repository_name
stringclasses 316
values | func_path_in_repository
stringlengths 6
223
| func_name
stringlengths 1
134
| language
stringclasses 1
value | func_code_string
stringlengths 57
65.5k
| func_documentation_string
stringlengths 1
46.3k
| split_name
stringclasses 1
value | func_code_url
stringlengths 91
315
| called_functions
listlengths 1
156
⌀ | enclosing_scope
stringlengths 2
1.48M
|
|---|---|---|---|---|---|---|---|---|---|
xolox/python-rotate-backups
|
rotate_backups/__init__.py
|
Location.match
|
python
|
def match(self, location):
if self.ssh_alias != location.ssh_alias:
# Never match locations on other systems.
return False
elif self.have_wildcards:
# Match filename patterns using fnmatch().
return fnmatch.fnmatch(location.directory, self.directory)
else:
# Compare normalized directory pathnames.
self = os.path.normpath(self.directory)
other = os.path.normpath(location.directory)
return self == other
|
Check if the given location "matches".
:param location: The :class:`Location` object to try to match.
:returns: :data:`True` if the two locations are on the same system and
the :attr:`directory` can be matched as a filename pattern or
a literal match on the normalized pathname.
|
train
|
https://github.com/xolox/python-rotate-backups/blob/611c72b2806952bf2bb84c38a4b5f856ea334707/rotate_backups/__init__.py#L773-L792
| null |
class Location(PropertyManager):
""":class:`Location` objects represent a root directory containing backups."""
@required_property
def context(self):
"""An execution context created using :mod:`executor.contexts`."""
@required_property
def directory(self):
"""The pathname of a directory containing backups (a string)."""
@lazy_property
def have_ionice(self):
""":data:`True` when ionice_ is available, :data:`False` otherwise."""
return self.context.have_ionice
@lazy_property
def have_wildcards(self):
""":data:`True` if :attr:`directory` is a filename pattern, :data:`False` otherwise."""
return '*' in self.directory
@lazy_property
def mount_point(self):
"""
The pathname of the mount point of :attr:`directory` (a string or :data:`None`).
If the ``stat --format=%m ...`` command that is used to determine the
mount point fails, the value of this property defaults to :data:`None`.
This enables graceful degradation on e.g. Mac OS X whose ``stat``
implementation is rather bare bones compared to GNU/Linux.
"""
try:
return self.context.capture('stat', '--format=%m', self.directory, silent=True)
except ExternalCommandFailed:
return None
@lazy_property
def is_remote(self):
""":data:`True` if the location is remote, :data:`False` otherwise."""
return isinstance(self.context, RemoteContext)
@lazy_property
def ssh_alias(self):
"""The SSH alias of a remote location (a string or :data:`None`)."""
return self.context.ssh_alias if self.is_remote else None
@property
def key_properties(self):
"""
A list of strings with the names of the :attr:`~custom_property.key` properties.
Overrides :attr:`~property_manager.PropertyManager.key_properties` to
customize the ordering of :class:`Location` objects so that they are
ordered first by their :attr:`ssh_alias` and second by their
:attr:`directory`.
"""
return ['ssh_alias', 'directory'] if self.is_remote else ['directory']
def ensure_exists(self):
"""Make sure the location exists."""
if not self.context.is_directory(self.directory):
# This can also happen when we don't have permission to one of the
# parent directories so we'll point that out in the error message
# when it seems applicable (so as not to confuse users).
if self.context.have_superuser_privileges:
msg = "The directory %s doesn't exist!"
raise ValueError(msg % self)
else:
raise ValueError(compact("""
The directory {location} isn't accessible, most likely
because it doesn't exist or because of permissions. If
you're sure the directory exists you can use the
--use-sudo option.
""", location=self))
def ensure_readable(self):
"""Make sure the location exists and is readable."""
self.ensure_exists()
if not self.context.is_readable(self.directory):
if self.context.have_superuser_privileges:
msg = "The directory %s isn't readable!"
raise ValueError(msg % self)
else:
raise ValueError(compact("""
The directory {location} isn't readable, most likely
because of permissions. Consider using the --use-sudo
option.
""", location=self))
def ensure_writable(self):
"""Make sure the directory exists and is writable."""
self.ensure_exists()
if not self.context.is_writable(self.directory):
if self.context.have_superuser_privileges:
msg = "The directory %s isn't writable!"
raise ValueError(msg % self)
else:
raise ValueError(compact("""
The directory {location} isn't writable, most likely due
to permissions. Consider using the --use-sudo option.
""", location=self))
def __str__(self):
"""Render a simple human readable representation of a location."""
return '%s:%s' % (self.ssh_alias, self.directory) if self.ssh_alias else self.directory
|
sanger-pathogens/pymummer
|
pymummer/alignment.py
|
Alignment._swap
|
python
|
def _swap(self):
'''Swaps the alignment so that the reference becomes the query and vice-versa. Swaps their names, coordinates etc. The frame is not changed'''
self.ref_start, self.qry_start = self.qry_start, self.ref_start
self.ref_end, self.qry_end = self.qry_end, self.ref_end
self.hit_length_ref, self.hit_length_qry = self.hit_length_qry, self.hit_length_ref
self.ref_length, self.qry_length = self.qry_length, self.ref_length
self.ref_name, self.qry_name = self.qry_name, self.ref_name
|
Swaps the alignment so that the reference becomes the query and vice-versa. Swaps their names, coordinates etc. The frame is not changed
|
train
|
https://github.com/sanger-pathogens/pymummer/blob/fd97bccfbae62719a7247473d73dd6733d4fa903/pymummer/alignment.py#L52-L58
| null |
class Alignment:
def __init__(self, line):
'''Constructs Alignment object from a line of show-coords -dTlro'''
# nucmer:
# [S1] [E1] [S2] [E2] [LEN 1] [LEN 2] [% IDY] [LEN R] [LEN Q] [FRM] [TAGS]
#1162 25768 24536 4 24607 24533 99.32 640851 24536 1 -1 ref qry [CONTAINS]
# promer:
#[S1] [E1] [S2] [E2] [LEN 1] [LEN 2] [% IDY] [% SIM] [% STP] [LEN R] [LEN Q] [FRM] [TAGS]
# 1 1398 4891054 4892445 1398 1392 89.55 93.18 0.21 1398 5349013 1 1 ref qry [CONTAINED]
fields = line.rstrip().split('\t')
try:
self.ref_start = int(fields[0]) - 1
self.ref_end = int(fields[1]) - 1
self.qry_start = int(fields[2]) - 1
self.qry_end = int(fields[3]) - 1
self.hit_length_ref = int(fields[4])
self.hit_length_qry = int(fields[5])
self.percent_identity = float(fields[6])
if len(fields) >= 15: # promer has more fields
self.ref_length = int(fields[9])
self.qry_length = int(fields[10])
self.frame = int(fields[11])
self.ref_name = fields[13]
self.qry_name = fields[14]
else:
self.ref_length = int(fields[7])
self.qry_length = int(fields[8])
self.frame = int(fields[9])
self.ref_name = fields[11]
self.qry_name = fields[12]
except:
raise Error('Error reading this nucmer line:\n' + line)
def __eq__(self, other):
return type(other) is type(self) and self.__dict__ == other.__dict__
def __hash__(self):
return hash((self.ref_start, self.ref_end, self.qry_start, self.qry_end, self.hit_length_ref, self.hit_length_qry, self.percent_identity, self.ref_length, self.qry_length, self.frame, self.ref_name, self.qry_name))
def qry_coords(self):
'''Returns a pyfastaq.intervals.Interval object of the start and end coordinates in the query sequence'''
return pyfastaq.intervals.Interval(min(self.qry_start, self.qry_end), max(self.qry_start, self.qry_end))
def ref_coords(self):
'''Returns a pyfastaq.intervals.Interval object of the start and end coordinates in the reference sequence'''
return pyfastaq.intervals.Interval(min(self.ref_start, self.ref_end), max(self.ref_start, self.ref_end))
def on_same_strand(self):
'''Returns true iff the direction of the alignment is the same in the reference and the query'''
return (self.ref_start < self.ref_end) == (self.qry_start < self.qry_end)
def is_self_hit(self):
'''Returns true iff the alignment is of a sequence to itself: names and all coordinates are the same and 100 percent identity'''
return self.ref_name == self.qry_name \
and self.ref_start == self.qry_start \
and self.ref_end == self.qry_end \
and self.percent_identity == 100
def reverse_query(self):
'''Changes the coordinates as if the query sequence has been reverse complemented'''
self.qry_start = self.qry_length - self.qry_start - 1
self.qry_end = self.qry_length - self.qry_end - 1
def reverse_reference(self):
'''Changes the coordinates as if the reference sequence has been reverse complemented'''
self.ref_start = self.ref_length - self.ref_start - 1
self.ref_end = self.ref_length - self.ref_end - 1
def __str__(self):
'''Returns a tab delimited string containing the values of this alignment object'''
return '\t'.join(str(x) for x in
[self.ref_start + 1,
self.ref_end + 1,
self.qry_start + 1,
self.qry_end + 1,
self.hit_length_ref,
self.hit_length_qry,
'{0:.2f}'.format(self.percent_identity),
self.ref_length,
self.qry_length,
self.frame,
self.ref_name,
self.qry_name])
def to_msp_crunch(self):
'''Returns the alignment as a line in MSPcrunch format. The columns are space-separated and are:
1. score
2. percent identity
3. match start in the query sequence
4. match end in the query sequence
5. query sequence name
6. subject sequence start
7. subject sequence end
8. subject sequence name'''
# we don't know the alignment score. Estimate it. This approximates 1 for a match.
aln_score = int(self.percent_identity * 0.005 * (self.hit_length_ref + self.hit_length_qry))
return ' '.join(str(x) for x in [
aln_score,
'{0:.2f}'.format(self.percent_identity),
self.qry_start + 1,
self.qry_end + 1,
self.qry_name,
self.ref_start + 1,
self.ref_end + 1,
self.ref_name
])
def intersects_variant(self, var):
var_ref_coords = sorted([var.ref_start, var.ref_end])
var_ref_coords = pyfastaq.intervals.Interval(var_ref_coords[0], var_ref_coords[1])
var_qry_coords = sorted([var.qry_start, var.qry_end])
var_qry_coords = pyfastaq.intervals.Interval(var_qry_coords[0], var_qry_coords[1])
return var_ref_coords.intersects(self.ref_coords()) and var_qry_coords.intersects(self.qry_coords())
def qry_coords_from_ref_coord(self, ref_coord, variant_list):
'''Given a reference position and a list of variants ([variant.Variant]),
works out the position in the query sequence, accounting for indels.
Returns a tuple: (position, True|False), where second element is whether
or not the ref_coord lies in an indel. If it is, then
returns the corresponding start position
of the indel in the query'''
if self.ref_coords().distance_to_point(ref_coord) > 0:
raise Error('Cannot get query coord in qry_coords_from_ref_coord because given ref_coord ' + str(ref_coord) + ' does not lie in nucmer alignment:\n' + str(self))
indel_variant_indexes = []
for i in range(len(variant_list)):
if variant_list[i].var_type not in {variant.INS, variant.DEL}:
continue
if not self.intersects_variant(variant_list[i]):
continue
if variant_list[i].ref_start <= ref_coord <= variant_list[i].ref_end:
return variant_list[i].qry_start, True
elif variant_list[i].ref_start < ref_coord:
indel_variant_indexes.append(i)
distance = ref_coord - min(self.ref_start, self.ref_end)
for i in indel_variant_indexes:
if variant_list[i].var_type == variant.INS:
distance += len(variant_list[i].qry_base)
else:
assert variant_list[i].var_type == variant.DEL
distance -= len(variant_list[i].ref_base)
if self.on_same_strand():
return min(self.qry_start, self.qry_end) + distance, False
else:
return max(self.qry_start, self.qry_end) - distance, False
def ref_coords_from_qry_coord(self, qry_coord, variant_list):
'''Given a qryerence position and a list of variants ([variant.Variant]),
works out the position in the ref sequence, accounting for indels.
Returns a tuple: (position, True|False), where second element is whether
or not the qry_coord lies in an indel. If it is, then
returns the corresponding start position
of the indel in the ref'''
if self.qry_coords().distance_to_point(qry_coord) > 0:
raise Error('Cannot get ref coord in ref_coords_from_qry_coord because given qry_coord ' + str(qry_coord) + ' does not lie in nucmer alignment:\n' + str(self))
indel_variant_indexes = []
for i in range(len(variant_list)):
if variant_list[i].var_type not in {variant.INS, variant.DEL}:
continue
if not self.intersects_variant(variant_list[i]):
continue
if variant_list[i].qry_start <= qry_coord <= variant_list[i].qry_end:
return variant_list[i].ref_start, True
elif variant_list[i].qry_start < qry_coord:
indel_variant_indexes.append(i)
distance = qry_coord - min(self.qry_start, self.qry_end)
for i in indel_variant_indexes:
if variant_list[i].var_type == variant.DEL:
distance += len(variant_list[i].ref_base)
else:
assert variant_list[i].var_type == variant.INS
distance -= len(variant_list[i].qry_base)
if self.on_same_strand():
return min(self.ref_start, self.ref_end) + distance, False
else:
return max(self.ref_start, self.ref_end) - distance, False
|
sanger-pathogens/pymummer
|
pymummer/alignment.py
|
Alignment.qry_coords
|
python
|
def qry_coords(self):
'''Returns a pyfastaq.intervals.Interval object of the start and end coordinates in the query sequence'''
return pyfastaq.intervals.Interval(min(self.qry_start, self.qry_end), max(self.qry_start, self.qry_end))
|
Returns a pyfastaq.intervals.Interval object of the start and end coordinates in the query sequence
|
train
|
https://github.com/sanger-pathogens/pymummer/blob/fd97bccfbae62719a7247473d73dd6733d4fa903/pymummer/alignment.py#L61-L63
| null |
class Alignment:
def __init__(self, line):
'''Constructs Alignment object from a line of show-coords -dTlro'''
# nucmer:
# [S1] [E1] [S2] [E2] [LEN 1] [LEN 2] [% IDY] [LEN R] [LEN Q] [FRM] [TAGS]
#1162 25768 24536 4 24607 24533 99.32 640851 24536 1 -1 ref qry [CONTAINS]
# promer:
#[S1] [E1] [S2] [E2] [LEN 1] [LEN 2] [% IDY] [% SIM] [% STP] [LEN R] [LEN Q] [FRM] [TAGS]
# 1 1398 4891054 4892445 1398 1392 89.55 93.18 0.21 1398 5349013 1 1 ref qry [CONTAINED]
fields = line.rstrip().split('\t')
try:
self.ref_start = int(fields[0]) - 1
self.ref_end = int(fields[1]) - 1
self.qry_start = int(fields[2]) - 1
self.qry_end = int(fields[3]) - 1
self.hit_length_ref = int(fields[4])
self.hit_length_qry = int(fields[5])
self.percent_identity = float(fields[6])
if len(fields) >= 15: # promer has more fields
self.ref_length = int(fields[9])
self.qry_length = int(fields[10])
self.frame = int(fields[11])
self.ref_name = fields[13]
self.qry_name = fields[14]
else:
self.ref_length = int(fields[7])
self.qry_length = int(fields[8])
self.frame = int(fields[9])
self.ref_name = fields[11]
self.qry_name = fields[12]
except:
raise Error('Error reading this nucmer line:\n' + line)
def __eq__(self, other):
return type(other) is type(self) and self.__dict__ == other.__dict__
def __hash__(self):
return hash((self.ref_start, self.ref_end, self.qry_start, self.qry_end, self.hit_length_ref, self.hit_length_qry, self.percent_identity, self.ref_length, self.qry_length, self.frame, self.ref_name, self.qry_name))
def _swap(self):
'''Swaps the alignment so that the reference becomes the query and vice-versa. Swaps their names, coordinates etc. The frame is not changed'''
self.ref_start, self.qry_start = self.qry_start, self.ref_start
self.ref_end, self.qry_end = self.qry_end, self.ref_end
self.hit_length_ref, self.hit_length_qry = self.hit_length_qry, self.hit_length_ref
self.ref_length, self.qry_length = self.qry_length, self.ref_length
self.ref_name, self.qry_name = self.qry_name, self.ref_name
def ref_coords(self):
'''Returns a pyfastaq.intervals.Interval object of the start and end coordinates in the reference sequence'''
return pyfastaq.intervals.Interval(min(self.ref_start, self.ref_end), max(self.ref_start, self.ref_end))
def on_same_strand(self):
'''Returns true iff the direction of the alignment is the same in the reference and the query'''
return (self.ref_start < self.ref_end) == (self.qry_start < self.qry_end)
def is_self_hit(self):
'''Returns true iff the alignment is of a sequence to itself: names and all coordinates are the same and 100 percent identity'''
return self.ref_name == self.qry_name \
and self.ref_start == self.qry_start \
and self.ref_end == self.qry_end \
and self.percent_identity == 100
def reverse_query(self):
'''Changes the coordinates as if the query sequence has been reverse complemented'''
self.qry_start = self.qry_length - self.qry_start - 1
self.qry_end = self.qry_length - self.qry_end - 1
def reverse_reference(self):
'''Changes the coordinates as if the reference sequence has been reverse complemented'''
self.ref_start = self.ref_length - self.ref_start - 1
self.ref_end = self.ref_length - self.ref_end - 1
def __str__(self):
'''Returns a tab delimited string containing the values of this alignment object'''
return '\t'.join(str(x) for x in
[self.ref_start + 1,
self.ref_end + 1,
self.qry_start + 1,
self.qry_end + 1,
self.hit_length_ref,
self.hit_length_qry,
'{0:.2f}'.format(self.percent_identity),
self.ref_length,
self.qry_length,
self.frame,
self.ref_name,
self.qry_name])
def to_msp_crunch(self):
'''Returns the alignment as a line in MSPcrunch format. The columns are space-separated and are:
1. score
2. percent identity
3. match start in the query sequence
4. match end in the query sequence
5. query sequence name
6. subject sequence start
7. subject sequence end
8. subject sequence name'''
# we don't know the alignment score. Estimate it. This approximates 1 for a match.
aln_score = int(self.percent_identity * 0.005 * (self.hit_length_ref + self.hit_length_qry))
return ' '.join(str(x) for x in [
aln_score,
'{0:.2f}'.format(self.percent_identity),
self.qry_start + 1,
self.qry_end + 1,
self.qry_name,
self.ref_start + 1,
self.ref_end + 1,
self.ref_name
])
def intersects_variant(self, var):
var_ref_coords = sorted([var.ref_start, var.ref_end])
var_ref_coords = pyfastaq.intervals.Interval(var_ref_coords[0], var_ref_coords[1])
var_qry_coords = sorted([var.qry_start, var.qry_end])
var_qry_coords = pyfastaq.intervals.Interval(var_qry_coords[0], var_qry_coords[1])
return var_ref_coords.intersects(self.ref_coords()) and var_qry_coords.intersects(self.qry_coords())
def qry_coords_from_ref_coord(self, ref_coord, variant_list):
'''Given a reference position and a list of variants ([variant.Variant]),
works out the position in the query sequence, accounting for indels.
Returns a tuple: (position, True|False), where second element is whether
or not the ref_coord lies in an indel. If it is, then
returns the corresponding start position
of the indel in the query'''
if self.ref_coords().distance_to_point(ref_coord) > 0:
raise Error('Cannot get query coord in qry_coords_from_ref_coord because given ref_coord ' + str(ref_coord) + ' does not lie in nucmer alignment:\n' + str(self))
indel_variant_indexes = []
for i in range(len(variant_list)):
if variant_list[i].var_type not in {variant.INS, variant.DEL}:
continue
if not self.intersects_variant(variant_list[i]):
continue
if variant_list[i].ref_start <= ref_coord <= variant_list[i].ref_end:
return variant_list[i].qry_start, True
elif variant_list[i].ref_start < ref_coord:
indel_variant_indexes.append(i)
distance = ref_coord - min(self.ref_start, self.ref_end)
for i in indel_variant_indexes:
if variant_list[i].var_type == variant.INS:
distance += len(variant_list[i].qry_base)
else:
assert variant_list[i].var_type == variant.DEL
distance -= len(variant_list[i].ref_base)
if self.on_same_strand():
return min(self.qry_start, self.qry_end) + distance, False
else:
return max(self.qry_start, self.qry_end) - distance, False
def ref_coords_from_qry_coord(self, qry_coord, variant_list):
'''Given a qryerence position and a list of variants ([variant.Variant]),
works out the position in the ref sequence, accounting for indels.
Returns a tuple: (position, True|False), where second element is whether
or not the qry_coord lies in an indel. If it is, then
returns the corresponding start position
of the indel in the ref'''
if self.qry_coords().distance_to_point(qry_coord) > 0:
raise Error('Cannot get ref coord in ref_coords_from_qry_coord because given qry_coord ' + str(qry_coord) + ' does not lie in nucmer alignment:\n' + str(self))
indel_variant_indexes = []
for i in range(len(variant_list)):
if variant_list[i].var_type not in {variant.INS, variant.DEL}:
continue
if not self.intersects_variant(variant_list[i]):
continue
if variant_list[i].qry_start <= qry_coord <= variant_list[i].qry_end:
return variant_list[i].ref_start, True
elif variant_list[i].qry_start < qry_coord:
indel_variant_indexes.append(i)
distance = qry_coord - min(self.qry_start, self.qry_end)
for i in indel_variant_indexes:
if variant_list[i].var_type == variant.DEL:
distance += len(variant_list[i].ref_base)
else:
assert variant_list[i].var_type == variant.INS
distance -= len(variant_list[i].qry_base)
if self.on_same_strand():
return min(self.ref_start, self.ref_end) + distance, False
else:
return max(self.ref_start, self.ref_end) - distance, False
|
sanger-pathogens/pymummer
|
pymummer/alignment.py
|
Alignment.ref_coords
|
python
|
def ref_coords(self):
'''Returns a pyfastaq.intervals.Interval object of the start and end coordinates in the reference sequence'''
return pyfastaq.intervals.Interval(min(self.ref_start, self.ref_end), max(self.ref_start, self.ref_end))
|
Returns a pyfastaq.intervals.Interval object of the start and end coordinates in the reference sequence
|
train
|
https://github.com/sanger-pathogens/pymummer/blob/fd97bccfbae62719a7247473d73dd6733d4fa903/pymummer/alignment.py#L66-L68
| null |
class Alignment:
def __init__(self, line):
'''Constructs Alignment object from a line of show-coords -dTlro'''
# nucmer:
# [S1] [E1] [S2] [E2] [LEN 1] [LEN 2] [% IDY] [LEN R] [LEN Q] [FRM] [TAGS]
#1162 25768 24536 4 24607 24533 99.32 640851 24536 1 -1 ref qry [CONTAINS]
# promer:
#[S1] [E1] [S2] [E2] [LEN 1] [LEN 2] [% IDY] [% SIM] [% STP] [LEN R] [LEN Q] [FRM] [TAGS]
# 1 1398 4891054 4892445 1398 1392 89.55 93.18 0.21 1398 5349013 1 1 ref qry [CONTAINED]
fields = line.rstrip().split('\t')
try:
self.ref_start = int(fields[0]) - 1
self.ref_end = int(fields[1]) - 1
self.qry_start = int(fields[2]) - 1
self.qry_end = int(fields[3]) - 1
self.hit_length_ref = int(fields[4])
self.hit_length_qry = int(fields[5])
self.percent_identity = float(fields[6])
if len(fields) >= 15: # promer has more fields
self.ref_length = int(fields[9])
self.qry_length = int(fields[10])
self.frame = int(fields[11])
self.ref_name = fields[13]
self.qry_name = fields[14]
else:
self.ref_length = int(fields[7])
self.qry_length = int(fields[8])
self.frame = int(fields[9])
self.ref_name = fields[11]
self.qry_name = fields[12]
except:
raise Error('Error reading this nucmer line:\n' + line)
def __eq__(self, other):
return type(other) is type(self) and self.__dict__ == other.__dict__
def __hash__(self):
return hash((self.ref_start, self.ref_end, self.qry_start, self.qry_end, self.hit_length_ref, self.hit_length_qry, self.percent_identity, self.ref_length, self.qry_length, self.frame, self.ref_name, self.qry_name))
def _swap(self):
'''Swaps the alignment so that the reference becomes the query and vice-versa. Swaps their names, coordinates etc. The frame is not changed'''
self.ref_start, self.qry_start = self.qry_start, self.ref_start
self.ref_end, self.qry_end = self.qry_end, self.ref_end
self.hit_length_ref, self.hit_length_qry = self.hit_length_qry, self.hit_length_ref
self.ref_length, self.qry_length = self.qry_length, self.ref_length
self.ref_name, self.qry_name = self.qry_name, self.ref_name
def qry_coords(self):
'''Returns a pyfastaq.intervals.Interval object of the start and end coordinates in the query sequence'''
return pyfastaq.intervals.Interval(min(self.qry_start, self.qry_end), max(self.qry_start, self.qry_end))
def on_same_strand(self):
'''Returns true iff the direction of the alignment is the same in the reference and the query'''
return (self.ref_start < self.ref_end) == (self.qry_start < self.qry_end)
def is_self_hit(self):
'''Returns true iff the alignment is of a sequence to itself: names and all coordinates are the same and 100 percent identity'''
return self.ref_name == self.qry_name \
and self.ref_start == self.qry_start \
and self.ref_end == self.qry_end \
and self.percent_identity == 100
def reverse_query(self):
'''Changes the coordinates as if the query sequence has been reverse complemented'''
self.qry_start = self.qry_length - self.qry_start - 1
self.qry_end = self.qry_length - self.qry_end - 1
def reverse_reference(self):
'''Changes the coordinates as if the reference sequence has been reverse complemented'''
self.ref_start = self.ref_length - self.ref_start - 1
self.ref_end = self.ref_length - self.ref_end - 1
def __str__(self):
'''Returns a tab delimited string containing the values of this alignment object'''
return '\t'.join(str(x) for x in
[self.ref_start + 1,
self.ref_end + 1,
self.qry_start + 1,
self.qry_end + 1,
self.hit_length_ref,
self.hit_length_qry,
'{0:.2f}'.format(self.percent_identity),
self.ref_length,
self.qry_length,
self.frame,
self.ref_name,
self.qry_name])
def to_msp_crunch(self):
'''Returns the alignment as a line in MSPcrunch format. The columns are space-separated and are:
1. score
2. percent identity
3. match start in the query sequence
4. match end in the query sequence
5. query sequence name
6. subject sequence start
7. subject sequence end
8. subject sequence name'''
# we don't know the alignment score. Estimate it. This approximates 1 for a match.
aln_score = int(self.percent_identity * 0.005 * (self.hit_length_ref + self.hit_length_qry))
return ' '.join(str(x) for x in [
aln_score,
'{0:.2f}'.format(self.percent_identity),
self.qry_start + 1,
self.qry_end + 1,
self.qry_name,
self.ref_start + 1,
self.ref_end + 1,
self.ref_name
])
def intersects_variant(self, var):
var_ref_coords = sorted([var.ref_start, var.ref_end])
var_ref_coords = pyfastaq.intervals.Interval(var_ref_coords[0], var_ref_coords[1])
var_qry_coords = sorted([var.qry_start, var.qry_end])
var_qry_coords = pyfastaq.intervals.Interval(var_qry_coords[0], var_qry_coords[1])
return var_ref_coords.intersects(self.ref_coords()) and var_qry_coords.intersects(self.qry_coords())
def qry_coords_from_ref_coord(self, ref_coord, variant_list):
'''Given a reference position and a list of variants ([variant.Variant]),
works out the position in the query sequence, accounting for indels.
Returns a tuple: (position, True|False), where second element is whether
or not the ref_coord lies in an indel. If it is, then
returns the corresponding start position
of the indel in the query'''
if self.ref_coords().distance_to_point(ref_coord) > 0:
raise Error('Cannot get query coord in qry_coords_from_ref_coord because given ref_coord ' + str(ref_coord) + ' does not lie in nucmer alignment:\n' + str(self))
indel_variant_indexes = []
for i in range(len(variant_list)):
if variant_list[i].var_type not in {variant.INS, variant.DEL}:
continue
if not self.intersects_variant(variant_list[i]):
continue
if variant_list[i].ref_start <= ref_coord <= variant_list[i].ref_end:
return variant_list[i].qry_start, True
elif variant_list[i].ref_start < ref_coord:
indel_variant_indexes.append(i)
distance = ref_coord - min(self.ref_start, self.ref_end)
for i in indel_variant_indexes:
if variant_list[i].var_type == variant.INS:
distance += len(variant_list[i].qry_base)
else:
assert variant_list[i].var_type == variant.DEL
distance -= len(variant_list[i].ref_base)
if self.on_same_strand():
return min(self.qry_start, self.qry_end) + distance, False
else:
return max(self.qry_start, self.qry_end) - distance, False
def ref_coords_from_qry_coord(self, qry_coord, variant_list):
'''Given a qryerence position and a list of variants ([variant.Variant]),
works out the position in the ref sequence, accounting for indels.
Returns a tuple: (position, True|False), where second element is whether
or not the qry_coord lies in an indel. If it is, then
returns the corresponding start position
of the indel in the ref'''
if self.qry_coords().distance_to_point(qry_coord) > 0:
raise Error('Cannot get ref coord in ref_coords_from_qry_coord because given qry_coord ' + str(qry_coord) + ' does not lie in nucmer alignment:\n' + str(self))
indel_variant_indexes = []
for i in range(len(variant_list)):
if variant_list[i].var_type not in {variant.INS, variant.DEL}:
continue
if not self.intersects_variant(variant_list[i]):
continue
if variant_list[i].qry_start <= qry_coord <= variant_list[i].qry_end:
return variant_list[i].ref_start, True
elif variant_list[i].qry_start < qry_coord:
indel_variant_indexes.append(i)
distance = qry_coord - min(self.qry_start, self.qry_end)
for i in indel_variant_indexes:
if variant_list[i].var_type == variant.DEL:
distance += len(variant_list[i].ref_base)
else:
assert variant_list[i].var_type == variant.INS
distance -= len(variant_list[i].qry_base)
if self.on_same_strand():
return min(self.ref_start, self.ref_end) + distance, False
else:
return max(self.ref_start, self.ref_end) - distance, False
|
sanger-pathogens/pymummer
|
pymummer/alignment.py
|
Alignment.on_same_strand
|
python
|
def on_same_strand(self):
'''Returns true iff the direction of the alignment is the same in the reference and the query'''
return (self.ref_start < self.ref_end) == (self.qry_start < self.qry_end)
|
Returns true iff the direction of the alignment is the same in the reference and the query
|
train
|
https://github.com/sanger-pathogens/pymummer/blob/fd97bccfbae62719a7247473d73dd6733d4fa903/pymummer/alignment.py#L71-L73
| null |
class Alignment:
def __init__(self, line):
'''Constructs Alignment object from a line of show-coords -dTlro'''
# nucmer:
# [S1] [E1] [S2] [E2] [LEN 1] [LEN 2] [% IDY] [LEN R] [LEN Q] [FRM] [TAGS]
#1162 25768 24536 4 24607 24533 99.32 640851 24536 1 -1 ref qry [CONTAINS]
# promer:
#[S1] [E1] [S2] [E2] [LEN 1] [LEN 2] [% IDY] [% SIM] [% STP] [LEN R] [LEN Q] [FRM] [TAGS]
# 1 1398 4891054 4892445 1398 1392 89.55 93.18 0.21 1398 5349013 1 1 ref qry [CONTAINED]
fields = line.rstrip().split('\t')
try:
self.ref_start = int(fields[0]) - 1
self.ref_end = int(fields[1]) - 1
self.qry_start = int(fields[2]) - 1
self.qry_end = int(fields[3]) - 1
self.hit_length_ref = int(fields[4])
self.hit_length_qry = int(fields[5])
self.percent_identity = float(fields[6])
if len(fields) >= 15: # promer has more fields
self.ref_length = int(fields[9])
self.qry_length = int(fields[10])
self.frame = int(fields[11])
self.ref_name = fields[13]
self.qry_name = fields[14]
else:
self.ref_length = int(fields[7])
self.qry_length = int(fields[8])
self.frame = int(fields[9])
self.ref_name = fields[11]
self.qry_name = fields[12]
except:
raise Error('Error reading this nucmer line:\n' + line)
def __eq__(self, other):
return type(other) is type(self) and self.__dict__ == other.__dict__
def __hash__(self):
return hash((self.ref_start, self.ref_end, self.qry_start, self.qry_end, self.hit_length_ref, self.hit_length_qry, self.percent_identity, self.ref_length, self.qry_length, self.frame, self.ref_name, self.qry_name))
def _swap(self):
'''Swaps the alignment so that the reference becomes the query and vice-versa. Swaps their names, coordinates etc. The frame is not changed'''
self.ref_start, self.qry_start = self.qry_start, self.ref_start
self.ref_end, self.qry_end = self.qry_end, self.ref_end
self.hit_length_ref, self.hit_length_qry = self.hit_length_qry, self.hit_length_ref
self.ref_length, self.qry_length = self.qry_length, self.ref_length
self.ref_name, self.qry_name = self.qry_name, self.ref_name
def qry_coords(self):
'''Returns a pyfastaq.intervals.Interval object of the start and end coordinates in the query sequence'''
return pyfastaq.intervals.Interval(min(self.qry_start, self.qry_end), max(self.qry_start, self.qry_end))
def ref_coords(self):
'''Returns a pyfastaq.intervals.Interval object of the start and end coordinates in the reference sequence'''
return pyfastaq.intervals.Interval(min(self.ref_start, self.ref_end), max(self.ref_start, self.ref_end))
def is_self_hit(self):
'''Returns true iff the alignment is of a sequence to itself: names and all coordinates are the same and 100 percent identity'''
return self.ref_name == self.qry_name \
and self.ref_start == self.qry_start \
and self.ref_end == self.qry_end \
and self.percent_identity == 100
def reverse_query(self):
'''Changes the coordinates as if the query sequence has been reverse complemented'''
self.qry_start = self.qry_length - self.qry_start - 1
self.qry_end = self.qry_length - self.qry_end - 1
def reverse_reference(self):
'''Changes the coordinates as if the reference sequence has been reverse complemented'''
self.ref_start = self.ref_length - self.ref_start - 1
self.ref_end = self.ref_length - self.ref_end - 1
def __str__(self):
'''Returns a tab delimited string containing the values of this alignment object'''
return '\t'.join(str(x) for x in
[self.ref_start + 1,
self.ref_end + 1,
self.qry_start + 1,
self.qry_end + 1,
self.hit_length_ref,
self.hit_length_qry,
'{0:.2f}'.format(self.percent_identity),
self.ref_length,
self.qry_length,
self.frame,
self.ref_name,
self.qry_name])
def to_msp_crunch(self):
'''Returns the alignment as a line in MSPcrunch format. The columns are space-separated and are:
1. score
2. percent identity
3. match start in the query sequence
4. match end in the query sequence
5. query sequence name
6. subject sequence start
7. subject sequence end
8. subject sequence name'''
# we don't know the alignment score. Estimate it. This approximates 1 for a match.
aln_score = int(self.percent_identity * 0.005 * (self.hit_length_ref + self.hit_length_qry))
return ' '.join(str(x) for x in [
aln_score,
'{0:.2f}'.format(self.percent_identity),
self.qry_start + 1,
self.qry_end + 1,
self.qry_name,
self.ref_start + 1,
self.ref_end + 1,
self.ref_name
])
def intersects_variant(self, var):
var_ref_coords = sorted([var.ref_start, var.ref_end])
var_ref_coords = pyfastaq.intervals.Interval(var_ref_coords[0], var_ref_coords[1])
var_qry_coords = sorted([var.qry_start, var.qry_end])
var_qry_coords = pyfastaq.intervals.Interval(var_qry_coords[0], var_qry_coords[1])
return var_ref_coords.intersects(self.ref_coords()) and var_qry_coords.intersects(self.qry_coords())
def qry_coords_from_ref_coord(self, ref_coord, variant_list):
'''Given a reference position and a list of variants ([variant.Variant]),
works out the position in the query sequence, accounting for indels.
Returns a tuple: (position, True|False), where second element is whether
or not the ref_coord lies in an indel. If it is, then
returns the corresponding start position
of the indel in the query'''
if self.ref_coords().distance_to_point(ref_coord) > 0:
raise Error('Cannot get query coord in qry_coords_from_ref_coord because given ref_coord ' + str(ref_coord) + ' does not lie in nucmer alignment:\n' + str(self))
indel_variant_indexes = []
for i in range(len(variant_list)):
if variant_list[i].var_type not in {variant.INS, variant.DEL}:
continue
if not self.intersects_variant(variant_list[i]):
continue
if variant_list[i].ref_start <= ref_coord <= variant_list[i].ref_end:
return variant_list[i].qry_start, True
elif variant_list[i].ref_start < ref_coord:
indel_variant_indexes.append(i)
distance = ref_coord - min(self.ref_start, self.ref_end)
for i in indel_variant_indexes:
if variant_list[i].var_type == variant.INS:
distance += len(variant_list[i].qry_base)
else:
assert variant_list[i].var_type == variant.DEL
distance -= len(variant_list[i].ref_base)
if self.on_same_strand():
return min(self.qry_start, self.qry_end) + distance, False
else:
return max(self.qry_start, self.qry_end) - distance, False
def ref_coords_from_qry_coord(self, qry_coord, variant_list):
'''Given a qryerence position and a list of variants ([variant.Variant]),
works out the position in the ref sequence, accounting for indels.
Returns a tuple: (position, True|False), where second element is whether
or not the qry_coord lies in an indel. If it is, then
returns the corresponding start position
of the indel in the ref'''
if self.qry_coords().distance_to_point(qry_coord) > 0:
raise Error('Cannot get ref coord in ref_coords_from_qry_coord because given qry_coord ' + str(qry_coord) + ' does not lie in nucmer alignment:\n' + str(self))
indel_variant_indexes = []
for i in range(len(variant_list)):
if variant_list[i].var_type not in {variant.INS, variant.DEL}:
continue
if not self.intersects_variant(variant_list[i]):
continue
if variant_list[i].qry_start <= qry_coord <= variant_list[i].qry_end:
return variant_list[i].ref_start, True
elif variant_list[i].qry_start < qry_coord:
indel_variant_indexes.append(i)
distance = qry_coord - min(self.qry_start, self.qry_end)
for i in indel_variant_indexes:
if variant_list[i].var_type == variant.DEL:
distance += len(variant_list[i].ref_base)
else:
assert variant_list[i].var_type == variant.INS
distance -= len(variant_list[i].qry_base)
if self.on_same_strand():
return min(self.ref_start, self.ref_end) + distance, False
else:
return max(self.ref_start, self.ref_end) - distance, False
|
sanger-pathogens/pymummer
|
pymummer/alignment.py
|
Alignment.is_self_hit
|
python
|
def is_self_hit(self):
'''Returns true iff the alignment is of a sequence to itself: names and all coordinates are the same and 100 percent identity'''
return self.ref_name == self.qry_name \
and self.ref_start == self.qry_start \
and self.ref_end == self.qry_end \
and self.percent_identity == 100
|
Returns true iff the alignment is of a sequence to itself: names and all coordinates are the same and 100 percent identity
|
train
|
https://github.com/sanger-pathogens/pymummer/blob/fd97bccfbae62719a7247473d73dd6733d4fa903/pymummer/alignment.py#L76-L81
| null |
class Alignment:
def __init__(self, line):
'''Constructs Alignment object from a line of show-coords -dTlro'''
# nucmer:
# [S1] [E1] [S2] [E2] [LEN 1] [LEN 2] [% IDY] [LEN R] [LEN Q] [FRM] [TAGS]
#1162 25768 24536 4 24607 24533 99.32 640851 24536 1 -1 ref qry [CONTAINS]
# promer:
#[S1] [E1] [S2] [E2] [LEN 1] [LEN 2] [% IDY] [% SIM] [% STP] [LEN R] [LEN Q] [FRM] [TAGS]
# 1 1398 4891054 4892445 1398 1392 89.55 93.18 0.21 1398 5349013 1 1 ref qry [CONTAINED]
fields = line.rstrip().split('\t')
try:
self.ref_start = int(fields[0]) - 1
self.ref_end = int(fields[1]) - 1
self.qry_start = int(fields[2]) - 1
self.qry_end = int(fields[3]) - 1
self.hit_length_ref = int(fields[4])
self.hit_length_qry = int(fields[5])
self.percent_identity = float(fields[6])
if len(fields) >= 15: # promer has more fields
self.ref_length = int(fields[9])
self.qry_length = int(fields[10])
self.frame = int(fields[11])
self.ref_name = fields[13]
self.qry_name = fields[14]
else:
self.ref_length = int(fields[7])
self.qry_length = int(fields[8])
self.frame = int(fields[9])
self.ref_name = fields[11]
self.qry_name = fields[12]
except:
raise Error('Error reading this nucmer line:\n' + line)
def __eq__(self, other):
return type(other) is type(self) and self.__dict__ == other.__dict__
def __hash__(self):
return hash((self.ref_start, self.ref_end, self.qry_start, self.qry_end, self.hit_length_ref, self.hit_length_qry, self.percent_identity, self.ref_length, self.qry_length, self.frame, self.ref_name, self.qry_name))
def _swap(self):
'''Swaps the alignment so that the reference becomes the query and vice-versa. Swaps their names, coordinates etc. The frame is not changed'''
self.ref_start, self.qry_start = self.qry_start, self.ref_start
self.ref_end, self.qry_end = self.qry_end, self.ref_end
self.hit_length_ref, self.hit_length_qry = self.hit_length_qry, self.hit_length_ref
self.ref_length, self.qry_length = self.qry_length, self.ref_length
self.ref_name, self.qry_name = self.qry_name, self.ref_name
def qry_coords(self):
'''Returns a pyfastaq.intervals.Interval object of the start and end coordinates in the query sequence'''
return pyfastaq.intervals.Interval(min(self.qry_start, self.qry_end), max(self.qry_start, self.qry_end))
def ref_coords(self):
'''Returns a pyfastaq.intervals.Interval object of the start and end coordinates in the reference sequence'''
return pyfastaq.intervals.Interval(min(self.ref_start, self.ref_end), max(self.ref_start, self.ref_end))
def on_same_strand(self):
'''Returns true iff the direction of the alignment is the same in the reference and the query'''
return (self.ref_start < self.ref_end) == (self.qry_start < self.qry_end)
def reverse_query(self):
'''Changes the coordinates as if the query sequence has been reverse complemented'''
self.qry_start = self.qry_length - self.qry_start - 1
self.qry_end = self.qry_length - self.qry_end - 1
def reverse_reference(self):
'''Changes the coordinates as if the reference sequence has been reverse complemented'''
self.ref_start = self.ref_length - self.ref_start - 1
self.ref_end = self.ref_length - self.ref_end - 1
def __str__(self):
'''Returns a tab delimited string containing the values of this alignment object'''
return '\t'.join(str(x) for x in
[self.ref_start + 1,
self.ref_end + 1,
self.qry_start + 1,
self.qry_end + 1,
self.hit_length_ref,
self.hit_length_qry,
'{0:.2f}'.format(self.percent_identity),
self.ref_length,
self.qry_length,
self.frame,
self.ref_name,
self.qry_name])
def to_msp_crunch(self):
'''Returns the alignment as a line in MSPcrunch format. The columns are space-separated and are:
1. score
2. percent identity
3. match start in the query sequence
4. match end in the query sequence
5. query sequence name
6. subject sequence start
7. subject sequence end
8. subject sequence name'''
# we don't know the alignment score. Estimate it. This approximates 1 for a match.
aln_score = int(self.percent_identity * 0.005 * (self.hit_length_ref + self.hit_length_qry))
return ' '.join(str(x) for x in [
aln_score,
'{0:.2f}'.format(self.percent_identity),
self.qry_start + 1,
self.qry_end + 1,
self.qry_name,
self.ref_start + 1,
self.ref_end + 1,
self.ref_name
])
def intersects_variant(self, var):
var_ref_coords = sorted([var.ref_start, var.ref_end])
var_ref_coords = pyfastaq.intervals.Interval(var_ref_coords[0], var_ref_coords[1])
var_qry_coords = sorted([var.qry_start, var.qry_end])
var_qry_coords = pyfastaq.intervals.Interval(var_qry_coords[0], var_qry_coords[1])
return var_ref_coords.intersects(self.ref_coords()) and var_qry_coords.intersects(self.qry_coords())
def qry_coords_from_ref_coord(self, ref_coord, variant_list):
'''Given a reference position and a list of variants ([variant.Variant]),
works out the position in the query sequence, accounting for indels.
Returns a tuple: (position, True|False), where second element is whether
or not the ref_coord lies in an indel. If it is, then
returns the corresponding start position
of the indel in the query'''
if self.ref_coords().distance_to_point(ref_coord) > 0:
raise Error('Cannot get query coord in qry_coords_from_ref_coord because given ref_coord ' + str(ref_coord) + ' does not lie in nucmer alignment:\n' + str(self))
indel_variant_indexes = []
for i in range(len(variant_list)):
if variant_list[i].var_type not in {variant.INS, variant.DEL}:
continue
if not self.intersects_variant(variant_list[i]):
continue
if variant_list[i].ref_start <= ref_coord <= variant_list[i].ref_end:
return variant_list[i].qry_start, True
elif variant_list[i].ref_start < ref_coord:
indel_variant_indexes.append(i)
distance = ref_coord - min(self.ref_start, self.ref_end)
for i in indel_variant_indexes:
if variant_list[i].var_type == variant.INS:
distance += len(variant_list[i].qry_base)
else:
assert variant_list[i].var_type == variant.DEL
distance -= len(variant_list[i].ref_base)
if self.on_same_strand():
return min(self.qry_start, self.qry_end) + distance, False
else:
return max(self.qry_start, self.qry_end) - distance, False
def ref_coords_from_qry_coord(self, qry_coord, variant_list):
'''Given a qryerence position and a list of variants ([variant.Variant]),
works out the position in the ref sequence, accounting for indels.
Returns a tuple: (position, True|False), where second element is whether
or not the qry_coord lies in an indel. If it is, then
returns the corresponding start position
of the indel in the ref'''
if self.qry_coords().distance_to_point(qry_coord) > 0:
raise Error('Cannot get ref coord in ref_coords_from_qry_coord because given qry_coord ' + str(qry_coord) + ' does not lie in nucmer alignment:\n' + str(self))
indel_variant_indexes = []
for i in range(len(variant_list)):
if variant_list[i].var_type not in {variant.INS, variant.DEL}:
continue
if not self.intersects_variant(variant_list[i]):
continue
if variant_list[i].qry_start <= qry_coord <= variant_list[i].qry_end:
return variant_list[i].ref_start, True
elif variant_list[i].qry_start < qry_coord:
indel_variant_indexes.append(i)
distance = qry_coord - min(self.qry_start, self.qry_end)
for i in indel_variant_indexes:
if variant_list[i].var_type == variant.DEL:
distance += len(variant_list[i].ref_base)
else:
assert variant_list[i].var_type == variant.INS
distance -= len(variant_list[i].qry_base)
if self.on_same_strand():
return min(self.ref_start, self.ref_end) + distance, False
else:
return max(self.ref_start, self.ref_end) - distance, False
|
sanger-pathogens/pymummer
|
pymummer/alignment.py
|
Alignment.reverse_query
|
python
|
def reverse_query(self):
'''Changes the coordinates as if the query sequence has been reverse complemented'''
self.qry_start = self.qry_length - self.qry_start - 1
self.qry_end = self.qry_length - self.qry_end - 1
|
Changes the coordinates as if the query sequence has been reverse complemented
|
train
|
https://github.com/sanger-pathogens/pymummer/blob/fd97bccfbae62719a7247473d73dd6733d4fa903/pymummer/alignment.py#L84-L87
| null |
class Alignment:
def __init__(self, line):
'''Constructs Alignment object from a line of show-coords -dTlro'''
# nucmer:
# [S1] [E1] [S2] [E2] [LEN 1] [LEN 2] [% IDY] [LEN R] [LEN Q] [FRM] [TAGS]
#1162 25768 24536 4 24607 24533 99.32 640851 24536 1 -1 ref qry [CONTAINS]
# promer:
#[S1] [E1] [S2] [E2] [LEN 1] [LEN 2] [% IDY] [% SIM] [% STP] [LEN R] [LEN Q] [FRM] [TAGS]
# 1 1398 4891054 4892445 1398 1392 89.55 93.18 0.21 1398 5349013 1 1 ref qry [CONTAINED]
fields = line.rstrip().split('\t')
try:
self.ref_start = int(fields[0]) - 1
self.ref_end = int(fields[1]) - 1
self.qry_start = int(fields[2]) - 1
self.qry_end = int(fields[3]) - 1
self.hit_length_ref = int(fields[4])
self.hit_length_qry = int(fields[5])
self.percent_identity = float(fields[6])
if len(fields) >= 15: # promer has more fields
self.ref_length = int(fields[9])
self.qry_length = int(fields[10])
self.frame = int(fields[11])
self.ref_name = fields[13]
self.qry_name = fields[14]
else:
self.ref_length = int(fields[7])
self.qry_length = int(fields[8])
self.frame = int(fields[9])
self.ref_name = fields[11]
self.qry_name = fields[12]
except:
raise Error('Error reading this nucmer line:\n' + line)
def __eq__(self, other):
return type(other) is type(self) and self.__dict__ == other.__dict__
def __hash__(self):
return hash((self.ref_start, self.ref_end, self.qry_start, self.qry_end, self.hit_length_ref, self.hit_length_qry, self.percent_identity, self.ref_length, self.qry_length, self.frame, self.ref_name, self.qry_name))
def _swap(self):
'''Swaps the alignment so that the reference becomes the query and vice-versa. Swaps their names, coordinates etc. The frame is not changed'''
self.ref_start, self.qry_start = self.qry_start, self.ref_start
self.ref_end, self.qry_end = self.qry_end, self.ref_end
self.hit_length_ref, self.hit_length_qry = self.hit_length_qry, self.hit_length_ref
self.ref_length, self.qry_length = self.qry_length, self.ref_length
self.ref_name, self.qry_name = self.qry_name, self.ref_name
def qry_coords(self):
'''Returns a pyfastaq.intervals.Interval object of the start and end coordinates in the query sequence'''
return pyfastaq.intervals.Interval(min(self.qry_start, self.qry_end), max(self.qry_start, self.qry_end))
def ref_coords(self):
'''Returns a pyfastaq.intervals.Interval object of the start and end coordinates in the reference sequence'''
return pyfastaq.intervals.Interval(min(self.ref_start, self.ref_end), max(self.ref_start, self.ref_end))
def on_same_strand(self):
'''Returns true iff the direction of the alignment is the same in the reference and the query'''
return (self.ref_start < self.ref_end) == (self.qry_start < self.qry_end)
def is_self_hit(self):
'''Returns true iff the alignment is of a sequence to itself: names and all coordinates are the same and 100 percent identity'''
return self.ref_name == self.qry_name \
and self.ref_start == self.qry_start \
and self.ref_end == self.qry_end \
and self.percent_identity == 100
def reverse_reference(self):
'''Changes the coordinates as if the reference sequence has been reverse complemented'''
self.ref_start = self.ref_length - self.ref_start - 1
self.ref_end = self.ref_length - self.ref_end - 1
def __str__(self):
'''Returns a tab delimited string containing the values of this alignment object'''
return '\t'.join(str(x) for x in
[self.ref_start + 1,
self.ref_end + 1,
self.qry_start + 1,
self.qry_end + 1,
self.hit_length_ref,
self.hit_length_qry,
'{0:.2f}'.format(self.percent_identity),
self.ref_length,
self.qry_length,
self.frame,
self.ref_name,
self.qry_name])
def to_msp_crunch(self):
'''Returns the alignment as a line in MSPcrunch format. The columns are space-separated and are:
1. score
2. percent identity
3. match start in the query sequence
4. match end in the query sequence
5. query sequence name
6. subject sequence start
7. subject sequence end
8. subject sequence name'''
# we don't know the alignment score. Estimate it. This approximates 1 for a match.
aln_score = int(self.percent_identity * 0.005 * (self.hit_length_ref + self.hit_length_qry))
return ' '.join(str(x) for x in [
aln_score,
'{0:.2f}'.format(self.percent_identity),
self.qry_start + 1,
self.qry_end + 1,
self.qry_name,
self.ref_start + 1,
self.ref_end + 1,
self.ref_name
])
def intersects_variant(self, var):
var_ref_coords = sorted([var.ref_start, var.ref_end])
var_ref_coords = pyfastaq.intervals.Interval(var_ref_coords[0], var_ref_coords[1])
var_qry_coords = sorted([var.qry_start, var.qry_end])
var_qry_coords = pyfastaq.intervals.Interval(var_qry_coords[0], var_qry_coords[1])
return var_ref_coords.intersects(self.ref_coords()) and var_qry_coords.intersects(self.qry_coords())
def qry_coords_from_ref_coord(self, ref_coord, variant_list):
'''Given a reference position and a list of variants ([variant.Variant]),
works out the position in the query sequence, accounting for indels.
Returns a tuple: (position, True|False), where second element is whether
or not the ref_coord lies in an indel. If it is, then
returns the corresponding start position
of the indel in the query'''
if self.ref_coords().distance_to_point(ref_coord) > 0:
raise Error('Cannot get query coord in qry_coords_from_ref_coord because given ref_coord ' + str(ref_coord) + ' does not lie in nucmer alignment:\n' + str(self))
indel_variant_indexes = []
for i in range(len(variant_list)):
if variant_list[i].var_type not in {variant.INS, variant.DEL}:
continue
if not self.intersects_variant(variant_list[i]):
continue
if variant_list[i].ref_start <= ref_coord <= variant_list[i].ref_end:
return variant_list[i].qry_start, True
elif variant_list[i].ref_start < ref_coord:
indel_variant_indexes.append(i)
distance = ref_coord - min(self.ref_start, self.ref_end)
for i in indel_variant_indexes:
if variant_list[i].var_type == variant.INS:
distance += len(variant_list[i].qry_base)
else:
assert variant_list[i].var_type == variant.DEL
distance -= len(variant_list[i].ref_base)
if self.on_same_strand():
return min(self.qry_start, self.qry_end) + distance, False
else:
return max(self.qry_start, self.qry_end) - distance, False
def ref_coords_from_qry_coord(self, qry_coord, variant_list):
'''Given a qryerence position and a list of variants ([variant.Variant]),
works out the position in the ref sequence, accounting for indels.
Returns a tuple: (position, True|False), where second element is whether
or not the qry_coord lies in an indel. If it is, then
returns the corresponding start position
of the indel in the ref'''
if self.qry_coords().distance_to_point(qry_coord) > 0:
raise Error('Cannot get ref coord in ref_coords_from_qry_coord because given qry_coord ' + str(qry_coord) + ' does not lie in nucmer alignment:\n' + str(self))
indel_variant_indexes = []
for i in range(len(variant_list)):
if variant_list[i].var_type not in {variant.INS, variant.DEL}:
continue
if not self.intersects_variant(variant_list[i]):
continue
if variant_list[i].qry_start <= qry_coord <= variant_list[i].qry_end:
return variant_list[i].ref_start, True
elif variant_list[i].qry_start < qry_coord:
indel_variant_indexes.append(i)
distance = qry_coord - min(self.qry_start, self.qry_end)
for i in indel_variant_indexes:
if variant_list[i].var_type == variant.DEL:
distance += len(variant_list[i].ref_base)
else:
assert variant_list[i].var_type == variant.INS
distance -= len(variant_list[i].qry_base)
if self.on_same_strand():
return min(self.ref_start, self.ref_end) + distance, False
else:
return max(self.ref_start, self.ref_end) - distance, False
|
sanger-pathogens/pymummer
|
pymummer/alignment.py
|
Alignment.reverse_reference
|
python
|
def reverse_reference(self):
'''Changes the coordinates as if the reference sequence has been reverse complemented'''
self.ref_start = self.ref_length - self.ref_start - 1
self.ref_end = self.ref_length - self.ref_end - 1
|
Changes the coordinates as if the reference sequence has been reverse complemented
|
train
|
https://github.com/sanger-pathogens/pymummer/blob/fd97bccfbae62719a7247473d73dd6733d4fa903/pymummer/alignment.py#L90-L93
| null |
class Alignment:
def __init__(self, line):
'''Constructs Alignment object from a line of show-coords -dTlro'''
# nucmer:
# [S1] [E1] [S2] [E2] [LEN 1] [LEN 2] [% IDY] [LEN R] [LEN Q] [FRM] [TAGS]
#1162 25768 24536 4 24607 24533 99.32 640851 24536 1 -1 ref qry [CONTAINS]
# promer:
#[S1] [E1] [S2] [E2] [LEN 1] [LEN 2] [% IDY] [% SIM] [% STP] [LEN R] [LEN Q] [FRM] [TAGS]
# 1 1398 4891054 4892445 1398 1392 89.55 93.18 0.21 1398 5349013 1 1 ref qry [CONTAINED]
fields = line.rstrip().split('\t')
try:
self.ref_start = int(fields[0]) - 1
self.ref_end = int(fields[1]) - 1
self.qry_start = int(fields[2]) - 1
self.qry_end = int(fields[3]) - 1
self.hit_length_ref = int(fields[4])
self.hit_length_qry = int(fields[5])
self.percent_identity = float(fields[6])
if len(fields) >= 15: # promer has more fields
self.ref_length = int(fields[9])
self.qry_length = int(fields[10])
self.frame = int(fields[11])
self.ref_name = fields[13]
self.qry_name = fields[14]
else:
self.ref_length = int(fields[7])
self.qry_length = int(fields[8])
self.frame = int(fields[9])
self.ref_name = fields[11]
self.qry_name = fields[12]
except:
raise Error('Error reading this nucmer line:\n' + line)
def __eq__(self, other):
return type(other) is type(self) and self.__dict__ == other.__dict__
def __hash__(self):
return hash((self.ref_start, self.ref_end, self.qry_start, self.qry_end, self.hit_length_ref, self.hit_length_qry, self.percent_identity, self.ref_length, self.qry_length, self.frame, self.ref_name, self.qry_name))
def _swap(self):
'''Swaps the alignment so that the reference becomes the query and vice-versa. Swaps their names, coordinates etc. The frame is not changed'''
self.ref_start, self.qry_start = self.qry_start, self.ref_start
self.ref_end, self.qry_end = self.qry_end, self.ref_end
self.hit_length_ref, self.hit_length_qry = self.hit_length_qry, self.hit_length_ref
self.ref_length, self.qry_length = self.qry_length, self.ref_length
self.ref_name, self.qry_name = self.qry_name, self.ref_name
def qry_coords(self):
'''Returns a pyfastaq.intervals.Interval object of the start and end coordinates in the query sequence'''
return pyfastaq.intervals.Interval(min(self.qry_start, self.qry_end), max(self.qry_start, self.qry_end))
def ref_coords(self):
'''Returns a pyfastaq.intervals.Interval object of the start and end coordinates in the reference sequence'''
return pyfastaq.intervals.Interval(min(self.ref_start, self.ref_end), max(self.ref_start, self.ref_end))
def on_same_strand(self):
'''Returns true iff the direction of the alignment is the same in the reference and the query'''
return (self.ref_start < self.ref_end) == (self.qry_start < self.qry_end)
def is_self_hit(self):
'''Returns true iff the alignment is of a sequence to itself: names and all coordinates are the same and 100 percent identity'''
return self.ref_name == self.qry_name \
and self.ref_start == self.qry_start \
and self.ref_end == self.qry_end \
and self.percent_identity == 100
def reverse_query(self):
'''Changes the coordinates as if the query sequence has been reverse complemented'''
self.qry_start = self.qry_length - self.qry_start - 1
self.qry_end = self.qry_length - self.qry_end - 1
def __str__(self):
'''Returns a tab delimited string containing the values of this alignment object'''
return '\t'.join(str(x) for x in
[self.ref_start + 1,
self.ref_end + 1,
self.qry_start + 1,
self.qry_end + 1,
self.hit_length_ref,
self.hit_length_qry,
'{0:.2f}'.format(self.percent_identity),
self.ref_length,
self.qry_length,
self.frame,
self.ref_name,
self.qry_name])
def to_msp_crunch(self):
'''Returns the alignment as a line in MSPcrunch format. The columns are space-separated and are:
1. score
2. percent identity
3. match start in the query sequence
4. match end in the query sequence
5. query sequence name
6. subject sequence start
7. subject sequence end
8. subject sequence name'''
# we don't know the alignment score. Estimate it. This approximates 1 for a match.
aln_score = int(self.percent_identity * 0.005 * (self.hit_length_ref + self.hit_length_qry))
return ' '.join(str(x) for x in [
aln_score,
'{0:.2f}'.format(self.percent_identity),
self.qry_start + 1,
self.qry_end + 1,
self.qry_name,
self.ref_start + 1,
self.ref_end + 1,
self.ref_name
])
def intersects_variant(self, var):
var_ref_coords = sorted([var.ref_start, var.ref_end])
var_ref_coords = pyfastaq.intervals.Interval(var_ref_coords[0], var_ref_coords[1])
var_qry_coords = sorted([var.qry_start, var.qry_end])
var_qry_coords = pyfastaq.intervals.Interval(var_qry_coords[0], var_qry_coords[1])
return var_ref_coords.intersects(self.ref_coords()) and var_qry_coords.intersects(self.qry_coords())
def qry_coords_from_ref_coord(self, ref_coord, variant_list):
'''Given a reference position and a list of variants ([variant.Variant]),
works out the position in the query sequence, accounting for indels.
Returns a tuple: (position, True|False), where second element is whether
or not the ref_coord lies in an indel. If it is, then
returns the corresponding start position
of the indel in the query'''
if self.ref_coords().distance_to_point(ref_coord) > 0:
raise Error('Cannot get query coord in qry_coords_from_ref_coord because given ref_coord ' + str(ref_coord) + ' does not lie in nucmer alignment:\n' + str(self))
indel_variant_indexes = []
for i in range(len(variant_list)):
if variant_list[i].var_type not in {variant.INS, variant.DEL}:
continue
if not self.intersects_variant(variant_list[i]):
continue
if variant_list[i].ref_start <= ref_coord <= variant_list[i].ref_end:
return variant_list[i].qry_start, True
elif variant_list[i].ref_start < ref_coord:
indel_variant_indexes.append(i)
distance = ref_coord - min(self.ref_start, self.ref_end)
for i in indel_variant_indexes:
if variant_list[i].var_type == variant.INS:
distance += len(variant_list[i].qry_base)
else:
assert variant_list[i].var_type == variant.DEL
distance -= len(variant_list[i].ref_base)
if self.on_same_strand():
return min(self.qry_start, self.qry_end) + distance, False
else:
return max(self.qry_start, self.qry_end) - distance, False
def ref_coords_from_qry_coord(self, qry_coord, variant_list):
'''Given a qryerence position and a list of variants ([variant.Variant]),
works out the position in the ref sequence, accounting for indels.
Returns a tuple: (position, True|False), where second element is whether
or not the qry_coord lies in an indel. If it is, then
returns the corresponding start position
of the indel in the ref'''
if self.qry_coords().distance_to_point(qry_coord) > 0:
raise Error('Cannot get ref coord in ref_coords_from_qry_coord because given qry_coord ' + str(qry_coord) + ' does not lie in nucmer alignment:\n' + str(self))
indel_variant_indexes = []
for i in range(len(variant_list)):
if variant_list[i].var_type not in {variant.INS, variant.DEL}:
continue
if not self.intersects_variant(variant_list[i]):
continue
if variant_list[i].qry_start <= qry_coord <= variant_list[i].qry_end:
return variant_list[i].ref_start, True
elif variant_list[i].qry_start < qry_coord:
indel_variant_indexes.append(i)
distance = qry_coord - min(self.qry_start, self.qry_end)
for i in indel_variant_indexes:
if variant_list[i].var_type == variant.DEL:
distance += len(variant_list[i].ref_base)
else:
assert variant_list[i].var_type == variant.INS
distance -= len(variant_list[i].qry_base)
if self.on_same_strand():
return min(self.ref_start, self.ref_end) + distance, False
else:
return max(self.ref_start, self.ref_end) - distance, False
|
sanger-pathogens/pymummer
|
pymummer/alignment.py
|
Alignment.to_msp_crunch
|
python
|
def to_msp_crunch(self):
'''Returns the alignment as a line in MSPcrunch format. The columns are space-separated and are:
1. score
2. percent identity
3. match start in the query sequence
4. match end in the query sequence
5. query sequence name
6. subject sequence start
7. subject sequence end
8. subject sequence name'''
# we don't know the alignment score. Estimate it. This approximates 1 for a match.
aln_score = int(self.percent_identity * 0.005 * (self.hit_length_ref + self.hit_length_qry))
return ' '.join(str(x) for x in [
aln_score,
'{0:.2f}'.format(self.percent_identity),
self.qry_start + 1,
self.qry_end + 1,
self.qry_name,
self.ref_start + 1,
self.ref_end + 1,
self.ref_name
])
|
Returns the alignment as a line in MSPcrunch format. The columns are space-separated and are:
1. score
2. percent identity
3. match start in the query sequence
4. match end in the query sequence
5. query sequence name
6. subject sequence start
7. subject sequence end
8. subject sequence name
|
train
|
https://github.com/sanger-pathogens/pymummer/blob/fd97bccfbae62719a7247473d73dd6733d4fa903/pymummer/alignment.py#L113-L136
| null |
class Alignment:
def __init__(self, line):
'''Constructs Alignment object from a line of show-coords -dTlro'''
# nucmer:
# [S1] [E1] [S2] [E2] [LEN 1] [LEN 2] [% IDY] [LEN R] [LEN Q] [FRM] [TAGS]
#1162 25768 24536 4 24607 24533 99.32 640851 24536 1 -1 ref qry [CONTAINS]
# promer:
#[S1] [E1] [S2] [E2] [LEN 1] [LEN 2] [% IDY] [% SIM] [% STP] [LEN R] [LEN Q] [FRM] [TAGS]
# 1 1398 4891054 4892445 1398 1392 89.55 93.18 0.21 1398 5349013 1 1 ref qry [CONTAINED]
fields = line.rstrip().split('\t')
try:
self.ref_start = int(fields[0]) - 1
self.ref_end = int(fields[1]) - 1
self.qry_start = int(fields[2]) - 1
self.qry_end = int(fields[3]) - 1
self.hit_length_ref = int(fields[4])
self.hit_length_qry = int(fields[5])
self.percent_identity = float(fields[6])
if len(fields) >= 15: # promer has more fields
self.ref_length = int(fields[9])
self.qry_length = int(fields[10])
self.frame = int(fields[11])
self.ref_name = fields[13]
self.qry_name = fields[14]
else:
self.ref_length = int(fields[7])
self.qry_length = int(fields[8])
self.frame = int(fields[9])
self.ref_name = fields[11]
self.qry_name = fields[12]
except:
raise Error('Error reading this nucmer line:\n' + line)
def __eq__(self, other):
return type(other) is type(self) and self.__dict__ == other.__dict__
def __hash__(self):
return hash((self.ref_start, self.ref_end, self.qry_start, self.qry_end, self.hit_length_ref, self.hit_length_qry, self.percent_identity, self.ref_length, self.qry_length, self.frame, self.ref_name, self.qry_name))
def _swap(self):
'''Swaps the alignment so that the reference becomes the query and vice-versa. Swaps their names, coordinates etc. The frame is not changed'''
self.ref_start, self.qry_start = self.qry_start, self.ref_start
self.ref_end, self.qry_end = self.qry_end, self.ref_end
self.hit_length_ref, self.hit_length_qry = self.hit_length_qry, self.hit_length_ref
self.ref_length, self.qry_length = self.qry_length, self.ref_length
self.ref_name, self.qry_name = self.qry_name, self.ref_name
def qry_coords(self):
'''Returns a pyfastaq.intervals.Interval object of the start and end coordinates in the query sequence'''
return pyfastaq.intervals.Interval(min(self.qry_start, self.qry_end), max(self.qry_start, self.qry_end))
def ref_coords(self):
'''Returns a pyfastaq.intervals.Interval object of the start and end coordinates in the reference sequence'''
return pyfastaq.intervals.Interval(min(self.ref_start, self.ref_end), max(self.ref_start, self.ref_end))
def on_same_strand(self):
'''Returns true iff the direction of the alignment is the same in the reference and the query'''
return (self.ref_start < self.ref_end) == (self.qry_start < self.qry_end)
def is_self_hit(self):
'''Returns true iff the alignment is of a sequence to itself: names and all coordinates are the same and 100 percent identity'''
return self.ref_name == self.qry_name \
and self.ref_start == self.qry_start \
and self.ref_end == self.qry_end \
and self.percent_identity == 100
def reverse_query(self):
'''Changes the coordinates as if the query sequence has been reverse complemented'''
self.qry_start = self.qry_length - self.qry_start - 1
self.qry_end = self.qry_length - self.qry_end - 1
def reverse_reference(self):
'''Changes the coordinates as if the reference sequence has been reverse complemented'''
self.ref_start = self.ref_length - self.ref_start - 1
self.ref_end = self.ref_length - self.ref_end - 1
def __str__(self):
'''Returns a tab delimited string containing the values of this alignment object'''
return '\t'.join(str(x) for x in
[self.ref_start + 1,
self.ref_end + 1,
self.qry_start + 1,
self.qry_end + 1,
self.hit_length_ref,
self.hit_length_qry,
'{0:.2f}'.format(self.percent_identity),
self.ref_length,
self.qry_length,
self.frame,
self.ref_name,
self.qry_name])
def intersects_variant(self, var):
var_ref_coords = sorted([var.ref_start, var.ref_end])
var_ref_coords = pyfastaq.intervals.Interval(var_ref_coords[0], var_ref_coords[1])
var_qry_coords = sorted([var.qry_start, var.qry_end])
var_qry_coords = pyfastaq.intervals.Interval(var_qry_coords[0], var_qry_coords[1])
return var_ref_coords.intersects(self.ref_coords()) and var_qry_coords.intersects(self.qry_coords())
def qry_coords_from_ref_coord(self, ref_coord, variant_list):
'''Given a reference position and a list of variants ([variant.Variant]),
works out the position in the query sequence, accounting for indels.
Returns a tuple: (position, True|False), where second element is whether
or not the ref_coord lies in an indel. If it is, then
returns the corresponding start position
of the indel in the query'''
if self.ref_coords().distance_to_point(ref_coord) > 0:
raise Error('Cannot get query coord in qry_coords_from_ref_coord because given ref_coord ' + str(ref_coord) + ' does not lie in nucmer alignment:\n' + str(self))
indel_variant_indexes = []
for i in range(len(variant_list)):
if variant_list[i].var_type not in {variant.INS, variant.DEL}:
continue
if not self.intersects_variant(variant_list[i]):
continue
if variant_list[i].ref_start <= ref_coord <= variant_list[i].ref_end:
return variant_list[i].qry_start, True
elif variant_list[i].ref_start < ref_coord:
indel_variant_indexes.append(i)
distance = ref_coord - min(self.ref_start, self.ref_end)
for i in indel_variant_indexes:
if variant_list[i].var_type == variant.INS:
distance += len(variant_list[i].qry_base)
else:
assert variant_list[i].var_type == variant.DEL
distance -= len(variant_list[i].ref_base)
if self.on_same_strand():
return min(self.qry_start, self.qry_end) + distance, False
else:
return max(self.qry_start, self.qry_end) - distance, False
def ref_coords_from_qry_coord(self, qry_coord, variant_list):
'''Given a qryerence position and a list of variants ([variant.Variant]),
works out the position in the ref sequence, accounting for indels.
Returns a tuple: (position, True|False), where second element is whether
or not the qry_coord lies in an indel. If it is, then
returns the corresponding start position
of the indel in the ref'''
if self.qry_coords().distance_to_point(qry_coord) > 0:
raise Error('Cannot get ref coord in ref_coords_from_qry_coord because given qry_coord ' + str(qry_coord) + ' does not lie in nucmer alignment:\n' + str(self))
indel_variant_indexes = []
for i in range(len(variant_list)):
if variant_list[i].var_type not in {variant.INS, variant.DEL}:
continue
if not self.intersects_variant(variant_list[i]):
continue
if variant_list[i].qry_start <= qry_coord <= variant_list[i].qry_end:
return variant_list[i].ref_start, True
elif variant_list[i].qry_start < qry_coord:
indel_variant_indexes.append(i)
distance = qry_coord - min(self.qry_start, self.qry_end)
for i in indel_variant_indexes:
if variant_list[i].var_type == variant.DEL:
distance += len(variant_list[i].ref_base)
else:
assert variant_list[i].var_type == variant.INS
distance -= len(variant_list[i].qry_base)
if self.on_same_strand():
return min(self.ref_start, self.ref_end) + distance, False
else:
return max(self.ref_start, self.ref_end) - distance, False
|
sanger-pathogens/pymummer
|
pymummer/alignment.py
|
Alignment.qry_coords_from_ref_coord
|
python
|
def qry_coords_from_ref_coord(self, ref_coord, variant_list):
'''Given a reference position and a list of variants ([variant.Variant]),
works out the position in the query sequence, accounting for indels.
Returns a tuple: (position, True|False), where second element is whether
or not the ref_coord lies in an indel. If it is, then
returns the corresponding start position
of the indel in the query'''
if self.ref_coords().distance_to_point(ref_coord) > 0:
raise Error('Cannot get query coord in qry_coords_from_ref_coord because given ref_coord ' + str(ref_coord) + ' does not lie in nucmer alignment:\n' + str(self))
indel_variant_indexes = []
for i in range(len(variant_list)):
if variant_list[i].var_type not in {variant.INS, variant.DEL}:
continue
if not self.intersects_variant(variant_list[i]):
continue
if variant_list[i].ref_start <= ref_coord <= variant_list[i].ref_end:
return variant_list[i].qry_start, True
elif variant_list[i].ref_start < ref_coord:
indel_variant_indexes.append(i)
distance = ref_coord - min(self.ref_start, self.ref_end)
for i in indel_variant_indexes:
if variant_list[i].var_type == variant.INS:
distance += len(variant_list[i].qry_base)
else:
assert variant_list[i].var_type == variant.DEL
distance -= len(variant_list[i].ref_base)
if self.on_same_strand():
return min(self.qry_start, self.qry_end) + distance, False
else:
return max(self.qry_start, self.qry_end) - distance, False
|
Given a reference position and a list of variants ([variant.Variant]),
works out the position in the query sequence, accounting for indels.
Returns a tuple: (position, True|False), where second element is whether
or not the ref_coord lies in an indel. If it is, then
returns the corresponding start position
of the indel in the query
|
train
|
https://github.com/sanger-pathogens/pymummer/blob/fd97bccfbae62719a7247473d73dd6733d4fa903/pymummer/alignment.py#L147-L181
|
[
"def ref_coords(self):\n '''Returns a pyfastaq.intervals.Interval object of the start and end coordinates in the reference sequence'''\n return pyfastaq.intervals.Interval(min(self.ref_start, self.ref_end), max(self.ref_start, self.ref_end))\n"
] |
class Alignment:
def __init__(self, line):
'''Constructs Alignment object from a line of show-coords -dTlro'''
# nucmer:
# [S1] [E1] [S2] [E2] [LEN 1] [LEN 2] [% IDY] [LEN R] [LEN Q] [FRM] [TAGS]
#1162 25768 24536 4 24607 24533 99.32 640851 24536 1 -1 ref qry [CONTAINS]
# promer:
#[S1] [E1] [S2] [E2] [LEN 1] [LEN 2] [% IDY] [% SIM] [% STP] [LEN R] [LEN Q] [FRM] [TAGS]
# 1 1398 4891054 4892445 1398 1392 89.55 93.18 0.21 1398 5349013 1 1 ref qry [CONTAINED]
fields = line.rstrip().split('\t')
try:
self.ref_start = int(fields[0]) - 1
self.ref_end = int(fields[1]) - 1
self.qry_start = int(fields[2]) - 1
self.qry_end = int(fields[3]) - 1
self.hit_length_ref = int(fields[4])
self.hit_length_qry = int(fields[5])
self.percent_identity = float(fields[6])
if len(fields) >= 15: # promer has more fields
self.ref_length = int(fields[9])
self.qry_length = int(fields[10])
self.frame = int(fields[11])
self.ref_name = fields[13]
self.qry_name = fields[14]
else:
self.ref_length = int(fields[7])
self.qry_length = int(fields[8])
self.frame = int(fields[9])
self.ref_name = fields[11]
self.qry_name = fields[12]
except:
raise Error('Error reading this nucmer line:\n' + line)
def __eq__(self, other):
return type(other) is type(self) and self.__dict__ == other.__dict__
def __hash__(self):
return hash((self.ref_start, self.ref_end, self.qry_start, self.qry_end, self.hit_length_ref, self.hit_length_qry, self.percent_identity, self.ref_length, self.qry_length, self.frame, self.ref_name, self.qry_name))
def _swap(self):
'''Swaps the alignment so that the reference becomes the query and vice-versa. Swaps their names, coordinates etc. The frame is not changed'''
self.ref_start, self.qry_start = self.qry_start, self.ref_start
self.ref_end, self.qry_end = self.qry_end, self.ref_end
self.hit_length_ref, self.hit_length_qry = self.hit_length_qry, self.hit_length_ref
self.ref_length, self.qry_length = self.qry_length, self.ref_length
self.ref_name, self.qry_name = self.qry_name, self.ref_name
def qry_coords(self):
'''Returns a pyfastaq.intervals.Interval object of the start and end coordinates in the query sequence'''
return pyfastaq.intervals.Interval(min(self.qry_start, self.qry_end), max(self.qry_start, self.qry_end))
def ref_coords(self):
'''Returns a pyfastaq.intervals.Interval object of the start and end coordinates in the reference sequence'''
return pyfastaq.intervals.Interval(min(self.ref_start, self.ref_end), max(self.ref_start, self.ref_end))
def on_same_strand(self):
'''Returns true iff the direction of the alignment is the same in the reference and the query'''
return (self.ref_start < self.ref_end) == (self.qry_start < self.qry_end)
def is_self_hit(self):
'''Returns true iff the alignment is of a sequence to itself: names and all coordinates are the same and 100 percent identity'''
return self.ref_name == self.qry_name \
and self.ref_start == self.qry_start \
and self.ref_end == self.qry_end \
and self.percent_identity == 100
def reverse_query(self):
'''Changes the coordinates as if the query sequence has been reverse complemented'''
self.qry_start = self.qry_length - self.qry_start - 1
self.qry_end = self.qry_length - self.qry_end - 1
def reverse_reference(self):
'''Changes the coordinates as if the reference sequence has been reverse complemented'''
self.ref_start = self.ref_length - self.ref_start - 1
self.ref_end = self.ref_length - self.ref_end - 1
def __str__(self):
'''Returns a tab delimited string containing the values of this alignment object'''
return '\t'.join(str(x) for x in
[self.ref_start + 1,
self.ref_end + 1,
self.qry_start + 1,
self.qry_end + 1,
self.hit_length_ref,
self.hit_length_qry,
'{0:.2f}'.format(self.percent_identity),
self.ref_length,
self.qry_length,
self.frame,
self.ref_name,
self.qry_name])
def to_msp_crunch(self):
'''Returns the alignment as a line in MSPcrunch format. The columns are space-separated and are:
1. score
2. percent identity
3. match start in the query sequence
4. match end in the query sequence
5. query sequence name
6. subject sequence start
7. subject sequence end
8. subject sequence name'''
# we don't know the alignment score. Estimate it. This approximates 1 for a match.
aln_score = int(self.percent_identity * 0.005 * (self.hit_length_ref + self.hit_length_qry))
return ' '.join(str(x) for x in [
aln_score,
'{0:.2f}'.format(self.percent_identity),
self.qry_start + 1,
self.qry_end + 1,
self.qry_name,
self.ref_start + 1,
self.ref_end + 1,
self.ref_name
])
def intersects_variant(self, var):
var_ref_coords = sorted([var.ref_start, var.ref_end])
var_ref_coords = pyfastaq.intervals.Interval(var_ref_coords[0], var_ref_coords[1])
var_qry_coords = sorted([var.qry_start, var.qry_end])
var_qry_coords = pyfastaq.intervals.Interval(var_qry_coords[0], var_qry_coords[1])
return var_ref_coords.intersects(self.ref_coords()) and var_qry_coords.intersects(self.qry_coords())
def ref_coords_from_qry_coord(self, qry_coord, variant_list):
'''Given a qryerence position and a list of variants ([variant.Variant]),
works out the position in the ref sequence, accounting for indels.
Returns a tuple: (position, True|False), where second element is whether
or not the qry_coord lies in an indel. If it is, then
returns the corresponding start position
of the indel in the ref'''
if self.qry_coords().distance_to_point(qry_coord) > 0:
raise Error('Cannot get ref coord in ref_coords_from_qry_coord because given qry_coord ' + str(qry_coord) + ' does not lie in nucmer alignment:\n' + str(self))
indel_variant_indexes = []
for i in range(len(variant_list)):
if variant_list[i].var_type not in {variant.INS, variant.DEL}:
continue
if not self.intersects_variant(variant_list[i]):
continue
if variant_list[i].qry_start <= qry_coord <= variant_list[i].qry_end:
return variant_list[i].ref_start, True
elif variant_list[i].qry_start < qry_coord:
indel_variant_indexes.append(i)
distance = qry_coord - min(self.qry_start, self.qry_end)
for i in indel_variant_indexes:
if variant_list[i].var_type == variant.DEL:
distance += len(variant_list[i].ref_base)
else:
assert variant_list[i].var_type == variant.INS
distance -= len(variant_list[i].qry_base)
if self.on_same_strand():
return min(self.ref_start, self.ref_end) + distance, False
else:
return max(self.ref_start, self.ref_end) - distance, False
|
sanger-pathogens/pymummer
|
pymummer/nucmer.py
|
Runner._nucmer_command
|
python
|
def _nucmer_command(self, ref, qry, outprefix):
'''Construct the nucmer command'''
if self.use_promer:
command = 'promer'
else:
command = 'nucmer'
command += ' -p ' + outprefix
if self.breaklen is not None:
command += ' -b ' + str(self.breaklen)
if self.diagdiff is not None and not self.use_promer:
command += ' -D ' + str(self.diagdiff)
if self.diagfactor:
command += ' -d ' + str(self.diagfactor)
if self.maxgap:
command += ' -g ' + str(self.maxgap)
if self.maxmatch:
command += ' --maxmatch'
if self.mincluster is not None:
command += ' -c ' + str(self.mincluster)
if not self.simplify and not self.use_promer:
command += ' --nosimplify'
return command + ' ' + ref + ' ' + qry
|
Construct the nucmer command
|
train
|
https://github.com/sanger-pathogens/pymummer/blob/fd97bccfbae62719a7247473d73dd6733d4fa903/pymummer/nucmer.py#L53-L83
| null |
class Runner:
'''Handy reference for all the arguments needed for nucmer, delta-filter, show-coords, show-snps'''
def __init__(
self,
ref,
query,
outfile,
min_id=None,
min_length=None,
breaklen=None,
coords_header=True,
diagdiff=None,
diagfactor=None,
maxgap=None,
maxmatch=False,
mincluster=None,
simplify=True,
show_snps=False,
snps_header=True,
verbose=False,
promer=False,
show_snps_C=True,
):
self.qry = query
self.ref = ref
self.outfile = outfile
self.min_id = min_id
self.min_length = min_length
self.breaklen = breaklen
self.diagdiff = diagdiff
self.diagfactor = diagfactor
self.coords_header = coords_header
self.maxgap = maxgap
self.maxmatch = maxmatch
self.mincluster = mincluster
self.simplify = simplify
self.show_snps = show_snps
self.snps_header = snps_header
self.verbose = verbose
self.use_promer = promer
self.show_snps_C = show_snps_C
def _delta_filter_command(self, infile, outfile):
'''Construct delta-filter command'''
command = 'delta-filter'
if self.min_id is not None:
command += ' -i ' + str(self.min_id)
if self.min_length is not None:
command += ' -l ' + str(self.min_length)
return command + ' ' + infile + ' > ' + outfile
def _show_coords_command(self, infile, outfile):
'''Construct show-coords command'''
command = 'show-coords -dTlro'
if not self.coords_header:
command += ' -H'
return command + ' ' + infile + ' > ' + outfile
def _show_snps_command(self, infile, outfile):
command = 'show-snps -T' + ('C' if self.show_snps_C else '') + 'lr'
if not self.snps_header:
command += ' -H'
return command + ' ' + infile + ' > ' + outfile
def _write_script(self, script_name, ref, qry, outfile):
'''Write commands into a bash script'''
f = pyfastaq.utils.open_file_write(script_name)
print(self._nucmer_command(ref, qry, 'p'), file=f)
print(self._delta_filter_command('p.delta', 'p.delta.filter'), file=f)
print(self._show_coords_command('p.delta.filter', outfile), file=f)
if self.show_snps:
print(self._show_snps_command('p.delta.filter', outfile + '.snps'), file=f)
pyfastaq.utils.close(f)
def run(self):
'''
Change to a temp directory
Run bash script containing commands
Place results in specified output file
Clean up temp directory
'''
qry = os.path.abspath(self.qry)
ref = os.path.abspath(self.ref)
outfile = os.path.abspath(self.outfile)
tmpdir = tempfile.mkdtemp(prefix='tmp.run_nucmer.', dir=os.getcwd())
original_dir = os.getcwd()
os.chdir(tmpdir)
script = 'run_nucmer.sh'
self._write_script(script, ref, qry, outfile)
syscall.run('bash ' + script, verbose=self.verbose)
os.chdir(original_dir)
shutil.rmtree(tmpdir)
|
sanger-pathogens/pymummer
|
pymummer/nucmer.py
|
Runner._delta_filter_command
|
python
|
def _delta_filter_command(self, infile, outfile):
'''Construct delta-filter command'''
command = 'delta-filter'
if self.min_id is not None:
command += ' -i ' + str(self.min_id)
if self.min_length is not None:
command += ' -l ' + str(self.min_length)
return command + ' ' + infile + ' > ' + outfile
|
Construct delta-filter command
|
train
|
https://github.com/sanger-pathogens/pymummer/blob/fd97bccfbae62719a7247473d73dd6733d4fa903/pymummer/nucmer.py#L86-L96
| null |
class Runner:
'''Handy reference for all the arguments needed for nucmer, delta-filter, show-coords, show-snps'''
def __init__(
self,
ref,
query,
outfile,
min_id=None,
min_length=None,
breaklen=None,
coords_header=True,
diagdiff=None,
diagfactor=None,
maxgap=None,
maxmatch=False,
mincluster=None,
simplify=True,
show_snps=False,
snps_header=True,
verbose=False,
promer=False,
show_snps_C=True,
):
self.qry = query
self.ref = ref
self.outfile = outfile
self.min_id = min_id
self.min_length = min_length
self.breaklen = breaklen
self.diagdiff = diagdiff
self.diagfactor = diagfactor
self.coords_header = coords_header
self.maxgap = maxgap
self.maxmatch = maxmatch
self.mincluster = mincluster
self.simplify = simplify
self.show_snps = show_snps
self.snps_header = snps_header
self.verbose = verbose
self.use_promer = promer
self.show_snps_C = show_snps_C
def _nucmer_command(self, ref, qry, outprefix):
'''Construct the nucmer command'''
if self.use_promer:
command = 'promer'
else:
command = 'nucmer'
command += ' -p ' + outprefix
if self.breaklen is not None:
command += ' -b ' + str(self.breaklen)
if self.diagdiff is not None and not self.use_promer:
command += ' -D ' + str(self.diagdiff)
if self.diagfactor:
command += ' -d ' + str(self.diagfactor)
if self.maxgap:
command += ' -g ' + str(self.maxgap)
if self.maxmatch:
command += ' --maxmatch'
if self.mincluster is not None:
command += ' -c ' + str(self.mincluster)
if not self.simplify and not self.use_promer:
command += ' --nosimplify'
return command + ' ' + ref + ' ' + qry
def _show_coords_command(self, infile, outfile):
'''Construct show-coords command'''
command = 'show-coords -dTlro'
if not self.coords_header:
command += ' -H'
return command + ' ' + infile + ' > ' + outfile
def _show_snps_command(self, infile, outfile):
command = 'show-snps -T' + ('C' if self.show_snps_C else '') + 'lr'
if not self.snps_header:
command += ' -H'
return command + ' ' + infile + ' > ' + outfile
def _write_script(self, script_name, ref, qry, outfile):
'''Write commands into a bash script'''
f = pyfastaq.utils.open_file_write(script_name)
print(self._nucmer_command(ref, qry, 'p'), file=f)
print(self._delta_filter_command('p.delta', 'p.delta.filter'), file=f)
print(self._show_coords_command('p.delta.filter', outfile), file=f)
if self.show_snps:
print(self._show_snps_command('p.delta.filter', outfile + '.snps'), file=f)
pyfastaq.utils.close(f)
def run(self):
'''
Change to a temp directory
Run bash script containing commands
Place results in specified output file
Clean up temp directory
'''
qry = os.path.abspath(self.qry)
ref = os.path.abspath(self.ref)
outfile = os.path.abspath(self.outfile)
tmpdir = tempfile.mkdtemp(prefix='tmp.run_nucmer.', dir=os.getcwd())
original_dir = os.getcwd()
os.chdir(tmpdir)
script = 'run_nucmer.sh'
self._write_script(script, ref, qry, outfile)
syscall.run('bash ' + script, verbose=self.verbose)
os.chdir(original_dir)
shutil.rmtree(tmpdir)
|
sanger-pathogens/pymummer
|
pymummer/nucmer.py
|
Runner._show_coords_command
|
python
|
def _show_coords_command(self, infile, outfile):
'''Construct show-coords command'''
command = 'show-coords -dTlro'
if not self.coords_header:
command += ' -H'
return command + ' ' + infile + ' > ' + outfile
|
Construct show-coords command
|
train
|
https://github.com/sanger-pathogens/pymummer/blob/fd97bccfbae62719a7247473d73dd6733d4fa903/pymummer/nucmer.py#L99-L106
| null |
class Runner:
'''Handy reference for all the arguments needed for nucmer, delta-filter, show-coords, show-snps'''
def __init__(
self,
ref,
query,
outfile,
min_id=None,
min_length=None,
breaklen=None,
coords_header=True,
diagdiff=None,
diagfactor=None,
maxgap=None,
maxmatch=False,
mincluster=None,
simplify=True,
show_snps=False,
snps_header=True,
verbose=False,
promer=False,
show_snps_C=True,
):
self.qry = query
self.ref = ref
self.outfile = outfile
self.min_id = min_id
self.min_length = min_length
self.breaklen = breaklen
self.diagdiff = diagdiff
self.diagfactor = diagfactor
self.coords_header = coords_header
self.maxgap = maxgap
self.maxmatch = maxmatch
self.mincluster = mincluster
self.simplify = simplify
self.show_snps = show_snps
self.snps_header = snps_header
self.verbose = verbose
self.use_promer = promer
self.show_snps_C = show_snps_C
def _nucmer_command(self, ref, qry, outprefix):
'''Construct the nucmer command'''
if self.use_promer:
command = 'promer'
else:
command = 'nucmer'
command += ' -p ' + outprefix
if self.breaklen is not None:
command += ' -b ' + str(self.breaklen)
if self.diagdiff is not None and not self.use_promer:
command += ' -D ' + str(self.diagdiff)
if self.diagfactor:
command += ' -d ' + str(self.diagfactor)
if self.maxgap:
command += ' -g ' + str(self.maxgap)
if self.maxmatch:
command += ' --maxmatch'
if self.mincluster is not None:
command += ' -c ' + str(self.mincluster)
if not self.simplify and not self.use_promer:
command += ' --nosimplify'
return command + ' ' + ref + ' ' + qry
def _delta_filter_command(self, infile, outfile):
'''Construct delta-filter command'''
command = 'delta-filter'
if self.min_id is not None:
command += ' -i ' + str(self.min_id)
if self.min_length is not None:
command += ' -l ' + str(self.min_length)
return command + ' ' + infile + ' > ' + outfile
def _show_snps_command(self, infile, outfile):
command = 'show-snps -T' + ('C' if self.show_snps_C else '') + 'lr'
if not self.snps_header:
command += ' -H'
return command + ' ' + infile + ' > ' + outfile
def _write_script(self, script_name, ref, qry, outfile):
'''Write commands into a bash script'''
f = pyfastaq.utils.open_file_write(script_name)
print(self._nucmer_command(ref, qry, 'p'), file=f)
print(self._delta_filter_command('p.delta', 'p.delta.filter'), file=f)
print(self._show_coords_command('p.delta.filter', outfile), file=f)
if self.show_snps:
print(self._show_snps_command('p.delta.filter', outfile + '.snps'), file=f)
pyfastaq.utils.close(f)
def run(self):
'''
Change to a temp directory
Run bash script containing commands
Place results in specified output file
Clean up temp directory
'''
qry = os.path.abspath(self.qry)
ref = os.path.abspath(self.ref)
outfile = os.path.abspath(self.outfile)
tmpdir = tempfile.mkdtemp(prefix='tmp.run_nucmer.', dir=os.getcwd())
original_dir = os.getcwd()
os.chdir(tmpdir)
script = 'run_nucmer.sh'
self._write_script(script, ref, qry, outfile)
syscall.run('bash ' + script, verbose=self.verbose)
os.chdir(original_dir)
shutil.rmtree(tmpdir)
|
sanger-pathogens/pymummer
|
pymummer/nucmer.py
|
Runner._write_script
|
python
|
def _write_script(self, script_name, ref, qry, outfile):
'''Write commands into a bash script'''
f = pyfastaq.utils.open_file_write(script_name)
print(self._nucmer_command(ref, qry, 'p'), file=f)
print(self._delta_filter_command('p.delta', 'p.delta.filter'), file=f)
print(self._show_coords_command('p.delta.filter', outfile), file=f)
if self.show_snps:
print(self._show_snps_command('p.delta.filter', outfile + '.snps'), file=f)
pyfastaq.utils.close(f)
|
Write commands into a bash script
|
train
|
https://github.com/sanger-pathogens/pymummer/blob/fd97bccfbae62719a7247473d73dd6733d4fa903/pymummer/nucmer.py#L118-L126
|
[
"def _nucmer_command(self, ref, qry, outprefix):\n '''Construct the nucmer command'''\n if self.use_promer:\n command = 'promer'\n else:\n command = 'nucmer'\n\n command += ' -p ' + outprefix\n\n if self.breaklen is not None:\n command += ' -b ' + str(self.breaklen)\n\n if self.diagdiff is not None and not self.use_promer:\n command += ' -D ' + str(self.diagdiff)\n\n if self.diagfactor:\n command += ' -d ' + str(self.diagfactor)\n\n if self.maxgap:\n command += ' -g ' + str(self.maxgap)\n\n if self.maxmatch:\n command += ' --maxmatch'\n\n if self.mincluster is not None:\n command += ' -c ' + str(self.mincluster)\n\n if not self.simplify and not self.use_promer:\n \tcommand += ' --nosimplify'\n\n return command + ' ' + ref + ' ' + qry\n",
"def _delta_filter_command(self, infile, outfile):\n '''Construct delta-filter command'''\n command = 'delta-filter'\n\n if self.min_id is not None:\n command += ' -i ' + str(self.min_id)\n\n if self.min_length is not None:\n command += ' -l ' + str(self.min_length)\n\n return command + ' ' + infile + ' > ' + outfile\n",
"def _show_coords_command(self, infile, outfile):\n '''Construct show-coords command'''\n command = 'show-coords -dTlro'\n\n if not self.coords_header:\n command += ' -H'\n\n return command + ' ' + infile + ' > ' + outfile\n",
"def _show_snps_command(self, infile, outfile):\n command = 'show-snps -T' + ('C' if self.show_snps_C else '') + 'lr'\n\n if not self.snps_header:\n command += ' -H'\n\n return command + ' ' + infile + ' > ' + outfile\n"
] |
class Runner:
'''Handy reference for all the arguments needed for nucmer, delta-filter, show-coords, show-snps'''
def __init__(
self,
ref,
query,
outfile,
min_id=None,
min_length=None,
breaklen=None,
coords_header=True,
diagdiff=None,
diagfactor=None,
maxgap=None,
maxmatch=False,
mincluster=None,
simplify=True,
show_snps=False,
snps_header=True,
verbose=False,
promer=False,
show_snps_C=True,
):
self.qry = query
self.ref = ref
self.outfile = outfile
self.min_id = min_id
self.min_length = min_length
self.breaklen = breaklen
self.diagdiff = diagdiff
self.diagfactor = diagfactor
self.coords_header = coords_header
self.maxgap = maxgap
self.maxmatch = maxmatch
self.mincluster = mincluster
self.simplify = simplify
self.show_snps = show_snps
self.snps_header = snps_header
self.verbose = verbose
self.use_promer = promer
self.show_snps_C = show_snps_C
def _nucmer_command(self, ref, qry, outprefix):
'''Construct the nucmer command'''
if self.use_promer:
command = 'promer'
else:
command = 'nucmer'
command += ' -p ' + outprefix
if self.breaklen is not None:
command += ' -b ' + str(self.breaklen)
if self.diagdiff is not None and not self.use_promer:
command += ' -D ' + str(self.diagdiff)
if self.diagfactor:
command += ' -d ' + str(self.diagfactor)
if self.maxgap:
command += ' -g ' + str(self.maxgap)
if self.maxmatch:
command += ' --maxmatch'
if self.mincluster is not None:
command += ' -c ' + str(self.mincluster)
if not self.simplify and not self.use_promer:
command += ' --nosimplify'
return command + ' ' + ref + ' ' + qry
def _delta_filter_command(self, infile, outfile):
'''Construct delta-filter command'''
command = 'delta-filter'
if self.min_id is not None:
command += ' -i ' + str(self.min_id)
if self.min_length is not None:
command += ' -l ' + str(self.min_length)
return command + ' ' + infile + ' > ' + outfile
def _show_coords_command(self, infile, outfile):
'''Construct show-coords command'''
command = 'show-coords -dTlro'
if not self.coords_header:
command += ' -H'
return command + ' ' + infile + ' > ' + outfile
def _show_snps_command(self, infile, outfile):
command = 'show-snps -T' + ('C' if self.show_snps_C else '') + 'lr'
if not self.snps_header:
command += ' -H'
return command + ' ' + infile + ' > ' + outfile
def run(self):
'''
Change to a temp directory
Run bash script containing commands
Place results in specified output file
Clean up temp directory
'''
qry = os.path.abspath(self.qry)
ref = os.path.abspath(self.ref)
outfile = os.path.abspath(self.outfile)
tmpdir = tempfile.mkdtemp(prefix='tmp.run_nucmer.', dir=os.getcwd())
original_dir = os.getcwd()
os.chdir(tmpdir)
script = 'run_nucmer.sh'
self._write_script(script, ref, qry, outfile)
syscall.run('bash ' + script, verbose=self.verbose)
os.chdir(original_dir)
shutil.rmtree(tmpdir)
|
sanger-pathogens/pymummer
|
pymummer/nucmer.py
|
Runner.run
|
python
|
def run(self):
'''
Change to a temp directory
Run bash script containing commands
Place results in specified output file
Clean up temp directory
'''
qry = os.path.abspath(self.qry)
ref = os.path.abspath(self.ref)
outfile = os.path.abspath(self.outfile)
tmpdir = tempfile.mkdtemp(prefix='tmp.run_nucmer.', dir=os.getcwd())
original_dir = os.getcwd()
os.chdir(tmpdir)
script = 'run_nucmer.sh'
self._write_script(script, ref, qry, outfile)
syscall.run('bash ' + script, verbose=self.verbose)
os.chdir(original_dir)
shutil.rmtree(tmpdir)
|
Change to a temp directory
Run bash script containing commands
Place results in specified output file
Clean up temp directory
|
train
|
https://github.com/sanger-pathogens/pymummer/blob/fd97bccfbae62719a7247473d73dd6733d4fa903/pymummer/nucmer.py#L129-L146
|
[
"def _write_script(self, script_name, ref, qry, outfile):\n '''Write commands into a bash script'''\n f = pyfastaq.utils.open_file_write(script_name)\n print(self._nucmer_command(ref, qry, 'p'), file=f)\n print(self._delta_filter_command('p.delta', 'p.delta.filter'), file=f)\n print(self._show_coords_command('p.delta.filter', outfile), file=f)\n if self.show_snps:\n print(self._show_snps_command('p.delta.filter', outfile + '.snps'), file=f)\n pyfastaq.utils.close(f)\n"
] |
class Runner:
'''Handy reference for all the arguments needed for nucmer, delta-filter, show-coords, show-snps'''
def __init__(
self,
ref,
query,
outfile,
min_id=None,
min_length=None,
breaklen=None,
coords_header=True,
diagdiff=None,
diagfactor=None,
maxgap=None,
maxmatch=False,
mincluster=None,
simplify=True,
show_snps=False,
snps_header=True,
verbose=False,
promer=False,
show_snps_C=True,
):
self.qry = query
self.ref = ref
self.outfile = outfile
self.min_id = min_id
self.min_length = min_length
self.breaklen = breaklen
self.diagdiff = diagdiff
self.diagfactor = diagfactor
self.coords_header = coords_header
self.maxgap = maxgap
self.maxmatch = maxmatch
self.mincluster = mincluster
self.simplify = simplify
self.show_snps = show_snps
self.snps_header = snps_header
self.verbose = verbose
self.use_promer = promer
self.show_snps_C = show_snps_C
def _nucmer_command(self, ref, qry, outprefix):
'''Construct the nucmer command'''
if self.use_promer:
command = 'promer'
else:
command = 'nucmer'
command += ' -p ' + outprefix
if self.breaklen is not None:
command += ' -b ' + str(self.breaklen)
if self.diagdiff is not None and not self.use_promer:
command += ' -D ' + str(self.diagdiff)
if self.diagfactor:
command += ' -d ' + str(self.diagfactor)
if self.maxgap:
command += ' -g ' + str(self.maxgap)
if self.maxmatch:
command += ' --maxmatch'
if self.mincluster is not None:
command += ' -c ' + str(self.mincluster)
if not self.simplify and not self.use_promer:
command += ' --nosimplify'
return command + ' ' + ref + ' ' + qry
def _delta_filter_command(self, infile, outfile):
'''Construct delta-filter command'''
command = 'delta-filter'
if self.min_id is not None:
command += ' -i ' + str(self.min_id)
if self.min_length is not None:
command += ' -l ' + str(self.min_length)
return command + ' ' + infile + ' > ' + outfile
def _show_coords_command(self, infile, outfile):
'''Construct show-coords command'''
command = 'show-coords -dTlro'
if not self.coords_header:
command += ' -H'
return command + ' ' + infile + ' > ' + outfile
def _show_snps_command(self, infile, outfile):
command = 'show-snps -T' + ('C' if self.show_snps_C else '') + 'lr'
if not self.snps_header:
command += ' -H'
return command + ' ' + infile + ' > ' + outfile
def _write_script(self, script_name, ref, qry, outfile):
'''Write commands into a bash script'''
f = pyfastaq.utils.open_file_write(script_name)
print(self._nucmer_command(ref, qry, 'p'), file=f)
print(self._delta_filter_command('p.delta', 'p.delta.filter'), file=f)
print(self._show_coords_command('p.delta.filter', outfile), file=f)
if self.show_snps:
print(self._show_snps_command('p.delta.filter', outfile + '.snps'), file=f)
pyfastaq.utils.close(f)
|
sanger-pathogens/pymummer
|
pymummer/variant.py
|
Variant.update_indel
|
python
|
def update_indel(self, nucmer_snp):
'''Indels are reported over multiple lines, 1 base insertion or deletion per line. This method extends the current variant by 1 base if it's an indel and adjacent to the new SNP and returns True. If the current variant is a SNP, does nothing and returns False'''
new_variant = Variant(nucmer_snp)
if self.var_type not in [INS, DEL] \
or self.var_type != new_variant.var_type \
or self.qry_name != new_variant.qry_name \
or self.ref_name != new_variant.ref_name \
or self.reverse != new_variant.reverse:
return False
if self.var_type == INS \
and self.ref_start == new_variant.ref_start \
and self.qry_end + 1 == new_variant.qry_start:
self.qry_base += new_variant.qry_base
self.qry_end += 1
return True
if self.var_type == DEL \
and self.qry_start == new_variant.qry_start \
and self.ref_end + 1 == new_variant.ref_start:
self.ref_base += new_variant.ref_base
self.ref_end += 1
return True
return False
|
Indels are reported over multiple lines, 1 base insertion or deletion per line. This method extends the current variant by 1 base if it's an indel and adjacent to the new SNP and returns True. If the current variant is a SNP, does nothing and returns False
|
train
|
https://github.com/sanger-pathogens/pymummer/blob/fd97bccfbae62719a7247473d73dd6733d4fa903/pymummer/variant.py#L62-L84
| null |
class Variant:
def __init__(self, snp):
'''Create a Variant object from a pymummer.snp.Snp object'''
if snp.ref_base == '.':
self.var_type = INS
self.qry_base = snp.qry_base
self.ref_base = '.'
elif snp.qry_base == '.':
self.var_type = DEL
self.qry_base = '.'
self.ref_base = snp.ref_base
elif '.' not in [snp.ref_base, snp.qry_base]:
self.var_type = SNP
self.ref_base = snp.ref_base
self.qry_base = snp.qry_base
else:
raise Error('Error constructing Variant from pymummer.snp.Snp:' + str(snp))
self.ref_start = snp.ref_pos
self.ref_end = snp.ref_pos
self.ref_length = snp.ref_length
self.ref_name = snp.ref_name
self.qry_start = snp.qry_pos
self.qry_end = snp.qry_pos
self.qry_length = snp.qry_length
self.qry_name = snp.qry_name
self.reverse = snp.reverse
def __eq__(self, other):
return type(other) is type(self) and self.__dict__ == other.__dict__
def __str__(self):
return '\t'.join([
str(self.ref_start + 1),
str(self.ref_end + 1),
str(self.ref_length),
str(self.ref_name),
self.ref_base,
str(self.qry_start + 1),
str(self.qry_end + 1),
str(self.qry_length),
str(self.qry_name),
self.qry_base,
'-1' if self.reverse else '1',
])
|
sanger-pathogens/pymummer
|
pymummer/coords_file.py
|
reader
|
python
|
def reader(fname):
'''Helper function to open the results file (coords file) and create alignment objects with the values in it'''
f = pyfastaq.utils.open_file_read(fname)
for line in f:
if line.startswith('[') or (not '\t' in line):
continue
yield alignment.Alignment(line)
pyfastaq.utils.close(f)
|
Helper function to open the results file (coords file) and create alignment objects with the values in it
|
train
|
https://github.com/sanger-pathogens/pymummer/blob/fd97bccfbae62719a7247473d73dd6733d4fa903/pymummer/coords_file.py#L6-L16
| null |
import pyfastaq
from pymummer import alignment
class Error (Exception): pass
def convert_to_msp_crunch(infile, outfile, ref_fai=None, qry_fai=None):
'''Converts a coords file to a file in MSPcrunch format (for use with ACT, most likely).
ACT ignores sequence names in the crunch file, and just looks at the numbers.
To make a compatible file, the coords all must be shifted appropriately, which
can be done by providing both the ref_fai and qry_fai options.
Both or neither of these must be used, otherwise an error will be thrown.'''
fai_files = {ref_fai, qry_fai}
if None in fai_files and len(fai_files) != 1:
print(fai_files)
raise Error('Error in convert_to_msp_crunch. Must use both of ref_fai and qry_fai, or neither of them')
if ref_fai is not None:
assert qry_fai is not None
ref_offsets = pyfastaq.tasks.length_offsets_from_fai(ref_fai)
qry_offsets = pyfastaq.tasks.length_offsets_from_fai(qry_fai)
file_reader = reader(infile)
f_out = pyfastaq.utils.open_file_write(outfile)
for aln in file_reader:
if ref_fai is not None:
aln.ref_start += ref_offsets[aln.ref_name]
aln.ref_end += ref_offsets[aln.ref_name]
aln.qry_start += qry_offsets[aln.qry_name]
aln.qry_end += qry_offsets[aln.qry_name]
print(aln.to_msp_crunch(), file=f_out)
pyfastaq.utils.close(f_out)
|
sanger-pathogens/pymummer
|
pymummer/coords_file.py
|
convert_to_msp_crunch
|
python
|
def convert_to_msp_crunch(infile, outfile, ref_fai=None, qry_fai=None):
'''Converts a coords file to a file in MSPcrunch format (for use with ACT, most likely).
ACT ignores sequence names in the crunch file, and just looks at the numbers.
To make a compatible file, the coords all must be shifted appropriately, which
can be done by providing both the ref_fai and qry_fai options.
Both or neither of these must be used, otherwise an error will be thrown.'''
fai_files = {ref_fai, qry_fai}
if None in fai_files and len(fai_files) != 1:
print(fai_files)
raise Error('Error in convert_to_msp_crunch. Must use both of ref_fai and qry_fai, or neither of them')
if ref_fai is not None:
assert qry_fai is not None
ref_offsets = pyfastaq.tasks.length_offsets_from_fai(ref_fai)
qry_offsets = pyfastaq.tasks.length_offsets_from_fai(qry_fai)
file_reader = reader(infile)
f_out = pyfastaq.utils.open_file_write(outfile)
for aln in file_reader:
if ref_fai is not None:
aln.ref_start += ref_offsets[aln.ref_name]
aln.ref_end += ref_offsets[aln.ref_name]
aln.qry_start += qry_offsets[aln.qry_name]
aln.qry_end += qry_offsets[aln.qry_name]
print(aln.to_msp_crunch(), file=f_out)
pyfastaq.utils.close(f_out)
|
Converts a coords file to a file in MSPcrunch format (for use with ACT, most likely).
ACT ignores sequence names in the crunch file, and just looks at the numbers.
To make a compatible file, the coords all must be shifted appropriately, which
can be done by providing both the ref_fai and qry_fai options.
Both or neither of these must be used, otherwise an error will be thrown.
|
train
|
https://github.com/sanger-pathogens/pymummer/blob/fd97bccfbae62719a7247473d73dd6733d4fa903/pymummer/coords_file.py#L19-L47
|
[
"def reader(fname):\n '''Helper function to open the results file (coords file) and create alignment objects with the values in it'''\n f = pyfastaq.utils.open_file_read(fname)\n\n for line in f:\n if line.startswith('[') or (not '\\t' in line):\n continue\n\n yield alignment.Alignment(line)\n\n pyfastaq.utils.close(f)\n"
] |
import pyfastaq
from pymummer import alignment
class Error (Exception): pass
def reader(fname):
'''Helper function to open the results file (coords file) and create alignment objects with the values in it'''
f = pyfastaq.utils.open_file_read(fname)
for line in f:
if line.startswith('[') or (not '\t' in line):
continue
yield alignment.Alignment(line)
pyfastaq.utils.close(f)
def convert_to_msp_crunch(infile, outfile, ref_fai=None, qry_fai=None):
'''Converts a coords file to a file in MSPcrunch format (for use with ACT, most likely).
ACT ignores sequence names in the crunch file, and just looks at the numbers.
To make a compatible file, the coords all must be shifted appropriately, which
can be done by providing both the ref_fai and qry_fai options.
Both or neither of these must be used, otherwise an error will be thrown.'''
fai_files = {ref_fai, qry_fai}
if None in fai_files and len(fai_files) != 1:
print(fai_files)
raise Error('Error in convert_to_msp_crunch. Must use both of ref_fai and qry_fai, or neither of them')
if ref_fai is not None:
assert qry_fai is not None
ref_offsets = pyfastaq.tasks.length_offsets_from_fai(ref_fai)
qry_offsets = pyfastaq.tasks.length_offsets_from_fai(qry_fai)
file_reader = reader(infile)
f_out = pyfastaq.utils.open_file_write(outfile)
for aln in file_reader:
if ref_fai is not None:
aln.ref_start += ref_offsets[aln.ref_name]
aln.ref_end += ref_offsets[aln.ref_name]
aln.qry_start += qry_offsets[aln.qry_name]
aln.qry_end += qry_offsets[aln.qry_name]
print(aln.to_msp_crunch(), file=f_out)
pyfastaq.utils.close(f_out)
|
xolox/python-qpass
|
setup.py
|
get_requirements
|
python
|
def get_requirements(*args):
requirements = set()
contents = get_contents(*args)
for line in contents.splitlines():
# Strip comments.
line = re.sub(r'^#.*|\s#.*', '', line)
# Ignore empty lines
if line and not line.isspace():
requirements.add(re.sub(r'\s+', '', line))
return sorted(requirements)
|
Get requirements from pip requirement files.
|
train
|
https://github.com/xolox/python-qpass/blob/43ce447b0904ff42a54b8f1dd4d2479f950f258f/setup.py#L44-L54
|
[
"def get_contents(*args):\n \"\"\"Get the contents of a file relative to the source distribution directory.\"\"\"\n with codecs.open(get_absolute_path(*args), 'r', 'UTF-8') as handle:\n return handle.read()\n"
] |
#!/usr/bin/env python
# Setup script for the `qpass' package.
#
# Author: Peter Odding <peter@peterodding.com>
# Last Change: April 26, 2018
# URL: https://github.com/xolox/python-qpass
"""
Setup script for the ``qpass`` package.
**python setup.py install**
Install from the working directory into the current Python environment.
**python setup.py sdist**
Build a source distribution archive.
**python setup.py bdist_wheel**
Build a wheel distribution archive.
"""
# Standard library modules.
import codecs
import os
import re
# De-facto standard solution for Python packaging.
from setuptools import find_packages, setup
def get_contents(*args):
"""Get the contents of a file relative to the source distribution directory."""
with codecs.open(get_absolute_path(*args), 'r', 'UTF-8') as handle:
return handle.read()
def get_version(*args):
"""Extract the version number from a Python module."""
contents = get_contents(*args)
metadata = dict(re.findall('__([a-z]+)__ = [\'"]([^\'"]+)', contents))
return metadata['version']
def get_absolute_path(*args):
"""Transform relative pathnames into absolute pathnames."""
return os.path.join(os.path.dirname(os.path.abspath(__file__)), *args)
setup(name='qpass',
version=get_version('qpass', '__init__.py'),
description="Frontend for pass (the standard unix password manager)",
long_description=get_contents('README.rst'),
url='https://github.com/xolox/python-qpass',
author="Peter Odding",
author_email='peter@peterodding.com',
license='MIT',
packages=find_packages(),
entry_points=dict(console_scripts=[
'qpass = qpass.cli:main',
]),
install_requires=get_requirements('requirements.txt'),
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: MacOS X',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Security',
'Topic :: Security :: Cryptography',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System',
'Topic :: System :: Systems Administration',
'Topic :: Terminals',
'Topic :: Utilities',
])
|
xolox/python-qpass
|
qpass/__init__.py
|
create_fuzzy_pattern
|
python
|
def create_fuzzy_pattern(pattern):
return re.compile(".*".join(map(re.escape, pattern)), re.IGNORECASE)
|
Convert a string into a fuzzy regular expression pattern.
:param pattern: The input pattern (a string).
:returns: A compiled regular expression object.
This function works by adding ``.*`` between each of the characters in the
input pattern and compiling the resulting expression into a case
insensitive regular expression.
|
train
|
https://github.com/xolox/python-qpass/blob/43ce447b0904ff42a54b8f1dd4d2479f950f258f/qpass/__init__.py#L431-L442
| null |
# qpass: Frontend for pass (the standard unix password manager).
#
# Author: Peter Odding <peter@peterodding.com>
# Last Change: December 3, 2018
# URL: https://github.com/xolox/python-qpass
"""
Frontend for pass_, the standard unix password manager.
.. _pass: https://www.passwordstore.org/
"""
# Standard library modules.
import fnmatch
import logging
import os
import platform
import re
# External dependencies.
from executor import execute
from executor.contexts import LocalContext
from humanfriendly import Timer, coerce_pattern, format_path, parse_path
from humanfriendly.terminal import HIGHLIGHT_COLOR, ansi_wrap, terminal_supports_colors
from humanfriendly.prompts import prompt_for_choice
from humanfriendly.text import concatenate, format, pluralize, split, trim_empty_lines
from natsort import natsort
from proc.gpg import get_gpg_variables
from property_manager import (
PropertyManager,
cached_property,
clear_property,
mutable_property,
required_property,
set_property,
)
from verboselogs import VerboseLogger
# Modules included in our package.
from qpass.exceptions import EmptyPasswordStoreError, MissingPasswordStoreError, NoMatchingPasswordError
# Public identifiers that require documentation.
__all__ = (
"DEFAULT_DIRECTORY",
"DIRECTORY_VARIABLE",
"AbstractPasswordStore",
"PasswordEntry",
"PasswordStore",
"QuickPass",
"__version__",
"create_fuzzy_pattern",
"logger",
)
# Semi-standard module versioning.
__version__ = "2.3"
DEFAULT_DIRECTORY = "~/.password-store"
"""
The default password storage directory (a string).
The value of :data:`DEFAULT_DIRECTORY` is normalized using
:func:`~humanfriendly.parse_path()`.
"""
DIRECTORY_VARIABLE = "PASSWORD_STORE_DIR"
"""The environment variable that sets the password storage directory (a string)."""
KEY_VALUE_PATTERN = re.compile(r"^(.+\S):\s+(\S.*)$")
"""A compiled regular expression to recognize "Key: Value" lines."""
# Initialize a logger for this module.
logger = VerboseLogger(__name__)
class AbstractPasswordStore(PropertyManager):
"""
Abstract Python API to query passwords managed by pass_.
This abstract base class has two concrete subclasses:
- The :class:`QuickPass` class manages multiple password stores as one.
- The :class:`PasswordStore` class manages a single password store.
"""
@property
def entries(self):
"""A list of :class:`PasswordEntry` objects."""
raise NotImplementedError()
@mutable_property(cached=True)
def exclude_list(self):
"""
A list of strings with filename patterns to ignore.
The :mod:`fnmatch` module is used for pattern matching. Filenames as
well as patterns are normalized to lowercase before pattern matching is
attempted.
"""
return []
@cached_property
def filtered_entries(self):
"""A list of :class:`PasswordEntry` objects that don't match the exclude list."""
return [
e for e in self.entries if not any(fnmatch.fnmatch(e.name.lower(), p.lower()) for p in self.exclude_list)
]
def fuzzy_search(self, *filters):
"""
Perform a "fuzzy" search that matches the given characters in the given order.
:param filters: The pattern(s) to search for.
:returns: The matched password names (a list of strings).
"""
matches = []
logger.verbose(
"Performing fuzzy search on %s (%s) ..", pluralize(len(filters), "pattern"), concatenate(map(repr, filters))
)
patterns = list(map(create_fuzzy_pattern, filters))
for entry in self.filtered_entries:
if all(p.search(entry.name) for p in patterns):
matches.append(entry)
logger.log(
logging.INFO if matches else logging.VERBOSE,
"Matched %s using fuzzy search.",
pluralize(len(matches), "password"),
)
return matches
def select_entry(self, *arguments):
"""
Select a password from the available choices.
:param arguments: Refer to :func:`smart_search()`.
:returns: The name of a password (a string) or :data:`None`
(when no password matched the given `arguments`).
"""
matches = self.smart_search(*arguments)
if len(matches) > 1:
logger.info("More than one match, prompting for choice ..")
labels = [entry.name for entry in matches]
return matches[labels.index(prompt_for_choice(labels))]
else:
logger.info("Matched one entry: %s", matches[0].name)
return matches[0]
def simple_search(self, *keywords):
"""
Perform a simple search for case insensitive substring matches.
:param keywords: The string(s) to search for.
:returns: The matched password names (a generator of strings).
Only passwords whose names matches *all* of the given keywords are
returned.
"""
matches = []
keywords = [kw.lower() for kw in keywords]
logger.verbose(
"Performing simple search on %s (%s) ..",
pluralize(len(keywords), "keyword"),
concatenate(map(repr, keywords)),
)
for entry in self.filtered_entries:
normalized = entry.name.lower()
if all(kw in normalized for kw in keywords):
matches.append(entry)
logger.log(
logging.INFO if matches else logging.VERBOSE,
"Matched %s using simple search.",
pluralize(len(matches), "password"),
)
return matches
def smart_search(self, *arguments):
"""
Perform a smart search on the given keywords or patterns.
:param arguments: The keywords or patterns to search for.
:returns: The matched password names (a list of strings).
:raises: The following exceptions can be raised:
- :exc:`.NoMatchingPasswordError` when no matching passwords are found.
- :exc:`.EmptyPasswordStoreError` when the password store is empty.
This method first tries :func:`simple_search()` and if that doesn't
produce any matches it will fall back to :func:`fuzzy_search()`. If no
matches are found an exception is raised (see above).
"""
matches = self.simple_search(*arguments)
if not matches:
logger.verbose("Falling back from substring search to fuzzy search ..")
matches = self.fuzzy_search(*arguments)
if not matches:
if len(self.filtered_entries) > 0:
raise NoMatchingPasswordError(
format("No passwords matched the given arguments! (%s)", concatenate(map(repr, arguments)))
)
else:
msg = "You don't have any passwords yet! (no *.gpg files found)"
raise EmptyPasswordStoreError(msg)
return matches
class QuickPass(AbstractPasswordStore):
"""
Python API to query multiple password stores as if they are one.
:see also: The :class:`PasswordStore` class to query a single password store.
"""
repr_properties = ["stores"]
"""The properties included in the output of :func:`repr()`."""
@cached_property
def entries(self):
"""A list of :class:`PasswordEntry` objects."""
passwords = []
for store in self.stores:
passwords.extend(store.entries)
return natsort(passwords, key=lambda e: e.name)
@mutable_property(cached=True)
def stores(self):
"""A list of :class:`PasswordStore` objects."""
return [PasswordStore()]
class PasswordStore(AbstractPasswordStore):
"""
Python API to query a single password store.
:see also: The :class:`QuickPass` class to query multiple password stores.
"""
repr_properties = ["directory", "entries"]
"""The properties included in the output of :func:`repr()`."""
@mutable_property(cached=True)
def context(self):
"""
An execution context created using :mod:`executor.contexts`.
The value of :attr:`context` defaults to a
:class:`~executor.contexts.LocalContext` object with the following
characteristics:
- The working directory of the execution context is set to the
value of :attr:`directory`.
- The environment variable given by :data:`DIRECTORY_VARIABLE` is set
to the value of :attr:`directory`.
:raises: :exc:`.MissingPasswordStoreError` when :attr:`directory`
doesn't exist.
"""
# Make sure the directory exists.
self.ensure_directory_exists()
# Prepare the environment variables.
environment = {DIRECTORY_VARIABLE: self.directory}
try:
# Try to enable the GPG agent in headless sessions.
environment.update(get_gpg_variables())
except Exception:
# If we failed then let's at least make sure that the
# $GPG_TTY environment variable is set correctly.
environment.update(GPG_TTY=execute("tty", capture=True, check=False, tty=True, silent=True))
return LocalContext(directory=self.directory, environment=environment)
@mutable_property(cached=True)
def directory(self):
"""
The pathname of the password storage directory (a string).
When the environment variable given by :data:`DIRECTORY_VARIABLE` is
set the value of that environment variable is used, otherwise
:data:`DEFAULT_DIRECTORY` is used. In either case the resulting
directory pathname is normalized using
:func:`~humanfriendly.parse_path()`.
When you set the :attr:`directory` property, the value you set will be
normalized using :func:`~humanfriendly.parse_path()` and the computed
value of the :attr:`context` property is cleared.
"""
return parse_path(os.environ.get(DIRECTORY_VARIABLE, DEFAULT_DIRECTORY))
@directory.setter
def directory(self, value):
"""Normalize the value of :attr:`directory` when it's set."""
# Normalize the value of `directory'.
set_property(self, "directory", parse_path(value))
# Clear the computed values of `context' and `entries'.
clear_property(self, "context")
clear_property(self, "entries")
@cached_property
def entries(self):
"""A list of :class:`PasswordEntry` objects."""
timer = Timer()
passwords = []
logger.info("Scanning %s ..", format_path(self.directory))
listing = self.context.capture("find", "-type", "f", "-name", "*.gpg", "-print0")
for filename in split(listing, "\0"):
basename, extension = os.path.splitext(filename)
if extension == ".gpg":
# We use os.path.normpath() to remove the leading `./' prefixes
# that `find' adds because it searches the working directory.
passwords.append(PasswordEntry(name=os.path.normpath(basename), store=self))
logger.verbose("Found %s in %s.", pluralize(len(passwords), "password"), timer)
return natsort(passwords, key=lambda e: e.name)
def ensure_directory_exists(self):
"""
Make sure :attr:`directory` exists.
:raises: :exc:`.MissingPasswordStoreError` when the password storage
directory doesn't exist.
"""
if not os.path.isdir(self.directory):
msg = "The password storage directory doesn't exist! (%s)"
raise MissingPasswordStoreError(msg % self.directory)
class PasswordEntry(PropertyManager):
""":class:`PasswordEntry` objects bind the name of a password to the store that contains the password."""
repr_properties = ["name"]
"""The properties included in the output of :func:`repr()`."""
@property
def context(self):
"""The :attr:`~PasswordStore.context` of :attr:`store`."""
return self.store.context
@required_property
def name(self):
"""The name of the password store entry (a string)."""
@cached_property
def password(self):
"""The password identified by :attr:`name` (a string)."""
return self.text.splitlines()[0]
@required_property
def store(self):
"""The :class:`PasswordStore` that contains the entry."""
@cached_property
def text(self):
"""The full text of the entry (a string)."""
return self.context.capture("pass", "show", self.name)
def copy_password(self):
"""Copy the password to the clipboard."""
self.context.execute("pass", "show", "--clip", self.name)
def format_text(self, include_password=True, use_colors=None, padding=True, filters=()):
"""
Format :attr:`text` for viewing on a terminal.
:param include_password: :data:`True` to include the password in the
formatted text, :data:`False` to exclude the
password from the formatted text.
:param use_colors: :data:`True` to use ANSI escape sequences,
:data:`False` otherwise. When this is :data:`None`
:func:`~humanfriendly.terminal.terminal_supports_colors()`
will be used to detect whether ANSI escape sequences
are supported.
:param padding: :data:`True` to add empty lines before and after the
entry and indent the entry's text with two spaces,
:data:`False` to skip the padding.
:param filters: An iterable of regular expression patterns (defaults to
an empty tuple). If a line in the entry's text matches
one of these patterns it won't be shown on the
terminal.
:returns: The formatted entry (a string).
"""
# Determine whether we can use ANSI escape sequences.
if use_colors is None:
use_colors = terminal_supports_colors()
# Extract the password (first line) from the entry.
lines = self.text.splitlines()
password = lines.pop(0).strip()
# Compile the given patterns to case insensitive regular expressions
# and use them to ignore lines that match any of the given filters.
patterns = [coerce_pattern(f, re.IGNORECASE) for f in filters]
lines = [l for l in lines if not any(p.search(l) for p in patterns)]
text = trim_empty_lines("\n".join(lines))
# Include the password in the formatted text?
if include_password:
text = "Password: %s\n%s" % (password, text)
# Add the name to the entry (only when there's something to show).
if text and not text.isspace():
title = " / ".join(split(self.name, "/"))
if use_colors:
title = ansi_wrap(title, bold=True)
text = "%s\n\n%s" % (title, text)
# Highlight the entry's text using ANSI escape sequences.
lines = []
for line in text.splitlines():
# Check for a "Key: Value" line.
match = KEY_VALUE_PATTERN.match(line)
if match:
key = "%s:" % match.group(1).strip()
value = match.group(2).strip()
if use_colors:
# Highlight the key.
key = ansi_wrap(key, color=HIGHLIGHT_COLOR)
# Underline hyperlinks in the value.
tokens = value.split()
for i in range(len(tokens)):
if "://" in tokens[i]:
tokens[i] = ansi_wrap(tokens[i], underline=True)
# Replace the line with a highlighted version.
line = key + " " + " ".join(tokens)
if padding:
line = " " + line
lines.append(line)
text = "\n".join(lines)
text = trim_empty_lines(text)
if text and padding:
text = "\n%s\n" % text
return text
def is_clipboard_supported():
"""
Check whether the clipboard is supported.
:returns: :data:`True` if the clipboard is supported, :data:`False` otherwise.
"""
return platform.system().lower() == "darwin" or bool(os.environ.get("DISPLAY"))
|
xolox/python-qpass
|
qpass/__init__.py
|
AbstractPasswordStore.filtered_entries
|
python
|
def filtered_entries(self):
return [
e for e in self.entries if not any(fnmatch.fnmatch(e.name.lower(), p.lower()) for p in self.exclude_list)
]
|
A list of :class:`PasswordEntry` objects that don't match the exclude list.
|
train
|
https://github.com/xolox/python-qpass/blob/43ce447b0904ff42a54b8f1dd4d2479f950f258f/qpass/__init__.py#L104-L108
| null |
class AbstractPasswordStore(PropertyManager):
"""
Abstract Python API to query passwords managed by pass_.
This abstract base class has two concrete subclasses:
- The :class:`QuickPass` class manages multiple password stores as one.
- The :class:`PasswordStore` class manages a single password store.
"""
@property
def entries(self):
"""A list of :class:`PasswordEntry` objects."""
raise NotImplementedError()
@mutable_property(cached=True)
def exclude_list(self):
"""
A list of strings with filename patterns to ignore.
The :mod:`fnmatch` module is used for pattern matching. Filenames as
well as patterns are normalized to lowercase before pattern matching is
attempted.
"""
return []
@cached_property
def fuzzy_search(self, *filters):
"""
Perform a "fuzzy" search that matches the given characters in the given order.
:param filters: The pattern(s) to search for.
:returns: The matched password names (a list of strings).
"""
matches = []
logger.verbose(
"Performing fuzzy search on %s (%s) ..", pluralize(len(filters), "pattern"), concatenate(map(repr, filters))
)
patterns = list(map(create_fuzzy_pattern, filters))
for entry in self.filtered_entries:
if all(p.search(entry.name) for p in patterns):
matches.append(entry)
logger.log(
logging.INFO if matches else logging.VERBOSE,
"Matched %s using fuzzy search.",
pluralize(len(matches), "password"),
)
return matches
def select_entry(self, *arguments):
"""
Select a password from the available choices.
:param arguments: Refer to :func:`smart_search()`.
:returns: The name of a password (a string) or :data:`None`
(when no password matched the given `arguments`).
"""
matches = self.smart_search(*arguments)
if len(matches) > 1:
logger.info("More than one match, prompting for choice ..")
labels = [entry.name for entry in matches]
return matches[labels.index(prompt_for_choice(labels))]
else:
logger.info("Matched one entry: %s", matches[0].name)
return matches[0]
def simple_search(self, *keywords):
"""
Perform a simple search for case insensitive substring matches.
:param keywords: The string(s) to search for.
:returns: The matched password names (a generator of strings).
Only passwords whose names matches *all* of the given keywords are
returned.
"""
matches = []
keywords = [kw.lower() for kw in keywords]
logger.verbose(
"Performing simple search on %s (%s) ..",
pluralize(len(keywords), "keyword"),
concatenate(map(repr, keywords)),
)
for entry in self.filtered_entries:
normalized = entry.name.lower()
if all(kw in normalized for kw in keywords):
matches.append(entry)
logger.log(
logging.INFO if matches else logging.VERBOSE,
"Matched %s using simple search.",
pluralize(len(matches), "password"),
)
return matches
def smart_search(self, *arguments):
"""
Perform a smart search on the given keywords or patterns.
:param arguments: The keywords or patterns to search for.
:returns: The matched password names (a list of strings).
:raises: The following exceptions can be raised:
- :exc:`.NoMatchingPasswordError` when no matching passwords are found.
- :exc:`.EmptyPasswordStoreError` when the password store is empty.
This method first tries :func:`simple_search()` and if that doesn't
produce any matches it will fall back to :func:`fuzzy_search()`. If no
matches are found an exception is raised (see above).
"""
matches = self.simple_search(*arguments)
if not matches:
logger.verbose("Falling back from substring search to fuzzy search ..")
matches = self.fuzzy_search(*arguments)
if not matches:
if len(self.filtered_entries) > 0:
raise NoMatchingPasswordError(
format("No passwords matched the given arguments! (%s)", concatenate(map(repr, arguments)))
)
else:
msg = "You don't have any passwords yet! (no *.gpg files found)"
raise EmptyPasswordStoreError(msg)
return matches
|
xolox/python-qpass
|
qpass/__init__.py
|
AbstractPasswordStore.fuzzy_search
|
python
|
def fuzzy_search(self, *filters):
matches = []
logger.verbose(
"Performing fuzzy search on %s (%s) ..", pluralize(len(filters), "pattern"), concatenate(map(repr, filters))
)
patterns = list(map(create_fuzzy_pattern, filters))
for entry in self.filtered_entries:
if all(p.search(entry.name) for p in patterns):
matches.append(entry)
logger.log(
logging.INFO if matches else logging.VERBOSE,
"Matched %s using fuzzy search.",
pluralize(len(matches), "password"),
)
return matches
|
Perform a "fuzzy" search that matches the given characters in the given order.
:param filters: The pattern(s) to search for.
:returns: The matched password names (a list of strings).
|
train
|
https://github.com/xolox/python-qpass/blob/43ce447b0904ff42a54b8f1dd4d2479f950f258f/qpass/__init__.py#L110-L130
| null |
class AbstractPasswordStore(PropertyManager):
"""
Abstract Python API to query passwords managed by pass_.
This abstract base class has two concrete subclasses:
- The :class:`QuickPass` class manages multiple password stores as one.
- The :class:`PasswordStore` class manages a single password store.
"""
@property
def entries(self):
"""A list of :class:`PasswordEntry` objects."""
raise NotImplementedError()
@mutable_property(cached=True)
def exclude_list(self):
"""
A list of strings with filename patterns to ignore.
The :mod:`fnmatch` module is used for pattern matching. Filenames as
well as patterns are normalized to lowercase before pattern matching is
attempted.
"""
return []
@cached_property
def filtered_entries(self):
"""A list of :class:`PasswordEntry` objects that don't match the exclude list."""
return [
e for e in self.entries if not any(fnmatch.fnmatch(e.name.lower(), p.lower()) for p in self.exclude_list)
]
def select_entry(self, *arguments):
"""
Select a password from the available choices.
:param arguments: Refer to :func:`smart_search()`.
:returns: The name of a password (a string) or :data:`None`
(when no password matched the given `arguments`).
"""
matches = self.smart_search(*arguments)
if len(matches) > 1:
logger.info("More than one match, prompting for choice ..")
labels = [entry.name for entry in matches]
return matches[labels.index(prompt_for_choice(labels))]
else:
logger.info("Matched one entry: %s", matches[0].name)
return matches[0]
def simple_search(self, *keywords):
"""
Perform a simple search for case insensitive substring matches.
:param keywords: The string(s) to search for.
:returns: The matched password names (a generator of strings).
Only passwords whose names matches *all* of the given keywords are
returned.
"""
matches = []
keywords = [kw.lower() for kw in keywords]
logger.verbose(
"Performing simple search on %s (%s) ..",
pluralize(len(keywords), "keyword"),
concatenate(map(repr, keywords)),
)
for entry in self.filtered_entries:
normalized = entry.name.lower()
if all(kw in normalized for kw in keywords):
matches.append(entry)
logger.log(
logging.INFO if matches else logging.VERBOSE,
"Matched %s using simple search.",
pluralize(len(matches), "password"),
)
return matches
def smart_search(self, *arguments):
"""
Perform a smart search on the given keywords or patterns.
:param arguments: The keywords or patterns to search for.
:returns: The matched password names (a list of strings).
:raises: The following exceptions can be raised:
- :exc:`.NoMatchingPasswordError` when no matching passwords are found.
- :exc:`.EmptyPasswordStoreError` when the password store is empty.
This method first tries :func:`simple_search()` and if that doesn't
produce any matches it will fall back to :func:`fuzzy_search()`. If no
matches are found an exception is raised (see above).
"""
matches = self.simple_search(*arguments)
if not matches:
logger.verbose("Falling back from substring search to fuzzy search ..")
matches = self.fuzzy_search(*arguments)
if not matches:
if len(self.filtered_entries) > 0:
raise NoMatchingPasswordError(
format("No passwords matched the given arguments! (%s)", concatenate(map(repr, arguments)))
)
else:
msg = "You don't have any passwords yet! (no *.gpg files found)"
raise EmptyPasswordStoreError(msg)
return matches
|
xolox/python-qpass
|
qpass/__init__.py
|
AbstractPasswordStore.select_entry
|
python
|
def select_entry(self, *arguments):
matches = self.smart_search(*arguments)
if len(matches) > 1:
logger.info("More than one match, prompting for choice ..")
labels = [entry.name for entry in matches]
return matches[labels.index(prompt_for_choice(labels))]
else:
logger.info("Matched one entry: %s", matches[0].name)
return matches[0]
|
Select a password from the available choices.
:param arguments: Refer to :func:`smart_search()`.
:returns: The name of a password (a string) or :data:`None`
(when no password matched the given `arguments`).
|
train
|
https://github.com/xolox/python-qpass/blob/43ce447b0904ff42a54b8f1dd4d2479f950f258f/qpass/__init__.py#L132-L147
| null |
class AbstractPasswordStore(PropertyManager):
"""
Abstract Python API to query passwords managed by pass_.
This abstract base class has two concrete subclasses:
- The :class:`QuickPass` class manages multiple password stores as one.
- The :class:`PasswordStore` class manages a single password store.
"""
@property
def entries(self):
"""A list of :class:`PasswordEntry` objects."""
raise NotImplementedError()
@mutable_property(cached=True)
def exclude_list(self):
"""
A list of strings with filename patterns to ignore.
The :mod:`fnmatch` module is used for pattern matching. Filenames as
well as patterns are normalized to lowercase before pattern matching is
attempted.
"""
return []
@cached_property
def filtered_entries(self):
"""A list of :class:`PasswordEntry` objects that don't match the exclude list."""
return [
e for e in self.entries if not any(fnmatch.fnmatch(e.name.lower(), p.lower()) for p in self.exclude_list)
]
def fuzzy_search(self, *filters):
"""
Perform a "fuzzy" search that matches the given characters in the given order.
:param filters: The pattern(s) to search for.
:returns: The matched password names (a list of strings).
"""
matches = []
logger.verbose(
"Performing fuzzy search on %s (%s) ..", pluralize(len(filters), "pattern"), concatenate(map(repr, filters))
)
patterns = list(map(create_fuzzy_pattern, filters))
for entry in self.filtered_entries:
if all(p.search(entry.name) for p in patterns):
matches.append(entry)
logger.log(
logging.INFO if matches else logging.VERBOSE,
"Matched %s using fuzzy search.",
pluralize(len(matches), "password"),
)
return matches
def simple_search(self, *keywords):
"""
Perform a simple search for case insensitive substring matches.
:param keywords: The string(s) to search for.
:returns: The matched password names (a generator of strings).
Only passwords whose names matches *all* of the given keywords are
returned.
"""
matches = []
keywords = [kw.lower() for kw in keywords]
logger.verbose(
"Performing simple search on %s (%s) ..",
pluralize(len(keywords), "keyword"),
concatenate(map(repr, keywords)),
)
for entry in self.filtered_entries:
normalized = entry.name.lower()
if all(kw in normalized for kw in keywords):
matches.append(entry)
logger.log(
logging.INFO if matches else logging.VERBOSE,
"Matched %s using simple search.",
pluralize(len(matches), "password"),
)
return matches
def smart_search(self, *arguments):
"""
Perform a smart search on the given keywords or patterns.
:param arguments: The keywords or patterns to search for.
:returns: The matched password names (a list of strings).
:raises: The following exceptions can be raised:
- :exc:`.NoMatchingPasswordError` when no matching passwords are found.
- :exc:`.EmptyPasswordStoreError` when the password store is empty.
This method first tries :func:`simple_search()` and if that doesn't
produce any matches it will fall back to :func:`fuzzy_search()`. If no
matches are found an exception is raised (see above).
"""
matches = self.simple_search(*arguments)
if not matches:
logger.verbose("Falling back from substring search to fuzzy search ..")
matches = self.fuzzy_search(*arguments)
if not matches:
if len(self.filtered_entries) > 0:
raise NoMatchingPasswordError(
format("No passwords matched the given arguments! (%s)", concatenate(map(repr, arguments)))
)
else:
msg = "You don't have any passwords yet! (no *.gpg files found)"
raise EmptyPasswordStoreError(msg)
return matches
|
xolox/python-qpass
|
qpass/__init__.py
|
AbstractPasswordStore.simple_search
|
python
|
def simple_search(self, *keywords):
matches = []
keywords = [kw.lower() for kw in keywords]
logger.verbose(
"Performing simple search on %s (%s) ..",
pluralize(len(keywords), "keyword"),
concatenate(map(repr, keywords)),
)
for entry in self.filtered_entries:
normalized = entry.name.lower()
if all(kw in normalized for kw in keywords):
matches.append(entry)
logger.log(
logging.INFO if matches else logging.VERBOSE,
"Matched %s using simple search.",
pluralize(len(matches), "password"),
)
return matches
|
Perform a simple search for case insensitive substring matches.
:param keywords: The string(s) to search for.
:returns: The matched password names (a generator of strings).
Only passwords whose names matches *all* of the given keywords are
returned.
|
train
|
https://github.com/xolox/python-qpass/blob/43ce447b0904ff42a54b8f1dd4d2479f950f258f/qpass/__init__.py#L149-L175
| null |
class AbstractPasswordStore(PropertyManager):
"""
Abstract Python API to query passwords managed by pass_.
This abstract base class has two concrete subclasses:
- The :class:`QuickPass` class manages multiple password stores as one.
- The :class:`PasswordStore` class manages a single password store.
"""
@property
def entries(self):
"""A list of :class:`PasswordEntry` objects."""
raise NotImplementedError()
@mutable_property(cached=True)
def exclude_list(self):
"""
A list of strings with filename patterns to ignore.
The :mod:`fnmatch` module is used for pattern matching. Filenames as
well as patterns are normalized to lowercase before pattern matching is
attempted.
"""
return []
@cached_property
def filtered_entries(self):
"""A list of :class:`PasswordEntry` objects that don't match the exclude list."""
return [
e for e in self.entries if not any(fnmatch.fnmatch(e.name.lower(), p.lower()) for p in self.exclude_list)
]
def fuzzy_search(self, *filters):
"""
Perform a "fuzzy" search that matches the given characters in the given order.
:param filters: The pattern(s) to search for.
:returns: The matched password names (a list of strings).
"""
matches = []
logger.verbose(
"Performing fuzzy search on %s (%s) ..", pluralize(len(filters), "pattern"), concatenate(map(repr, filters))
)
patterns = list(map(create_fuzzy_pattern, filters))
for entry in self.filtered_entries:
if all(p.search(entry.name) for p in patterns):
matches.append(entry)
logger.log(
logging.INFO if matches else logging.VERBOSE,
"Matched %s using fuzzy search.",
pluralize(len(matches), "password"),
)
return matches
def select_entry(self, *arguments):
"""
Select a password from the available choices.
:param arguments: Refer to :func:`smart_search()`.
:returns: The name of a password (a string) or :data:`None`
(when no password matched the given `arguments`).
"""
matches = self.smart_search(*arguments)
if len(matches) > 1:
logger.info("More than one match, prompting for choice ..")
labels = [entry.name for entry in matches]
return matches[labels.index(prompt_for_choice(labels))]
else:
logger.info("Matched one entry: %s", matches[0].name)
return matches[0]
def smart_search(self, *arguments):
"""
Perform a smart search on the given keywords or patterns.
:param arguments: The keywords or patterns to search for.
:returns: The matched password names (a list of strings).
:raises: The following exceptions can be raised:
- :exc:`.NoMatchingPasswordError` when no matching passwords are found.
- :exc:`.EmptyPasswordStoreError` when the password store is empty.
This method first tries :func:`simple_search()` and if that doesn't
produce any matches it will fall back to :func:`fuzzy_search()`. If no
matches are found an exception is raised (see above).
"""
matches = self.simple_search(*arguments)
if not matches:
logger.verbose("Falling back from substring search to fuzzy search ..")
matches = self.fuzzy_search(*arguments)
if not matches:
if len(self.filtered_entries) > 0:
raise NoMatchingPasswordError(
format("No passwords matched the given arguments! (%s)", concatenate(map(repr, arguments)))
)
else:
msg = "You don't have any passwords yet! (no *.gpg files found)"
raise EmptyPasswordStoreError(msg)
return matches
|
xolox/python-qpass
|
qpass/__init__.py
|
AbstractPasswordStore.smart_search
|
python
|
def smart_search(self, *arguments):
matches = self.simple_search(*arguments)
if not matches:
logger.verbose("Falling back from substring search to fuzzy search ..")
matches = self.fuzzy_search(*arguments)
if not matches:
if len(self.filtered_entries) > 0:
raise NoMatchingPasswordError(
format("No passwords matched the given arguments! (%s)", concatenate(map(repr, arguments)))
)
else:
msg = "You don't have any passwords yet! (no *.gpg files found)"
raise EmptyPasswordStoreError(msg)
return matches
|
Perform a smart search on the given keywords or patterns.
:param arguments: The keywords or patterns to search for.
:returns: The matched password names (a list of strings).
:raises: The following exceptions can be raised:
- :exc:`.NoMatchingPasswordError` when no matching passwords are found.
- :exc:`.EmptyPasswordStoreError` when the password store is empty.
This method first tries :func:`simple_search()` and if that doesn't
produce any matches it will fall back to :func:`fuzzy_search()`. If no
matches are found an exception is raised (see above).
|
train
|
https://github.com/xolox/python-qpass/blob/43ce447b0904ff42a54b8f1dd4d2479f950f258f/qpass/__init__.py#L177-L204
| null |
class AbstractPasswordStore(PropertyManager):
"""
Abstract Python API to query passwords managed by pass_.
This abstract base class has two concrete subclasses:
- The :class:`QuickPass` class manages multiple password stores as one.
- The :class:`PasswordStore` class manages a single password store.
"""
@property
def entries(self):
"""A list of :class:`PasswordEntry` objects."""
raise NotImplementedError()
@mutable_property(cached=True)
def exclude_list(self):
"""
A list of strings with filename patterns to ignore.
The :mod:`fnmatch` module is used for pattern matching. Filenames as
well as patterns are normalized to lowercase before pattern matching is
attempted.
"""
return []
@cached_property
def filtered_entries(self):
"""A list of :class:`PasswordEntry` objects that don't match the exclude list."""
return [
e for e in self.entries if not any(fnmatch.fnmatch(e.name.lower(), p.lower()) for p in self.exclude_list)
]
def fuzzy_search(self, *filters):
"""
Perform a "fuzzy" search that matches the given characters in the given order.
:param filters: The pattern(s) to search for.
:returns: The matched password names (a list of strings).
"""
matches = []
logger.verbose(
"Performing fuzzy search on %s (%s) ..", pluralize(len(filters), "pattern"), concatenate(map(repr, filters))
)
patterns = list(map(create_fuzzy_pattern, filters))
for entry in self.filtered_entries:
if all(p.search(entry.name) for p in patterns):
matches.append(entry)
logger.log(
logging.INFO if matches else logging.VERBOSE,
"Matched %s using fuzzy search.",
pluralize(len(matches), "password"),
)
return matches
def select_entry(self, *arguments):
"""
Select a password from the available choices.
:param arguments: Refer to :func:`smart_search()`.
:returns: The name of a password (a string) or :data:`None`
(when no password matched the given `arguments`).
"""
matches = self.smart_search(*arguments)
if len(matches) > 1:
logger.info("More than one match, prompting for choice ..")
labels = [entry.name for entry in matches]
return matches[labels.index(prompt_for_choice(labels))]
else:
logger.info("Matched one entry: %s", matches[0].name)
return matches[0]
def simple_search(self, *keywords):
"""
Perform a simple search for case insensitive substring matches.
:param keywords: The string(s) to search for.
:returns: The matched password names (a generator of strings).
Only passwords whose names matches *all* of the given keywords are
returned.
"""
matches = []
keywords = [kw.lower() for kw in keywords]
logger.verbose(
"Performing simple search on %s (%s) ..",
pluralize(len(keywords), "keyword"),
concatenate(map(repr, keywords)),
)
for entry in self.filtered_entries:
normalized = entry.name.lower()
if all(kw in normalized for kw in keywords):
matches.append(entry)
logger.log(
logging.INFO if matches else logging.VERBOSE,
"Matched %s using simple search.",
pluralize(len(matches), "password"),
)
return matches
|
xolox/python-qpass
|
qpass/__init__.py
|
QuickPass.entries
|
python
|
def entries(self):
passwords = []
for store in self.stores:
passwords.extend(store.entries)
return natsort(passwords, key=lambda e: e.name)
|
A list of :class:`PasswordEntry` objects.
|
train
|
https://github.com/xolox/python-qpass/blob/43ce447b0904ff42a54b8f1dd4d2479f950f258f/qpass/__init__.py#L219-L224
| null |
class QuickPass(AbstractPasswordStore):
"""
Python API to query multiple password stores as if they are one.
:see also: The :class:`PasswordStore` class to query a single password store.
"""
repr_properties = ["stores"]
"""The properties included in the output of :func:`repr()`."""
@cached_property
@mutable_property(cached=True)
def stores(self):
"""A list of :class:`PasswordStore` objects."""
return [PasswordStore()]
|
xolox/python-qpass
|
qpass/__init__.py
|
PasswordStore.context
|
python
|
def context(self):
# Make sure the directory exists.
self.ensure_directory_exists()
# Prepare the environment variables.
environment = {DIRECTORY_VARIABLE: self.directory}
try:
# Try to enable the GPG agent in headless sessions.
environment.update(get_gpg_variables())
except Exception:
# If we failed then let's at least make sure that the
# $GPG_TTY environment variable is set correctly.
environment.update(GPG_TTY=execute("tty", capture=True, check=False, tty=True, silent=True))
return LocalContext(directory=self.directory, environment=environment)
|
An execution context created using :mod:`executor.contexts`.
The value of :attr:`context` defaults to a
:class:`~executor.contexts.LocalContext` object with the following
characteristics:
- The working directory of the execution context is set to the
value of :attr:`directory`.
- The environment variable given by :data:`DIRECTORY_VARIABLE` is set
to the value of :attr:`directory`.
:raises: :exc:`.MissingPasswordStoreError` when :attr:`directory`
doesn't exist.
|
train
|
https://github.com/xolox/python-qpass/blob/43ce447b0904ff42a54b8f1dd4d2479f950f258f/qpass/__init__.py#L244-L272
| null |
class PasswordStore(AbstractPasswordStore):
"""
Python API to query a single password store.
:see also: The :class:`QuickPass` class to query multiple password stores.
"""
repr_properties = ["directory", "entries"]
"""The properties included in the output of :func:`repr()`."""
@mutable_property(cached=True)
@mutable_property(cached=True)
def directory(self):
"""
The pathname of the password storage directory (a string).
When the environment variable given by :data:`DIRECTORY_VARIABLE` is
set the value of that environment variable is used, otherwise
:data:`DEFAULT_DIRECTORY` is used. In either case the resulting
directory pathname is normalized using
:func:`~humanfriendly.parse_path()`.
When you set the :attr:`directory` property, the value you set will be
normalized using :func:`~humanfriendly.parse_path()` and the computed
value of the :attr:`context` property is cleared.
"""
return parse_path(os.environ.get(DIRECTORY_VARIABLE, DEFAULT_DIRECTORY))
@directory.setter
def directory(self, value):
"""Normalize the value of :attr:`directory` when it's set."""
# Normalize the value of `directory'.
set_property(self, "directory", parse_path(value))
# Clear the computed values of `context' and `entries'.
clear_property(self, "context")
clear_property(self, "entries")
@cached_property
def entries(self):
"""A list of :class:`PasswordEntry` objects."""
timer = Timer()
passwords = []
logger.info("Scanning %s ..", format_path(self.directory))
listing = self.context.capture("find", "-type", "f", "-name", "*.gpg", "-print0")
for filename in split(listing, "\0"):
basename, extension = os.path.splitext(filename)
if extension == ".gpg":
# We use os.path.normpath() to remove the leading `./' prefixes
# that `find' adds because it searches the working directory.
passwords.append(PasswordEntry(name=os.path.normpath(basename), store=self))
logger.verbose("Found %s in %s.", pluralize(len(passwords), "password"), timer)
return natsort(passwords, key=lambda e: e.name)
def ensure_directory_exists(self):
"""
Make sure :attr:`directory` exists.
:raises: :exc:`.MissingPasswordStoreError` when the password storage
directory doesn't exist.
"""
if not os.path.isdir(self.directory):
msg = "The password storage directory doesn't exist! (%s)"
raise MissingPasswordStoreError(msg % self.directory)
|
xolox/python-qpass
|
qpass/__init__.py
|
PasswordStore.directory
|
python
|
def directory(self, value):
# Normalize the value of `directory'.
set_property(self, "directory", parse_path(value))
# Clear the computed values of `context' and `entries'.
clear_property(self, "context")
clear_property(self, "entries")
|
Normalize the value of :attr:`directory` when it's set.
|
train
|
https://github.com/xolox/python-qpass/blob/43ce447b0904ff42a54b8f1dd4d2479f950f258f/qpass/__init__.py#L292-L298
| null |
class PasswordStore(AbstractPasswordStore):
"""
Python API to query a single password store.
:see also: The :class:`QuickPass` class to query multiple password stores.
"""
repr_properties = ["directory", "entries"]
"""The properties included in the output of :func:`repr()`."""
@mutable_property(cached=True)
def context(self):
"""
An execution context created using :mod:`executor.contexts`.
The value of :attr:`context` defaults to a
:class:`~executor.contexts.LocalContext` object with the following
characteristics:
- The working directory of the execution context is set to the
value of :attr:`directory`.
- The environment variable given by :data:`DIRECTORY_VARIABLE` is set
to the value of :attr:`directory`.
:raises: :exc:`.MissingPasswordStoreError` when :attr:`directory`
doesn't exist.
"""
# Make sure the directory exists.
self.ensure_directory_exists()
# Prepare the environment variables.
environment = {DIRECTORY_VARIABLE: self.directory}
try:
# Try to enable the GPG agent in headless sessions.
environment.update(get_gpg_variables())
except Exception:
# If we failed then let's at least make sure that the
# $GPG_TTY environment variable is set correctly.
environment.update(GPG_TTY=execute("tty", capture=True, check=False, tty=True, silent=True))
return LocalContext(directory=self.directory, environment=environment)
@mutable_property(cached=True)
def directory(self):
"""
The pathname of the password storage directory (a string).
When the environment variable given by :data:`DIRECTORY_VARIABLE` is
set the value of that environment variable is used, otherwise
:data:`DEFAULT_DIRECTORY` is used. In either case the resulting
directory pathname is normalized using
:func:`~humanfriendly.parse_path()`.
When you set the :attr:`directory` property, the value you set will be
normalized using :func:`~humanfriendly.parse_path()` and the computed
value of the :attr:`context` property is cleared.
"""
return parse_path(os.environ.get(DIRECTORY_VARIABLE, DEFAULT_DIRECTORY))
@directory.setter
@cached_property
def entries(self):
"""A list of :class:`PasswordEntry` objects."""
timer = Timer()
passwords = []
logger.info("Scanning %s ..", format_path(self.directory))
listing = self.context.capture("find", "-type", "f", "-name", "*.gpg", "-print0")
for filename in split(listing, "\0"):
basename, extension = os.path.splitext(filename)
if extension == ".gpg":
# We use os.path.normpath() to remove the leading `./' prefixes
# that `find' adds because it searches the working directory.
passwords.append(PasswordEntry(name=os.path.normpath(basename), store=self))
logger.verbose("Found %s in %s.", pluralize(len(passwords), "password"), timer)
return natsort(passwords, key=lambda e: e.name)
def ensure_directory_exists(self):
"""
Make sure :attr:`directory` exists.
:raises: :exc:`.MissingPasswordStoreError` when the password storage
directory doesn't exist.
"""
if not os.path.isdir(self.directory):
msg = "The password storage directory doesn't exist! (%s)"
raise MissingPasswordStoreError(msg % self.directory)
|
xolox/python-qpass
|
qpass/__init__.py
|
PasswordStore.entries
|
python
|
def entries(self):
timer = Timer()
passwords = []
logger.info("Scanning %s ..", format_path(self.directory))
listing = self.context.capture("find", "-type", "f", "-name", "*.gpg", "-print0")
for filename in split(listing, "\0"):
basename, extension = os.path.splitext(filename)
if extension == ".gpg":
# We use os.path.normpath() to remove the leading `./' prefixes
# that `find' adds because it searches the working directory.
passwords.append(PasswordEntry(name=os.path.normpath(basename), store=self))
logger.verbose("Found %s in %s.", pluralize(len(passwords), "password"), timer)
return natsort(passwords, key=lambda e: e.name)
|
A list of :class:`PasswordEntry` objects.
|
train
|
https://github.com/xolox/python-qpass/blob/43ce447b0904ff42a54b8f1dd4d2479f950f258f/qpass/__init__.py#L301-L314
| null |
class PasswordStore(AbstractPasswordStore):
"""
Python API to query a single password store.
:see also: The :class:`QuickPass` class to query multiple password stores.
"""
repr_properties = ["directory", "entries"]
"""The properties included in the output of :func:`repr()`."""
@mutable_property(cached=True)
def context(self):
"""
An execution context created using :mod:`executor.contexts`.
The value of :attr:`context` defaults to a
:class:`~executor.contexts.LocalContext` object with the following
characteristics:
- The working directory of the execution context is set to the
value of :attr:`directory`.
- The environment variable given by :data:`DIRECTORY_VARIABLE` is set
to the value of :attr:`directory`.
:raises: :exc:`.MissingPasswordStoreError` when :attr:`directory`
doesn't exist.
"""
# Make sure the directory exists.
self.ensure_directory_exists()
# Prepare the environment variables.
environment = {DIRECTORY_VARIABLE: self.directory}
try:
# Try to enable the GPG agent in headless sessions.
environment.update(get_gpg_variables())
except Exception:
# If we failed then let's at least make sure that the
# $GPG_TTY environment variable is set correctly.
environment.update(GPG_TTY=execute("tty", capture=True, check=False, tty=True, silent=True))
return LocalContext(directory=self.directory, environment=environment)
@mutable_property(cached=True)
def directory(self):
"""
The pathname of the password storage directory (a string).
When the environment variable given by :data:`DIRECTORY_VARIABLE` is
set the value of that environment variable is used, otherwise
:data:`DEFAULT_DIRECTORY` is used. In either case the resulting
directory pathname is normalized using
:func:`~humanfriendly.parse_path()`.
When you set the :attr:`directory` property, the value you set will be
normalized using :func:`~humanfriendly.parse_path()` and the computed
value of the :attr:`context` property is cleared.
"""
return parse_path(os.environ.get(DIRECTORY_VARIABLE, DEFAULT_DIRECTORY))
@directory.setter
def directory(self, value):
"""Normalize the value of :attr:`directory` when it's set."""
# Normalize the value of `directory'.
set_property(self, "directory", parse_path(value))
# Clear the computed values of `context' and `entries'.
clear_property(self, "context")
clear_property(self, "entries")
@cached_property
def ensure_directory_exists(self):
"""
Make sure :attr:`directory` exists.
:raises: :exc:`.MissingPasswordStoreError` when the password storage
directory doesn't exist.
"""
if not os.path.isdir(self.directory):
msg = "The password storage directory doesn't exist! (%s)"
raise MissingPasswordStoreError(msg % self.directory)
|
xolox/python-qpass
|
qpass/__init__.py
|
PasswordStore.ensure_directory_exists
|
python
|
def ensure_directory_exists(self):
if not os.path.isdir(self.directory):
msg = "The password storage directory doesn't exist! (%s)"
raise MissingPasswordStoreError(msg % self.directory)
|
Make sure :attr:`directory` exists.
:raises: :exc:`.MissingPasswordStoreError` when the password storage
directory doesn't exist.
|
train
|
https://github.com/xolox/python-qpass/blob/43ce447b0904ff42a54b8f1dd4d2479f950f258f/qpass/__init__.py#L316-L325
| null |
class PasswordStore(AbstractPasswordStore):
"""
Python API to query a single password store.
:see also: The :class:`QuickPass` class to query multiple password stores.
"""
repr_properties = ["directory", "entries"]
"""The properties included in the output of :func:`repr()`."""
@mutable_property(cached=True)
def context(self):
"""
An execution context created using :mod:`executor.contexts`.
The value of :attr:`context` defaults to a
:class:`~executor.contexts.LocalContext` object with the following
characteristics:
- The working directory of the execution context is set to the
value of :attr:`directory`.
- The environment variable given by :data:`DIRECTORY_VARIABLE` is set
to the value of :attr:`directory`.
:raises: :exc:`.MissingPasswordStoreError` when :attr:`directory`
doesn't exist.
"""
# Make sure the directory exists.
self.ensure_directory_exists()
# Prepare the environment variables.
environment = {DIRECTORY_VARIABLE: self.directory}
try:
# Try to enable the GPG agent in headless sessions.
environment.update(get_gpg_variables())
except Exception:
# If we failed then let's at least make sure that the
# $GPG_TTY environment variable is set correctly.
environment.update(GPG_TTY=execute("tty", capture=True, check=False, tty=True, silent=True))
return LocalContext(directory=self.directory, environment=environment)
@mutable_property(cached=True)
def directory(self):
"""
The pathname of the password storage directory (a string).
When the environment variable given by :data:`DIRECTORY_VARIABLE` is
set the value of that environment variable is used, otherwise
:data:`DEFAULT_DIRECTORY` is used. In either case the resulting
directory pathname is normalized using
:func:`~humanfriendly.parse_path()`.
When you set the :attr:`directory` property, the value you set will be
normalized using :func:`~humanfriendly.parse_path()` and the computed
value of the :attr:`context` property is cleared.
"""
return parse_path(os.environ.get(DIRECTORY_VARIABLE, DEFAULT_DIRECTORY))
@directory.setter
def directory(self, value):
"""Normalize the value of :attr:`directory` when it's set."""
# Normalize the value of `directory'.
set_property(self, "directory", parse_path(value))
# Clear the computed values of `context' and `entries'.
clear_property(self, "context")
clear_property(self, "entries")
@cached_property
def entries(self):
"""A list of :class:`PasswordEntry` objects."""
timer = Timer()
passwords = []
logger.info("Scanning %s ..", format_path(self.directory))
listing = self.context.capture("find", "-type", "f", "-name", "*.gpg", "-print0")
for filename in split(listing, "\0"):
basename, extension = os.path.splitext(filename)
if extension == ".gpg":
# We use os.path.normpath() to remove the leading `./' prefixes
# that `find' adds because it searches the working directory.
passwords.append(PasswordEntry(name=os.path.normpath(basename), store=self))
logger.verbose("Found %s in %s.", pluralize(len(passwords), "password"), timer)
return natsort(passwords, key=lambda e: e.name)
|
xolox/python-qpass
|
qpass/__init__.py
|
PasswordEntry.format_text
|
python
|
def format_text(self, include_password=True, use_colors=None, padding=True, filters=()):
# Determine whether we can use ANSI escape sequences.
if use_colors is None:
use_colors = terminal_supports_colors()
# Extract the password (first line) from the entry.
lines = self.text.splitlines()
password = lines.pop(0).strip()
# Compile the given patterns to case insensitive regular expressions
# and use them to ignore lines that match any of the given filters.
patterns = [coerce_pattern(f, re.IGNORECASE) for f in filters]
lines = [l for l in lines if not any(p.search(l) for p in patterns)]
text = trim_empty_lines("\n".join(lines))
# Include the password in the formatted text?
if include_password:
text = "Password: %s\n%s" % (password, text)
# Add the name to the entry (only when there's something to show).
if text and not text.isspace():
title = " / ".join(split(self.name, "/"))
if use_colors:
title = ansi_wrap(title, bold=True)
text = "%s\n\n%s" % (title, text)
# Highlight the entry's text using ANSI escape sequences.
lines = []
for line in text.splitlines():
# Check for a "Key: Value" line.
match = KEY_VALUE_PATTERN.match(line)
if match:
key = "%s:" % match.group(1).strip()
value = match.group(2).strip()
if use_colors:
# Highlight the key.
key = ansi_wrap(key, color=HIGHLIGHT_COLOR)
# Underline hyperlinks in the value.
tokens = value.split()
for i in range(len(tokens)):
if "://" in tokens[i]:
tokens[i] = ansi_wrap(tokens[i], underline=True)
# Replace the line with a highlighted version.
line = key + " " + " ".join(tokens)
if padding:
line = " " + line
lines.append(line)
text = "\n".join(lines)
text = trim_empty_lines(text)
if text and padding:
text = "\n%s\n" % text
return text
|
Format :attr:`text` for viewing on a terminal.
:param include_password: :data:`True` to include the password in the
formatted text, :data:`False` to exclude the
password from the formatted text.
:param use_colors: :data:`True` to use ANSI escape sequences,
:data:`False` otherwise. When this is :data:`None`
:func:`~humanfriendly.terminal.terminal_supports_colors()`
will be used to detect whether ANSI escape sequences
are supported.
:param padding: :data:`True` to add empty lines before and after the
entry and indent the entry's text with two spaces,
:data:`False` to skip the padding.
:param filters: An iterable of regular expression patterns (defaults to
an empty tuple). If a line in the entry's text matches
one of these patterns it won't be shown on the
terminal.
:returns: The formatted entry (a string).
|
train
|
https://github.com/xolox/python-qpass/blob/43ce447b0904ff42a54b8f1dd4d2479f950f258f/qpass/__init__.py#L362-L428
| null |
class PasswordEntry(PropertyManager):
""":class:`PasswordEntry` objects bind the name of a password to the store that contains the password."""
repr_properties = ["name"]
"""The properties included in the output of :func:`repr()`."""
@property
def context(self):
"""The :attr:`~PasswordStore.context` of :attr:`store`."""
return self.store.context
@required_property
def name(self):
"""The name of the password store entry (a string)."""
@cached_property
def password(self):
"""The password identified by :attr:`name` (a string)."""
return self.text.splitlines()[0]
@required_property
def store(self):
"""The :class:`PasswordStore` that contains the entry."""
@cached_property
def text(self):
"""The full text of the entry (a string)."""
return self.context.capture("pass", "show", self.name)
def copy_password(self):
"""Copy the password to the clipboard."""
self.context.execute("pass", "show", "--clip", self.name)
|
xolox/python-qpass
|
qpass/cli.py
|
main
|
python
|
def main():
# Initialize logging to the terminal.
coloredlogs.install()
# Prepare for command line argument parsing.
action = show_matching_entry
program_opts = dict(exclude_list=[])
show_opts = dict(filters=[], use_clipboard=is_clipboard_supported())
verbosity = 0
# Parse the command line arguments.
try:
options, arguments = getopt.gnu_getopt(
sys.argv[1:],
"elnp:f:x:vqh",
["edit", "list", "no-clipboard", "password-store=", "filter=", "exclude=", "verbose", "quiet", "help"],
)
for option, value in options:
if option in ("-e", "--edit"):
action = edit_matching_entry
elif option in ("-l", "--list"):
action = list_matching_entries
elif option in ("-n", "--no-clipboard"):
show_opts["use_clipboard"] = False
elif option in ("-p", "--password-store"):
stores = program_opts.setdefault("stores", [])
stores.append(PasswordStore(directory=value))
elif option in ("-f", "--filter"):
show_opts["filters"].append(value)
elif option in ("-x", "--exclude"):
program_opts["exclude_list"].append(value)
elif option in ("-v", "--verbose"):
coloredlogs.increase_verbosity()
verbosity += 1
elif option in ("-q", "--quiet"):
coloredlogs.decrease_verbosity()
verbosity -= 1
elif option in ("-h", "--help"):
usage(__doc__)
return
else:
raise Exception("Unhandled option! (programming error)")
if not (arguments or action == list_matching_entries):
usage(__doc__)
return
except Exception as e:
warning("Error: %s", e)
sys.exit(1)
# Execute the requested action.
try:
show_opts["quiet"] = verbosity < 0
kw = show_opts if action == show_matching_entry else {}
action(QuickPass(**program_opts), arguments, **kw)
except PasswordStoreError as e:
# Known issues don't get a traceback.
logger.error("%s", e)
sys.exit(1)
except KeyboardInterrupt:
# If the user interrupted an interactive prompt they most likely did so
# intentionally, so there's no point in generating more output here.
sys.exit(1)
|
Command line interface for the ``qpass`` program.
|
train
|
https://github.com/xolox/python-qpass/blob/43ce447b0904ff42a54b8f1dd4d2479f950f258f/qpass/cli.py#L101-L160
|
[
"def is_clipboard_supported():\n \"\"\"\n Check whether the clipboard is supported.\n\n :returns: :data:`True` if the clipboard is supported, :data:`False` otherwise.\n \"\"\"\n return platform.system().lower() == \"darwin\" or bool(os.environ.get(\"DISPLAY\"))\n",
"def edit_matching_entry(program, arguments):\n \"\"\"Edit the matching entry.\"\"\"\n entry = program.select_entry(*arguments)\n entry.context.execute(\"pass\", \"edit\", entry.name)\n",
"def list_matching_entries(program, arguments):\n \"\"\"List the entries matching the given keywords/patterns.\"\"\"\n output(\"\\n\".join(entry.name for entry in program.smart_search(*arguments)))\n",
"def show_matching_entry(program, arguments, use_clipboard=True, quiet=False, filters=()):\n \"\"\"Show the matching entry on the terminal (and copy the password to the clipboard).\"\"\"\n entry = program.select_entry(*arguments)\n if not quiet:\n formatted_entry = entry.format_text(include_password=not use_clipboard, filters=filters)\n if formatted_entry and not formatted_entry.isspace():\n output(formatted_entry)\n if use_clipboard:\n entry.copy_password()\n"
] |
# qpass: Frontend for pass (the standard unix password manager).
#
# Author: Peter Odding <peter@peterodding.com>
# Last Change: December 3, 2018
# URL: https://github.com/xolox/python-qpass
"""
Usage: qpass [OPTIONS] KEYWORD..
Search your password store for the given keywords or patterns and copy the
password of the matching entry to the clipboard. When more than one entry
matches you will be prompted to select the password to copy.
If you provide more than one KEYWORD all of the given keywords must match,
in other words you're performing an AND search instead of an OR search.
Instead of matching on keywords you can also enter just a few of the characters
in the name of a password, as long as those characters are in the right order.
Some examples to make this more concrete:
- The pattern 'pe/zbx' will match the name 'Personal/Zabbix'.
- The pattern 'ba/cc' will match the name 'Bank accounts/Creditcard'.
When a password is copied to the clipboard, any text after the first line will
be shown on the terminal, to share any additional details about the password
entry (for example the associated username or email address). The -q, --quiet
option suppresses this text.
Supported options:
-e, --edit
Edit the matching entry instead of copying it to the clipboard.
-l, --list
List the matching entries on standard output.
-n, --no-clipboard
Don't copy the password of the matching entry to the clipboard, instead
show the password on the terminal (by default the password is copied to
the clipboard but not shown on the terminal).
-p, --password-store=DIRECTORY
Search the password store in DIRECTORY. If this option isn't given
the password store is located using the $PASSWORD_STORE_DIR
environment variable. If that environment variable isn't
set the directory ~/.password-store is used.
You can use the -p, --password-store option multiple times to search more
than one password store at the same time. No distinction is made between
passwords in different password stores, so the names of passwords need to
be recognizable and unique.
-f, --filter=PATTERN
Don't show lines in the additional details which match the case insensitive
regular expression given by PATTERN. This can be used to avoid revealing
sensitive details on the terminal. You can use this option more than once.
-x, --exclude=GLOB
Ignore passwords whose name matches the given GLOB filename pattern.
This argument can be repeated to add multiple exclude patterns.
-v, --verbose
Increase logging verbosity (can be repeated).
-q, --quiet
Decrease logging verbosity (can be repeated).
-h, --help
Show this message and exit.
"""
# Standard library modules.
import getopt
import logging
import sys
# External dependencies.
import coloredlogs
from humanfriendly.terminal import output, usage, warning
# Modules included in our package.
from qpass import PasswordStore, QuickPass, is_clipboard_supported
from qpass.exceptions import PasswordStoreError
# Public identifiers that require documentation.
__all__ = ("edit_matching_entry", "list_matching_entries", "logger", "main", "show_matching_entry")
# Initialize a logger for this module.
logger = logging.getLogger(__name__)
def edit_matching_entry(program, arguments):
"""Edit the matching entry."""
entry = program.select_entry(*arguments)
entry.context.execute("pass", "edit", entry.name)
def list_matching_entries(program, arguments):
"""List the entries matching the given keywords/patterns."""
output("\n".join(entry.name for entry in program.smart_search(*arguments)))
def show_matching_entry(program, arguments, use_clipboard=True, quiet=False, filters=()):
"""Show the matching entry on the terminal (and copy the password to the clipboard)."""
entry = program.select_entry(*arguments)
if not quiet:
formatted_entry = entry.format_text(include_password=not use_clipboard, filters=filters)
if formatted_entry and not formatted_entry.isspace():
output(formatted_entry)
if use_clipboard:
entry.copy_password()
|
xolox/python-qpass
|
qpass/cli.py
|
edit_matching_entry
|
python
|
def edit_matching_entry(program, arguments):
entry = program.select_entry(*arguments)
entry.context.execute("pass", "edit", entry.name)
|
Edit the matching entry.
|
train
|
https://github.com/xolox/python-qpass/blob/43ce447b0904ff42a54b8f1dd4d2479f950f258f/qpass/cli.py#L163-L166
| null |
# qpass: Frontend for pass (the standard unix password manager).
#
# Author: Peter Odding <peter@peterodding.com>
# Last Change: December 3, 2018
# URL: https://github.com/xolox/python-qpass
"""
Usage: qpass [OPTIONS] KEYWORD..
Search your password store for the given keywords or patterns and copy the
password of the matching entry to the clipboard. When more than one entry
matches you will be prompted to select the password to copy.
If you provide more than one KEYWORD all of the given keywords must match,
in other words you're performing an AND search instead of an OR search.
Instead of matching on keywords you can also enter just a few of the characters
in the name of a password, as long as those characters are in the right order.
Some examples to make this more concrete:
- The pattern 'pe/zbx' will match the name 'Personal/Zabbix'.
- The pattern 'ba/cc' will match the name 'Bank accounts/Creditcard'.
When a password is copied to the clipboard, any text after the first line will
be shown on the terminal, to share any additional details about the password
entry (for example the associated username or email address). The -q, --quiet
option suppresses this text.
Supported options:
-e, --edit
Edit the matching entry instead of copying it to the clipboard.
-l, --list
List the matching entries on standard output.
-n, --no-clipboard
Don't copy the password of the matching entry to the clipboard, instead
show the password on the terminal (by default the password is copied to
the clipboard but not shown on the terminal).
-p, --password-store=DIRECTORY
Search the password store in DIRECTORY. If this option isn't given
the password store is located using the $PASSWORD_STORE_DIR
environment variable. If that environment variable isn't
set the directory ~/.password-store is used.
You can use the -p, --password-store option multiple times to search more
than one password store at the same time. No distinction is made between
passwords in different password stores, so the names of passwords need to
be recognizable and unique.
-f, --filter=PATTERN
Don't show lines in the additional details which match the case insensitive
regular expression given by PATTERN. This can be used to avoid revealing
sensitive details on the terminal. You can use this option more than once.
-x, --exclude=GLOB
Ignore passwords whose name matches the given GLOB filename pattern.
This argument can be repeated to add multiple exclude patterns.
-v, --verbose
Increase logging verbosity (can be repeated).
-q, --quiet
Decrease logging verbosity (can be repeated).
-h, --help
Show this message and exit.
"""
# Standard library modules.
import getopt
import logging
import sys
# External dependencies.
import coloredlogs
from humanfriendly.terminal import output, usage, warning
# Modules included in our package.
from qpass import PasswordStore, QuickPass, is_clipboard_supported
from qpass.exceptions import PasswordStoreError
# Public identifiers that require documentation.
__all__ = ("edit_matching_entry", "list_matching_entries", "logger", "main", "show_matching_entry")
# Initialize a logger for this module.
logger = logging.getLogger(__name__)
def main():
"""Command line interface for the ``qpass`` program."""
# Initialize logging to the terminal.
coloredlogs.install()
# Prepare for command line argument parsing.
action = show_matching_entry
program_opts = dict(exclude_list=[])
show_opts = dict(filters=[], use_clipboard=is_clipboard_supported())
verbosity = 0
# Parse the command line arguments.
try:
options, arguments = getopt.gnu_getopt(
sys.argv[1:],
"elnp:f:x:vqh",
["edit", "list", "no-clipboard", "password-store=", "filter=", "exclude=", "verbose", "quiet", "help"],
)
for option, value in options:
if option in ("-e", "--edit"):
action = edit_matching_entry
elif option in ("-l", "--list"):
action = list_matching_entries
elif option in ("-n", "--no-clipboard"):
show_opts["use_clipboard"] = False
elif option in ("-p", "--password-store"):
stores = program_opts.setdefault("stores", [])
stores.append(PasswordStore(directory=value))
elif option in ("-f", "--filter"):
show_opts["filters"].append(value)
elif option in ("-x", "--exclude"):
program_opts["exclude_list"].append(value)
elif option in ("-v", "--verbose"):
coloredlogs.increase_verbosity()
verbosity += 1
elif option in ("-q", "--quiet"):
coloredlogs.decrease_verbosity()
verbosity -= 1
elif option in ("-h", "--help"):
usage(__doc__)
return
else:
raise Exception("Unhandled option! (programming error)")
if not (arguments or action == list_matching_entries):
usage(__doc__)
return
except Exception as e:
warning("Error: %s", e)
sys.exit(1)
# Execute the requested action.
try:
show_opts["quiet"] = verbosity < 0
kw = show_opts if action == show_matching_entry else {}
action(QuickPass(**program_opts), arguments, **kw)
except PasswordStoreError as e:
# Known issues don't get a traceback.
logger.error("%s", e)
sys.exit(1)
except KeyboardInterrupt:
# If the user interrupted an interactive prompt they most likely did so
# intentionally, so there's no point in generating more output here.
sys.exit(1)
def list_matching_entries(program, arguments):
"""List the entries matching the given keywords/patterns."""
output("\n".join(entry.name for entry in program.smart_search(*arguments)))
def show_matching_entry(program, arguments, use_clipboard=True, quiet=False, filters=()):
"""Show the matching entry on the terminal (and copy the password to the clipboard)."""
entry = program.select_entry(*arguments)
if not quiet:
formatted_entry = entry.format_text(include_password=not use_clipboard, filters=filters)
if formatted_entry and not formatted_entry.isspace():
output(formatted_entry)
if use_clipboard:
entry.copy_password()
|
xolox/python-qpass
|
qpass/cli.py
|
list_matching_entries
|
python
|
def list_matching_entries(program, arguments):
output("\n".join(entry.name for entry in program.smart_search(*arguments)))
|
List the entries matching the given keywords/patterns.
|
train
|
https://github.com/xolox/python-qpass/blob/43ce447b0904ff42a54b8f1dd4d2479f950f258f/qpass/cli.py#L169-L171
| null |
# qpass: Frontend for pass (the standard unix password manager).
#
# Author: Peter Odding <peter@peterodding.com>
# Last Change: December 3, 2018
# URL: https://github.com/xolox/python-qpass
"""
Usage: qpass [OPTIONS] KEYWORD..
Search your password store for the given keywords or patterns and copy the
password of the matching entry to the clipboard. When more than one entry
matches you will be prompted to select the password to copy.
If you provide more than one KEYWORD all of the given keywords must match,
in other words you're performing an AND search instead of an OR search.
Instead of matching on keywords you can also enter just a few of the characters
in the name of a password, as long as those characters are in the right order.
Some examples to make this more concrete:
- The pattern 'pe/zbx' will match the name 'Personal/Zabbix'.
- The pattern 'ba/cc' will match the name 'Bank accounts/Creditcard'.
When a password is copied to the clipboard, any text after the first line will
be shown on the terminal, to share any additional details about the password
entry (for example the associated username or email address). The -q, --quiet
option suppresses this text.
Supported options:
-e, --edit
Edit the matching entry instead of copying it to the clipboard.
-l, --list
List the matching entries on standard output.
-n, --no-clipboard
Don't copy the password of the matching entry to the clipboard, instead
show the password on the terminal (by default the password is copied to
the clipboard but not shown on the terminal).
-p, --password-store=DIRECTORY
Search the password store in DIRECTORY. If this option isn't given
the password store is located using the $PASSWORD_STORE_DIR
environment variable. If that environment variable isn't
set the directory ~/.password-store is used.
You can use the -p, --password-store option multiple times to search more
than one password store at the same time. No distinction is made between
passwords in different password stores, so the names of passwords need to
be recognizable and unique.
-f, --filter=PATTERN
Don't show lines in the additional details which match the case insensitive
regular expression given by PATTERN. This can be used to avoid revealing
sensitive details on the terminal. You can use this option more than once.
-x, --exclude=GLOB
Ignore passwords whose name matches the given GLOB filename pattern.
This argument can be repeated to add multiple exclude patterns.
-v, --verbose
Increase logging verbosity (can be repeated).
-q, --quiet
Decrease logging verbosity (can be repeated).
-h, --help
Show this message and exit.
"""
# Standard library modules.
import getopt
import logging
import sys
# External dependencies.
import coloredlogs
from humanfriendly.terminal import output, usage, warning
# Modules included in our package.
from qpass import PasswordStore, QuickPass, is_clipboard_supported
from qpass.exceptions import PasswordStoreError
# Public identifiers that require documentation.
__all__ = ("edit_matching_entry", "list_matching_entries", "logger", "main", "show_matching_entry")
# Initialize a logger for this module.
logger = logging.getLogger(__name__)
def main():
"""Command line interface for the ``qpass`` program."""
# Initialize logging to the terminal.
coloredlogs.install()
# Prepare for command line argument parsing.
action = show_matching_entry
program_opts = dict(exclude_list=[])
show_opts = dict(filters=[], use_clipboard=is_clipboard_supported())
verbosity = 0
# Parse the command line arguments.
try:
options, arguments = getopt.gnu_getopt(
sys.argv[1:],
"elnp:f:x:vqh",
["edit", "list", "no-clipboard", "password-store=", "filter=", "exclude=", "verbose", "quiet", "help"],
)
for option, value in options:
if option in ("-e", "--edit"):
action = edit_matching_entry
elif option in ("-l", "--list"):
action = list_matching_entries
elif option in ("-n", "--no-clipboard"):
show_opts["use_clipboard"] = False
elif option in ("-p", "--password-store"):
stores = program_opts.setdefault("stores", [])
stores.append(PasswordStore(directory=value))
elif option in ("-f", "--filter"):
show_opts["filters"].append(value)
elif option in ("-x", "--exclude"):
program_opts["exclude_list"].append(value)
elif option in ("-v", "--verbose"):
coloredlogs.increase_verbosity()
verbosity += 1
elif option in ("-q", "--quiet"):
coloredlogs.decrease_verbosity()
verbosity -= 1
elif option in ("-h", "--help"):
usage(__doc__)
return
else:
raise Exception("Unhandled option! (programming error)")
if not (arguments or action == list_matching_entries):
usage(__doc__)
return
except Exception as e:
warning("Error: %s", e)
sys.exit(1)
# Execute the requested action.
try:
show_opts["quiet"] = verbosity < 0
kw = show_opts if action == show_matching_entry else {}
action(QuickPass(**program_opts), arguments, **kw)
except PasswordStoreError as e:
# Known issues don't get a traceback.
logger.error("%s", e)
sys.exit(1)
except KeyboardInterrupt:
# If the user interrupted an interactive prompt they most likely did so
# intentionally, so there's no point in generating more output here.
sys.exit(1)
def edit_matching_entry(program, arguments):
"""Edit the matching entry."""
entry = program.select_entry(*arguments)
entry.context.execute("pass", "edit", entry.name)
def show_matching_entry(program, arguments, use_clipboard=True, quiet=False, filters=()):
"""Show the matching entry on the terminal (and copy the password to the clipboard)."""
entry = program.select_entry(*arguments)
if not quiet:
formatted_entry = entry.format_text(include_password=not use_clipboard, filters=filters)
if formatted_entry and not formatted_entry.isspace():
output(formatted_entry)
if use_clipboard:
entry.copy_password()
|
xolox/python-qpass
|
qpass/cli.py
|
show_matching_entry
|
python
|
def show_matching_entry(program, arguments, use_clipboard=True, quiet=False, filters=()):
entry = program.select_entry(*arguments)
if not quiet:
formatted_entry = entry.format_text(include_password=not use_clipboard, filters=filters)
if formatted_entry and not formatted_entry.isspace():
output(formatted_entry)
if use_clipboard:
entry.copy_password()
|
Show the matching entry on the terminal (and copy the password to the clipboard).
|
train
|
https://github.com/xolox/python-qpass/blob/43ce447b0904ff42a54b8f1dd4d2479f950f258f/qpass/cli.py#L174-L182
| null |
# qpass: Frontend for pass (the standard unix password manager).
#
# Author: Peter Odding <peter@peterodding.com>
# Last Change: December 3, 2018
# URL: https://github.com/xolox/python-qpass
"""
Usage: qpass [OPTIONS] KEYWORD..
Search your password store for the given keywords or patterns and copy the
password of the matching entry to the clipboard. When more than one entry
matches you will be prompted to select the password to copy.
If you provide more than one KEYWORD all of the given keywords must match,
in other words you're performing an AND search instead of an OR search.
Instead of matching on keywords you can also enter just a few of the characters
in the name of a password, as long as those characters are in the right order.
Some examples to make this more concrete:
- The pattern 'pe/zbx' will match the name 'Personal/Zabbix'.
- The pattern 'ba/cc' will match the name 'Bank accounts/Creditcard'.
When a password is copied to the clipboard, any text after the first line will
be shown on the terminal, to share any additional details about the password
entry (for example the associated username or email address). The -q, --quiet
option suppresses this text.
Supported options:
-e, --edit
Edit the matching entry instead of copying it to the clipboard.
-l, --list
List the matching entries on standard output.
-n, --no-clipboard
Don't copy the password of the matching entry to the clipboard, instead
show the password on the terminal (by default the password is copied to
the clipboard but not shown on the terminal).
-p, --password-store=DIRECTORY
Search the password store in DIRECTORY. If this option isn't given
the password store is located using the $PASSWORD_STORE_DIR
environment variable. If that environment variable isn't
set the directory ~/.password-store is used.
You can use the -p, --password-store option multiple times to search more
than one password store at the same time. No distinction is made between
passwords in different password stores, so the names of passwords need to
be recognizable and unique.
-f, --filter=PATTERN
Don't show lines in the additional details which match the case insensitive
regular expression given by PATTERN. This can be used to avoid revealing
sensitive details on the terminal. You can use this option more than once.
-x, --exclude=GLOB
Ignore passwords whose name matches the given GLOB filename pattern.
This argument can be repeated to add multiple exclude patterns.
-v, --verbose
Increase logging verbosity (can be repeated).
-q, --quiet
Decrease logging verbosity (can be repeated).
-h, --help
Show this message and exit.
"""
# Standard library modules.
import getopt
import logging
import sys
# External dependencies.
import coloredlogs
from humanfriendly.terminal import output, usage, warning
# Modules included in our package.
from qpass import PasswordStore, QuickPass, is_clipboard_supported
from qpass.exceptions import PasswordStoreError
# Public identifiers that require documentation.
__all__ = ("edit_matching_entry", "list_matching_entries", "logger", "main", "show_matching_entry")
# Initialize a logger for this module.
logger = logging.getLogger(__name__)
def main():
"""Command line interface for the ``qpass`` program."""
# Initialize logging to the terminal.
coloredlogs.install()
# Prepare for command line argument parsing.
action = show_matching_entry
program_opts = dict(exclude_list=[])
show_opts = dict(filters=[], use_clipboard=is_clipboard_supported())
verbosity = 0
# Parse the command line arguments.
try:
options, arguments = getopt.gnu_getopt(
sys.argv[1:],
"elnp:f:x:vqh",
["edit", "list", "no-clipboard", "password-store=", "filter=", "exclude=", "verbose", "quiet", "help"],
)
for option, value in options:
if option in ("-e", "--edit"):
action = edit_matching_entry
elif option in ("-l", "--list"):
action = list_matching_entries
elif option in ("-n", "--no-clipboard"):
show_opts["use_clipboard"] = False
elif option in ("-p", "--password-store"):
stores = program_opts.setdefault("stores", [])
stores.append(PasswordStore(directory=value))
elif option in ("-f", "--filter"):
show_opts["filters"].append(value)
elif option in ("-x", "--exclude"):
program_opts["exclude_list"].append(value)
elif option in ("-v", "--verbose"):
coloredlogs.increase_verbosity()
verbosity += 1
elif option in ("-q", "--quiet"):
coloredlogs.decrease_verbosity()
verbosity -= 1
elif option in ("-h", "--help"):
usage(__doc__)
return
else:
raise Exception("Unhandled option! (programming error)")
if not (arguments or action == list_matching_entries):
usage(__doc__)
return
except Exception as e:
warning("Error: %s", e)
sys.exit(1)
# Execute the requested action.
try:
show_opts["quiet"] = verbosity < 0
kw = show_opts if action == show_matching_entry else {}
action(QuickPass(**program_opts), arguments, **kw)
except PasswordStoreError as e:
# Known issues don't get a traceback.
logger.error("%s", e)
sys.exit(1)
except KeyboardInterrupt:
# If the user interrupted an interactive prompt they most likely did so
# intentionally, so there's no point in generating more output here.
sys.exit(1)
def edit_matching_entry(program, arguments):
"""Edit the matching entry."""
entry = program.select_entry(*arguments)
entry.context.execute("pass", "edit", entry.name)
def list_matching_entries(program, arguments):
"""List the entries matching the given keywords/patterns."""
output("\n".join(entry.name for entry in program.smart_search(*arguments)))
|
UniversalDevicesInc/polyglot-v2-python-interface
|
polyinterface/polyinterface.py
|
init_interface
|
python
|
def init_interface():
sys.stdout = LoggerWriter(LOGGER.debug)
sys.stderr = LoggerWriter(LOGGER.error)
warnings.simplefilter('error', UserWarning)
try:
load_dotenv(join(expanduser("~") + '/.polyglot/.env'))
except (UserWarning) as err:
LOGGER.warning('File does not exist: {}.'.format(join(expanduser("~") + '/.polyglot/.env')), exc_info=True)
# sys.exit(1)
warnings.resetwarnings()
"""
If this NodeServer is co-resident with Polyglot it will receive a STDIN config on startup
that looks like:
{"token":"2cb40e507253fc8f4cbbe247089b28db79d859cbed700ec151",
"mqttHost":"localhost","mqttPort":"1883","profileNum":"10"}
"""
init = select.select([sys.stdin], [], [], 1)[0]
if init:
line = sys.stdin.readline()
try:
line = json.loads(line)
os.environ['PROFILE_NUM'] = line['profileNum']
os.environ['MQTT_HOST'] = line['mqttHost']
os.environ['MQTT_PORT'] = line['mqttPort']
os.environ['TOKEN'] = line['token']
LOGGER.info('Received Config from STDIN.')
except (Exception) as err:
# e = sys.exc_info()[0]
LOGGER.error('Invalid formatted input. Skipping. %s', err, exc_info=True)
|
Grab the ~/.polyglot/.env file for variables
If you are running Polyglot v2 on this same machine
then it should already exist. If not create it.
|
train
|
https://github.com/UniversalDevicesInc/polyglot-v2-python-interface/blob/fe613135b762731a41a081222e43d2a8ae4fc53f/polyinterface/polyinterface.py#L76-L112
| null |
#!/usr/bin/env python
"""
Python Interface for UDI Polyglot v2 NodeServers
by Einstein.42 (James Milne) milne.james@gmail.com
"""
from copy import deepcopy
from dotenv import load_dotenv
import json
import ssl
import logging
import logging.handlers
import __main__ as main
import markdown2
import os
from os.path import join, expanduser
import paho.mqtt.client as mqtt
try:
import queue
except ImportError:
import Queue as queue
import re
import sys
import select
from threading import Thread
import warnings
def warning_on_one_line(message, category, filename, lineno, file=None, line=None):
return '{}:{}: {}: {}'.format(filename, lineno, category.__name__, message)
class LoggerWriter(object):
def __init__(self, level):
self.level = level
def write(self, message):
if not re.match(r'^\s*$', message):
self.level(message.strip())
def flush(self):
pass
def setup_log():
# Log Location
# path = os.path.dirname(sys.argv[0])
if not os.path.exists('./logs'):
os.makedirs('./logs')
log_filename = "./logs/debug.log"
log_level = logging.DEBUG # Could be e.g. "DEBUG" or "WARNING"
# ### Logging Section ################################################################################
logging.captureWarnings(True)
logger = logging.getLogger(__name__)
logger.propagate = False
warnlog = logging.getLogger('py.warnings')
warnings.formatwarning = warning_on_one_line
logger.setLevel(log_level)
# Set the log level to LOG_LEVEL
# Make a handler that writes to a file,
# making a new file at midnight and keeping 3 backups
handler = logging.handlers.TimedRotatingFileHandler(log_filename, when="midnight", backupCount=30)
# Format each log message like this
formatter = logging.Formatter('%(asctime)s [%(threadName)-10s] [%(levelname)-5s] %(message)s')
# Attach the formatter to the handler
handler.setFormatter(formatter)
# Attach the handler to the logger
logger.addHandler(handler)
warnlog.addHandler(handler)
return logger
LOGGER = setup_log()
def unload_interface():
sys.stdout = sys.__stdout__
sys.stderr = sys.__stderr__
LOGGER.handlers = []
class Interface(object):
CUSTOM_CONFIG_DOCS_FILE_NAME = 'POLYGLOT_CONFIG.md'
"""
Polyglot Interface Class
:param envVar: The Name of the variable from ~/.polyglot/.env that has this NodeServer's profile number
"""
# pylint: disable=too-many-instance-attributes
# pylint: disable=unused-argument
__exists = False
def __init__(self, envVar=None):
if self.__exists:
warnings.warn('Only one Interface is allowed.')
return
self.connected = False
self.profileNum = os.environ.get("PROFILE_NUM")
if self.profileNum is None:
if envVar is not None:
self.profileNum = os.environ.get(envVar)
if self.profileNum is None:
LOGGER.error('Profile Number not found in STDIN or .env file. Exiting.')
sys.exit(1)
self.profileNum = str(self.profileNum)
self.topicPolyglotConnection = 'udi/polyglot/connections/polyglot'
self.topicInput = 'udi/polyglot/ns/{}'.format(self.profileNum)
self.topicSelfConnection = 'udi/polyglot/connections/{}'.format(self.profileNum)
self._threads = {}
self._threads['socket'] = Thread(target = self._startMqtt, name = 'Interface')
self._mqttc = mqtt.Client(envVar, True)
# self._mqttc.will_set(self.topicSelfConnection, json.dumps({'node': self.profileNum, 'connected': False}), retain=True)
self._mqttc.on_connect = self._connect
self._mqttc.on_message = self._message
self._mqttc.on_subscribe = self._subscribe
self._mqttc.on_disconnect = self._disconnect
self._mqttc.on_publish = self._publish
self._mqttc.on_log = self._log
self.useSecure = True
if 'USE_HTTPS' in os.environ:
self.useSecure = os.environ['USE_HTTPS']
if self.useSecure is True:
if 'MQTT_CERTPATH' in os.environ:
self._mqttc.tls_set(
ca_certs=os.environ['MQTT_CERTPATH'] + '/polyglot.crt',
certfile=os.environ['MQTT_CERTPATH'] + '/client.crt',
keyfile=os.environ['MQTT_CERTPATH'] + '/client_private.key',
tls_version=ssl.PROTOCOL_TLSv1_2)
else:
self._mqttc.tls_set(
ca_certs=join(expanduser("~") + '/.polyglot/ssl/polyglot.crt'),
certfile=join(expanduser("~") + '/.polyglot/ssl/client.crt'),
keyfile=join(expanduser("~") + '/.polyglot/ssl/client_private.key'),
tls_version=ssl.PROTOCOL_TLSv1_2
)
# self._mqttc.tls_insecure_set(True)
# self._mqttc.enable_logger(logger=LOGGER)
self.config = None
# self.loop = asyncio.new_event_loop()
self.loop = None
self.inQueue = queue.Queue()
# self.thread = Thread(target=self.start_loop)
self.isyVersion = None
self._server = os.environ.get("MQTT_HOST") or 'localhost'
self._port = os.environ.get("MQTT_PORT") or '1883'
self.polyglotConnected = False
self.__configObservers = []
self.__stopObservers = []
Interface.__exists = True
self.custom_params_docs_file_sent = False
self.custom_params_pending_docs = ''
def onConfig(self, callback):
"""
Gives the ability to bind any methods to be run when the config is received.
"""
self.__configObservers.append(callback)
def onStop(self, callback):
"""
Gives the ability to bind any methods to be run when the stop command is received.
"""
self.__stopObservers.append(callback)
def _connect(self, mqttc, userdata, flags, rc):
"""
The callback for when the client receives a CONNACK response from the server.
Subscribing in on_connect() means that if we lose the connection and
reconnect then subscriptions will be renewed.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param flags: The flags set on the connection.
:param rc: Result code of connection, 0 = Success, anything else is a failure
"""
if rc == 0:
self.connected = True
results = []
LOGGER.info("MQTT Connected with result code " + str(rc) + " (Success)")
# result, mid = self._mqttc.subscribe(self.topicInput)
results.append((self.topicInput, tuple(self._mqttc.subscribe(self.topicInput))))
results.append((self.topicPolyglotConnection, tuple(self._mqttc.subscribe(self.topicPolyglotConnection))))
for (topic, (result, mid)) in results:
if result == 0:
LOGGER.info("MQTT Subscribing to topic: " + topic + " - " + " MID: " + str(mid) + " Result: " + str(result))
else:
LOGGER.info("MQTT Subscription to " + topic + " failed. This is unusual. MID: " + str(mid) + " Result: " + str(result))
# If subscription fails, try to reconnect.
self._mqttc.reconnect()
self._mqttc.publish(self.topicSelfConnection, json.dumps(
{
'connected': True,
'node': self.profileNum
}), retain=True)
LOGGER.info('Sent Connected message to Polyglot')
else:
LOGGER.error("MQTT Failed to connect. Result code: " + str(rc))
def _message(self, mqttc, userdata, msg):
"""
The callback for when a PUBLISH message is received from the server.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param flags: The flags set on the connection.
:param msg: Dictionary of MQTT received message. Uses: msg.topic, msg.qos, msg.payload
"""
try:
inputCmds = ['query', 'command', 'result', 'status', 'shortPoll', 'longPoll', 'delete']
parsed_msg = json.loads(msg.payload.decode('utf-8'))
if 'node' in parsed_msg:
if parsed_msg['node'] != 'polyglot':
return
del parsed_msg['node']
for key in parsed_msg:
# LOGGER.debug('MQTT Received Message: {}: {}'.format(msg.topic, parsed_msg))
if key == 'config':
self.inConfig(parsed_msg[key])
elif key == 'connected':
self.polyglotConnected = parsed_msg[key]
elif key == 'stop':
LOGGER.debug('Received stop from Polyglot... Shutting Down.')
self.stop()
elif key in inputCmds:
self.input(parsed_msg)
else:
LOGGER.error('Invalid command received in message from Polyglot: {}'.format(key))
except (ValueError) as err:
LOGGER.error('MQTT Received Payload Error: {}'.format(err), exc_info=True)
def _disconnect(self, mqttc, userdata, rc):
"""
The callback for when a DISCONNECT occurs.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param rc: Result code of connection, 0 = Graceful, anything else is unclean
"""
self.connected = False
if rc != 0:
LOGGER.info("MQTT Unexpected disconnection. Trying reconnect.")
try:
self._mqttc.reconnect()
except Exception as ex:
template = "An exception of type {0} occured. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
LOGGER.error("MQTT Connection error: " + message)
else:
LOGGER.info("MQTT Graceful disconnection.")
def _log(self, mqttc, userdata, level, string):
""" Use for debugging MQTT Packets, disable for normal use, NOISY. """
# LOGGER.info('MQTT Log - {}: {}'.format(str(level), str(string)))
pass
def _subscribe(self, mqttc, userdata, mid, granted_qos):
""" Callback for Subscribe message. Unused currently. """
# LOGGER.info("MQTT Subscribed Succesfully for Message ID: {} - QoS: {}".format(str(mid), str(granted_qos)))
pass
def _publish(self, mqttc, userdata, mid):
""" Callback for publish message. Unused currently. """
# LOGGER.info("MQTT Published message ID: {}".format(str(mid)))
pass
def start(self):
for _, thread in self._threads.items():
thread.start()
def _startMqtt(self):
"""
The client start method. Starts the thread for the MQTT Client
and publishes the connected message.
"""
LOGGER.info('Connecting to MQTT... {}:{}'.format(self._server, self._port))
try:
# self._mqttc.connect_async(str(self._server), int(self._port), 10)
self._mqttc.connect_async('{}'.format(self._server), int(self._port), 10)
self._mqttc.loop_forever()
except Exception as ex:
template = "An exception of type {0} occurred. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
LOGGER.error("MQTT Connection error: {}".format(message), exc_info=True)
def stop(self):
"""
The client stop method. If the client is currently connected
stop the thread and disconnect. Publish the disconnected
message if clean shutdown.
"""
# self.loop.call_soon_threadsafe(self.loop.stop)
# self.loop.stop()
# self._longPoll.cancel()
# self._shortPoll.cancel()
if self.connected:
LOGGER.info('Disconnecting from MQTT... {}:{}'.format(self._server, self._port))
self._mqttc.publish(self.topicSelfConnection, json.dumps({'node': self.profileNum, 'connected': False}), retain=True)
self._mqttc.loop_stop()
self._mqttc.disconnect()
try:
for watcher in self.__stopObservers:
watcher()
except KeyError as e:
LOGGER.exception('KeyError in gotConfig: {}'.format(e), exc_info=True)
def send(self, message):
"""
Formatted Message to send to Polyglot. Connection messages are sent automatically from this module
so this method is used to send commands to/from Polyglot and formats it for consumption
"""
if not isinstance(message, dict) and self.connected:
warnings.warn('payload not a dictionary')
return False
try:
message['node'] = self.profileNum
self._mqttc.publish(self.topicInput, json.dumps(message), retain=False)
except TypeError as err:
LOGGER.error('MQTT Send Error: {}'.format(err), exc_info=True)
def addNode(self, node):
"""
Add a node to the NodeServer
:param node: Dictionary of node settings. Keys: address, name, node_def_id, primary, and drivers are required.
"""
LOGGER.info('Adding node {}({})'.format(node.name, node.address))
message = {
'addnode': {
'nodes': [{
'address': node.address,
'name': node.name,
'node_def_id': node.id,
'primary': node.primary,
'drivers': node.drivers,
'hint': node.hint
}]
}
}
self.send(message)
def saveCustomData(self, data):
"""
Send custom dictionary to Polyglot to save and be retrieved on startup.
:param data: Dictionary of key value pairs to store in Polyglot database.
"""
LOGGER.info('Sending customData to Polyglot.')
message = { 'customdata': data }
self.send(message)
def saveCustomParams(self, data):
"""
Send custom dictionary to Polyglot to save and be retrieved on startup.
:param data: Dictionary of key value pairs to store in Polyglot database.
"""
LOGGER.info('Sending customParams to Polyglot.')
message = { 'customparams': data }
self.send(message)
def addNotice(self, data):
"""
Add custom notice to front-end for this NodeServers
:param data: String of characters to add as a notification in the front-end.
"""
LOGGER.info('Sending addnotice to Polyglot: {}'.format(data))
message = { 'addnotice': data }
self.send(message)
def removeNotice(self, data):
"""
Add custom notice to front-end for this NodeServers
:param data: Index of notices list to remove.
"""
LOGGER.info('Sending removenotice to Polyglot for index {}'.format(data))
message = { 'removenotice': data }
self.send(message)
def restart(self):
"""
Send a command to Polyglot to restart this NodeServer
"""
LOGGER.info('Asking Polyglot to restart me.')
message = {
'restart': {}
}
self.send(message)
def installprofile(self):
LOGGER.info('Sending Install Profile command to Polyglot.')
message = { 'installprofile': { 'reboot': False } }
self.send(message)
def delNode(self, address):
"""
Delete a node from the NodeServer
:param node: Dictionary of node settings. Keys: address, name, node_def_id, primary, and drivers are required.
"""
LOGGER.info('Removing node {}'.format(address))
message = {
'removenode': {
'address': address
}
}
self.send(message)
def getNode(self, address):
"""
Get Node by Address of existing nodes.
"""
try:
for node in self.config['nodes']:
if node['address'] == address:
return node
return False
except KeyError:
LOGGER.error('Usually means we have not received the config yet.', exc_info=True)
return False
def inConfig(self, config):
"""
Save incoming config received from Polyglot to Interface.config and then do any functions
that are waiting on the config to be received.
"""
self.config = config
self.isyVersion = config['isyVersion']
try:
for watcher in self.__configObservers:
watcher(config)
self.send_custom_config_docs()
except KeyError as e:
LOGGER.error('KeyError in gotConfig: {}'.format(e), exc_info=True)
def input(self, command):
self.inQueue.put(command)
def supports_feature(self, feature):
return True
def get_md_file_data(self, fileName):
data = ''
if os.path.isfile(fileName):
data = markdown2.markdown_path(fileName)
return data
def send_custom_config_docs(self):
data = ''
if not self.custom_params_docs_file_sent:
data = self.get_md_file_data(Interface.CUSTOM_CONFIG_DOCS_FILE_NAME)
else:
data = self.config.get('customParamsDoc', '')
# send if we're sending new file or there are updates
if (not self.custom_params_docs_file_sent or
len(self.custom_params_pending_docs) > 0):
data += self.custom_params_pending_docs
self.custom_params_docs_file_sent = True
self.custom_params_pending_docs = ''
self.config['customParamsDoc'] = data
self.send({ 'customparamsdoc': data })
def add_custom_config_docs(self, data, clearCurrentData=False):
if clearCurrentData:
self.custom_params_docs_file_sent = False
self.custom_params_pending_docs += data
self.send_custom_config_docs()
def save_typed_params(self, data):
"""
Send custom parameters descriptions to Polyglot to be used
in front end UI configuration screen
Accepts list of objects with the followin properties
name - used as a key when data is sent from UI
title - displayed in UI
defaultValue - optionanl
type - optional, can be 'NUMBER', 'STRING' or 'BOOLEAN'.
Defaults to 'STRING'
desc - optional, shown in tooltip in UI
isRequired - optional, True/False, when set, will not validate UI
input if it's empty
isList - optional, True/False, if set this will be treated as list
of values or objects by UI
params - optional, can contain a list of objects. If present, then
this (parent) is treated as object / list of objects by UI,
otherwise, it's treated as a single / list of single values
"""
LOGGER.info('Sending typed parameters to Polyglot.')
if type(data) is not list:
data = [ data ]
message = { 'typedparams': data }
self.send(message)
class Node(object):
"""
Node Class for individual devices.
"""
def __init__(self, controller, primary, address, name):
try:
self.controller = controller
self.parent = self.controller
self.primary = primary
self.address = address
self.name = name
self.polyConfig = None
self.drivers = deepcopy(self.drivers)
self._drivers = deepcopy(self.drivers)
self.isPrimary = None
self.config = None
self.timeAdded = None
self.enabled = None
self.added = None
except (KeyError) as err:
LOGGER.error('Error Creating node: {}'.format(err), exc_info=True)
def _convertDrivers(self, drivers):
return deepcopy(drivers)
"""
if isinstance(drivers, list):
newFormat = {}
for driver in drivers:
newFormat[driver['driver']] = {}
newFormat[driver['driver']]['value'] = driver['value']
newFormat[driver['driver']]['uom'] = driver['uom']
return newFormat
else:
return deepcopy(drivers)
"""
def setDriver(self, driver, value, report=True, force=False, uom=None):
for d in self.drivers:
if d['driver'] == driver:
d['value'] = value
if uom is not None:
d['uom'] = uom
if report:
self.reportDriver(d, report, force)
break
def reportDriver(self, driver, report, force):
for d in self._drivers:
if (d['driver'] == driver['driver'] and
(str(d['value']) != str(driver['value']) or
d['uom'] != driver['uom'] or
force)):
LOGGER.info('Updating Driver {} - {}: {}, uom: {}'.format(self.address, driver['driver'], driver['value'], driver['uom']))
d['value'] = deepcopy(driver['value'])
if d['uom'] != driver['uom']:
d['uom'] = deepcopy(driver['uom'])
message = {
'status': {
'address': self.address,
'driver': driver['driver'],
'value': str(driver['value']),
'uom': driver['uom']
}
}
self.controller.poly.send(message)
break
def reportCmd(self, command, value=None, uom=None):
message = {
'command': {
'address': self.address,
'command': command
}
}
if value is not None and uom is not None:
message['command']['value'] = str(value)
message['command']['uom'] = uom
self.controller.poly.send(message)
def reportDrivers(self):
LOGGER.info('Updating All Drivers to ISY for {}({})'.format(self.name, self.address))
self.updateDrivers(self.drivers)
for driver in self.drivers:
message = {
'status': {
'address': self.address,
'driver': driver['driver'],
'value': driver['value'],
'uom': driver['uom']
}
}
self.controller.poly.send(message)
def updateDrivers(self, drivers):
self._drivers = deepcopy(drivers)
def query(self):
self.reportDrivers()
def status(self):
self.reportDrivers()
def runCmd(self, command):
if command['cmd'] in self.commands:
fun = self.commands[command['cmd']]
fun(self, command)
def start(self):
pass
def getDriver(self, dv):
for index, node in enumerate(self.controller.poly.config['nodes']):
if node['address'] == self.address:
for index, driver in enumerate(node['drivers']):
if driver['driver'] == dv:
return driver['value']
return None
def toJSON(self):
LOGGER.debug(json.dumps(self.__dict__))
def __rep__(self):
return self.toJSON()
id = ''
commands = {}
drivers = []
sends = {}
hint = [ 0, 0, 0, 0 ]
class Controller(Node):
"""
Controller Class for controller management. Superclass of Node
"""
__exists = False
def __init__(self, poly, name='Controller'):
if self.__exists:
warnings.warn('Only one Controller is allowed.')
return
try:
self.controller = self
self.parent = self.controller
self.poly = poly
self.poly.onConfig(self._gotConfig)
self.poly.onStop(self.stop)
self.name = name
self.address = 'controller'
self.primary = self.address
self._drivers = deepcopy(self.drivers)
self._nodes = {}
self.config = None
self.nodes = { self.address: self }
self._threads = {}
self._threads['input'] = Thread(target = self._parseInput, name = 'Controller')
self._threads['ns'] = Thread(target = self.start, name = 'NodeServer')
self.polyConfig = None
self.isPrimary = None
self.timeAdded = None
self.enabled = None
self.added = None
self.started = False
self.nodesAdding = []
# self._threads = []
self._startThreads()
except (KeyError) as err:
LOGGER.error('Error Creating node: {}'.format(err), exc_info=True)
def _gotConfig(self, config):
self.polyConfig = config
for node in config['nodes']:
self._nodes[node['address']] = node
if node['address'] in self.nodes:
n = self.nodes[node['address']]
n.updateDrivers(node['drivers'])
n.config = node
n.isPrimary = node['isprimary']
n.timeAdded = node['timeAdded']
n.enabled = node['enabled']
n.added = node['added']
if self.address not in self._nodes:
self.addNode(self)
LOGGER.info('Waiting on Controller node to be added.......')
if not self.started:
self.nodes[self.address] = self
self.started = True
# self.setDriver('ST', 1, True, True)
self._threads['ns'].start()
def _startThreads(self):
self._threads['input'].daemon = True
self._threads['ns'].daemon = True
self._threads['input'].start()
def _parseInput(self):
while True:
input = self.poly.inQueue.get()
for key in input:
if key == 'command':
if input[key]['address'] in self.nodes:
try:
self.nodes[input[key]['address']].runCmd(input[key])
except (Exception) as err:
LOGGER.error('_parseInput: failed {}.runCmd({}) {}'.format(input[key]['address'], input[key]['cmd'], err), exc_info=True)
else:
LOGGER.error('_parseInput: received command {} for a node that is not in memory: {}'.format(input[key]['cmd'], input[key]['address']))
elif key == 'result':
self._handleResult(input[key])
elif key == 'delete':
self._delete()
elif key == 'shortPoll':
self.shortPoll()
elif key == 'longPoll':
self.longPoll()
elif key == 'query':
if input[key]['address'] in self.nodes:
self.nodes[input[key]['address']].query()
elif input[key]['address'] == 'all':
self.query()
elif key == 'status':
if input[key]['address'] in self.nodes:
self.nodes[input[key]['address']].status()
elif input[key]['address'] == 'all':
self.status()
self.poly.inQueue.task_done()
def _handleResult(self, result):
# LOGGER.debug(self.nodesAdding)
try:
if 'addnode' in result:
if result['addnode']['success']:
if not result['addnode']['address'] == self.address:
self.nodes[result['addnode']['address']].start()
# self.nodes[result['addnode']['address']].reportDrivers()
if result['addnode']['address'] in self.nodesAdding:
self.nodesAdding.remove(result['addnode']['address'])
else:
del self.nodes[result['addnode']['address']]
except (KeyError, ValueError) as err:
LOGGER.error('handleResult: {}'.format(err), exc_info=True)
def _delete(self):
"""
Intermediate message that stops MQTT before sending to overrideable method for delete.
"""
self.poly.stop()
self.delete()
def _convertDrivers(self, drivers):
return deepcopy(drivers)
"""
if isinstance(drivers, list):
newFormat = {}
for driver in drivers:
newFormat[driver['driver']] = {}
newFormat[driver['driver']]['value'] = driver['value']
newFormat[driver['driver']]['uom'] = driver['uom']
return newFormat
else:
return deepcopy(drivers)
"""
def delete(self):
"""
Incoming delete message from Polyglot. This NodeServer is being deleted.
You have 5 seconds before the process is killed. Cleanup and disconnect.
"""
pass
"""
AddNode adds the class to self.nodes then sends the request to Polyglot
If update is True, overwrite the node in Polyglot
"""
def addNode(self, node, update=False):
if node.address in self._nodes:
node._drivers = self._nodes[node.address]['drivers']
for driver in node.drivers:
for existing in self._nodes[node.address]['drivers']:
if driver['driver'] == existing['driver']:
driver['value'] = existing['value']
# JIMBO SAYS NO
# driver['uom'] = existing['uom']
self.nodes[node.address] = node
# if node.address not in self._nodes or update:
self.nodesAdding.append(node.address)
self.poly.addNode(node)
# else:
# self.nodes[node.address].start()
return node
"""
Forces a full overwrite of the node
"""
def updateNode(self, node):
self.nodes[node.address] = node
self.nodesAdding.append(node.address)
self.poly.addNode(node)
def delNode(self, address):
"""
Just send it along if requested, should be able to delete the node even if it isn't
in our config anywhere. Usually used for normalization.
"""
if address in self.nodes:
del self.nodes[address]
self.poly.delNode(address)
def longPoll(self):
pass
def shortPoll(self):
pass
def query(self):
for node in self.nodes:
self.nodes[node].reportDrivers()
def status(self):
for node in self.nodes:
self.nodes[node].reportDrivers()
def runForever(self):
self._threads['input'].join()
def start(self):
pass
def saveCustomData(self, data):
if not isinstance(data, dict):
LOGGER.error('saveCustomData: data isn\'t a dictionary. Ignoring.')
else:
self.poly.saveCustomData(data)
def addCustomParam(self, data):
if not isinstance(data, dict):
LOGGER.error('addCustomParam: data isn\'t a dictionary. Ignoring.')
else:
newData = self.poly.config['customParams']
newData.update(data)
self.poly.saveCustomParams(newData)
def removeCustomParam(self, data):
try: # check whether python knows about 'basestring'
basestring
except NameError: # no, it doesn't (it's Python3); use 'str' instead
basestring = str
if not isinstance(data, basestring):
LOGGER.error('removeCustomParam: data isn\'t a string. Ignoring.')
else:
try:
newData = deepcopy(self.poly.config['customParams'])
newData.pop(data)
self.poly.saveCustomParams(newData)
except KeyError:
LOGGER.error('{} not found in customParams. Ignoring...'.format(data), exc_info=True)
def getCustomParam(self, data):
params = deepcopy(self.poly.config['customParams'])
return params.get(data)
def addNotice(self, data, key=None):
if not isinstance(data, dict):
self.poly.addNotice({ 'key': key, 'value': data})
else:
if 'value' in data:
self.poly.addNotice(data)
else:
for key, value in data.items():
self.poly.addNotice({ 'key': key, 'value': value })
def removeNotice(self, key):
data = { 'key': str(key) }
self.poly.removeNotice(data)
def getNotices(self):
return self.poly.config['notices']
def removeNoticesAll(self):
if type(self.poly.config['notices']) == dict:
for key in self.poly.config['notices'].keys():
self.removeNotice(key)
else:
if len(self.poly.config['notices']):
for i in range(len(self.poly.config['notices'])):
self.removeNotice(i)
def stop(self):
""" Called on nodeserver stop """
pass
id = 'controller'
commands = {}
drivers = [{'driver': 'ST', 'value': 0, 'uom': 2}]
if __name__ == "__main__":
sys.exit(0)
if hasattr(main, '__file__'):
init_interface()
|
UniversalDevicesInc/polyglot-v2-python-interface
|
polyinterface/polyinterface.py
|
Interface._connect
|
python
|
def _connect(self, mqttc, userdata, flags, rc):
if rc == 0:
self.connected = True
results = []
LOGGER.info("MQTT Connected with result code " + str(rc) + " (Success)")
# result, mid = self._mqttc.subscribe(self.topicInput)
results.append((self.topicInput, tuple(self._mqttc.subscribe(self.topicInput))))
results.append((self.topicPolyglotConnection, tuple(self._mqttc.subscribe(self.topicPolyglotConnection))))
for (topic, (result, mid)) in results:
if result == 0:
LOGGER.info("MQTT Subscribing to topic: " + topic + " - " + " MID: " + str(mid) + " Result: " + str(result))
else:
LOGGER.info("MQTT Subscription to " + topic + " failed. This is unusual. MID: " + str(mid) + " Result: " + str(result))
# If subscription fails, try to reconnect.
self._mqttc.reconnect()
self._mqttc.publish(self.topicSelfConnection, json.dumps(
{
'connected': True,
'node': self.profileNum
}), retain=True)
LOGGER.info('Sent Connected message to Polyglot')
else:
LOGGER.error("MQTT Failed to connect. Result code: " + str(rc))
|
The callback for when the client receives a CONNACK response from the server.
Subscribing in on_connect() means that if we lose the connection and
reconnect then subscriptions will be renewed.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param flags: The flags set on the connection.
:param rc: Result code of connection, 0 = Success, anything else is a failure
|
train
|
https://github.com/UniversalDevicesInc/polyglot-v2-python-interface/blob/fe613135b762731a41a081222e43d2a8ae4fc53f/polyinterface/polyinterface.py#L207-L239
| null |
class Interface(object):
CUSTOM_CONFIG_DOCS_FILE_NAME = 'POLYGLOT_CONFIG.md'
"""
Polyglot Interface Class
:param envVar: The Name of the variable from ~/.polyglot/.env that has this NodeServer's profile number
"""
# pylint: disable=too-many-instance-attributes
# pylint: disable=unused-argument
__exists = False
def __init__(self, envVar=None):
if self.__exists:
warnings.warn('Only one Interface is allowed.')
return
self.connected = False
self.profileNum = os.environ.get("PROFILE_NUM")
if self.profileNum is None:
if envVar is not None:
self.profileNum = os.environ.get(envVar)
if self.profileNum is None:
LOGGER.error('Profile Number not found in STDIN or .env file. Exiting.')
sys.exit(1)
self.profileNum = str(self.profileNum)
self.topicPolyglotConnection = 'udi/polyglot/connections/polyglot'
self.topicInput = 'udi/polyglot/ns/{}'.format(self.profileNum)
self.topicSelfConnection = 'udi/polyglot/connections/{}'.format(self.profileNum)
self._threads = {}
self._threads['socket'] = Thread(target = self._startMqtt, name = 'Interface')
self._mqttc = mqtt.Client(envVar, True)
# self._mqttc.will_set(self.topicSelfConnection, json.dumps({'node': self.profileNum, 'connected': False}), retain=True)
self._mqttc.on_connect = self._connect
self._mqttc.on_message = self._message
self._mqttc.on_subscribe = self._subscribe
self._mqttc.on_disconnect = self._disconnect
self._mqttc.on_publish = self._publish
self._mqttc.on_log = self._log
self.useSecure = True
if 'USE_HTTPS' in os.environ:
self.useSecure = os.environ['USE_HTTPS']
if self.useSecure is True:
if 'MQTT_CERTPATH' in os.environ:
self._mqttc.tls_set(
ca_certs=os.environ['MQTT_CERTPATH'] + '/polyglot.crt',
certfile=os.environ['MQTT_CERTPATH'] + '/client.crt',
keyfile=os.environ['MQTT_CERTPATH'] + '/client_private.key',
tls_version=ssl.PROTOCOL_TLSv1_2)
else:
self._mqttc.tls_set(
ca_certs=join(expanduser("~") + '/.polyglot/ssl/polyglot.crt'),
certfile=join(expanduser("~") + '/.polyglot/ssl/client.crt'),
keyfile=join(expanduser("~") + '/.polyglot/ssl/client_private.key'),
tls_version=ssl.PROTOCOL_TLSv1_2
)
# self._mqttc.tls_insecure_set(True)
# self._mqttc.enable_logger(logger=LOGGER)
self.config = None
# self.loop = asyncio.new_event_loop()
self.loop = None
self.inQueue = queue.Queue()
# self.thread = Thread(target=self.start_loop)
self.isyVersion = None
self._server = os.environ.get("MQTT_HOST") or 'localhost'
self._port = os.environ.get("MQTT_PORT") or '1883'
self.polyglotConnected = False
self.__configObservers = []
self.__stopObservers = []
Interface.__exists = True
self.custom_params_docs_file_sent = False
self.custom_params_pending_docs = ''
def onConfig(self, callback):
"""
Gives the ability to bind any methods to be run when the config is received.
"""
self.__configObservers.append(callback)
def onStop(self, callback):
"""
Gives the ability to bind any methods to be run when the stop command is received.
"""
self.__stopObservers.append(callback)
def _message(self, mqttc, userdata, msg):
"""
The callback for when a PUBLISH message is received from the server.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param flags: The flags set on the connection.
:param msg: Dictionary of MQTT received message. Uses: msg.topic, msg.qos, msg.payload
"""
try:
inputCmds = ['query', 'command', 'result', 'status', 'shortPoll', 'longPoll', 'delete']
parsed_msg = json.loads(msg.payload.decode('utf-8'))
if 'node' in parsed_msg:
if parsed_msg['node'] != 'polyglot':
return
del parsed_msg['node']
for key in parsed_msg:
# LOGGER.debug('MQTT Received Message: {}: {}'.format(msg.topic, parsed_msg))
if key == 'config':
self.inConfig(parsed_msg[key])
elif key == 'connected':
self.polyglotConnected = parsed_msg[key]
elif key == 'stop':
LOGGER.debug('Received stop from Polyglot... Shutting Down.')
self.stop()
elif key in inputCmds:
self.input(parsed_msg)
else:
LOGGER.error('Invalid command received in message from Polyglot: {}'.format(key))
except (ValueError) as err:
LOGGER.error('MQTT Received Payload Error: {}'.format(err), exc_info=True)
def _disconnect(self, mqttc, userdata, rc):
"""
The callback for when a DISCONNECT occurs.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param rc: Result code of connection, 0 = Graceful, anything else is unclean
"""
self.connected = False
if rc != 0:
LOGGER.info("MQTT Unexpected disconnection. Trying reconnect.")
try:
self._mqttc.reconnect()
except Exception as ex:
template = "An exception of type {0} occured. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
LOGGER.error("MQTT Connection error: " + message)
else:
LOGGER.info("MQTT Graceful disconnection.")
def _log(self, mqttc, userdata, level, string):
""" Use for debugging MQTT Packets, disable for normal use, NOISY. """
# LOGGER.info('MQTT Log - {}: {}'.format(str(level), str(string)))
pass
def _subscribe(self, mqttc, userdata, mid, granted_qos):
""" Callback for Subscribe message. Unused currently. """
# LOGGER.info("MQTT Subscribed Succesfully for Message ID: {} - QoS: {}".format(str(mid), str(granted_qos)))
pass
def _publish(self, mqttc, userdata, mid):
""" Callback for publish message. Unused currently. """
# LOGGER.info("MQTT Published message ID: {}".format(str(mid)))
pass
def start(self):
for _, thread in self._threads.items():
thread.start()
def _startMqtt(self):
"""
The client start method. Starts the thread for the MQTT Client
and publishes the connected message.
"""
LOGGER.info('Connecting to MQTT... {}:{}'.format(self._server, self._port))
try:
# self._mqttc.connect_async(str(self._server), int(self._port), 10)
self._mqttc.connect_async('{}'.format(self._server), int(self._port), 10)
self._mqttc.loop_forever()
except Exception as ex:
template = "An exception of type {0} occurred. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
LOGGER.error("MQTT Connection error: {}".format(message), exc_info=True)
def stop(self):
"""
The client stop method. If the client is currently connected
stop the thread and disconnect. Publish the disconnected
message if clean shutdown.
"""
# self.loop.call_soon_threadsafe(self.loop.stop)
# self.loop.stop()
# self._longPoll.cancel()
# self._shortPoll.cancel()
if self.connected:
LOGGER.info('Disconnecting from MQTT... {}:{}'.format(self._server, self._port))
self._mqttc.publish(self.topicSelfConnection, json.dumps({'node': self.profileNum, 'connected': False}), retain=True)
self._mqttc.loop_stop()
self._mqttc.disconnect()
try:
for watcher in self.__stopObservers:
watcher()
except KeyError as e:
LOGGER.exception('KeyError in gotConfig: {}'.format(e), exc_info=True)
def send(self, message):
"""
Formatted Message to send to Polyglot. Connection messages are sent automatically from this module
so this method is used to send commands to/from Polyglot and formats it for consumption
"""
if not isinstance(message, dict) and self.connected:
warnings.warn('payload not a dictionary')
return False
try:
message['node'] = self.profileNum
self._mqttc.publish(self.topicInput, json.dumps(message), retain=False)
except TypeError as err:
LOGGER.error('MQTT Send Error: {}'.format(err), exc_info=True)
def addNode(self, node):
"""
Add a node to the NodeServer
:param node: Dictionary of node settings. Keys: address, name, node_def_id, primary, and drivers are required.
"""
LOGGER.info('Adding node {}({})'.format(node.name, node.address))
message = {
'addnode': {
'nodes': [{
'address': node.address,
'name': node.name,
'node_def_id': node.id,
'primary': node.primary,
'drivers': node.drivers,
'hint': node.hint
}]
}
}
self.send(message)
def saveCustomData(self, data):
"""
Send custom dictionary to Polyglot to save and be retrieved on startup.
:param data: Dictionary of key value pairs to store in Polyglot database.
"""
LOGGER.info('Sending customData to Polyglot.')
message = { 'customdata': data }
self.send(message)
def saveCustomParams(self, data):
"""
Send custom dictionary to Polyglot to save and be retrieved on startup.
:param data: Dictionary of key value pairs to store in Polyglot database.
"""
LOGGER.info('Sending customParams to Polyglot.')
message = { 'customparams': data }
self.send(message)
def addNotice(self, data):
"""
Add custom notice to front-end for this NodeServers
:param data: String of characters to add as a notification in the front-end.
"""
LOGGER.info('Sending addnotice to Polyglot: {}'.format(data))
message = { 'addnotice': data }
self.send(message)
def removeNotice(self, data):
"""
Add custom notice to front-end for this NodeServers
:param data: Index of notices list to remove.
"""
LOGGER.info('Sending removenotice to Polyglot for index {}'.format(data))
message = { 'removenotice': data }
self.send(message)
def restart(self):
"""
Send a command to Polyglot to restart this NodeServer
"""
LOGGER.info('Asking Polyglot to restart me.')
message = {
'restart': {}
}
self.send(message)
def installprofile(self):
LOGGER.info('Sending Install Profile command to Polyglot.')
message = { 'installprofile': { 'reboot': False } }
self.send(message)
def delNode(self, address):
"""
Delete a node from the NodeServer
:param node: Dictionary of node settings. Keys: address, name, node_def_id, primary, and drivers are required.
"""
LOGGER.info('Removing node {}'.format(address))
message = {
'removenode': {
'address': address
}
}
self.send(message)
def getNode(self, address):
"""
Get Node by Address of existing nodes.
"""
try:
for node in self.config['nodes']:
if node['address'] == address:
return node
return False
except KeyError:
LOGGER.error('Usually means we have not received the config yet.', exc_info=True)
return False
def inConfig(self, config):
"""
Save incoming config received from Polyglot to Interface.config and then do any functions
that are waiting on the config to be received.
"""
self.config = config
self.isyVersion = config['isyVersion']
try:
for watcher in self.__configObservers:
watcher(config)
self.send_custom_config_docs()
except KeyError as e:
LOGGER.error('KeyError in gotConfig: {}'.format(e), exc_info=True)
def input(self, command):
self.inQueue.put(command)
def supports_feature(self, feature):
return True
def get_md_file_data(self, fileName):
data = ''
if os.path.isfile(fileName):
data = markdown2.markdown_path(fileName)
return data
def send_custom_config_docs(self):
data = ''
if not self.custom_params_docs_file_sent:
data = self.get_md_file_data(Interface.CUSTOM_CONFIG_DOCS_FILE_NAME)
else:
data = self.config.get('customParamsDoc', '')
# send if we're sending new file or there are updates
if (not self.custom_params_docs_file_sent or
len(self.custom_params_pending_docs) > 0):
data += self.custom_params_pending_docs
self.custom_params_docs_file_sent = True
self.custom_params_pending_docs = ''
self.config['customParamsDoc'] = data
self.send({ 'customparamsdoc': data })
def add_custom_config_docs(self, data, clearCurrentData=False):
if clearCurrentData:
self.custom_params_docs_file_sent = False
self.custom_params_pending_docs += data
self.send_custom_config_docs()
def save_typed_params(self, data):
"""
Send custom parameters descriptions to Polyglot to be used
in front end UI configuration screen
Accepts list of objects with the followin properties
name - used as a key when data is sent from UI
title - displayed in UI
defaultValue - optionanl
type - optional, can be 'NUMBER', 'STRING' or 'BOOLEAN'.
Defaults to 'STRING'
desc - optional, shown in tooltip in UI
isRequired - optional, True/False, when set, will not validate UI
input if it's empty
isList - optional, True/False, if set this will be treated as list
of values or objects by UI
params - optional, can contain a list of objects. If present, then
this (parent) is treated as object / list of objects by UI,
otherwise, it's treated as a single / list of single values
"""
LOGGER.info('Sending typed parameters to Polyglot.')
if type(data) is not list:
data = [ data ]
message = { 'typedparams': data }
self.send(message)
|
UniversalDevicesInc/polyglot-v2-python-interface
|
polyinterface/polyinterface.py
|
Interface._message
|
python
|
def _message(self, mqttc, userdata, msg):
try:
inputCmds = ['query', 'command', 'result', 'status', 'shortPoll', 'longPoll', 'delete']
parsed_msg = json.loads(msg.payload.decode('utf-8'))
if 'node' in parsed_msg:
if parsed_msg['node'] != 'polyglot':
return
del parsed_msg['node']
for key in parsed_msg:
# LOGGER.debug('MQTT Received Message: {}: {}'.format(msg.topic, parsed_msg))
if key == 'config':
self.inConfig(parsed_msg[key])
elif key == 'connected':
self.polyglotConnected = parsed_msg[key]
elif key == 'stop':
LOGGER.debug('Received stop from Polyglot... Shutting Down.')
self.stop()
elif key in inputCmds:
self.input(parsed_msg)
else:
LOGGER.error('Invalid command received in message from Polyglot: {}'.format(key))
except (ValueError) as err:
LOGGER.error('MQTT Received Payload Error: {}'.format(err), exc_info=True)
|
The callback for when a PUBLISH message is received from the server.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param flags: The flags set on the connection.
:param msg: Dictionary of MQTT received message. Uses: msg.topic, msg.qos, msg.payload
|
train
|
https://github.com/UniversalDevicesInc/polyglot-v2-python-interface/blob/fe613135b762731a41a081222e43d2a8ae4fc53f/polyinterface/polyinterface.py#L241-L272
| null |
class Interface(object):
CUSTOM_CONFIG_DOCS_FILE_NAME = 'POLYGLOT_CONFIG.md'
"""
Polyglot Interface Class
:param envVar: The Name of the variable from ~/.polyglot/.env that has this NodeServer's profile number
"""
# pylint: disable=too-many-instance-attributes
# pylint: disable=unused-argument
__exists = False
def __init__(self, envVar=None):
if self.__exists:
warnings.warn('Only one Interface is allowed.')
return
self.connected = False
self.profileNum = os.environ.get("PROFILE_NUM")
if self.profileNum is None:
if envVar is not None:
self.profileNum = os.environ.get(envVar)
if self.profileNum is None:
LOGGER.error('Profile Number not found in STDIN or .env file. Exiting.')
sys.exit(1)
self.profileNum = str(self.profileNum)
self.topicPolyglotConnection = 'udi/polyglot/connections/polyglot'
self.topicInput = 'udi/polyglot/ns/{}'.format(self.profileNum)
self.topicSelfConnection = 'udi/polyglot/connections/{}'.format(self.profileNum)
self._threads = {}
self._threads['socket'] = Thread(target = self._startMqtt, name = 'Interface')
self._mqttc = mqtt.Client(envVar, True)
# self._mqttc.will_set(self.topicSelfConnection, json.dumps({'node': self.profileNum, 'connected': False}), retain=True)
self._mqttc.on_connect = self._connect
self._mqttc.on_message = self._message
self._mqttc.on_subscribe = self._subscribe
self._mqttc.on_disconnect = self._disconnect
self._mqttc.on_publish = self._publish
self._mqttc.on_log = self._log
self.useSecure = True
if 'USE_HTTPS' in os.environ:
self.useSecure = os.environ['USE_HTTPS']
if self.useSecure is True:
if 'MQTT_CERTPATH' in os.environ:
self._mqttc.tls_set(
ca_certs=os.environ['MQTT_CERTPATH'] + '/polyglot.crt',
certfile=os.environ['MQTT_CERTPATH'] + '/client.crt',
keyfile=os.environ['MQTT_CERTPATH'] + '/client_private.key',
tls_version=ssl.PROTOCOL_TLSv1_2)
else:
self._mqttc.tls_set(
ca_certs=join(expanduser("~") + '/.polyglot/ssl/polyglot.crt'),
certfile=join(expanduser("~") + '/.polyglot/ssl/client.crt'),
keyfile=join(expanduser("~") + '/.polyglot/ssl/client_private.key'),
tls_version=ssl.PROTOCOL_TLSv1_2
)
# self._mqttc.tls_insecure_set(True)
# self._mqttc.enable_logger(logger=LOGGER)
self.config = None
# self.loop = asyncio.new_event_loop()
self.loop = None
self.inQueue = queue.Queue()
# self.thread = Thread(target=self.start_loop)
self.isyVersion = None
self._server = os.environ.get("MQTT_HOST") or 'localhost'
self._port = os.environ.get("MQTT_PORT") or '1883'
self.polyglotConnected = False
self.__configObservers = []
self.__stopObservers = []
Interface.__exists = True
self.custom_params_docs_file_sent = False
self.custom_params_pending_docs = ''
def onConfig(self, callback):
"""
Gives the ability to bind any methods to be run when the config is received.
"""
self.__configObservers.append(callback)
def onStop(self, callback):
"""
Gives the ability to bind any methods to be run when the stop command is received.
"""
self.__stopObservers.append(callback)
def _connect(self, mqttc, userdata, flags, rc):
"""
The callback for when the client receives a CONNACK response from the server.
Subscribing in on_connect() means that if we lose the connection and
reconnect then subscriptions will be renewed.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param flags: The flags set on the connection.
:param rc: Result code of connection, 0 = Success, anything else is a failure
"""
if rc == 0:
self.connected = True
results = []
LOGGER.info("MQTT Connected with result code " + str(rc) + " (Success)")
# result, mid = self._mqttc.subscribe(self.topicInput)
results.append((self.topicInput, tuple(self._mqttc.subscribe(self.topicInput))))
results.append((self.topicPolyglotConnection, tuple(self._mqttc.subscribe(self.topicPolyglotConnection))))
for (topic, (result, mid)) in results:
if result == 0:
LOGGER.info("MQTT Subscribing to topic: " + topic + " - " + " MID: " + str(mid) + " Result: " + str(result))
else:
LOGGER.info("MQTT Subscription to " + topic + " failed. This is unusual. MID: " + str(mid) + " Result: " + str(result))
# If subscription fails, try to reconnect.
self._mqttc.reconnect()
self._mqttc.publish(self.topicSelfConnection, json.dumps(
{
'connected': True,
'node': self.profileNum
}), retain=True)
LOGGER.info('Sent Connected message to Polyglot')
else:
LOGGER.error("MQTT Failed to connect. Result code: " + str(rc))
def _disconnect(self, mqttc, userdata, rc):
"""
The callback for when a DISCONNECT occurs.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param rc: Result code of connection, 0 = Graceful, anything else is unclean
"""
self.connected = False
if rc != 0:
LOGGER.info("MQTT Unexpected disconnection. Trying reconnect.")
try:
self._mqttc.reconnect()
except Exception as ex:
template = "An exception of type {0} occured. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
LOGGER.error("MQTT Connection error: " + message)
else:
LOGGER.info("MQTT Graceful disconnection.")
def _log(self, mqttc, userdata, level, string):
""" Use for debugging MQTT Packets, disable for normal use, NOISY. """
# LOGGER.info('MQTT Log - {}: {}'.format(str(level), str(string)))
pass
def _subscribe(self, mqttc, userdata, mid, granted_qos):
""" Callback for Subscribe message. Unused currently. """
# LOGGER.info("MQTT Subscribed Succesfully for Message ID: {} - QoS: {}".format(str(mid), str(granted_qos)))
pass
def _publish(self, mqttc, userdata, mid):
""" Callback for publish message. Unused currently. """
# LOGGER.info("MQTT Published message ID: {}".format(str(mid)))
pass
def start(self):
for _, thread in self._threads.items():
thread.start()
def _startMqtt(self):
"""
The client start method. Starts the thread for the MQTT Client
and publishes the connected message.
"""
LOGGER.info('Connecting to MQTT... {}:{}'.format(self._server, self._port))
try:
# self._mqttc.connect_async(str(self._server), int(self._port), 10)
self._mqttc.connect_async('{}'.format(self._server), int(self._port), 10)
self._mqttc.loop_forever()
except Exception as ex:
template = "An exception of type {0} occurred. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
LOGGER.error("MQTT Connection error: {}".format(message), exc_info=True)
def stop(self):
"""
The client stop method. If the client is currently connected
stop the thread and disconnect. Publish the disconnected
message if clean shutdown.
"""
# self.loop.call_soon_threadsafe(self.loop.stop)
# self.loop.stop()
# self._longPoll.cancel()
# self._shortPoll.cancel()
if self.connected:
LOGGER.info('Disconnecting from MQTT... {}:{}'.format(self._server, self._port))
self._mqttc.publish(self.topicSelfConnection, json.dumps({'node': self.profileNum, 'connected': False}), retain=True)
self._mqttc.loop_stop()
self._mqttc.disconnect()
try:
for watcher in self.__stopObservers:
watcher()
except KeyError as e:
LOGGER.exception('KeyError in gotConfig: {}'.format(e), exc_info=True)
def send(self, message):
"""
Formatted Message to send to Polyglot. Connection messages are sent automatically from this module
so this method is used to send commands to/from Polyglot and formats it for consumption
"""
if not isinstance(message, dict) and self.connected:
warnings.warn('payload not a dictionary')
return False
try:
message['node'] = self.profileNum
self._mqttc.publish(self.topicInput, json.dumps(message), retain=False)
except TypeError as err:
LOGGER.error('MQTT Send Error: {}'.format(err), exc_info=True)
def addNode(self, node):
"""
Add a node to the NodeServer
:param node: Dictionary of node settings. Keys: address, name, node_def_id, primary, and drivers are required.
"""
LOGGER.info('Adding node {}({})'.format(node.name, node.address))
message = {
'addnode': {
'nodes': [{
'address': node.address,
'name': node.name,
'node_def_id': node.id,
'primary': node.primary,
'drivers': node.drivers,
'hint': node.hint
}]
}
}
self.send(message)
def saveCustomData(self, data):
"""
Send custom dictionary to Polyglot to save and be retrieved on startup.
:param data: Dictionary of key value pairs to store in Polyglot database.
"""
LOGGER.info('Sending customData to Polyglot.')
message = { 'customdata': data }
self.send(message)
def saveCustomParams(self, data):
"""
Send custom dictionary to Polyglot to save and be retrieved on startup.
:param data: Dictionary of key value pairs to store in Polyglot database.
"""
LOGGER.info('Sending customParams to Polyglot.')
message = { 'customparams': data }
self.send(message)
def addNotice(self, data):
"""
Add custom notice to front-end for this NodeServers
:param data: String of characters to add as a notification in the front-end.
"""
LOGGER.info('Sending addnotice to Polyglot: {}'.format(data))
message = { 'addnotice': data }
self.send(message)
def removeNotice(self, data):
"""
Add custom notice to front-end for this NodeServers
:param data: Index of notices list to remove.
"""
LOGGER.info('Sending removenotice to Polyglot for index {}'.format(data))
message = { 'removenotice': data }
self.send(message)
def restart(self):
"""
Send a command to Polyglot to restart this NodeServer
"""
LOGGER.info('Asking Polyglot to restart me.')
message = {
'restart': {}
}
self.send(message)
def installprofile(self):
LOGGER.info('Sending Install Profile command to Polyglot.')
message = { 'installprofile': { 'reboot': False } }
self.send(message)
def delNode(self, address):
"""
Delete a node from the NodeServer
:param node: Dictionary of node settings. Keys: address, name, node_def_id, primary, and drivers are required.
"""
LOGGER.info('Removing node {}'.format(address))
message = {
'removenode': {
'address': address
}
}
self.send(message)
def getNode(self, address):
"""
Get Node by Address of existing nodes.
"""
try:
for node in self.config['nodes']:
if node['address'] == address:
return node
return False
except KeyError:
LOGGER.error('Usually means we have not received the config yet.', exc_info=True)
return False
def inConfig(self, config):
"""
Save incoming config received from Polyglot to Interface.config and then do any functions
that are waiting on the config to be received.
"""
self.config = config
self.isyVersion = config['isyVersion']
try:
for watcher in self.__configObservers:
watcher(config)
self.send_custom_config_docs()
except KeyError as e:
LOGGER.error('KeyError in gotConfig: {}'.format(e), exc_info=True)
def input(self, command):
self.inQueue.put(command)
def supports_feature(self, feature):
return True
def get_md_file_data(self, fileName):
data = ''
if os.path.isfile(fileName):
data = markdown2.markdown_path(fileName)
return data
def send_custom_config_docs(self):
data = ''
if not self.custom_params_docs_file_sent:
data = self.get_md_file_data(Interface.CUSTOM_CONFIG_DOCS_FILE_NAME)
else:
data = self.config.get('customParamsDoc', '')
# send if we're sending new file or there are updates
if (not self.custom_params_docs_file_sent or
len(self.custom_params_pending_docs) > 0):
data += self.custom_params_pending_docs
self.custom_params_docs_file_sent = True
self.custom_params_pending_docs = ''
self.config['customParamsDoc'] = data
self.send({ 'customparamsdoc': data })
def add_custom_config_docs(self, data, clearCurrentData=False):
if clearCurrentData:
self.custom_params_docs_file_sent = False
self.custom_params_pending_docs += data
self.send_custom_config_docs()
def save_typed_params(self, data):
"""
Send custom parameters descriptions to Polyglot to be used
in front end UI configuration screen
Accepts list of objects with the followin properties
name - used as a key when data is sent from UI
title - displayed in UI
defaultValue - optionanl
type - optional, can be 'NUMBER', 'STRING' or 'BOOLEAN'.
Defaults to 'STRING'
desc - optional, shown in tooltip in UI
isRequired - optional, True/False, when set, will not validate UI
input if it's empty
isList - optional, True/False, if set this will be treated as list
of values or objects by UI
params - optional, can contain a list of objects. If present, then
this (parent) is treated as object / list of objects by UI,
otherwise, it's treated as a single / list of single values
"""
LOGGER.info('Sending typed parameters to Polyglot.')
if type(data) is not list:
data = [ data ]
message = { 'typedparams': data }
self.send(message)
|
UniversalDevicesInc/polyglot-v2-python-interface
|
polyinterface/polyinterface.py
|
Interface._disconnect
|
python
|
def _disconnect(self, mqttc, userdata, rc):
self.connected = False
if rc != 0:
LOGGER.info("MQTT Unexpected disconnection. Trying reconnect.")
try:
self._mqttc.reconnect()
except Exception as ex:
template = "An exception of type {0} occured. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
LOGGER.error("MQTT Connection error: " + message)
else:
LOGGER.info("MQTT Graceful disconnection.")
|
The callback for when a DISCONNECT occurs.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param rc: Result code of connection, 0 = Graceful, anything else is unclean
|
train
|
https://github.com/UniversalDevicesInc/polyglot-v2-python-interface/blob/fe613135b762731a41a081222e43d2a8ae4fc53f/polyinterface/polyinterface.py#L274-L292
| null |
class Interface(object):
CUSTOM_CONFIG_DOCS_FILE_NAME = 'POLYGLOT_CONFIG.md'
"""
Polyglot Interface Class
:param envVar: The Name of the variable from ~/.polyglot/.env that has this NodeServer's profile number
"""
# pylint: disable=too-many-instance-attributes
# pylint: disable=unused-argument
__exists = False
def __init__(self, envVar=None):
if self.__exists:
warnings.warn('Only one Interface is allowed.')
return
self.connected = False
self.profileNum = os.environ.get("PROFILE_NUM")
if self.profileNum is None:
if envVar is not None:
self.profileNum = os.environ.get(envVar)
if self.profileNum is None:
LOGGER.error('Profile Number not found in STDIN or .env file. Exiting.')
sys.exit(1)
self.profileNum = str(self.profileNum)
self.topicPolyglotConnection = 'udi/polyglot/connections/polyglot'
self.topicInput = 'udi/polyglot/ns/{}'.format(self.profileNum)
self.topicSelfConnection = 'udi/polyglot/connections/{}'.format(self.profileNum)
self._threads = {}
self._threads['socket'] = Thread(target = self._startMqtt, name = 'Interface')
self._mqttc = mqtt.Client(envVar, True)
# self._mqttc.will_set(self.topicSelfConnection, json.dumps({'node': self.profileNum, 'connected': False}), retain=True)
self._mqttc.on_connect = self._connect
self._mqttc.on_message = self._message
self._mqttc.on_subscribe = self._subscribe
self._mqttc.on_disconnect = self._disconnect
self._mqttc.on_publish = self._publish
self._mqttc.on_log = self._log
self.useSecure = True
if 'USE_HTTPS' in os.environ:
self.useSecure = os.environ['USE_HTTPS']
if self.useSecure is True:
if 'MQTT_CERTPATH' in os.environ:
self._mqttc.tls_set(
ca_certs=os.environ['MQTT_CERTPATH'] + '/polyglot.crt',
certfile=os.environ['MQTT_CERTPATH'] + '/client.crt',
keyfile=os.environ['MQTT_CERTPATH'] + '/client_private.key',
tls_version=ssl.PROTOCOL_TLSv1_2)
else:
self._mqttc.tls_set(
ca_certs=join(expanduser("~") + '/.polyglot/ssl/polyglot.crt'),
certfile=join(expanduser("~") + '/.polyglot/ssl/client.crt'),
keyfile=join(expanduser("~") + '/.polyglot/ssl/client_private.key'),
tls_version=ssl.PROTOCOL_TLSv1_2
)
# self._mqttc.tls_insecure_set(True)
# self._mqttc.enable_logger(logger=LOGGER)
self.config = None
# self.loop = asyncio.new_event_loop()
self.loop = None
self.inQueue = queue.Queue()
# self.thread = Thread(target=self.start_loop)
self.isyVersion = None
self._server = os.environ.get("MQTT_HOST") or 'localhost'
self._port = os.environ.get("MQTT_PORT") or '1883'
self.polyglotConnected = False
self.__configObservers = []
self.__stopObservers = []
Interface.__exists = True
self.custom_params_docs_file_sent = False
self.custom_params_pending_docs = ''
def onConfig(self, callback):
"""
Gives the ability to bind any methods to be run when the config is received.
"""
self.__configObservers.append(callback)
def onStop(self, callback):
"""
Gives the ability to bind any methods to be run when the stop command is received.
"""
self.__stopObservers.append(callback)
def _connect(self, mqttc, userdata, flags, rc):
"""
The callback for when the client receives a CONNACK response from the server.
Subscribing in on_connect() means that if we lose the connection and
reconnect then subscriptions will be renewed.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param flags: The flags set on the connection.
:param rc: Result code of connection, 0 = Success, anything else is a failure
"""
if rc == 0:
self.connected = True
results = []
LOGGER.info("MQTT Connected with result code " + str(rc) + " (Success)")
# result, mid = self._mqttc.subscribe(self.topicInput)
results.append((self.topicInput, tuple(self._mqttc.subscribe(self.topicInput))))
results.append((self.topicPolyglotConnection, tuple(self._mqttc.subscribe(self.topicPolyglotConnection))))
for (topic, (result, mid)) in results:
if result == 0:
LOGGER.info("MQTT Subscribing to topic: " + topic + " - " + " MID: " + str(mid) + " Result: " + str(result))
else:
LOGGER.info("MQTT Subscription to " + topic + " failed. This is unusual. MID: " + str(mid) + " Result: " + str(result))
# If subscription fails, try to reconnect.
self._mqttc.reconnect()
self._mqttc.publish(self.topicSelfConnection, json.dumps(
{
'connected': True,
'node': self.profileNum
}), retain=True)
LOGGER.info('Sent Connected message to Polyglot')
else:
LOGGER.error("MQTT Failed to connect. Result code: " + str(rc))
def _message(self, mqttc, userdata, msg):
"""
The callback for when a PUBLISH message is received from the server.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param flags: The flags set on the connection.
:param msg: Dictionary of MQTT received message. Uses: msg.topic, msg.qos, msg.payload
"""
try:
inputCmds = ['query', 'command', 'result', 'status', 'shortPoll', 'longPoll', 'delete']
parsed_msg = json.loads(msg.payload.decode('utf-8'))
if 'node' in parsed_msg:
if parsed_msg['node'] != 'polyglot':
return
del parsed_msg['node']
for key in parsed_msg:
# LOGGER.debug('MQTT Received Message: {}: {}'.format(msg.topic, parsed_msg))
if key == 'config':
self.inConfig(parsed_msg[key])
elif key == 'connected':
self.polyglotConnected = parsed_msg[key]
elif key == 'stop':
LOGGER.debug('Received stop from Polyglot... Shutting Down.')
self.stop()
elif key in inputCmds:
self.input(parsed_msg)
else:
LOGGER.error('Invalid command received in message from Polyglot: {}'.format(key))
except (ValueError) as err:
LOGGER.error('MQTT Received Payload Error: {}'.format(err), exc_info=True)
def _log(self, mqttc, userdata, level, string):
""" Use for debugging MQTT Packets, disable for normal use, NOISY. """
# LOGGER.info('MQTT Log - {}: {}'.format(str(level), str(string)))
pass
def _subscribe(self, mqttc, userdata, mid, granted_qos):
""" Callback for Subscribe message. Unused currently. """
# LOGGER.info("MQTT Subscribed Succesfully for Message ID: {} - QoS: {}".format(str(mid), str(granted_qos)))
pass
def _publish(self, mqttc, userdata, mid):
""" Callback for publish message. Unused currently. """
# LOGGER.info("MQTT Published message ID: {}".format(str(mid)))
pass
def start(self):
for _, thread in self._threads.items():
thread.start()
def _startMqtt(self):
"""
The client start method. Starts the thread for the MQTT Client
and publishes the connected message.
"""
LOGGER.info('Connecting to MQTT... {}:{}'.format(self._server, self._port))
try:
# self._mqttc.connect_async(str(self._server), int(self._port), 10)
self._mqttc.connect_async('{}'.format(self._server), int(self._port), 10)
self._mqttc.loop_forever()
except Exception as ex:
template = "An exception of type {0} occurred. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
LOGGER.error("MQTT Connection error: {}".format(message), exc_info=True)
def stop(self):
"""
The client stop method. If the client is currently connected
stop the thread and disconnect. Publish the disconnected
message if clean shutdown.
"""
# self.loop.call_soon_threadsafe(self.loop.stop)
# self.loop.stop()
# self._longPoll.cancel()
# self._shortPoll.cancel()
if self.connected:
LOGGER.info('Disconnecting from MQTT... {}:{}'.format(self._server, self._port))
self._mqttc.publish(self.topicSelfConnection, json.dumps({'node': self.profileNum, 'connected': False}), retain=True)
self._mqttc.loop_stop()
self._mqttc.disconnect()
try:
for watcher in self.__stopObservers:
watcher()
except KeyError as e:
LOGGER.exception('KeyError in gotConfig: {}'.format(e), exc_info=True)
def send(self, message):
"""
Formatted Message to send to Polyglot. Connection messages are sent automatically from this module
so this method is used to send commands to/from Polyglot and formats it for consumption
"""
if not isinstance(message, dict) and self.connected:
warnings.warn('payload not a dictionary')
return False
try:
message['node'] = self.profileNum
self._mqttc.publish(self.topicInput, json.dumps(message), retain=False)
except TypeError as err:
LOGGER.error('MQTT Send Error: {}'.format(err), exc_info=True)
def addNode(self, node):
"""
Add a node to the NodeServer
:param node: Dictionary of node settings. Keys: address, name, node_def_id, primary, and drivers are required.
"""
LOGGER.info('Adding node {}({})'.format(node.name, node.address))
message = {
'addnode': {
'nodes': [{
'address': node.address,
'name': node.name,
'node_def_id': node.id,
'primary': node.primary,
'drivers': node.drivers,
'hint': node.hint
}]
}
}
self.send(message)
def saveCustomData(self, data):
"""
Send custom dictionary to Polyglot to save and be retrieved on startup.
:param data: Dictionary of key value pairs to store in Polyglot database.
"""
LOGGER.info('Sending customData to Polyglot.')
message = { 'customdata': data }
self.send(message)
def saveCustomParams(self, data):
"""
Send custom dictionary to Polyglot to save and be retrieved on startup.
:param data: Dictionary of key value pairs to store in Polyglot database.
"""
LOGGER.info('Sending customParams to Polyglot.')
message = { 'customparams': data }
self.send(message)
def addNotice(self, data):
"""
Add custom notice to front-end for this NodeServers
:param data: String of characters to add as a notification in the front-end.
"""
LOGGER.info('Sending addnotice to Polyglot: {}'.format(data))
message = { 'addnotice': data }
self.send(message)
def removeNotice(self, data):
"""
Add custom notice to front-end for this NodeServers
:param data: Index of notices list to remove.
"""
LOGGER.info('Sending removenotice to Polyglot for index {}'.format(data))
message = { 'removenotice': data }
self.send(message)
def restart(self):
"""
Send a command to Polyglot to restart this NodeServer
"""
LOGGER.info('Asking Polyglot to restart me.')
message = {
'restart': {}
}
self.send(message)
def installprofile(self):
LOGGER.info('Sending Install Profile command to Polyglot.')
message = { 'installprofile': { 'reboot': False } }
self.send(message)
def delNode(self, address):
"""
Delete a node from the NodeServer
:param node: Dictionary of node settings. Keys: address, name, node_def_id, primary, and drivers are required.
"""
LOGGER.info('Removing node {}'.format(address))
message = {
'removenode': {
'address': address
}
}
self.send(message)
def getNode(self, address):
"""
Get Node by Address of existing nodes.
"""
try:
for node in self.config['nodes']:
if node['address'] == address:
return node
return False
except KeyError:
LOGGER.error('Usually means we have not received the config yet.', exc_info=True)
return False
def inConfig(self, config):
"""
Save incoming config received from Polyglot to Interface.config and then do any functions
that are waiting on the config to be received.
"""
self.config = config
self.isyVersion = config['isyVersion']
try:
for watcher in self.__configObservers:
watcher(config)
self.send_custom_config_docs()
except KeyError as e:
LOGGER.error('KeyError in gotConfig: {}'.format(e), exc_info=True)
def input(self, command):
self.inQueue.put(command)
def supports_feature(self, feature):
return True
def get_md_file_data(self, fileName):
data = ''
if os.path.isfile(fileName):
data = markdown2.markdown_path(fileName)
return data
def send_custom_config_docs(self):
data = ''
if not self.custom_params_docs_file_sent:
data = self.get_md_file_data(Interface.CUSTOM_CONFIG_DOCS_FILE_NAME)
else:
data = self.config.get('customParamsDoc', '')
# send if we're sending new file or there are updates
if (not self.custom_params_docs_file_sent or
len(self.custom_params_pending_docs) > 0):
data += self.custom_params_pending_docs
self.custom_params_docs_file_sent = True
self.custom_params_pending_docs = ''
self.config['customParamsDoc'] = data
self.send({ 'customparamsdoc': data })
def add_custom_config_docs(self, data, clearCurrentData=False):
if clearCurrentData:
self.custom_params_docs_file_sent = False
self.custom_params_pending_docs += data
self.send_custom_config_docs()
def save_typed_params(self, data):
"""
Send custom parameters descriptions to Polyglot to be used
in front end UI configuration screen
Accepts list of objects with the followin properties
name - used as a key when data is sent from UI
title - displayed in UI
defaultValue - optionanl
type - optional, can be 'NUMBER', 'STRING' or 'BOOLEAN'.
Defaults to 'STRING'
desc - optional, shown in tooltip in UI
isRequired - optional, True/False, when set, will not validate UI
input if it's empty
isList - optional, True/False, if set this will be treated as list
of values or objects by UI
params - optional, can contain a list of objects. If present, then
this (parent) is treated as object / list of objects by UI,
otherwise, it's treated as a single / list of single values
"""
LOGGER.info('Sending typed parameters to Polyglot.')
if type(data) is not list:
data = [ data ]
message = { 'typedparams': data }
self.send(message)
|
UniversalDevicesInc/polyglot-v2-python-interface
|
polyinterface/polyinterface.py
|
Interface._startMqtt
|
python
|
def _startMqtt(self):
LOGGER.info('Connecting to MQTT... {}:{}'.format(self._server, self._port))
try:
# self._mqttc.connect_async(str(self._server), int(self._port), 10)
self._mqttc.connect_async('{}'.format(self._server), int(self._port), 10)
self._mqttc.loop_forever()
except Exception as ex:
template = "An exception of type {0} occurred. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
LOGGER.error("MQTT Connection error: {}".format(message), exc_info=True)
|
The client start method. Starts the thread for the MQTT Client
and publishes the connected message.
|
train
|
https://github.com/UniversalDevicesInc/polyglot-v2-python-interface/blob/fe613135b762731a41a081222e43d2a8ae4fc53f/polyinterface/polyinterface.py#L313-L326
| null |
class Interface(object):
CUSTOM_CONFIG_DOCS_FILE_NAME = 'POLYGLOT_CONFIG.md'
"""
Polyglot Interface Class
:param envVar: The Name of the variable from ~/.polyglot/.env that has this NodeServer's profile number
"""
# pylint: disable=too-many-instance-attributes
# pylint: disable=unused-argument
__exists = False
def __init__(self, envVar=None):
if self.__exists:
warnings.warn('Only one Interface is allowed.')
return
self.connected = False
self.profileNum = os.environ.get("PROFILE_NUM")
if self.profileNum is None:
if envVar is not None:
self.profileNum = os.environ.get(envVar)
if self.profileNum is None:
LOGGER.error('Profile Number not found in STDIN or .env file. Exiting.')
sys.exit(1)
self.profileNum = str(self.profileNum)
self.topicPolyglotConnection = 'udi/polyglot/connections/polyglot'
self.topicInput = 'udi/polyglot/ns/{}'.format(self.profileNum)
self.topicSelfConnection = 'udi/polyglot/connections/{}'.format(self.profileNum)
self._threads = {}
self._threads['socket'] = Thread(target = self._startMqtt, name = 'Interface')
self._mqttc = mqtt.Client(envVar, True)
# self._mqttc.will_set(self.topicSelfConnection, json.dumps({'node': self.profileNum, 'connected': False}), retain=True)
self._mqttc.on_connect = self._connect
self._mqttc.on_message = self._message
self._mqttc.on_subscribe = self._subscribe
self._mqttc.on_disconnect = self._disconnect
self._mqttc.on_publish = self._publish
self._mqttc.on_log = self._log
self.useSecure = True
if 'USE_HTTPS' in os.environ:
self.useSecure = os.environ['USE_HTTPS']
if self.useSecure is True:
if 'MQTT_CERTPATH' in os.environ:
self._mqttc.tls_set(
ca_certs=os.environ['MQTT_CERTPATH'] + '/polyglot.crt',
certfile=os.environ['MQTT_CERTPATH'] + '/client.crt',
keyfile=os.environ['MQTT_CERTPATH'] + '/client_private.key',
tls_version=ssl.PROTOCOL_TLSv1_2)
else:
self._mqttc.tls_set(
ca_certs=join(expanduser("~") + '/.polyglot/ssl/polyglot.crt'),
certfile=join(expanduser("~") + '/.polyglot/ssl/client.crt'),
keyfile=join(expanduser("~") + '/.polyglot/ssl/client_private.key'),
tls_version=ssl.PROTOCOL_TLSv1_2
)
# self._mqttc.tls_insecure_set(True)
# self._mqttc.enable_logger(logger=LOGGER)
self.config = None
# self.loop = asyncio.new_event_loop()
self.loop = None
self.inQueue = queue.Queue()
# self.thread = Thread(target=self.start_loop)
self.isyVersion = None
self._server = os.environ.get("MQTT_HOST") or 'localhost'
self._port = os.environ.get("MQTT_PORT") or '1883'
self.polyglotConnected = False
self.__configObservers = []
self.__stopObservers = []
Interface.__exists = True
self.custom_params_docs_file_sent = False
self.custom_params_pending_docs = ''
def onConfig(self, callback):
"""
Gives the ability to bind any methods to be run when the config is received.
"""
self.__configObservers.append(callback)
def onStop(self, callback):
"""
Gives the ability to bind any methods to be run when the stop command is received.
"""
self.__stopObservers.append(callback)
def _connect(self, mqttc, userdata, flags, rc):
"""
The callback for when the client receives a CONNACK response from the server.
Subscribing in on_connect() means that if we lose the connection and
reconnect then subscriptions will be renewed.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param flags: The flags set on the connection.
:param rc: Result code of connection, 0 = Success, anything else is a failure
"""
if rc == 0:
self.connected = True
results = []
LOGGER.info("MQTT Connected with result code " + str(rc) + " (Success)")
# result, mid = self._mqttc.subscribe(self.topicInput)
results.append((self.topicInput, tuple(self._mqttc.subscribe(self.topicInput))))
results.append((self.topicPolyglotConnection, tuple(self._mqttc.subscribe(self.topicPolyglotConnection))))
for (topic, (result, mid)) in results:
if result == 0:
LOGGER.info("MQTT Subscribing to topic: " + topic + " - " + " MID: " + str(mid) + " Result: " + str(result))
else:
LOGGER.info("MQTT Subscription to " + topic + " failed. This is unusual. MID: " + str(mid) + " Result: " + str(result))
# If subscription fails, try to reconnect.
self._mqttc.reconnect()
self._mqttc.publish(self.topicSelfConnection, json.dumps(
{
'connected': True,
'node': self.profileNum
}), retain=True)
LOGGER.info('Sent Connected message to Polyglot')
else:
LOGGER.error("MQTT Failed to connect. Result code: " + str(rc))
def _message(self, mqttc, userdata, msg):
"""
The callback for when a PUBLISH message is received from the server.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param flags: The flags set on the connection.
:param msg: Dictionary of MQTT received message. Uses: msg.topic, msg.qos, msg.payload
"""
try:
inputCmds = ['query', 'command', 'result', 'status', 'shortPoll', 'longPoll', 'delete']
parsed_msg = json.loads(msg.payload.decode('utf-8'))
if 'node' in parsed_msg:
if parsed_msg['node'] != 'polyglot':
return
del parsed_msg['node']
for key in parsed_msg:
# LOGGER.debug('MQTT Received Message: {}: {}'.format(msg.topic, parsed_msg))
if key == 'config':
self.inConfig(parsed_msg[key])
elif key == 'connected':
self.polyglotConnected = parsed_msg[key]
elif key == 'stop':
LOGGER.debug('Received stop from Polyglot... Shutting Down.')
self.stop()
elif key in inputCmds:
self.input(parsed_msg)
else:
LOGGER.error('Invalid command received in message from Polyglot: {}'.format(key))
except (ValueError) as err:
LOGGER.error('MQTT Received Payload Error: {}'.format(err), exc_info=True)
def _disconnect(self, mqttc, userdata, rc):
"""
The callback for when a DISCONNECT occurs.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param rc: Result code of connection, 0 = Graceful, anything else is unclean
"""
self.connected = False
if rc != 0:
LOGGER.info("MQTT Unexpected disconnection. Trying reconnect.")
try:
self._mqttc.reconnect()
except Exception as ex:
template = "An exception of type {0} occured. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
LOGGER.error("MQTT Connection error: " + message)
else:
LOGGER.info("MQTT Graceful disconnection.")
def _log(self, mqttc, userdata, level, string):
""" Use for debugging MQTT Packets, disable for normal use, NOISY. """
# LOGGER.info('MQTT Log - {}: {}'.format(str(level), str(string)))
pass
def _subscribe(self, mqttc, userdata, mid, granted_qos):
""" Callback for Subscribe message. Unused currently. """
# LOGGER.info("MQTT Subscribed Succesfully for Message ID: {} - QoS: {}".format(str(mid), str(granted_qos)))
pass
def _publish(self, mqttc, userdata, mid):
""" Callback for publish message. Unused currently. """
# LOGGER.info("MQTT Published message ID: {}".format(str(mid)))
pass
def start(self):
for _, thread in self._threads.items():
thread.start()
def stop(self):
"""
The client stop method. If the client is currently connected
stop the thread and disconnect. Publish the disconnected
message if clean shutdown.
"""
# self.loop.call_soon_threadsafe(self.loop.stop)
# self.loop.stop()
# self._longPoll.cancel()
# self._shortPoll.cancel()
if self.connected:
LOGGER.info('Disconnecting from MQTT... {}:{}'.format(self._server, self._port))
self._mqttc.publish(self.topicSelfConnection, json.dumps({'node': self.profileNum, 'connected': False}), retain=True)
self._mqttc.loop_stop()
self._mqttc.disconnect()
try:
for watcher in self.__stopObservers:
watcher()
except KeyError as e:
LOGGER.exception('KeyError in gotConfig: {}'.format(e), exc_info=True)
def send(self, message):
"""
Formatted Message to send to Polyglot. Connection messages are sent automatically from this module
so this method is used to send commands to/from Polyglot and formats it for consumption
"""
if not isinstance(message, dict) and self.connected:
warnings.warn('payload not a dictionary')
return False
try:
message['node'] = self.profileNum
self._mqttc.publish(self.topicInput, json.dumps(message), retain=False)
except TypeError as err:
LOGGER.error('MQTT Send Error: {}'.format(err), exc_info=True)
def addNode(self, node):
"""
Add a node to the NodeServer
:param node: Dictionary of node settings. Keys: address, name, node_def_id, primary, and drivers are required.
"""
LOGGER.info('Adding node {}({})'.format(node.name, node.address))
message = {
'addnode': {
'nodes': [{
'address': node.address,
'name': node.name,
'node_def_id': node.id,
'primary': node.primary,
'drivers': node.drivers,
'hint': node.hint
}]
}
}
self.send(message)
def saveCustomData(self, data):
"""
Send custom dictionary to Polyglot to save and be retrieved on startup.
:param data: Dictionary of key value pairs to store in Polyglot database.
"""
LOGGER.info('Sending customData to Polyglot.')
message = { 'customdata': data }
self.send(message)
def saveCustomParams(self, data):
"""
Send custom dictionary to Polyglot to save and be retrieved on startup.
:param data: Dictionary of key value pairs to store in Polyglot database.
"""
LOGGER.info('Sending customParams to Polyglot.')
message = { 'customparams': data }
self.send(message)
def addNotice(self, data):
"""
Add custom notice to front-end for this NodeServers
:param data: String of characters to add as a notification in the front-end.
"""
LOGGER.info('Sending addnotice to Polyglot: {}'.format(data))
message = { 'addnotice': data }
self.send(message)
def removeNotice(self, data):
"""
Add custom notice to front-end for this NodeServers
:param data: Index of notices list to remove.
"""
LOGGER.info('Sending removenotice to Polyglot for index {}'.format(data))
message = { 'removenotice': data }
self.send(message)
def restart(self):
"""
Send a command to Polyglot to restart this NodeServer
"""
LOGGER.info('Asking Polyglot to restart me.')
message = {
'restart': {}
}
self.send(message)
def installprofile(self):
LOGGER.info('Sending Install Profile command to Polyglot.')
message = { 'installprofile': { 'reboot': False } }
self.send(message)
def delNode(self, address):
"""
Delete a node from the NodeServer
:param node: Dictionary of node settings. Keys: address, name, node_def_id, primary, and drivers are required.
"""
LOGGER.info('Removing node {}'.format(address))
message = {
'removenode': {
'address': address
}
}
self.send(message)
def getNode(self, address):
"""
Get Node by Address of existing nodes.
"""
try:
for node in self.config['nodes']:
if node['address'] == address:
return node
return False
except KeyError:
LOGGER.error('Usually means we have not received the config yet.', exc_info=True)
return False
def inConfig(self, config):
"""
Save incoming config received from Polyglot to Interface.config and then do any functions
that are waiting on the config to be received.
"""
self.config = config
self.isyVersion = config['isyVersion']
try:
for watcher in self.__configObservers:
watcher(config)
self.send_custom_config_docs()
except KeyError as e:
LOGGER.error('KeyError in gotConfig: {}'.format(e), exc_info=True)
def input(self, command):
self.inQueue.put(command)
def supports_feature(self, feature):
return True
def get_md_file_data(self, fileName):
data = ''
if os.path.isfile(fileName):
data = markdown2.markdown_path(fileName)
return data
def send_custom_config_docs(self):
data = ''
if not self.custom_params_docs_file_sent:
data = self.get_md_file_data(Interface.CUSTOM_CONFIG_DOCS_FILE_NAME)
else:
data = self.config.get('customParamsDoc', '')
# send if we're sending new file or there are updates
if (not self.custom_params_docs_file_sent or
len(self.custom_params_pending_docs) > 0):
data += self.custom_params_pending_docs
self.custom_params_docs_file_sent = True
self.custom_params_pending_docs = ''
self.config['customParamsDoc'] = data
self.send({ 'customparamsdoc': data })
def add_custom_config_docs(self, data, clearCurrentData=False):
if clearCurrentData:
self.custom_params_docs_file_sent = False
self.custom_params_pending_docs += data
self.send_custom_config_docs()
def save_typed_params(self, data):
"""
Send custom parameters descriptions to Polyglot to be used
in front end UI configuration screen
Accepts list of objects with the followin properties
name - used as a key when data is sent from UI
title - displayed in UI
defaultValue - optionanl
type - optional, can be 'NUMBER', 'STRING' or 'BOOLEAN'.
Defaults to 'STRING'
desc - optional, shown in tooltip in UI
isRequired - optional, True/False, when set, will not validate UI
input if it's empty
isList - optional, True/False, if set this will be treated as list
of values or objects by UI
params - optional, can contain a list of objects. If present, then
this (parent) is treated as object / list of objects by UI,
otherwise, it's treated as a single / list of single values
"""
LOGGER.info('Sending typed parameters to Polyglot.')
if type(data) is not list:
data = [ data ]
message = { 'typedparams': data }
self.send(message)
|
UniversalDevicesInc/polyglot-v2-python-interface
|
polyinterface/polyinterface.py
|
Interface.stop
|
python
|
def stop(self):
# self.loop.call_soon_threadsafe(self.loop.stop)
# self.loop.stop()
# self._longPoll.cancel()
# self._shortPoll.cancel()
if self.connected:
LOGGER.info('Disconnecting from MQTT... {}:{}'.format(self._server, self._port))
self._mqttc.publish(self.topicSelfConnection, json.dumps({'node': self.profileNum, 'connected': False}), retain=True)
self._mqttc.loop_stop()
self._mqttc.disconnect()
try:
for watcher in self.__stopObservers:
watcher()
except KeyError as e:
LOGGER.exception('KeyError in gotConfig: {}'.format(e), exc_info=True)
|
The client stop method. If the client is currently connected
stop the thread and disconnect. Publish the disconnected
message if clean shutdown.
|
train
|
https://github.com/UniversalDevicesInc/polyglot-v2-python-interface/blob/fe613135b762731a41a081222e43d2a8ae4fc53f/polyinterface/polyinterface.py#L328-L347
| null |
class Interface(object):
CUSTOM_CONFIG_DOCS_FILE_NAME = 'POLYGLOT_CONFIG.md'
"""
Polyglot Interface Class
:param envVar: The Name of the variable from ~/.polyglot/.env that has this NodeServer's profile number
"""
# pylint: disable=too-many-instance-attributes
# pylint: disable=unused-argument
__exists = False
def __init__(self, envVar=None):
if self.__exists:
warnings.warn('Only one Interface is allowed.')
return
self.connected = False
self.profileNum = os.environ.get("PROFILE_NUM")
if self.profileNum is None:
if envVar is not None:
self.profileNum = os.environ.get(envVar)
if self.profileNum is None:
LOGGER.error('Profile Number not found in STDIN or .env file. Exiting.')
sys.exit(1)
self.profileNum = str(self.profileNum)
self.topicPolyglotConnection = 'udi/polyglot/connections/polyglot'
self.topicInput = 'udi/polyglot/ns/{}'.format(self.profileNum)
self.topicSelfConnection = 'udi/polyglot/connections/{}'.format(self.profileNum)
self._threads = {}
self._threads['socket'] = Thread(target = self._startMqtt, name = 'Interface')
self._mqttc = mqtt.Client(envVar, True)
# self._mqttc.will_set(self.topicSelfConnection, json.dumps({'node': self.profileNum, 'connected': False}), retain=True)
self._mqttc.on_connect = self._connect
self._mqttc.on_message = self._message
self._mqttc.on_subscribe = self._subscribe
self._mqttc.on_disconnect = self._disconnect
self._mqttc.on_publish = self._publish
self._mqttc.on_log = self._log
self.useSecure = True
if 'USE_HTTPS' in os.environ:
self.useSecure = os.environ['USE_HTTPS']
if self.useSecure is True:
if 'MQTT_CERTPATH' in os.environ:
self._mqttc.tls_set(
ca_certs=os.environ['MQTT_CERTPATH'] + '/polyglot.crt',
certfile=os.environ['MQTT_CERTPATH'] + '/client.crt',
keyfile=os.environ['MQTT_CERTPATH'] + '/client_private.key',
tls_version=ssl.PROTOCOL_TLSv1_2)
else:
self._mqttc.tls_set(
ca_certs=join(expanduser("~") + '/.polyglot/ssl/polyglot.crt'),
certfile=join(expanduser("~") + '/.polyglot/ssl/client.crt'),
keyfile=join(expanduser("~") + '/.polyglot/ssl/client_private.key'),
tls_version=ssl.PROTOCOL_TLSv1_2
)
# self._mqttc.tls_insecure_set(True)
# self._mqttc.enable_logger(logger=LOGGER)
self.config = None
# self.loop = asyncio.new_event_loop()
self.loop = None
self.inQueue = queue.Queue()
# self.thread = Thread(target=self.start_loop)
self.isyVersion = None
self._server = os.environ.get("MQTT_HOST") or 'localhost'
self._port = os.environ.get("MQTT_PORT") or '1883'
self.polyglotConnected = False
self.__configObservers = []
self.__stopObservers = []
Interface.__exists = True
self.custom_params_docs_file_sent = False
self.custom_params_pending_docs = ''
def onConfig(self, callback):
"""
Gives the ability to bind any methods to be run when the config is received.
"""
self.__configObservers.append(callback)
def onStop(self, callback):
"""
Gives the ability to bind any methods to be run when the stop command is received.
"""
self.__stopObservers.append(callback)
def _connect(self, mqttc, userdata, flags, rc):
"""
The callback for when the client receives a CONNACK response from the server.
Subscribing in on_connect() means that if we lose the connection and
reconnect then subscriptions will be renewed.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param flags: The flags set on the connection.
:param rc: Result code of connection, 0 = Success, anything else is a failure
"""
if rc == 0:
self.connected = True
results = []
LOGGER.info("MQTT Connected with result code " + str(rc) + " (Success)")
# result, mid = self._mqttc.subscribe(self.topicInput)
results.append((self.topicInput, tuple(self._mqttc.subscribe(self.topicInput))))
results.append((self.topicPolyglotConnection, tuple(self._mqttc.subscribe(self.topicPolyglotConnection))))
for (topic, (result, mid)) in results:
if result == 0:
LOGGER.info("MQTT Subscribing to topic: " + topic + " - " + " MID: " + str(mid) + " Result: " + str(result))
else:
LOGGER.info("MQTT Subscription to " + topic + " failed. This is unusual. MID: " + str(mid) + " Result: " + str(result))
# If subscription fails, try to reconnect.
self._mqttc.reconnect()
self._mqttc.publish(self.topicSelfConnection, json.dumps(
{
'connected': True,
'node': self.profileNum
}), retain=True)
LOGGER.info('Sent Connected message to Polyglot')
else:
LOGGER.error("MQTT Failed to connect. Result code: " + str(rc))
def _message(self, mqttc, userdata, msg):
"""
The callback for when a PUBLISH message is received from the server.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param flags: The flags set on the connection.
:param msg: Dictionary of MQTT received message. Uses: msg.topic, msg.qos, msg.payload
"""
try:
inputCmds = ['query', 'command', 'result', 'status', 'shortPoll', 'longPoll', 'delete']
parsed_msg = json.loads(msg.payload.decode('utf-8'))
if 'node' in parsed_msg:
if parsed_msg['node'] != 'polyglot':
return
del parsed_msg['node']
for key in parsed_msg:
# LOGGER.debug('MQTT Received Message: {}: {}'.format(msg.topic, parsed_msg))
if key == 'config':
self.inConfig(parsed_msg[key])
elif key == 'connected':
self.polyglotConnected = parsed_msg[key]
elif key == 'stop':
LOGGER.debug('Received stop from Polyglot... Shutting Down.')
self.stop()
elif key in inputCmds:
self.input(parsed_msg)
else:
LOGGER.error('Invalid command received in message from Polyglot: {}'.format(key))
except (ValueError) as err:
LOGGER.error('MQTT Received Payload Error: {}'.format(err), exc_info=True)
def _disconnect(self, mqttc, userdata, rc):
"""
The callback for when a DISCONNECT occurs.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param rc: Result code of connection, 0 = Graceful, anything else is unclean
"""
self.connected = False
if rc != 0:
LOGGER.info("MQTT Unexpected disconnection. Trying reconnect.")
try:
self._mqttc.reconnect()
except Exception as ex:
template = "An exception of type {0} occured. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
LOGGER.error("MQTT Connection error: " + message)
else:
LOGGER.info("MQTT Graceful disconnection.")
def _log(self, mqttc, userdata, level, string):
""" Use for debugging MQTT Packets, disable for normal use, NOISY. """
# LOGGER.info('MQTT Log - {}: {}'.format(str(level), str(string)))
pass
def _subscribe(self, mqttc, userdata, mid, granted_qos):
""" Callback for Subscribe message. Unused currently. """
# LOGGER.info("MQTT Subscribed Succesfully for Message ID: {} - QoS: {}".format(str(mid), str(granted_qos)))
pass
def _publish(self, mqttc, userdata, mid):
""" Callback for publish message. Unused currently. """
# LOGGER.info("MQTT Published message ID: {}".format(str(mid)))
pass
def start(self):
for _, thread in self._threads.items():
thread.start()
def _startMqtt(self):
"""
The client start method. Starts the thread for the MQTT Client
and publishes the connected message.
"""
LOGGER.info('Connecting to MQTT... {}:{}'.format(self._server, self._port))
try:
# self._mqttc.connect_async(str(self._server), int(self._port), 10)
self._mqttc.connect_async('{}'.format(self._server), int(self._port), 10)
self._mqttc.loop_forever()
except Exception as ex:
template = "An exception of type {0} occurred. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
LOGGER.error("MQTT Connection error: {}".format(message), exc_info=True)
def send(self, message):
"""
Formatted Message to send to Polyglot. Connection messages are sent automatically from this module
so this method is used to send commands to/from Polyglot and formats it for consumption
"""
if not isinstance(message, dict) and self.connected:
warnings.warn('payload not a dictionary')
return False
try:
message['node'] = self.profileNum
self._mqttc.publish(self.topicInput, json.dumps(message), retain=False)
except TypeError as err:
LOGGER.error('MQTT Send Error: {}'.format(err), exc_info=True)
def addNode(self, node):
"""
Add a node to the NodeServer
:param node: Dictionary of node settings. Keys: address, name, node_def_id, primary, and drivers are required.
"""
LOGGER.info('Adding node {}({})'.format(node.name, node.address))
message = {
'addnode': {
'nodes': [{
'address': node.address,
'name': node.name,
'node_def_id': node.id,
'primary': node.primary,
'drivers': node.drivers,
'hint': node.hint
}]
}
}
self.send(message)
def saveCustomData(self, data):
"""
Send custom dictionary to Polyglot to save and be retrieved on startup.
:param data: Dictionary of key value pairs to store in Polyglot database.
"""
LOGGER.info('Sending customData to Polyglot.')
message = { 'customdata': data }
self.send(message)
def saveCustomParams(self, data):
"""
Send custom dictionary to Polyglot to save and be retrieved on startup.
:param data: Dictionary of key value pairs to store in Polyglot database.
"""
LOGGER.info('Sending customParams to Polyglot.')
message = { 'customparams': data }
self.send(message)
def addNotice(self, data):
"""
Add custom notice to front-end for this NodeServers
:param data: String of characters to add as a notification in the front-end.
"""
LOGGER.info('Sending addnotice to Polyglot: {}'.format(data))
message = { 'addnotice': data }
self.send(message)
def removeNotice(self, data):
"""
Add custom notice to front-end for this NodeServers
:param data: Index of notices list to remove.
"""
LOGGER.info('Sending removenotice to Polyglot for index {}'.format(data))
message = { 'removenotice': data }
self.send(message)
def restart(self):
"""
Send a command to Polyglot to restart this NodeServer
"""
LOGGER.info('Asking Polyglot to restart me.')
message = {
'restart': {}
}
self.send(message)
def installprofile(self):
LOGGER.info('Sending Install Profile command to Polyglot.')
message = { 'installprofile': { 'reboot': False } }
self.send(message)
def delNode(self, address):
"""
Delete a node from the NodeServer
:param node: Dictionary of node settings. Keys: address, name, node_def_id, primary, and drivers are required.
"""
LOGGER.info('Removing node {}'.format(address))
message = {
'removenode': {
'address': address
}
}
self.send(message)
def getNode(self, address):
"""
Get Node by Address of existing nodes.
"""
try:
for node in self.config['nodes']:
if node['address'] == address:
return node
return False
except KeyError:
LOGGER.error('Usually means we have not received the config yet.', exc_info=True)
return False
def inConfig(self, config):
"""
Save incoming config received from Polyglot to Interface.config and then do any functions
that are waiting on the config to be received.
"""
self.config = config
self.isyVersion = config['isyVersion']
try:
for watcher in self.__configObservers:
watcher(config)
self.send_custom_config_docs()
except KeyError as e:
LOGGER.error('KeyError in gotConfig: {}'.format(e), exc_info=True)
def input(self, command):
self.inQueue.put(command)
def supports_feature(self, feature):
return True
def get_md_file_data(self, fileName):
data = ''
if os.path.isfile(fileName):
data = markdown2.markdown_path(fileName)
return data
def send_custom_config_docs(self):
data = ''
if not self.custom_params_docs_file_sent:
data = self.get_md_file_data(Interface.CUSTOM_CONFIG_DOCS_FILE_NAME)
else:
data = self.config.get('customParamsDoc', '')
# send if we're sending new file or there are updates
if (not self.custom_params_docs_file_sent or
len(self.custom_params_pending_docs) > 0):
data += self.custom_params_pending_docs
self.custom_params_docs_file_sent = True
self.custom_params_pending_docs = ''
self.config['customParamsDoc'] = data
self.send({ 'customparamsdoc': data })
def add_custom_config_docs(self, data, clearCurrentData=False):
if clearCurrentData:
self.custom_params_docs_file_sent = False
self.custom_params_pending_docs += data
self.send_custom_config_docs()
def save_typed_params(self, data):
"""
Send custom parameters descriptions to Polyglot to be used
in front end UI configuration screen
Accepts list of objects with the followin properties
name - used as a key when data is sent from UI
title - displayed in UI
defaultValue - optionanl
type - optional, can be 'NUMBER', 'STRING' or 'BOOLEAN'.
Defaults to 'STRING'
desc - optional, shown in tooltip in UI
isRequired - optional, True/False, when set, will not validate UI
input if it's empty
isList - optional, True/False, if set this will be treated as list
of values or objects by UI
params - optional, can contain a list of objects. If present, then
this (parent) is treated as object / list of objects by UI,
otherwise, it's treated as a single / list of single values
"""
LOGGER.info('Sending typed parameters to Polyglot.')
if type(data) is not list:
data = [ data ]
message = { 'typedparams': data }
self.send(message)
|
UniversalDevicesInc/polyglot-v2-python-interface
|
polyinterface/polyinterface.py
|
Interface.send
|
python
|
def send(self, message):
if not isinstance(message, dict) and self.connected:
warnings.warn('payload not a dictionary')
return False
try:
message['node'] = self.profileNum
self._mqttc.publish(self.topicInput, json.dumps(message), retain=False)
except TypeError as err:
LOGGER.error('MQTT Send Error: {}'.format(err), exc_info=True)
|
Formatted Message to send to Polyglot. Connection messages are sent automatically from this module
so this method is used to send commands to/from Polyglot and formats it for consumption
|
train
|
https://github.com/UniversalDevicesInc/polyglot-v2-python-interface/blob/fe613135b762731a41a081222e43d2a8ae4fc53f/polyinterface/polyinterface.py#L349-L361
| null |
class Interface(object):
CUSTOM_CONFIG_DOCS_FILE_NAME = 'POLYGLOT_CONFIG.md'
"""
Polyglot Interface Class
:param envVar: The Name of the variable from ~/.polyglot/.env that has this NodeServer's profile number
"""
# pylint: disable=too-many-instance-attributes
# pylint: disable=unused-argument
__exists = False
def __init__(self, envVar=None):
if self.__exists:
warnings.warn('Only one Interface is allowed.')
return
self.connected = False
self.profileNum = os.environ.get("PROFILE_NUM")
if self.profileNum is None:
if envVar is not None:
self.profileNum = os.environ.get(envVar)
if self.profileNum is None:
LOGGER.error('Profile Number not found in STDIN or .env file. Exiting.')
sys.exit(1)
self.profileNum = str(self.profileNum)
self.topicPolyglotConnection = 'udi/polyglot/connections/polyglot'
self.topicInput = 'udi/polyglot/ns/{}'.format(self.profileNum)
self.topicSelfConnection = 'udi/polyglot/connections/{}'.format(self.profileNum)
self._threads = {}
self._threads['socket'] = Thread(target = self._startMqtt, name = 'Interface')
self._mqttc = mqtt.Client(envVar, True)
# self._mqttc.will_set(self.topicSelfConnection, json.dumps({'node': self.profileNum, 'connected': False}), retain=True)
self._mqttc.on_connect = self._connect
self._mqttc.on_message = self._message
self._mqttc.on_subscribe = self._subscribe
self._mqttc.on_disconnect = self._disconnect
self._mqttc.on_publish = self._publish
self._mqttc.on_log = self._log
self.useSecure = True
if 'USE_HTTPS' in os.environ:
self.useSecure = os.environ['USE_HTTPS']
if self.useSecure is True:
if 'MQTT_CERTPATH' in os.environ:
self._mqttc.tls_set(
ca_certs=os.environ['MQTT_CERTPATH'] + '/polyglot.crt',
certfile=os.environ['MQTT_CERTPATH'] + '/client.crt',
keyfile=os.environ['MQTT_CERTPATH'] + '/client_private.key',
tls_version=ssl.PROTOCOL_TLSv1_2)
else:
self._mqttc.tls_set(
ca_certs=join(expanduser("~") + '/.polyglot/ssl/polyglot.crt'),
certfile=join(expanduser("~") + '/.polyglot/ssl/client.crt'),
keyfile=join(expanduser("~") + '/.polyglot/ssl/client_private.key'),
tls_version=ssl.PROTOCOL_TLSv1_2
)
# self._mqttc.tls_insecure_set(True)
# self._mqttc.enable_logger(logger=LOGGER)
self.config = None
# self.loop = asyncio.new_event_loop()
self.loop = None
self.inQueue = queue.Queue()
# self.thread = Thread(target=self.start_loop)
self.isyVersion = None
self._server = os.environ.get("MQTT_HOST") or 'localhost'
self._port = os.environ.get("MQTT_PORT") or '1883'
self.polyglotConnected = False
self.__configObservers = []
self.__stopObservers = []
Interface.__exists = True
self.custom_params_docs_file_sent = False
self.custom_params_pending_docs = ''
def onConfig(self, callback):
"""
Gives the ability to bind any methods to be run when the config is received.
"""
self.__configObservers.append(callback)
def onStop(self, callback):
"""
Gives the ability to bind any methods to be run when the stop command is received.
"""
self.__stopObservers.append(callback)
def _connect(self, mqttc, userdata, flags, rc):
"""
The callback for when the client receives a CONNACK response from the server.
Subscribing in on_connect() means that if we lose the connection and
reconnect then subscriptions will be renewed.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param flags: The flags set on the connection.
:param rc: Result code of connection, 0 = Success, anything else is a failure
"""
if rc == 0:
self.connected = True
results = []
LOGGER.info("MQTT Connected with result code " + str(rc) + " (Success)")
# result, mid = self._mqttc.subscribe(self.topicInput)
results.append((self.topicInput, tuple(self._mqttc.subscribe(self.topicInput))))
results.append((self.topicPolyglotConnection, tuple(self._mqttc.subscribe(self.topicPolyglotConnection))))
for (topic, (result, mid)) in results:
if result == 0:
LOGGER.info("MQTT Subscribing to topic: " + topic + " - " + " MID: " + str(mid) + " Result: " + str(result))
else:
LOGGER.info("MQTT Subscription to " + topic + " failed. This is unusual. MID: " + str(mid) + " Result: " + str(result))
# If subscription fails, try to reconnect.
self._mqttc.reconnect()
self._mqttc.publish(self.topicSelfConnection, json.dumps(
{
'connected': True,
'node': self.profileNum
}), retain=True)
LOGGER.info('Sent Connected message to Polyglot')
else:
LOGGER.error("MQTT Failed to connect. Result code: " + str(rc))
def _message(self, mqttc, userdata, msg):
"""
The callback for when a PUBLISH message is received from the server.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param flags: The flags set on the connection.
:param msg: Dictionary of MQTT received message. Uses: msg.topic, msg.qos, msg.payload
"""
try:
inputCmds = ['query', 'command', 'result', 'status', 'shortPoll', 'longPoll', 'delete']
parsed_msg = json.loads(msg.payload.decode('utf-8'))
if 'node' in parsed_msg:
if parsed_msg['node'] != 'polyglot':
return
del parsed_msg['node']
for key in parsed_msg:
# LOGGER.debug('MQTT Received Message: {}: {}'.format(msg.topic, parsed_msg))
if key == 'config':
self.inConfig(parsed_msg[key])
elif key == 'connected':
self.polyglotConnected = parsed_msg[key]
elif key == 'stop':
LOGGER.debug('Received stop from Polyglot... Shutting Down.')
self.stop()
elif key in inputCmds:
self.input(parsed_msg)
else:
LOGGER.error('Invalid command received in message from Polyglot: {}'.format(key))
except (ValueError) as err:
LOGGER.error('MQTT Received Payload Error: {}'.format(err), exc_info=True)
def _disconnect(self, mqttc, userdata, rc):
"""
The callback for when a DISCONNECT occurs.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param rc: Result code of connection, 0 = Graceful, anything else is unclean
"""
self.connected = False
if rc != 0:
LOGGER.info("MQTT Unexpected disconnection. Trying reconnect.")
try:
self._mqttc.reconnect()
except Exception as ex:
template = "An exception of type {0} occured. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
LOGGER.error("MQTT Connection error: " + message)
else:
LOGGER.info("MQTT Graceful disconnection.")
def _log(self, mqttc, userdata, level, string):
""" Use for debugging MQTT Packets, disable for normal use, NOISY. """
# LOGGER.info('MQTT Log - {}: {}'.format(str(level), str(string)))
pass
def _subscribe(self, mqttc, userdata, mid, granted_qos):
""" Callback for Subscribe message. Unused currently. """
# LOGGER.info("MQTT Subscribed Succesfully for Message ID: {} - QoS: {}".format(str(mid), str(granted_qos)))
pass
def _publish(self, mqttc, userdata, mid):
""" Callback for publish message. Unused currently. """
# LOGGER.info("MQTT Published message ID: {}".format(str(mid)))
pass
def start(self):
for _, thread in self._threads.items():
thread.start()
def _startMqtt(self):
"""
The client start method. Starts the thread for the MQTT Client
and publishes the connected message.
"""
LOGGER.info('Connecting to MQTT... {}:{}'.format(self._server, self._port))
try:
# self._mqttc.connect_async(str(self._server), int(self._port), 10)
self._mqttc.connect_async('{}'.format(self._server), int(self._port), 10)
self._mqttc.loop_forever()
except Exception as ex:
template = "An exception of type {0} occurred. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
LOGGER.error("MQTT Connection error: {}".format(message), exc_info=True)
def stop(self):
"""
The client stop method. If the client is currently connected
stop the thread and disconnect. Publish the disconnected
message if clean shutdown.
"""
# self.loop.call_soon_threadsafe(self.loop.stop)
# self.loop.stop()
# self._longPoll.cancel()
# self._shortPoll.cancel()
if self.connected:
LOGGER.info('Disconnecting from MQTT... {}:{}'.format(self._server, self._port))
self._mqttc.publish(self.topicSelfConnection, json.dumps({'node': self.profileNum, 'connected': False}), retain=True)
self._mqttc.loop_stop()
self._mqttc.disconnect()
try:
for watcher in self.__stopObservers:
watcher()
except KeyError as e:
LOGGER.exception('KeyError in gotConfig: {}'.format(e), exc_info=True)
def addNode(self, node):
"""
Add a node to the NodeServer
:param node: Dictionary of node settings. Keys: address, name, node_def_id, primary, and drivers are required.
"""
LOGGER.info('Adding node {}({})'.format(node.name, node.address))
message = {
'addnode': {
'nodes': [{
'address': node.address,
'name': node.name,
'node_def_id': node.id,
'primary': node.primary,
'drivers': node.drivers,
'hint': node.hint
}]
}
}
self.send(message)
def saveCustomData(self, data):
"""
Send custom dictionary to Polyglot to save and be retrieved on startup.
:param data: Dictionary of key value pairs to store in Polyglot database.
"""
LOGGER.info('Sending customData to Polyglot.')
message = { 'customdata': data }
self.send(message)
def saveCustomParams(self, data):
"""
Send custom dictionary to Polyglot to save and be retrieved on startup.
:param data: Dictionary of key value pairs to store in Polyglot database.
"""
LOGGER.info('Sending customParams to Polyglot.')
message = { 'customparams': data }
self.send(message)
def addNotice(self, data):
"""
Add custom notice to front-end for this NodeServers
:param data: String of characters to add as a notification in the front-end.
"""
LOGGER.info('Sending addnotice to Polyglot: {}'.format(data))
message = { 'addnotice': data }
self.send(message)
def removeNotice(self, data):
"""
Add custom notice to front-end for this NodeServers
:param data: Index of notices list to remove.
"""
LOGGER.info('Sending removenotice to Polyglot for index {}'.format(data))
message = { 'removenotice': data }
self.send(message)
def restart(self):
"""
Send a command to Polyglot to restart this NodeServer
"""
LOGGER.info('Asking Polyglot to restart me.')
message = {
'restart': {}
}
self.send(message)
def installprofile(self):
LOGGER.info('Sending Install Profile command to Polyglot.')
message = { 'installprofile': { 'reboot': False } }
self.send(message)
def delNode(self, address):
"""
Delete a node from the NodeServer
:param node: Dictionary of node settings. Keys: address, name, node_def_id, primary, and drivers are required.
"""
LOGGER.info('Removing node {}'.format(address))
message = {
'removenode': {
'address': address
}
}
self.send(message)
def getNode(self, address):
"""
Get Node by Address of existing nodes.
"""
try:
for node in self.config['nodes']:
if node['address'] == address:
return node
return False
except KeyError:
LOGGER.error('Usually means we have not received the config yet.', exc_info=True)
return False
def inConfig(self, config):
"""
Save incoming config received from Polyglot to Interface.config and then do any functions
that are waiting on the config to be received.
"""
self.config = config
self.isyVersion = config['isyVersion']
try:
for watcher in self.__configObservers:
watcher(config)
self.send_custom_config_docs()
except KeyError as e:
LOGGER.error('KeyError in gotConfig: {}'.format(e), exc_info=True)
def input(self, command):
self.inQueue.put(command)
def supports_feature(self, feature):
return True
def get_md_file_data(self, fileName):
data = ''
if os.path.isfile(fileName):
data = markdown2.markdown_path(fileName)
return data
def send_custom_config_docs(self):
data = ''
if not self.custom_params_docs_file_sent:
data = self.get_md_file_data(Interface.CUSTOM_CONFIG_DOCS_FILE_NAME)
else:
data = self.config.get('customParamsDoc', '')
# send if we're sending new file or there are updates
if (not self.custom_params_docs_file_sent or
len(self.custom_params_pending_docs) > 0):
data += self.custom_params_pending_docs
self.custom_params_docs_file_sent = True
self.custom_params_pending_docs = ''
self.config['customParamsDoc'] = data
self.send({ 'customparamsdoc': data })
def add_custom_config_docs(self, data, clearCurrentData=False):
if clearCurrentData:
self.custom_params_docs_file_sent = False
self.custom_params_pending_docs += data
self.send_custom_config_docs()
def save_typed_params(self, data):
"""
Send custom parameters descriptions to Polyglot to be used
in front end UI configuration screen
Accepts list of objects with the followin properties
name - used as a key when data is sent from UI
title - displayed in UI
defaultValue - optionanl
type - optional, can be 'NUMBER', 'STRING' or 'BOOLEAN'.
Defaults to 'STRING'
desc - optional, shown in tooltip in UI
isRequired - optional, True/False, when set, will not validate UI
input if it's empty
isList - optional, True/False, if set this will be treated as list
of values or objects by UI
params - optional, can contain a list of objects. If present, then
this (parent) is treated as object / list of objects by UI,
otherwise, it's treated as a single / list of single values
"""
LOGGER.info('Sending typed parameters to Polyglot.')
if type(data) is not list:
data = [ data ]
message = { 'typedparams': data }
self.send(message)
|
UniversalDevicesInc/polyglot-v2-python-interface
|
polyinterface/polyinterface.py
|
Interface.addNode
|
python
|
def addNode(self, node):
LOGGER.info('Adding node {}({})'.format(node.name, node.address))
message = {
'addnode': {
'nodes': [{
'address': node.address,
'name': node.name,
'node_def_id': node.id,
'primary': node.primary,
'drivers': node.drivers,
'hint': node.hint
}]
}
}
self.send(message)
|
Add a node to the NodeServer
:param node: Dictionary of node settings. Keys: address, name, node_def_id, primary, and drivers are required.
|
train
|
https://github.com/UniversalDevicesInc/polyglot-v2-python-interface/blob/fe613135b762731a41a081222e43d2a8ae4fc53f/polyinterface/polyinterface.py#L363-L382
|
[
"def send(self, message):\n \"\"\"\n Formatted Message to send to Polyglot. Connection messages are sent automatically from this module\n so this method is used to send commands to/from Polyglot and formats it for consumption\n \"\"\"\n if not isinstance(message, dict) and self.connected:\n warnings.warn('payload not a dictionary')\n return False\n try:\n message['node'] = self.profileNum\n self._mqttc.publish(self.topicInput, json.dumps(message), retain=False)\n except TypeError as err:\n LOGGER.error('MQTT Send Error: {}'.format(err), exc_info=True)\n"
] |
class Interface(object):
CUSTOM_CONFIG_DOCS_FILE_NAME = 'POLYGLOT_CONFIG.md'
"""
Polyglot Interface Class
:param envVar: The Name of the variable from ~/.polyglot/.env that has this NodeServer's profile number
"""
# pylint: disable=too-many-instance-attributes
# pylint: disable=unused-argument
__exists = False
def __init__(self, envVar=None):
if self.__exists:
warnings.warn('Only one Interface is allowed.')
return
self.connected = False
self.profileNum = os.environ.get("PROFILE_NUM")
if self.profileNum is None:
if envVar is not None:
self.profileNum = os.environ.get(envVar)
if self.profileNum is None:
LOGGER.error('Profile Number not found in STDIN or .env file. Exiting.')
sys.exit(1)
self.profileNum = str(self.profileNum)
self.topicPolyglotConnection = 'udi/polyglot/connections/polyglot'
self.topicInput = 'udi/polyglot/ns/{}'.format(self.profileNum)
self.topicSelfConnection = 'udi/polyglot/connections/{}'.format(self.profileNum)
self._threads = {}
self._threads['socket'] = Thread(target = self._startMqtt, name = 'Interface')
self._mqttc = mqtt.Client(envVar, True)
# self._mqttc.will_set(self.topicSelfConnection, json.dumps({'node': self.profileNum, 'connected': False}), retain=True)
self._mqttc.on_connect = self._connect
self._mqttc.on_message = self._message
self._mqttc.on_subscribe = self._subscribe
self._mqttc.on_disconnect = self._disconnect
self._mqttc.on_publish = self._publish
self._mqttc.on_log = self._log
self.useSecure = True
if 'USE_HTTPS' in os.environ:
self.useSecure = os.environ['USE_HTTPS']
if self.useSecure is True:
if 'MQTT_CERTPATH' in os.environ:
self._mqttc.tls_set(
ca_certs=os.environ['MQTT_CERTPATH'] + '/polyglot.crt',
certfile=os.environ['MQTT_CERTPATH'] + '/client.crt',
keyfile=os.environ['MQTT_CERTPATH'] + '/client_private.key',
tls_version=ssl.PROTOCOL_TLSv1_2)
else:
self._mqttc.tls_set(
ca_certs=join(expanduser("~") + '/.polyglot/ssl/polyglot.crt'),
certfile=join(expanduser("~") + '/.polyglot/ssl/client.crt'),
keyfile=join(expanduser("~") + '/.polyglot/ssl/client_private.key'),
tls_version=ssl.PROTOCOL_TLSv1_2
)
# self._mqttc.tls_insecure_set(True)
# self._mqttc.enable_logger(logger=LOGGER)
self.config = None
# self.loop = asyncio.new_event_loop()
self.loop = None
self.inQueue = queue.Queue()
# self.thread = Thread(target=self.start_loop)
self.isyVersion = None
self._server = os.environ.get("MQTT_HOST") or 'localhost'
self._port = os.environ.get("MQTT_PORT") or '1883'
self.polyglotConnected = False
self.__configObservers = []
self.__stopObservers = []
Interface.__exists = True
self.custom_params_docs_file_sent = False
self.custom_params_pending_docs = ''
def onConfig(self, callback):
"""
Gives the ability to bind any methods to be run when the config is received.
"""
self.__configObservers.append(callback)
def onStop(self, callback):
"""
Gives the ability to bind any methods to be run when the stop command is received.
"""
self.__stopObservers.append(callback)
def _connect(self, mqttc, userdata, flags, rc):
"""
The callback for when the client receives a CONNACK response from the server.
Subscribing in on_connect() means that if we lose the connection and
reconnect then subscriptions will be renewed.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param flags: The flags set on the connection.
:param rc: Result code of connection, 0 = Success, anything else is a failure
"""
if rc == 0:
self.connected = True
results = []
LOGGER.info("MQTT Connected with result code " + str(rc) + " (Success)")
# result, mid = self._mqttc.subscribe(self.topicInput)
results.append((self.topicInput, tuple(self._mqttc.subscribe(self.topicInput))))
results.append((self.topicPolyglotConnection, tuple(self._mqttc.subscribe(self.topicPolyglotConnection))))
for (topic, (result, mid)) in results:
if result == 0:
LOGGER.info("MQTT Subscribing to topic: " + topic + " - " + " MID: " + str(mid) + " Result: " + str(result))
else:
LOGGER.info("MQTT Subscription to " + topic + " failed. This is unusual. MID: " + str(mid) + " Result: " + str(result))
# If subscription fails, try to reconnect.
self._mqttc.reconnect()
self._mqttc.publish(self.topicSelfConnection, json.dumps(
{
'connected': True,
'node': self.profileNum
}), retain=True)
LOGGER.info('Sent Connected message to Polyglot')
else:
LOGGER.error("MQTT Failed to connect. Result code: " + str(rc))
def _message(self, mqttc, userdata, msg):
"""
The callback for when a PUBLISH message is received from the server.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param flags: The flags set on the connection.
:param msg: Dictionary of MQTT received message. Uses: msg.topic, msg.qos, msg.payload
"""
try:
inputCmds = ['query', 'command', 'result', 'status', 'shortPoll', 'longPoll', 'delete']
parsed_msg = json.loads(msg.payload.decode('utf-8'))
if 'node' in parsed_msg:
if parsed_msg['node'] != 'polyglot':
return
del parsed_msg['node']
for key in parsed_msg:
# LOGGER.debug('MQTT Received Message: {}: {}'.format(msg.topic, parsed_msg))
if key == 'config':
self.inConfig(parsed_msg[key])
elif key == 'connected':
self.polyglotConnected = parsed_msg[key]
elif key == 'stop':
LOGGER.debug('Received stop from Polyglot... Shutting Down.')
self.stop()
elif key in inputCmds:
self.input(parsed_msg)
else:
LOGGER.error('Invalid command received in message from Polyglot: {}'.format(key))
except (ValueError) as err:
LOGGER.error('MQTT Received Payload Error: {}'.format(err), exc_info=True)
def _disconnect(self, mqttc, userdata, rc):
"""
The callback for when a DISCONNECT occurs.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param rc: Result code of connection, 0 = Graceful, anything else is unclean
"""
self.connected = False
if rc != 0:
LOGGER.info("MQTT Unexpected disconnection. Trying reconnect.")
try:
self._mqttc.reconnect()
except Exception as ex:
template = "An exception of type {0} occured. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
LOGGER.error("MQTT Connection error: " + message)
else:
LOGGER.info("MQTT Graceful disconnection.")
def _log(self, mqttc, userdata, level, string):
""" Use for debugging MQTT Packets, disable for normal use, NOISY. """
# LOGGER.info('MQTT Log - {}: {}'.format(str(level), str(string)))
pass
def _subscribe(self, mqttc, userdata, mid, granted_qos):
""" Callback for Subscribe message. Unused currently. """
# LOGGER.info("MQTT Subscribed Succesfully for Message ID: {} - QoS: {}".format(str(mid), str(granted_qos)))
pass
def _publish(self, mqttc, userdata, mid):
""" Callback for publish message. Unused currently. """
# LOGGER.info("MQTT Published message ID: {}".format(str(mid)))
pass
def start(self):
for _, thread in self._threads.items():
thread.start()
def _startMqtt(self):
"""
The client start method. Starts the thread for the MQTT Client
and publishes the connected message.
"""
LOGGER.info('Connecting to MQTT... {}:{}'.format(self._server, self._port))
try:
# self._mqttc.connect_async(str(self._server), int(self._port), 10)
self._mqttc.connect_async('{}'.format(self._server), int(self._port), 10)
self._mqttc.loop_forever()
except Exception as ex:
template = "An exception of type {0} occurred. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
LOGGER.error("MQTT Connection error: {}".format(message), exc_info=True)
def stop(self):
"""
The client stop method. If the client is currently connected
stop the thread and disconnect. Publish the disconnected
message if clean shutdown.
"""
# self.loop.call_soon_threadsafe(self.loop.stop)
# self.loop.stop()
# self._longPoll.cancel()
# self._shortPoll.cancel()
if self.connected:
LOGGER.info('Disconnecting from MQTT... {}:{}'.format(self._server, self._port))
self._mqttc.publish(self.topicSelfConnection, json.dumps({'node': self.profileNum, 'connected': False}), retain=True)
self._mqttc.loop_stop()
self._mqttc.disconnect()
try:
for watcher in self.__stopObservers:
watcher()
except KeyError as e:
LOGGER.exception('KeyError in gotConfig: {}'.format(e), exc_info=True)
def send(self, message):
"""
Formatted Message to send to Polyglot. Connection messages are sent automatically from this module
so this method is used to send commands to/from Polyglot and formats it for consumption
"""
if not isinstance(message, dict) and self.connected:
warnings.warn('payload not a dictionary')
return False
try:
message['node'] = self.profileNum
self._mqttc.publish(self.topicInput, json.dumps(message), retain=False)
except TypeError as err:
LOGGER.error('MQTT Send Error: {}'.format(err), exc_info=True)
def saveCustomData(self, data):
"""
Send custom dictionary to Polyglot to save and be retrieved on startup.
:param data: Dictionary of key value pairs to store in Polyglot database.
"""
LOGGER.info('Sending customData to Polyglot.')
message = { 'customdata': data }
self.send(message)
def saveCustomParams(self, data):
"""
Send custom dictionary to Polyglot to save and be retrieved on startup.
:param data: Dictionary of key value pairs to store in Polyglot database.
"""
LOGGER.info('Sending customParams to Polyglot.')
message = { 'customparams': data }
self.send(message)
def addNotice(self, data):
"""
Add custom notice to front-end for this NodeServers
:param data: String of characters to add as a notification in the front-end.
"""
LOGGER.info('Sending addnotice to Polyglot: {}'.format(data))
message = { 'addnotice': data }
self.send(message)
def removeNotice(self, data):
"""
Add custom notice to front-end for this NodeServers
:param data: Index of notices list to remove.
"""
LOGGER.info('Sending removenotice to Polyglot for index {}'.format(data))
message = { 'removenotice': data }
self.send(message)
def restart(self):
"""
Send a command to Polyglot to restart this NodeServer
"""
LOGGER.info('Asking Polyglot to restart me.')
message = {
'restart': {}
}
self.send(message)
def installprofile(self):
LOGGER.info('Sending Install Profile command to Polyglot.')
message = { 'installprofile': { 'reboot': False } }
self.send(message)
def delNode(self, address):
"""
Delete a node from the NodeServer
:param node: Dictionary of node settings. Keys: address, name, node_def_id, primary, and drivers are required.
"""
LOGGER.info('Removing node {}'.format(address))
message = {
'removenode': {
'address': address
}
}
self.send(message)
def getNode(self, address):
"""
Get Node by Address of existing nodes.
"""
try:
for node in self.config['nodes']:
if node['address'] == address:
return node
return False
except KeyError:
LOGGER.error('Usually means we have not received the config yet.', exc_info=True)
return False
def inConfig(self, config):
"""
Save incoming config received from Polyglot to Interface.config and then do any functions
that are waiting on the config to be received.
"""
self.config = config
self.isyVersion = config['isyVersion']
try:
for watcher in self.__configObservers:
watcher(config)
self.send_custom_config_docs()
except KeyError as e:
LOGGER.error('KeyError in gotConfig: {}'.format(e), exc_info=True)
def input(self, command):
self.inQueue.put(command)
def supports_feature(self, feature):
return True
def get_md_file_data(self, fileName):
data = ''
if os.path.isfile(fileName):
data = markdown2.markdown_path(fileName)
return data
def send_custom_config_docs(self):
data = ''
if not self.custom_params_docs_file_sent:
data = self.get_md_file_data(Interface.CUSTOM_CONFIG_DOCS_FILE_NAME)
else:
data = self.config.get('customParamsDoc', '')
# send if we're sending new file or there are updates
if (not self.custom_params_docs_file_sent or
len(self.custom_params_pending_docs) > 0):
data += self.custom_params_pending_docs
self.custom_params_docs_file_sent = True
self.custom_params_pending_docs = ''
self.config['customParamsDoc'] = data
self.send({ 'customparamsdoc': data })
def add_custom_config_docs(self, data, clearCurrentData=False):
if clearCurrentData:
self.custom_params_docs_file_sent = False
self.custom_params_pending_docs += data
self.send_custom_config_docs()
def save_typed_params(self, data):
"""
Send custom parameters descriptions to Polyglot to be used
in front end UI configuration screen
Accepts list of objects with the followin properties
name - used as a key when data is sent from UI
title - displayed in UI
defaultValue - optionanl
type - optional, can be 'NUMBER', 'STRING' or 'BOOLEAN'.
Defaults to 'STRING'
desc - optional, shown in tooltip in UI
isRequired - optional, True/False, when set, will not validate UI
input if it's empty
isList - optional, True/False, if set this will be treated as list
of values or objects by UI
params - optional, can contain a list of objects. If present, then
this (parent) is treated as object / list of objects by UI,
otherwise, it's treated as a single / list of single values
"""
LOGGER.info('Sending typed parameters to Polyglot.')
if type(data) is not list:
data = [ data ]
message = { 'typedparams': data }
self.send(message)
|
UniversalDevicesInc/polyglot-v2-python-interface
|
polyinterface/polyinterface.py
|
Interface.saveCustomData
|
python
|
def saveCustomData(self, data):
LOGGER.info('Sending customData to Polyglot.')
message = { 'customdata': data }
self.send(message)
|
Send custom dictionary to Polyglot to save and be retrieved on startup.
:param data: Dictionary of key value pairs to store in Polyglot database.
|
train
|
https://github.com/UniversalDevicesInc/polyglot-v2-python-interface/blob/fe613135b762731a41a081222e43d2a8ae4fc53f/polyinterface/polyinterface.py#L384-L392
|
[
"def send(self, message):\n \"\"\"\n Formatted Message to send to Polyglot. Connection messages are sent automatically from this module\n so this method is used to send commands to/from Polyglot and formats it for consumption\n \"\"\"\n if not isinstance(message, dict) and self.connected:\n warnings.warn('payload not a dictionary')\n return False\n try:\n message['node'] = self.profileNum\n self._mqttc.publish(self.topicInput, json.dumps(message), retain=False)\n except TypeError as err:\n LOGGER.error('MQTT Send Error: {}'.format(err), exc_info=True)\n"
] |
class Interface(object):
CUSTOM_CONFIG_DOCS_FILE_NAME = 'POLYGLOT_CONFIG.md'
"""
Polyglot Interface Class
:param envVar: The Name of the variable from ~/.polyglot/.env that has this NodeServer's profile number
"""
# pylint: disable=too-many-instance-attributes
# pylint: disable=unused-argument
__exists = False
def __init__(self, envVar=None):
if self.__exists:
warnings.warn('Only one Interface is allowed.')
return
self.connected = False
self.profileNum = os.environ.get("PROFILE_NUM")
if self.profileNum is None:
if envVar is not None:
self.profileNum = os.environ.get(envVar)
if self.profileNum is None:
LOGGER.error('Profile Number not found in STDIN or .env file. Exiting.')
sys.exit(1)
self.profileNum = str(self.profileNum)
self.topicPolyglotConnection = 'udi/polyglot/connections/polyglot'
self.topicInput = 'udi/polyglot/ns/{}'.format(self.profileNum)
self.topicSelfConnection = 'udi/polyglot/connections/{}'.format(self.profileNum)
self._threads = {}
self._threads['socket'] = Thread(target = self._startMqtt, name = 'Interface')
self._mqttc = mqtt.Client(envVar, True)
# self._mqttc.will_set(self.topicSelfConnection, json.dumps({'node': self.profileNum, 'connected': False}), retain=True)
self._mqttc.on_connect = self._connect
self._mqttc.on_message = self._message
self._mqttc.on_subscribe = self._subscribe
self._mqttc.on_disconnect = self._disconnect
self._mqttc.on_publish = self._publish
self._mqttc.on_log = self._log
self.useSecure = True
if 'USE_HTTPS' in os.environ:
self.useSecure = os.environ['USE_HTTPS']
if self.useSecure is True:
if 'MQTT_CERTPATH' in os.environ:
self._mqttc.tls_set(
ca_certs=os.environ['MQTT_CERTPATH'] + '/polyglot.crt',
certfile=os.environ['MQTT_CERTPATH'] + '/client.crt',
keyfile=os.environ['MQTT_CERTPATH'] + '/client_private.key',
tls_version=ssl.PROTOCOL_TLSv1_2)
else:
self._mqttc.tls_set(
ca_certs=join(expanduser("~") + '/.polyglot/ssl/polyglot.crt'),
certfile=join(expanduser("~") + '/.polyglot/ssl/client.crt'),
keyfile=join(expanduser("~") + '/.polyglot/ssl/client_private.key'),
tls_version=ssl.PROTOCOL_TLSv1_2
)
# self._mqttc.tls_insecure_set(True)
# self._mqttc.enable_logger(logger=LOGGER)
self.config = None
# self.loop = asyncio.new_event_loop()
self.loop = None
self.inQueue = queue.Queue()
# self.thread = Thread(target=self.start_loop)
self.isyVersion = None
self._server = os.environ.get("MQTT_HOST") or 'localhost'
self._port = os.environ.get("MQTT_PORT") or '1883'
self.polyglotConnected = False
self.__configObservers = []
self.__stopObservers = []
Interface.__exists = True
self.custom_params_docs_file_sent = False
self.custom_params_pending_docs = ''
def onConfig(self, callback):
"""
Gives the ability to bind any methods to be run when the config is received.
"""
self.__configObservers.append(callback)
def onStop(self, callback):
"""
Gives the ability to bind any methods to be run when the stop command is received.
"""
self.__stopObservers.append(callback)
def _connect(self, mqttc, userdata, flags, rc):
"""
The callback for when the client receives a CONNACK response from the server.
Subscribing in on_connect() means that if we lose the connection and
reconnect then subscriptions will be renewed.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param flags: The flags set on the connection.
:param rc: Result code of connection, 0 = Success, anything else is a failure
"""
if rc == 0:
self.connected = True
results = []
LOGGER.info("MQTT Connected with result code " + str(rc) + " (Success)")
# result, mid = self._mqttc.subscribe(self.topicInput)
results.append((self.topicInput, tuple(self._mqttc.subscribe(self.topicInput))))
results.append((self.topicPolyglotConnection, tuple(self._mqttc.subscribe(self.topicPolyglotConnection))))
for (topic, (result, mid)) in results:
if result == 0:
LOGGER.info("MQTT Subscribing to topic: " + topic + " - " + " MID: " + str(mid) + " Result: " + str(result))
else:
LOGGER.info("MQTT Subscription to " + topic + " failed. This is unusual. MID: " + str(mid) + " Result: " + str(result))
# If subscription fails, try to reconnect.
self._mqttc.reconnect()
self._mqttc.publish(self.topicSelfConnection, json.dumps(
{
'connected': True,
'node': self.profileNum
}), retain=True)
LOGGER.info('Sent Connected message to Polyglot')
else:
LOGGER.error("MQTT Failed to connect. Result code: " + str(rc))
def _message(self, mqttc, userdata, msg):
"""
The callback for when a PUBLISH message is received from the server.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param flags: The flags set on the connection.
:param msg: Dictionary of MQTT received message. Uses: msg.topic, msg.qos, msg.payload
"""
try:
inputCmds = ['query', 'command', 'result', 'status', 'shortPoll', 'longPoll', 'delete']
parsed_msg = json.loads(msg.payload.decode('utf-8'))
if 'node' in parsed_msg:
if parsed_msg['node'] != 'polyglot':
return
del parsed_msg['node']
for key in parsed_msg:
# LOGGER.debug('MQTT Received Message: {}: {}'.format(msg.topic, parsed_msg))
if key == 'config':
self.inConfig(parsed_msg[key])
elif key == 'connected':
self.polyglotConnected = parsed_msg[key]
elif key == 'stop':
LOGGER.debug('Received stop from Polyglot... Shutting Down.')
self.stop()
elif key in inputCmds:
self.input(parsed_msg)
else:
LOGGER.error('Invalid command received in message from Polyglot: {}'.format(key))
except (ValueError) as err:
LOGGER.error('MQTT Received Payload Error: {}'.format(err), exc_info=True)
def _disconnect(self, mqttc, userdata, rc):
"""
The callback for when a DISCONNECT occurs.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param rc: Result code of connection, 0 = Graceful, anything else is unclean
"""
self.connected = False
if rc != 0:
LOGGER.info("MQTT Unexpected disconnection. Trying reconnect.")
try:
self._mqttc.reconnect()
except Exception as ex:
template = "An exception of type {0} occured. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
LOGGER.error("MQTT Connection error: " + message)
else:
LOGGER.info("MQTT Graceful disconnection.")
def _log(self, mqttc, userdata, level, string):
""" Use for debugging MQTT Packets, disable for normal use, NOISY. """
# LOGGER.info('MQTT Log - {}: {}'.format(str(level), str(string)))
pass
def _subscribe(self, mqttc, userdata, mid, granted_qos):
""" Callback for Subscribe message. Unused currently. """
# LOGGER.info("MQTT Subscribed Succesfully for Message ID: {} - QoS: {}".format(str(mid), str(granted_qos)))
pass
def _publish(self, mqttc, userdata, mid):
""" Callback for publish message. Unused currently. """
# LOGGER.info("MQTT Published message ID: {}".format(str(mid)))
pass
def start(self):
for _, thread in self._threads.items():
thread.start()
def _startMqtt(self):
"""
The client start method. Starts the thread for the MQTT Client
and publishes the connected message.
"""
LOGGER.info('Connecting to MQTT... {}:{}'.format(self._server, self._port))
try:
# self._mqttc.connect_async(str(self._server), int(self._port), 10)
self._mqttc.connect_async('{}'.format(self._server), int(self._port), 10)
self._mqttc.loop_forever()
except Exception as ex:
template = "An exception of type {0} occurred. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
LOGGER.error("MQTT Connection error: {}".format(message), exc_info=True)
def stop(self):
"""
The client stop method. If the client is currently connected
stop the thread and disconnect. Publish the disconnected
message if clean shutdown.
"""
# self.loop.call_soon_threadsafe(self.loop.stop)
# self.loop.stop()
# self._longPoll.cancel()
# self._shortPoll.cancel()
if self.connected:
LOGGER.info('Disconnecting from MQTT... {}:{}'.format(self._server, self._port))
self._mqttc.publish(self.topicSelfConnection, json.dumps({'node': self.profileNum, 'connected': False}), retain=True)
self._mqttc.loop_stop()
self._mqttc.disconnect()
try:
for watcher in self.__stopObservers:
watcher()
except KeyError as e:
LOGGER.exception('KeyError in gotConfig: {}'.format(e), exc_info=True)
def send(self, message):
"""
Formatted Message to send to Polyglot. Connection messages are sent automatically from this module
so this method is used to send commands to/from Polyglot and formats it for consumption
"""
if not isinstance(message, dict) and self.connected:
warnings.warn('payload not a dictionary')
return False
try:
message['node'] = self.profileNum
self._mqttc.publish(self.topicInput, json.dumps(message), retain=False)
except TypeError as err:
LOGGER.error('MQTT Send Error: {}'.format(err), exc_info=True)
def addNode(self, node):
"""
Add a node to the NodeServer
:param node: Dictionary of node settings. Keys: address, name, node_def_id, primary, and drivers are required.
"""
LOGGER.info('Adding node {}({})'.format(node.name, node.address))
message = {
'addnode': {
'nodes': [{
'address': node.address,
'name': node.name,
'node_def_id': node.id,
'primary': node.primary,
'drivers': node.drivers,
'hint': node.hint
}]
}
}
self.send(message)
def saveCustomParams(self, data):
"""
Send custom dictionary to Polyglot to save and be retrieved on startup.
:param data: Dictionary of key value pairs to store in Polyglot database.
"""
LOGGER.info('Sending customParams to Polyglot.')
message = { 'customparams': data }
self.send(message)
def addNotice(self, data):
"""
Add custom notice to front-end for this NodeServers
:param data: String of characters to add as a notification in the front-end.
"""
LOGGER.info('Sending addnotice to Polyglot: {}'.format(data))
message = { 'addnotice': data }
self.send(message)
def removeNotice(self, data):
"""
Add custom notice to front-end for this NodeServers
:param data: Index of notices list to remove.
"""
LOGGER.info('Sending removenotice to Polyglot for index {}'.format(data))
message = { 'removenotice': data }
self.send(message)
def restart(self):
"""
Send a command to Polyglot to restart this NodeServer
"""
LOGGER.info('Asking Polyglot to restart me.')
message = {
'restart': {}
}
self.send(message)
def installprofile(self):
LOGGER.info('Sending Install Profile command to Polyglot.')
message = { 'installprofile': { 'reboot': False } }
self.send(message)
def delNode(self, address):
"""
Delete a node from the NodeServer
:param node: Dictionary of node settings. Keys: address, name, node_def_id, primary, and drivers are required.
"""
LOGGER.info('Removing node {}'.format(address))
message = {
'removenode': {
'address': address
}
}
self.send(message)
def getNode(self, address):
"""
Get Node by Address of existing nodes.
"""
try:
for node in self.config['nodes']:
if node['address'] == address:
return node
return False
except KeyError:
LOGGER.error('Usually means we have not received the config yet.', exc_info=True)
return False
def inConfig(self, config):
"""
Save incoming config received from Polyglot to Interface.config and then do any functions
that are waiting on the config to be received.
"""
self.config = config
self.isyVersion = config['isyVersion']
try:
for watcher in self.__configObservers:
watcher(config)
self.send_custom_config_docs()
except KeyError as e:
LOGGER.error('KeyError in gotConfig: {}'.format(e), exc_info=True)
def input(self, command):
self.inQueue.put(command)
def supports_feature(self, feature):
return True
def get_md_file_data(self, fileName):
data = ''
if os.path.isfile(fileName):
data = markdown2.markdown_path(fileName)
return data
def send_custom_config_docs(self):
data = ''
if not self.custom_params_docs_file_sent:
data = self.get_md_file_data(Interface.CUSTOM_CONFIG_DOCS_FILE_NAME)
else:
data = self.config.get('customParamsDoc', '')
# send if we're sending new file or there are updates
if (not self.custom_params_docs_file_sent or
len(self.custom_params_pending_docs) > 0):
data += self.custom_params_pending_docs
self.custom_params_docs_file_sent = True
self.custom_params_pending_docs = ''
self.config['customParamsDoc'] = data
self.send({ 'customparamsdoc': data })
def add_custom_config_docs(self, data, clearCurrentData=False):
if clearCurrentData:
self.custom_params_docs_file_sent = False
self.custom_params_pending_docs += data
self.send_custom_config_docs()
def save_typed_params(self, data):
"""
Send custom parameters descriptions to Polyglot to be used
in front end UI configuration screen
Accepts list of objects with the followin properties
name - used as a key when data is sent from UI
title - displayed in UI
defaultValue - optionanl
type - optional, can be 'NUMBER', 'STRING' or 'BOOLEAN'.
Defaults to 'STRING'
desc - optional, shown in tooltip in UI
isRequired - optional, True/False, when set, will not validate UI
input if it's empty
isList - optional, True/False, if set this will be treated as list
of values or objects by UI
params - optional, can contain a list of objects. If present, then
this (parent) is treated as object / list of objects by UI,
otherwise, it's treated as a single / list of single values
"""
LOGGER.info('Sending typed parameters to Polyglot.')
if type(data) is not list:
data = [ data ]
message = { 'typedparams': data }
self.send(message)
|
UniversalDevicesInc/polyglot-v2-python-interface
|
polyinterface/polyinterface.py
|
Interface.saveCustomParams
|
python
|
def saveCustomParams(self, data):
LOGGER.info('Sending customParams to Polyglot.')
message = { 'customparams': data }
self.send(message)
|
Send custom dictionary to Polyglot to save and be retrieved on startup.
:param data: Dictionary of key value pairs to store in Polyglot database.
|
train
|
https://github.com/UniversalDevicesInc/polyglot-v2-python-interface/blob/fe613135b762731a41a081222e43d2a8ae4fc53f/polyinterface/polyinterface.py#L394-L402
|
[
"def send(self, message):\n \"\"\"\n Formatted Message to send to Polyglot. Connection messages are sent automatically from this module\n so this method is used to send commands to/from Polyglot and formats it for consumption\n \"\"\"\n if not isinstance(message, dict) and self.connected:\n warnings.warn('payload not a dictionary')\n return False\n try:\n message['node'] = self.profileNum\n self._mqttc.publish(self.topicInput, json.dumps(message), retain=False)\n except TypeError as err:\n LOGGER.error('MQTT Send Error: {}'.format(err), exc_info=True)\n"
] |
class Interface(object):
CUSTOM_CONFIG_DOCS_FILE_NAME = 'POLYGLOT_CONFIG.md'
"""
Polyglot Interface Class
:param envVar: The Name of the variable from ~/.polyglot/.env that has this NodeServer's profile number
"""
# pylint: disable=too-many-instance-attributes
# pylint: disable=unused-argument
__exists = False
def __init__(self, envVar=None):
if self.__exists:
warnings.warn('Only one Interface is allowed.')
return
self.connected = False
self.profileNum = os.environ.get("PROFILE_NUM")
if self.profileNum is None:
if envVar is not None:
self.profileNum = os.environ.get(envVar)
if self.profileNum is None:
LOGGER.error('Profile Number not found in STDIN or .env file. Exiting.')
sys.exit(1)
self.profileNum = str(self.profileNum)
self.topicPolyglotConnection = 'udi/polyglot/connections/polyglot'
self.topicInput = 'udi/polyglot/ns/{}'.format(self.profileNum)
self.topicSelfConnection = 'udi/polyglot/connections/{}'.format(self.profileNum)
self._threads = {}
self._threads['socket'] = Thread(target = self._startMqtt, name = 'Interface')
self._mqttc = mqtt.Client(envVar, True)
# self._mqttc.will_set(self.topicSelfConnection, json.dumps({'node': self.profileNum, 'connected': False}), retain=True)
self._mqttc.on_connect = self._connect
self._mqttc.on_message = self._message
self._mqttc.on_subscribe = self._subscribe
self._mqttc.on_disconnect = self._disconnect
self._mqttc.on_publish = self._publish
self._mqttc.on_log = self._log
self.useSecure = True
if 'USE_HTTPS' in os.environ:
self.useSecure = os.environ['USE_HTTPS']
if self.useSecure is True:
if 'MQTT_CERTPATH' in os.environ:
self._mqttc.tls_set(
ca_certs=os.environ['MQTT_CERTPATH'] + '/polyglot.crt',
certfile=os.environ['MQTT_CERTPATH'] + '/client.crt',
keyfile=os.environ['MQTT_CERTPATH'] + '/client_private.key',
tls_version=ssl.PROTOCOL_TLSv1_2)
else:
self._mqttc.tls_set(
ca_certs=join(expanduser("~") + '/.polyglot/ssl/polyglot.crt'),
certfile=join(expanduser("~") + '/.polyglot/ssl/client.crt'),
keyfile=join(expanduser("~") + '/.polyglot/ssl/client_private.key'),
tls_version=ssl.PROTOCOL_TLSv1_2
)
# self._mqttc.tls_insecure_set(True)
# self._mqttc.enable_logger(logger=LOGGER)
self.config = None
# self.loop = asyncio.new_event_loop()
self.loop = None
self.inQueue = queue.Queue()
# self.thread = Thread(target=self.start_loop)
self.isyVersion = None
self._server = os.environ.get("MQTT_HOST") or 'localhost'
self._port = os.environ.get("MQTT_PORT") or '1883'
self.polyglotConnected = False
self.__configObservers = []
self.__stopObservers = []
Interface.__exists = True
self.custom_params_docs_file_sent = False
self.custom_params_pending_docs = ''
def onConfig(self, callback):
"""
Gives the ability to bind any methods to be run when the config is received.
"""
self.__configObservers.append(callback)
def onStop(self, callback):
"""
Gives the ability to bind any methods to be run when the stop command is received.
"""
self.__stopObservers.append(callback)
def _connect(self, mqttc, userdata, flags, rc):
"""
The callback for when the client receives a CONNACK response from the server.
Subscribing in on_connect() means that if we lose the connection and
reconnect then subscriptions will be renewed.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param flags: The flags set on the connection.
:param rc: Result code of connection, 0 = Success, anything else is a failure
"""
if rc == 0:
self.connected = True
results = []
LOGGER.info("MQTT Connected with result code " + str(rc) + " (Success)")
# result, mid = self._mqttc.subscribe(self.topicInput)
results.append((self.topicInput, tuple(self._mqttc.subscribe(self.topicInput))))
results.append((self.topicPolyglotConnection, tuple(self._mqttc.subscribe(self.topicPolyglotConnection))))
for (topic, (result, mid)) in results:
if result == 0:
LOGGER.info("MQTT Subscribing to topic: " + topic + " - " + " MID: " + str(mid) + " Result: " + str(result))
else:
LOGGER.info("MQTT Subscription to " + topic + " failed. This is unusual. MID: " + str(mid) + " Result: " + str(result))
# If subscription fails, try to reconnect.
self._mqttc.reconnect()
self._mqttc.publish(self.topicSelfConnection, json.dumps(
{
'connected': True,
'node': self.profileNum
}), retain=True)
LOGGER.info('Sent Connected message to Polyglot')
else:
LOGGER.error("MQTT Failed to connect. Result code: " + str(rc))
def _message(self, mqttc, userdata, msg):
"""
The callback for when a PUBLISH message is received from the server.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param flags: The flags set on the connection.
:param msg: Dictionary of MQTT received message. Uses: msg.topic, msg.qos, msg.payload
"""
try:
inputCmds = ['query', 'command', 'result', 'status', 'shortPoll', 'longPoll', 'delete']
parsed_msg = json.loads(msg.payload.decode('utf-8'))
if 'node' in parsed_msg:
if parsed_msg['node'] != 'polyglot':
return
del parsed_msg['node']
for key in parsed_msg:
# LOGGER.debug('MQTT Received Message: {}: {}'.format(msg.topic, parsed_msg))
if key == 'config':
self.inConfig(parsed_msg[key])
elif key == 'connected':
self.polyglotConnected = parsed_msg[key]
elif key == 'stop':
LOGGER.debug('Received stop from Polyglot... Shutting Down.')
self.stop()
elif key in inputCmds:
self.input(parsed_msg)
else:
LOGGER.error('Invalid command received in message from Polyglot: {}'.format(key))
except (ValueError) as err:
LOGGER.error('MQTT Received Payload Error: {}'.format(err), exc_info=True)
def _disconnect(self, mqttc, userdata, rc):
"""
The callback for when a DISCONNECT occurs.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param rc: Result code of connection, 0 = Graceful, anything else is unclean
"""
self.connected = False
if rc != 0:
LOGGER.info("MQTT Unexpected disconnection. Trying reconnect.")
try:
self._mqttc.reconnect()
except Exception as ex:
template = "An exception of type {0} occured. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
LOGGER.error("MQTT Connection error: " + message)
else:
LOGGER.info("MQTT Graceful disconnection.")
def _log(self, mqttc, userdata, level, string):
""" Use for debugging MQTT Packets, disable for normal use, NOISY. """
# LOGGER.info('MQTT Log - {}: {}'.format(str(level), str(string)))
pass
def _subscribe(self, mqttc, userdata, mid, granted_qos):
""" Callback for Subscribe message. Unused currently. """
# LOGGER.info("MQTT Subscribed Succesfully for Message ID: {} - QoS: {}".format(str(mid), str(granted_qos)))
pass
def _publish(self, mqttc, userdata, mid):
""" Callback for publish message. Unused currently. """
# LOGGER.info("MQTT Published message ID: {}".format(str(mid)))
pass
def start(self):
for _, thread in self._threads.items():
thread.start()
def _startMqtt(self):
"""
The client start method. Starts the thread for the MQTT Client
and publishes the connected message.
"""
LOGGER.info('Connecting to MQTT... {}:{}'.format(self._server, self._port))
try:
# self._mqttc.connect_async(str(self._server), int(self._port), 10)
self._mqttc.connect_async('{}'.format(self._server), int(self._port), 10)
self._mqttc.loop_forever()
except Exception as ex:
template = "An exception of type {0} occurred. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
LOGGER.error("MQTT Connection error: {}".format(message), exc_info=True)
def stop(self):
"""
The client stop method. If the client is currently connected
stop the thread and disconnect. Publish the disconnected
message if clean shutdown.
"""
# self.loop.call_soon_threadsafe(self.loop.stop)
# self.loop.stop()
# self._longPoll.cancel()
# self._shortPoll.cancel()
if self.connected:
LOGGER.info('Disconnecting from MQTT... {}:{}'.format(self._server, self._port))
self._mqttc.publish(self.topicSelfConnection, json.dumps({'node': self.profileNum, 'connected': False}), retain=True)
self._mqttc.loop_stop()
self._mqttc.disconnect()
try:
for watcher in self.__stopObservers:
watcher()
except KeyError as e:
LOGGER.exception('KeyError in gotConfig: {}'.format(e), exc_info=True)
def send(self, message):
"""
Formatted Message to send to Polyglot. Connection messages are sent automatically from this module
so this method is used to send commands to/from Polyglot and formats it for consumption
"""
if not isinstance(message, dict) and self.connected:
warnings.warn('payload not a dictionary')
return False
try:
message['node'] = self.profileNum
self._mqttc.publish(self.topicInput, json.dumps(message), retain=False)
except TypeError as err:
LOGGER.error('MQTT Send Error: {}'.format(err), exc_info=True)
def addNode(self, node):
"""
Add a node to the NodeServer
:param node: Dictionary of node settings. Keys: address, name, node_def_id, primary, and drivers are required.
"""
LOGGER.info('Adding node {}({})'.format(node.name, node.address))
message = {
'addnode': {
'nodes': [{
'address': node.address,
'name': node.name,
'node_def_id': node.id,
'primary': node.primary,
'drivers': node.drivers,
'hint': node.hint
}]
}
}
self.send(message)
def saveCustomData(self, data):
"""
Send custom dictionary to Polyglot to save and be retrieved on startup.
:param data: Dictionary of key value pairs to store in Polyglot database.
"""
LOGGER.info('Sending customData to Polyglot.')
message = { 'customdata': data }
self.send(message)
def addNotice(self, data):
"""
Add custom notice to front-end for this NodeServers
:param data: String of characters to add as a notification in the front-end.
"""
LOGGER.info('Sending addnotice to Polyglot: {}'.format(data))
message = { 'addnotice': data }
self.send(message)
def removeNotice(self, data):
"""
Add custom notice to front-end for this NodeServers
:param data: Index of notices list to remove.
"""
LOGGER.info('Sending removenotice to Polyglot for index {}'.format(data))
message = { 'removenotice': data }
self.send(message)
def restart(self):
"""
Send a command to Polyglot to restart this NodeServer
"""
LOGGER.info('Asking Polyglot to restart me.')
message = {
'restart': {}
}
self.send(message)
def installprofile(self):
LOGGER.info('Sending Install Profile command to Polyglot.')
message = { 'installprofile': { 'reboot': False } }
self.send(message)
def delNode(self, address):
"""
Delete a node from the NodeServer
:param node: Dictionary of node settings. Keys: address, name, node_def_id, primary, and drivers are required.
"""
LOGGER.info('Removing node {}'.format(address))
message = {
'removenode': {
'address': address
}
}
self.send(message)
def getNode(self, address):
"""
Get Node by Address of existing nodes.
"""
try:
for node in self.config['nodes']:
if node['address'] == address:
return node
return False
except KeyError:
LOGGER.error('Usually means we have not received the config yet.', exc_info=True)
return False
def inConfig(self, config):
"""
Save incoming config received from Polyglot to Interface.config and then do any functions
that are waiting on the config to be received.
"""
self.config = config
self.isyVersion = config['isyVersion']
try:
for watcher in self.__configObservers:
watcher(config)
self.send_custom_config_docs()
except KeyError as e:
LOGGER.error('KeyError in gotConfig: {}'.format(e), exc_info=True)
def input(self, command):
self.inQueue.put(command)
def supports_feature(self, feature):
return True
def get_md_file_data(self, fileName):
data = ''
if os.path.isfile(fileName):
data = markdown2.markdown_path(fileName)
return data
def send_custom_config_docs(self):
data = ''
if not self.custom_params_docs_file_sent:
data = self.get_md_file_data(Interface.CUSTOM_CONFIG_DOCS_FILE_NAME)
else:
data = self.config.get('customParamsDoc', '')
# send if we're sending new file or there are updates
if (not self.custom_params_docs_file_sent or
len(self.custom_params_pending_docs) > 0):
data += self.custom_params_pending_docs
self.custom_params_docs_file_sent = True
self.custom_params_pending_docs = ''
self.config['customParamsDoc'] = data
self.send({ 'customparamsdoc': data })
def add_custom_config_docs(self, data, clearCurrentData=False):
if clearCurrentData:
self.custom_params_docs_file_sent = False
self.custom_params_pending_docs += data
self.send_custom_config_docs()
def save_typed_params(self, data):
"""
Send custom parameters descriptions to Polyglot to be used
in front end UI configuration screen
Accepts list of objects with the followin properties
name - used as a key when data is sent from UI
title - displayed in UI
defaultValue - optionanl
type - optional, can be 'NUMBER', 'STRING' or 'BOOLEAN'.
Defaults to 'STRING'
desc - optional, shown in tooltip in UI
isRequired - optional, True/False, when set, will not validate UI
input if it's empty
isList - optional, True/False, if set this will be treated as list
of values or objects by UI
params - optional, can contain a list of objects. If present, then
this (parent) is treated as object / list of objects by UI,
otherwise, it's treated as a single / list of single values
"""
LOGGER.info('Sending typed parameters to Polyglot.')
if type(data) is not list:
data = [ data ]
message = { 'typedparams': data }
self.send(message)
|
UniversalDevicesInc/polyglot-v2-python-interface
|
polyinterface/polyinterface.py
|
Interface.addNotice
|
python
|
def addNotice(self, data):
LOGGER.info('Sending addnotice to Polyglot: {}'.format(data))
message = { 'addnotice': data }
self.send(message)
|
Add custom notice to front-end for this NodeServers
:param data: String of characters to add as a notification in the front-end.
|
train
|
https://github.com/UniversalDevicesInc/polyglot-v2-python-interface/blob/fe613135b762731a41a081222e43d2a8ae4fc53f/polyinterface/polyinterface.py#L404-L412
|
[
"def send(self, message):\n \"\"\"\n Formatted Message to send to Polyglot. Connection messages are sent automatically from this module\n so this method is used to send commands to/from Polyglot and formats it for consumption\n \"\"\"\n if not isinstance(message, dict) and self.connected:\n warnings.warn('payload not a dictionary')\n return False\n try:\n message['node'] = self.profileNum\n self._mqttc.publish(self.topicInput, json.dumps(message), retain=False)\n except TypeError as err:\n LOGGER.error('MQTT Send Error: {}'.format(err), exc_info=True)\n"
] |
class Interface(object):
CUSTOM_CONFIG_DOCS_FILE_NAME = 'POLYGLOT_CONFIG.md'
"""
Polyglot Interface Class
:param envVar: The Name of the variable from ~/.polyglot/.env that has this NodeServer's profile number
"""
# pylint: disable=too-many-instance-attributes
# pylint: disable=unused-argument
__exists = False
def __init__(self, envVar=None):
if self.__exists:
warnings.warn('Only one Interface is allowed.')
return
self.connected = False
self.profileNum = os.environ.get("PROFILE_NUM")
if self.profileNum is None:
if envVar is not None:
self.profileNum = os.environ.get(envVar)
if self.profileNum is None:
LOGGER.error('Profile Number not found in STDIN or .env file. Exiting.')
sys.exit(1)
self.profileNum = str(self.profileNum)
self.topicPolyglotConnection = 'udi/polyglot/connections/polyglot'
self.topicInput = 'udi/polyglot/ns/{}'.format(self.profileNum)
self.topicSelfConnection = 'udi/polyglot/connections/{}'.format(self.profileNum)
self._threads = {}
self._threads['socket'] = Thread(target = self._startMqtt, name = 'Interface')
self._mqttc = mqtt.Client(envVar, True)
# self._mqttc.will_set(self.topicSelfConnection, json.dumps({'node': self.profileNum, 'connected': False}), retain=True)
self._mqttc.on_connect = self._connect
self._mqttc.on_message = self._message
self._mqttc.on_subscribe = self._subscribe
self._mqttc.on_disconnect = self._disconnect
self._mqttc.on_publish = self._publish
self._mqttc.on_log = self._log
self.useSecure = True
if 'USE_HTTPS' in os.environ:
self.useSecure = os.environ['USE_HTTPS']
if self.useSecure is True:
if 'MQTT_CERTPATH' in os.environ:
self._mqttc.tls_set(
ca_certs=os.environ['MQTT_CERTPATH'] + '/polyglot.crt',
certfile=os.environ['MQTT_CERTPATH'] + '/client.crt',
keyfile=os.environ['MQTT_CERTPATH'] + '/client_private.key',
tls_version=ssl.PROTOCOL_TLSv1_2)
else:
self._mqttc.tls_set(
ca_certs=join(expanduser("~") + '/.polyglot/ssl/polyglot.crt'),
certfile=join(expanduser("~") + '/.polyglot/ssl/client.crt'),
keyfile=join(expanduser("~") + '/.polyglot/ssl/client_private.key'),
tls_version=ssl.PROTOCOL_TLSv1_2
)
# self._mqttc.tls_insecure_set(True)
# self._mqttc.enable_logger(logger=LOGGER)
self.config = None
# self.loop = asyncio.new_event_loop()
self.loop = None
self.inQueue = queue.Queue()
# self.thread = Thread(target=self.start_loop)
self.isyVersion = None
self._server = os.environ.get("MQTT_HOST") or 'localhost'
self._port = os.environ.get("MQTT_PORT") or '1883'
self.polyglotConnected = False
self.__configObservers = []
self.__stopObservers = []
Interface.__exists = True
self.custom_params_docs_file_sent = False
self.custom_params_pending_docs = ''
def onConfig(self, callback):
"""
Gives the ability to bind any methods to be run when the config is received.
"""
self.__configObservers.append(callback)
def onStop(self, callback):
"""
Gives the ability to bind any methods to be run when the stop command is received.
"""
self.__stopObservers.append(callback)
def _connect(self, mqttc, userdata, flags, rc):
"""
The callback for when the client receives a CONNACK response from the server.
Subscribing in on_connect() means that if we lose the connection and
reconnect then subscriptions will be renewed.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param flags: The flags set on the connection.
:param rc: Result code of connection, 0 = Success, anything else is a failure
"""
if rc == 0:
self.connected = True
results = []
LOGGER.info("MQTT Connected with result code " + str(rc) + " (Success)")
# result, mid = self._mqttc.subscribe(self.topicInput)
results.append((self.topicInput, tuple(self._mqttc.subscribe(self.topicInput))))
results.append((self.topicPolyglotConnection, tuple(self._mqttc.subscribe(self.topicPolyglotConnection))))
for (topic, (result, mid)) in results:
if result == 0:
LOGGER.info("MQTT Subscribing to topic: " + topic + " - " + " MID: " + str(mid) + " Result: " + str(result))
else:
LOGGER.info("MQTT Subscription to " + topic + " failed. This is unusual. MID: " + str(mid) + " Result: " + str(result))
# If subscription fails, try to reconnect.
self._mqttc.reconnect()
self._mqttc.publish(self.topicSelfConnection, json.dumps(
{
'connected': True,
'node': self.profileNum
}), retain=True)
LOGGER.info('Sent Connected message to Polyglot')
else:
LOGGER.error("MQTT Failed to connect. Result code: " + str(rc))
def _message(self, mqttc, userdata, msg):
"""
The callback for when a PUBLISH message is received from the server.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param flags: The flags set on the connection.
:param msg: Dictionary of MQTT received message. Uses: msg.topic, msg.qos, msg.payload
"""
try:
inputCmds = ['query', 'command', 'result', 'status', 'shortPoll', 'longPoll', 'delete']
parsed_msg = json.loads(msg.payload.decode('utf-8'))
if 'node' in parsed_msg:
if parsed_msg['node'] != 'polyglot':
return
del parsed_msg['node']
for key in parsed_msg:
# LOGGER.debug('MQTT Received Message: {}: {}'.format(msg.topic, parsed_msg))
if key == 'config':
self.inConfig(parsed_msg[key])
elif key == 'connected':
self.polyglotConnected = parsed_msg[key]
elif key == 'stop':
LOGGER.debug('Received stop from Polyglot... Shutting Down.')
self.stop()
elif key in inputCmds:
self.input(parsed_msg)
else:
LOGGER.error('Invalid command received in message from Polyglot: {}'.format(key))
except (ValueError) as err:
LOGGER.error('MQTT Received Payload Error: {}'.format(err), exc_info=True)
def _disconnect(self, mqttc, userdata, rc):
"""
The callback for when a DISCONNECT occurs.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param rc: Result code of connection, 0 = Graceful, anything else is unclean
"""
self.connected = False
if rc != 0:
LOGGER.info("MQTT Unexpected disconnection. Trying reconnect.")
try:
self._mqttc.reconnect()
except Exception as ex:
template = "An exception of type {0} occured. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
LOGGER.error("MQTT Connection error: " + message)
else:
LOGGER.info("MQTT Graceful disconnection.")
def _log(self, mqttc, userdata, level, string):
""" Use for debugging MQTT Packets, disable for normal use, NOISY. """
# LOGGER.info('MQTT Log - {}: {}'.format(str(level), str(string)))
pass
def _subscribe(self, mqttc, userdata, mid, granted_qos):
""" Callback for Subscribe message. Unused currently. """
# LOGGER.info("MQTT Subscribed Succesfully for Message ID: {} - QoS: {}".format(str(mid), str(granted_qos)))
pass
def _publish(self, mqttc, userdata, mid):
""" Callback for publish message. Unused currently. """
# LOGGER.info("MQTT Published message ID: {}".format(str(mid)))
pass
def start(self):
for _, thread in self._threads.items():
thread.start()
def _startMqtt(self):
"""
The client start method. Starts the thread for the MQTT Client
and publishes the connected message.
"""
LOGGER.info('Connecting to MQTT... {}:{}'.format(self._server, self._port))
try:
# self._mqttc.connect_async(str(self._server), int(self._port), 10)
self._mqttc.connect_async('{}'.format(self._server), int(self._port), 10)
self._mqttc.loop_forever()
except Exception as ex:
template = "An exception of type {0} occurred. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
LOGGER.error("MQTT Connection error: {}".format(message), exc_info=True)
def stop(self):
"""
The client stop method. If the client is currently connected
stop the thread and disconnect. Publish the disconnected
message if clean shutdown.
"""
# self.loop.call_soon_threadsafe(self.loop.stop)
# self.loop.stop()
# self._longPoll.cancel()
# self._shortPoll.cancel()
if self.connected:
LOGGER.info('Disconnecting from MQTT... {}:{}'.format(self._server, self._port))
self._mqttc.publish(self.topicSelfConnection, json.dumps({'node': self.profileNum, 'connected': False}), retain=True)
self._mqttc.loop_stop()
self._mqttc.disconnect()
try:
for watcher in self.__stopObservers:
watcher()
except KeyError as e:
LOGGER.exception('KeyError in gotConfig: {}'.format(e), exc_info=True)
def send(self, message):
"""
Formatted Message to send to Polyglot. Connection messages are sent automatically from this module
so this method is used to send commands to/from Polyglot and formats it for consumption
"""
if not isinstance(message, dict) and self.connected:
warnings.warn('payload not a dictionary')
return False
try:
message['node'] = self.profileNum
self._mqttc.publish(self.topicInput, json.dumps(message), retain=False)
except TypeError as err:
LOGGER.error('MQTT Send Error: {}'.format(err), exc_info=True)
def addNode(self, node):
"""
Add a node to the NodeServer
:param node: Dictionary of node settings. Keys: address, name, node_def_id, primary, and drivers are required.
"""
LOGGER.info('Adding node {}({})'.format(node.name, node.address))
message = {
'addnode': {
'nodes': [{
'address': node.address,
'name': node.name,
'node_def_id': node.id,
'primary': node.primary,
'drivers': node.drivers,
'hint': node.hint
}]
}
}
self.send(message)
def saveCustomData(self, data):
"""
Send custom dictionary to Polyglot to save and be retrieved on startup.
:param data: Dictionary of key value pairs to store in Polyglot database.
"""
LOGGER.info('Sending customData to Polyglot.')
message = { 'customdata': data }
self.send(message)
def saveCustomParams(self, data):
"""
Send custom dictionary to Polyglot to save and be retrieved on startup.
:param data: Dictionary of key value pairs to store in Polyglot database.
"""
LOGGER.info('Sending customParams to Polyglot.')
message = { 'customparams': data }
self.send(message)
def removeNotice(self, data):
"""
Add custom notice to front-end for this NodeServers
:param data: Index of notices list to remove.
"""
LOGGER.info('Sending removenotice to Polyglot for index {}'.format(data))
message = { 'removenotice': data }
self.send(message)
def restart(self):
"""
Send a command to Polyglot to restart this NodeServer
"""
LOGGER.info('Asking Polyglot to restart me.')
message = {
'restart': {}
}
self.send(message)
def installprofile(self):
LOGGER.info('Sending Install Profile command to Polyglot.')
message = { 'installprofile': { 'reboot': False } }
self.send(message)
def delNode(self, address):
"""
Delete a node from the NodeServer
:param node: Dictionary of node settings. Keys: address, name, node_def_id, primary, and drivers are required.
"""
LOGGER.info('Removing node {}'.format(address))
message = {
'removenode': {
'address': address
}
}
self.send(message)
def getNode(self, address):
"""
Get Node by Address of existing nodes.
"""
try:
for node in self.config['nodes']:
if node['address'] == address:
return node
return False
except KeyError:
LOGGER.error('Usually means we have not received the config yet.', exc_info=True)
return False
def inConfig(self, config):
"""
Save incoming config received from Polyglot to Interface.config and then do any functions
that are waiting on the config to be received.
"""
self.config = config
self.isyVersion = config['isyVersion']
try:
for watcher in self.__configObservers:
watcher(config)
self.send_custom_config_docs()
except KeyError as e:
LOGGER.error('KeyError in gotConfig: {}'.format(e), exc_info=True)
def input(self, command):
self.inQueue.put(command)
def supports_feature(self, feature):
return True
def get_md_file_data(self, fileName):
data = ''
if os.path.isfile(fileName):
data = markdown2.markdown_path(fileName)
return data
def send_custom_config_docs(self):
data = ''
if not self.custom_params_docs_file_sent:
data = self.get_md_file_data(Interface.CUSTOM_CONFIG_DOCS_FILE_NAME)
else:
data = self.config.get('customParamsDoc', '')
# send if we're sending new file or there are updates
if (not self.custom_params_docs_file_sent or
len(self.custom_params_pending_docs) > 0):
data += self.custom_params_pending_docs
self.custom_params_docs_file_sent = True
self.custom_params_pending_docs = ''
self.config['customParamsDoc'] = data
self.send({ 'customparamsdoc': data })
def add_custom_config_docs(self, data, clearCurrentData=False):
if clearCurrentData:
self.custom_params_docs_file_sent = False
self.custom_params_pending_docs += data
self.send_custom_config_docs()
def save_typed_params(self, data):
"""
Send custom parameters descriptions to Polyglot to be used
in front end UI configuration screen
Accepts list of objects with the followin properties
name - used as a key when data is sent from UI
title - displayed in UI
defaultValue - optionanl
type - optional, can be 'NUMBER', 'STRING' or 'BOOLEAN'.
Defaults to 'STRING'
desc - optional, shown in tooltip in UI
isRequired - optional, True/False, when set, will not validate UI
input if it's empty
isList - optional, True/False, if set this will be treated as list
of values or objects by UI
params - optional, can contain a list of objects. If present, then
this (parent) is treated as object / list of objects by UI,
otherwise, it's treated as a single / list of single values
"""
LOGGER.info('Sending typed parameters to Polyglot.')
if type(data) is not list:
data = [ data ]
message = { 'typedparams': data }
self.send(message)
|
UniversalDevicesInc/polyglot-v2-python-interface
|
polyinterface/polyinterface.py
|
Interface.removeNotice
|
python
|
def removeNotice(self, data):
LOGGER.info('Sending removenotice to Polyglot for index {}'.format(data))
message = { 'removenotice': data }
self.send(message)
|
Add custom notice to front-end for this NodeServers
:param data: Index of notices list to remove.
|
train
|
https://github.com/UniversalDevicesInc/polyglot-v2-python-interface/blob/fe613135b762731a41a081222e43d2a8ae4fc53f/polyinterface/polyinterface.py#L414-L422
|
[
"def send(self, message):\n \"\"\"\n Formatted Message to send to Polyglot. Connection messages are sent automatically from this module\n so this method is used to send commands to/from Polyglot and formats it for consumption\n \"\"\"\n if not isinstance(message, dict) and self.connected:\n warnings.warn('payload not a dictionary')\n return False\n try:\n message['node'] = self.profileNum\n self._mqttc.publish(self.topicInput, json.dumps(message), retain=False)\n except TypeError as err:\n LOGGER.error('MQTT Send Error: {}'.format(err), exc_info=True)\n"
] |
class Interface(object):
CUSTOM_CONFIG_DOCS_FILE_NAME = 'POLYGLOT_CONFIG.md'
"""
Polyglot Interface Class
:param envVar: The Name of the variable from ~/.polyglot/.env that has this NodeServer's profile number
"""
# pylint: disable=too-many-instance-attributes
# pylint: disable=unused-argument
__exists = False
def __init__(self, envVar=None):
if self.__exists:
warnings.warn('Only one Interface is allowed.')
return
self.connected = False
self.profileNum = os.environ.get("PROFILE_NUM")
if self.profileNum is None:
if envVar is not None:
self.profileNum = os.environ.get(envVar)
if self.profileNum is None:
LOGGER.error('Profile Number not found in STDIN or .env file. Exiting.')
sys.exit(1)
self.profileNum = str(self.profileNum)
self.topicPolyglotConnection = 'udi/polyglot/connections/polyglot'
self.topicInput = 'udi/polyglot/ns/{}'.format(self.profileNum)
self.topicSelfConnection = 'udi/polyglot/connections/{}'.format(self.profileNum)
self._threads = {}
self._threads['socket'] = Thread(target = self._startMqtt, name = 'Interface')
self._mqttc = mqtt.Client(envVar, True)
# self._mqttc.will_set(self.topicSelfConnection, json.dumps({'node': self.profileNum, 'connected': False}), retain=True)
self._mqttc.on_connect = self._connect
self._mqttc.on_message = self._message
self._mqttc.on_subscribe = self._subscribe
self._mqttc.on_disconnect = self._disconnect
self._mqttc.on_publish = self._publish
self._mqttc.on_log = self._log
self.useSecure = True
if 'USE_HTTPS' in os.environ:
self.useSecure = os.environ['USE_HTTPS']
if self.useSecure is True:
if 'MQTT_CERTPATH' in os.environ:
self._mqttc.tls_set(
ca_certs=os.environ['MQTT_CERTPATH'] + '/polyglot.crt',
certfile=os.environ['MQTT_CERTPATH'] + '/client.crt',
keyfile=os.environ['MQTT_CERTPATH'] + '/client_private.key',
tls_version=ssl.PROTOCOL_TLSv1_2)
else:
self._mqttc.tls_set(
ca_certs=join(expanduser("~") + '/.polyglot/ssl/polyglot.crt'),
certfile=join(expanduser("~") + '/.polyglot/ssl/client.crt'),
keyfile=join(expanduser("~") + '/.polyglot/ssl/client_private.key'),
tls_version=ssl.PROTOCOL_TLSv1_2
)
# self._mqttc.tls_insecure_set(True)
# self._mqttc.enable_logger(logger=LOGGER)
self.config = None
# self.loop = asyncio.new_event_loop()
self.loop = None
self.inQueue = queue.Queue()
# self.thread = Thread(target=self.start_loop)
self.isyVersion = None
self._server = os.environ.get("MQTT_HOST") or 'localhost'
self._port = os.environ.get("MQTT_PORT") or '1883'
self.polyglotConnected = False
self.__configObservers = []
self.__stopObservers = []
Interface.__exists = True
self.custom_params_docs_file_sent = False
self.custom_params_pending_docs = ''
def onConfig(self, callback):
"""
Gives the ability to bind any methods to be run when the config is received.
"""
self.__configObservers.append(callback)
def onStop(self, callback):
"""
Gives the ability to bind any methods to be run when the stop command is received.
"""
self.__stopObservers.append(callback)
def _connect(self, mqttc, userdata, flags, rc):
"""
The callback for when the client receives a CONNACK response from the server.
Subscribing in on_connect() means that if we lose the connection and
reconnect then subscriptions will be renewed.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param flags: The flags set on the connection.
:param rc: Result code of connection, 0 = Success, anything else is a failure
"""
if rc == 0:
self.connected = True
results = []
LOGGER.info("MQTT Connected with result code " + str(rc) + " (Success)")
# result, mid = self._mqttc.subscribe(self.topicInput)
results.append((self.topicInput, tuple(self._mqttc.subscribe(self.topicInput))))
results.append((self.topicPolyglotConnection, tuple(self._mqttc.subscribe(self.topicPolyglotConnection))))
for (topic, (result, mid)) in results:
if result == 0:
LOGGER.info("MQTT Subscribing to topic: " + topic + " - " + " MID: " + str(mid) + " Result: " + str(result))
else:
LOGGER.info("MQTT Subscription to " + topic + " failed. This is unusual. MID: " + str(mid) + " Result: " + str(result))
# If subscription fails, try to reconnect.
self._mqttc.reconnect()
self._mqttc.publish(self.topicSelfConnection, json.dumps(
{
'connected': True,
'node': self.profileNum
}), retain=True)
LOGGER.info('Sent Connected message to Polyglot')
else:
LOGGER.error("MQTT Failed to connect. Result code: " + str(rc))
def _message(self, mqttc, userdata, msg):
"""
The callback for when a PUBLISH message is received from the server.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param flags: The flags set on the connection.
:param msg: Dictionary of MQTT received message. Uses: msg.topic, msg.qos, msg.payload
"""
try:
inputCmds = ['query', 'command', 'result', 'status', 'shortPoll', 'longPoll', 'delete']
parsed_msg = json.loads(msg.payload.decode('utf-8'))
if 'node' in parsed_msg:
if parsed_msg['node'] != 'polyglot':
return
del parsed_msg['node']
for key in parsed_msg:
# LOGGER.debug('MQTT Received Message: {}: {}'.format(msg.topic, parsed_msg))
if key == 'config':
self.inConfig(parsed_msg[key])
elif key == 'connected':
self.polyglotConnected = parsed_msg[key]
elif key == 'stop':
LOGGER.debug('Received stop from Polyglot... Shutting Down.')
self.stop()
elif key in inputCmds:
self.input(parsed_msg)
else:
LOGGER.error('Invalid command received in message from Polyglot: {}'.format(key))
except (ValueError) as err:
LOGGER.error('MQTT Received Payload Error: {}'.format(err), exc_info=True)
def _disconnect(self, mqttc, userdata, rc):
"""
The callback for when a DISCONNECT occurs.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param rc: Result code of connection, 0 = Graceful, anything else is unclean
"""
self.connected = False
if rc != 0:
LOGGER.info("MQTT Unexpected disconnection. Trying reconnect.")
try:
self._mqttc.reconnect()
except Exception as ex:
template = "An exception of type {0} occured. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
LOGGER.error("MQTT Connection error: " + message)
else:
LOGGER.info("MQTT Graceful disconnection.")
def _log(self, mqttc, userdata, level, string):
""" Use for debugging MQTT Packets, disable for normal use, NOISY. """
# LOGGER.info('MQTT Log - {}: {}'.format(str(level), str(string)))
pass
def _subscribe(self, mqttc, userdata, mid, granted_qos):
""" Callback for Subscribe message. Unused currently. """
# LOGGER.info("MQTT Subscribed Succesfully for Message ID: {} - QoS: {}".format(str(mid), str(granted_qos)))
pass
def _publish(self, mqttc, userdata, mid):
""" Callback for publish message. Unused currently. """
# LOGGER.info("MQTT Published message ID: {}".format(str(mid)))
pass
def start(self):
for _, thread in self._threads.items():
thread.start()
def _startMqtt(self):
"""
The client start method. Starts the thread for the MQTT Client
and publishes the connected message.
"""
LOGGER.info('Connecting to MQTT... {}:{}'.format(self._server, self._port))
try:
# self._mqttc.connect_async(str(self._server), int(self._port), 10)
self._mqttc.connect_async('{}'.format(self._server), int(self._port), 10)
self._mqttc.loop_forever()
except Exception as ex:
template = "An exception of type {0} occurred. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
LOGGER.error("MQTT Connection error: {}".format(message), exc_info=True)
def stop(self):
"""
The client stop method. If the client is currently connected
stop the thread and disconnect. Publish the disconnected
message if clean shutdown.
"""
# self.loop.call_soon_threadsafe(self.loop.stop)
# self.loop.stop()
# self._longPoll.cancel()
# self._shortPoll.cancel()
if self.connected:
LOGGER.info('Disconnecting from MQTT... {}:{}'.format(self._server, self._port))
self._mqttc.publish(self.topicSelfConnection, json.dumps({'node': self.profileNum, 'connected': False}), retain=True)
self._mqttc.loop_stop()
self._mqttc.disconnect()
try:
for watcher in self.__stopObservers:
watcher()
except KeyError as e:
LOGGER.exception('KeyError in gotConfig: {}'.format(e), exc_info=True)
def send(self, message):
"""
Formatted Message to send to Polyglot. Connection messages are sent automatically from this module
so this method is used to send commands to/from Polyglot and formats it for consumption
"""
if not isinstance(message, dict) and self.connected:
warnings.warn('payload not a dictionary')
return False
try:
message['node'] = self.profileNum
self._mqttc.publish(self.topicInput, json.dumps(message), retain=False)
except TypeError as err:
LOGGER.error('MQTT Send Error: {}'.format(err), exc_info=True)
def addNode(self, node):
"""
Add a node to the NodeServer
:param node: Dictionary of node settings. Keys: address, name, node_def_id, primary, and drivers are required.
"""
LOGGER.info('Adding node {}({})'.format(node.name, node.address))
message = {
'addnode': {
'nodes': [{
'address': node.address,
'name': node.name,
'node_def_id': node.id,
'primary': node.primary,
'drivers': node.drivers,
'hint': node.hint
}]
}
}
self.send(message)
def saveCustomData(self, data):
"""
Send custom dictionary to Polyglot to save and be retrieved on startup.
:param data: Dictionary of key value pairs to store in Polyglot database.
"""
LOGGER.info('Sending customData to Polyglot.')
message = { 'customdata': data }
self.send(message)
def saveCustomParams(self, data):
"""
Send custom dictionary to Polyglot to save and be retrieved on startup.
:param data: Dictionary of key value pairs to store in Polyglot database.
"""
LOGGER.info('Sending customParams to Polyglot.')
message = { 'customparams': data }
self.send(message)
def addNotice(self, data):
"""
Add custom notice to front-end for this NodeServers
:param data: String of characters to add as a notification in the front-end.
"""
LOGGER.info('Sending addnotice to Polyglot: {}'.format(data))
message = { 'addnotice': data }
self.send(message)
def restart(self):
"""
Send a command to Polyglot to restart this NodeServer
"""
LOGGER.info('Asking Polyglot to restart me.')
message = {
'restart': {}
}
self.send(message)
def installprofile(self):
LOGGER.info('Sending Install Profile command to Polyglot.')
message = { 'installprofile': { 'reboot': False } }
self.send(message)
def delNode(self, address):
"""
Delete a node from the NodeServer
:param node: Dictionary of node settings. Keys: address, name, node_def_id, primary, and drivers are required.
"""
LOGGER.info('Removing node {}'.format(address))
message = {
'removenode': {
'address': address
}
}
self.send(message)
def getNode(self, address):
"""
Get Node by Address of existing nodes.
"""
try:
for node in self.config['nodes']:
if node['address'] == address:
return node
return False
except KeyError:
LOGGER.error('Usually means we have not received the config yet.', exc_info=True)
return False
def inConfig(self, config):
"""
Save incoming config received from Polyglot to Interface.config and then do any functions
that are waiting on the config to be received.
"""
self.config = config
self.isyVersion = config['isyVersion']
try:
for watcher in self.__configObservers:
watcher(config)
self.send_custom_config_docs()
except KeyError as e:
LOGGER.error('KeyError in gotConfig: {}'.format(e), exc_info=True)
def input(self, command):
self.inQueue.put(command)
def supports_feature(self, feature):
return True
def get_md_file_data(self, fileName):
data = ''
if os.path.isfile(fileName):
data = markdown2.markdown_path(fileName)
return data
def send_custom_config_docs(self):
data = ''
if not self.custom_params_docs_file_sent:
data = self.get_md_file_data(Interface.CUSTOM_CONFIG_DOCS_FILE_NAME)
else:
data = self.config.get('customParamsDoc', '')
# send if we're sending new file or there are updates
if (not self.custom_params_docs_file_sent or
len(self.custom_params_pending_docs) > 0):
data += self.custom_params_pending_docs
self.custom_params_docs_file_sent = True
self.custom_params_pending_docs = ''
self.config['customParamsDoc'] = data
self.send({ 'customparamsdoc': data })
def add_custom_config_docs(self, data, clearCurrentData=False):
if clearCurrentData:
self.custom_params_docs_file_sent = False
self.custom_params_pending_docs += data
self.send_custom_config_docs()
def save_typed_params(self, data):
"""
Send custom parameters descriptions to Polyglot to be used
in front end UI configuration screen
Accepts list of objects with the followin properties
name - used as a key when data is sent from UI
title - displayed in UI
defaultValue - optionanl
type - optional, can be 'NUMBER', 'STRING' or 'BOOLEAN'.
Defaults to 'STRING'
desc - optional, shown in tooltip in UI
isRequired - optional, True/False, when set, will not validate UI
input if it's empty
isList - optional, True/False, if set this will be treated as list
of values or objects by UI
params - optional, can contain a list of objects. If present, then
this (parent) is treated as object / list of objects by UI,
otherwise, it's treated as a single / list of single values
"""
LOGGER.info('Sending typed parameters to Polyglot.')
if type(data) is not list:
data = [ data ]
message = { 'typedparams': data }
self.send(message)
|
UniversalDevicesInc/polyglot-v2-python-interface
|
polyinterface/polyinterface.py
|
Interface.delNode
|
python
|
def delNode(self, address):
LOGGER.info('Removing node {}'.format(address))
message = {
'removenode': {
'address': address
}
}
self.send(message)
|
Delete a node from the NodeServer
:param node: Dictionary of node settings. Keys: address, name, node_def_id, primary, and drivers are required.
|
train
|
https://github.com/UniversalDevicesInc/polyglot-v2-python-interface/blob/fe613135b762731a41a081222e43d2a8ae4fc53f/polyinterface/polyinterface.py#L439-L451
|
[
"def send(self, message):\n \"\"\"\n Formatted Message to send to Polyglot. Connection messages are sent automatically from this module\n so this method is used to send commands to/from Polyglot and formats it for consumption\n \"\"\"\n if not isinstance(message, dict) and self.connected:\n warnings.warn('payload not a dictionary')\n return False\n try:\n message['node'] = self.profileNum\n self._mqttc.publish(self.topicInput, json.dumps(message), retain=False)\n except TypeError as err:\n LOGGER.error('MQTT Send Error: {}'.format(err), exc_info=True)\n"
] |
class Interface(object):
CUSTOM_CONFIG_DOCS_FILE_NAME = 'POLYGLOT_CONFIG.md'
"""
Polyglot Interface Class
:param envVar: The Name of the variable from ~/.polyglot/.env that has this NodeServer's profile number
"""
# pylint: disable=too-many-instance-attributes
# pylint: disable=unused-argument
__exists = False
def __init__(self, envVar=None):
if self.__exists:
warnings.warn('Only one Interface is allowed.')
return
self.connected = False
self.profileNum = os.environ.get("PROFILE_NUM")
if self.profileNum is None:
if envVar is not None:
self.profileNum = os.environ.get(envVar)
if self.profileNum is None:
LOGGER.error('Profile Number not found in STDIN or .env file. Exiting.')
sys.exit(1)
self.profileNum = str(self.profileNum)
self.topicPolyglotConnection = 'udi/polyglot/connections/polyglot'
self.topicInput = 'udi/polyglot/ns/{}'.format(self.profileNum)
self.topicSelfConnection = 'udi/polyglot/connections/{}'.format(self.profileNum)
self._threads = {}
self._threads['socket'] = Thread(target = self._startMqtt, name = 'Interface')
self._mqttc = mqtt.Client(envVar, True)
# self._mqttc.will_set(self.topicSelfConnection, json.dumps({'node': self.profileNum, 'connected': False}), retain=True)
self._mqttc.on_connect = self._connect
self._mqttc.on_message = self._message
self._mqttc.on_subscribe = self._subscribe
self._mqttc.on_disconnect = self._disconnect
self._mqttc.on_publish = self._publish
self._mqttc.on_log = self._log
self.useSecure = True
if 'USE_HTTPS' in os.environ:
self.useSecure = os.environ['USE_HTTPS']
if self.useSecure is True:
if 'MQTT_CERTPATH' in os.environ:
self._mqttc.tls_set(
ca_certs=os.environ['MQTT_CERTPATH'] + '/polyglot.crt',
certfile=os.environ['MQTT_CERTPATH'] + '/client.crt',
keyfile=os.environ['MQTT_CERTPATH'] + '/client_private.key',
tls_version=ssl.PROTOCOL_TLSv1_2)
else:
self._mqttc.tls_set(
ca_certs=join(expanduser("~") + '/.polyglot/ssl/polyglot.crt'),
certfile=join(expanduser("~") + '/.polyglot/ssl/client.crt'),
keyfile=join(expanduser("~") + '/.polyglot/ssl/client_private.key'),
tls_version=ssl.PROTOCOL_TLSv1_2
)
# self._mqttc.tls_insecure_set(True)
# self._mqttc.enable_logger(logger=LOGGER)
self.config = None
# self.loop = asyncio.new_event_loop()
self.loop = None
self.inQueue = queue.Queue()
# self.thread = Thread(target=self.start_loop)
self.isyVersion = None
self._server = os.environ.get("MQTT_HOST") or 'localhost'
self._port = os.environ.get("MQTT_PORT") or '1883'
self.polyglotConnected = False
self.__configObservers = []
self.__stopObservers = []
Interface.__exists = True
self.custom_params_docs_file_sent = False
self.custom_params_pending_docs = ''
def onConfig(self, callback):
"""
Gives the ability to bind any methods to be run when the config is received.
"""
self.__configObservers.append(callback)
def onStop(self, callback):
"""
Gives the ability to bind any methods to be run when the stop command is received.
"""
self.__stopObservers.append(callback)
def _connect(self, mqttc, userdata, flags, rc):
"""
The callback for when the client receives a CONNACK response from the server.
Subscribing in on_connect() means that if we lose the connection and
reconnect then subscriptions will be renewed.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param flags: The flags set on the connection.
:param rc: Result code of connection, 0 = Success, anything else is a failure
"""
if rc == 0:
self.connected = True
results = []
LOGGER.info("MQTT Connected with result code " + str(rc) + " (Success)")
# result, mid = self._mqttc.subscribe(self.topicInput)
results.append((self.topicInput, tuple(self._mqttc.subscribe(self.topicInput))))
results.append((self.topicPolyglotConnection, tuple(self._mqttc.subscribe(self.topicPolyglotConnection))))
for (topic, (result, mid)) in results:
if result == 0:
LOGGER.info("MQTT Subscribing to topic: " + topic + " - " + " MID: " + str(mid) + " Result: " + str(result))
else:
LOGGER.info("MQTT Subscription to " + topic + " failed. This is unusual. MID: " + str(mid) + " Result: " + str(result))
# If subscription fails, try to reconnect.
self._mqttc.reconnect()
self._mqttc.publish(self.topicSelfConnection, json.dumps(
{
'connected': True,
'node': self.profileNum
}), retain=True)
LOGGER.info('Sent Connected message to Polyglot')
else:
LOGGER.error("MQTT Failed to connect. Result code: " + str(rc))
def _message(self, mqttc, userdata, msg):
"""
The callback for when a PUBLISH message is received from the server.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param flags: The flags set on the connection.
:param msg: Dictionary of MQTT received message. Uses: msg.topic, msg.qos, msg.payload
"""
try:
inputCmds = ['query', 'command', 'result', 'status', 'shortPoll', 'longPoll', 'delete']
parsed_msg = json.loads(msg.payload.decode('utf-8'))
if 'node' in parsed_msg:
if parsed_msg['node'] != 'polyglot':
return
del parsed_msg['node']
for key in parsed_msg:
# LOGGER.debug('MQTT Received Message: {}: {}'.format(msg.topic, parsed_msg))
if key == 'config':
self.inConfig(parsed_msg[key])
elif key == 'connected':
self.polyglotConnected = parsed_msg[key]
elif key == 'stop':
LOGGER.debug('Received stop from Polyglot... Shutting Down.')
self.stop()
elif key in inputCmds:
self.input(parsed_msg)
else:
LOGGER.error('Invalid command received in message from Polyglot: {}'.format(key))
except (ValueError) as err:
LOGGER.error('MQTT Received Payload Error: {}'.format(err), exc_info=True)
def _disconnect(self, mqttc, userdata, rc):
"""
The callback for when a DISCONNECT occurs.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param rc: Result code of connection, 0 = Graceful, anything else is unclean
"""
self.connected = False
if rc != 0:
LOGGER.info("MQTT Unexpected disconnection. Trying reconnect.")
try:
self._mqttc.reconnect()
except Exception as ex:
template = "An exception of type {0} occured. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
LOGGER.error("MQTT Connection error: " + message)
else:
LOGGER.info("MQTT Graceful disconnection.")
def _log(self, mqttc, userdata, level, string):
""" Use for debugging MQTT Packets, disable for normal use, NOISY. """
# LOGGER.info('MQTT Log - {}: {}'.format(str(level), str(string)))
pass
def _subscribe(self, mqttc, userdata, mid, granted_qos):
""" Callback for Subscribe message. Unused currently. """
# LOGGER.info("MQTT Subscribed Succesfully for Message ID: {} - QoS: {}".format(str(mid), str(granted_qos)))
pass
def _publish(self, mqttc, userdata, mid):
""" Callback for publish message. Unused currently. """
# LOGGER.info("MQTT Published message ID: {}".format(str(mid)))
pass
def start(self):
for _, thread in self._threads.items():
thread.start()
def _startMqtt(self):
"""
The client start method. Starts the thread for the MQTT Client
and publishes the connected message.
"""
LOGGER.info('Connecting to MQTT... {}:{}'.format(self._server, self._port))
try:
# self._mqttc.connect_async(str(self._server), int(self._port), 10)
self._mqttc.connect_async('{}'.format(self._server), int(self._port), 10)
self._mqttc.loop_forever()
except Exception as ex:
template = "An exception of type {0} occurred. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
LOGGER.error("MQTT Connection error: {}".format(message), exc_info=True)
def stop(self):
"""
The client stop method. If the client is currently connected
stop the thread and disconnect. Publish the disconnected
message if clean shutdown.
"""
# self.loop.call_soon_threadsafe(self.loop.stop)
# self.loop.stop()
# self._longPoll.cancel()
# self._shortPoll.cancel()
if self.connected:
LOGGER.info('Disconnecting from MQTT... {}:{}'.format(self._server, self._port))
self._mqttc.publish(self.topicSelfConnection, json.dumps({'node': self.profileNum, 'connected': False}), retain=True)
self._mqttc.loop_stop()
self._mqttc.disconnect()
try:
for watcher in self.__stopObservers:
watcher()
except KeyError as e:
LOGGER.exception('KeyError in gotConfig: {}'.format(e), exc_info=True)
def send(self, message):
"""
Formatted Message to send to Polyglot. Connection messages are sent automatically from this module
so this method is used to send commands to/from Polyglot and formats it for consumption
"""
if not isinstance(message, dict) and self.connected:
warnings.warn('payload not a dictionary')
return False
try:
message['node'] = self.profileNum
self._mqttc.publish(self.topicInput, json.dumps(message), retain=False)
except TypeError as err:
LOGGER.error('MQTT Send Error: {}'.format(err), exc_info=True)
def addNode(self, node):
"""
Add a node to the NodeServer
:param node: Dictionary of node settings. Keys: address, name, node_def_id, primary, and drivers are required.
"""
LOGGER.info('Adding node {}({})'.format(node.name, node.address))
message = {
'addnode': {
'nodes': [{
'address': node.address,
'name': node.name,
'node_def_id': node.id,
'primary': node.primary,
'drivers': node.drivers,
'hint': node.hint
}]
}
}
self.send(message)
def saveCustomData(self, data):
"""
Send custom dictionary to Polyglot to save and be retrieved on startup.
:param data: Dictionary of key value pairs to store in Polyglot database.
"""
LOGGER.info('Sending customData to Polyglot.')
message = { 'customdata': data }
self.send(message)
def saveCustomParams(self, data):
"""
Send custom dictionary to Polyglot to save and be retrieved on startup.
:param data: Dictionary of key value pairs to store in Polyglot database.
"""
LOGGER.info('Sending customParams to Polyglot.')
message = { 'customparams': data }
self.send(message)
def addNotice(self, data):
"""
Add custom notice to front-end for this NodeServers
:param data: String of characters to add as a notification in the front-end.
"""
LOGGER.info('Sending addnotice to Polyglot: {}'.format(data))
message = { 'addnotice': data }
self.send(message)
def removeNotice(self, data):
"""
Add custom notice to front-end for this NodeServers
:param data: Index of notices list to remove.
"""
LOGGER.info('Sending removenotice to Polyglot for index {}'.format(data))
message = { 'removenotice': data }
self.send(message)
def restart(self):
"""
Send a command to Polyglot to restart this NodeServer
"""
LOGGER.info('Asking Polyglot to restart me.')
message = {
'restart': {}
}
self.send(message)
def installprofile(self):
LOGGER.info('Sending Install Profile command to Polyglot.')
message = { 'installprofile': { 'reboot': False } }
self.send(message)
def getNode(self, address):
"""
Get Node by Address of existing nodes.
"""
try:
for node in self.config['nodes']:
if node['address'] == address:
return node
return False
except KeyError:
LOGGER.error('Usually means we have not received the config yet.', exc_info=True)
return False
def inConfig(self, config):
"""
Save incoming config received from Polyglot to Interface.config and then do any functions
that are waiting on the config to be received.
"""
self.config = config
self.isyVersion = config['isyVersion']
try:
for watcher in self.__configObservers:
watcher(config)
self.send_custom_config_docs()
except KeyError as e:
LOGGER.error('KeyError in gotConfig: {}'.format(e), exc_info=True)
def input(self, command):
self.inQueue.put(command)
def supports_feature(self, feature):
return True
def get_md_file_data(self, fileName):
data = ''
if os.path.isfile(fileName):
data = markdown2.markdown_path(fileName)
return data
def send_custom_config_docs(self):
data = ''
if not self.custom_params_docs_file_sent:
data = self.get_md_file_data(Interface.CUSTOM_CONFIG_DOCS_FILE_NAME)
else:
data = self.config.get('customParamsDoc', '')
# send if we're sending new file or there are updates
if (not self.custom_params_docs_file_sent or
len(self.custom_params_pending_docs) > 0):
data += self.custom_params_pending_docs
self.custom_params_docs_file_sent = True
self.custom_params_pending_docs = ''
self.config['customParamsDoc'] = data
self.send({ 'customparamsdoc': data })
def add_custom_config_docs(self, data, clearCurrentData=False):
if clearCurrentData:
self.custom_params_docs_file_sent = False
self.custom_params_pending_docs += data
self.send_custom_config_docs()
def save_typed_params(self, data):
"""
Send custom parameters descriptions to Polyglot to be used
in front end UI configuration screen
Accepts list of objects with the followin properties
name - used as a key when data is sent from UI
title - displayed in UI
defaultValue - optionanl
type - optional, can be 'NUMBER', 'STRING' or 'BOOLEAN'.
Defaults to 'STRING'
desc - optional, shown in tooltip in UI
isRequired - optional, True/False, when set, will not validate UI
input if it's empty
isList - optional, True/False, if set this will be treated as list
of values or objects by UI
params - optional, can contain a list of objects. If present, then
this (parent) is treated as object / list of objects by UI,
otherwise, it's treated as a single / list of single values
"""
LOGGER.info('Sending typed parameters to Polyglot.')
if type(data) is not list:
data = [ data ]
message = { 'typedparams': data }
self.send(message)
|
UniversalDevicesInc/polyglot-v2-python-interface
|
polyinterface/polyinterface.py
|
Interface.getNode
|
python
|
def getNode(self, address):
try:
for node in self.config['nodes']:
if node['address'] == address:
return node
return False
except KeyError:
LOGGER.error('Usually means we have not received the config yet.', exc_info=True)
return False
|
Get Node by Address of existing nodes.
|
train
|
https://github.com/UniversalDevicesInc/polyglot-v2-python-interface/blob/fe613135b762731a41a081222e43d2a8ae4fc53f/polyinterface/polyinterface.py#L453-L464
| null |
class Interface(object):
CUSTOM_CONFIG_DOCS_FILE_NAME = 'POLYGLOT_CONFIG.md'
"""
Polyglot Interface Class
:param envVar: The Name of the variable from ~/.polyglot/.env that has this NodeServer's profile number
"""
# pylint: disable=too-many-instance-attributes
# pylint: disable=unused-argument
__exists = False
def __init__(self, envVar=None):
if self.__exists:
warnings.warn('Only one Interface is allowed.')
return
self.connected = False
self.profileNum = os.environ.get("PROFILE_NUM")
if self.profileNum is None:
if envVar is not None:
self.profileNum = os.environ.get(envVar)
if self.profileNum is None:
LOGGER.error('Profile Number not found in STDIN or .env file. Exiting.')
sys.exit(1)
self.profileNum = str(self.profileNum)
self.topicPolyglotConnection = 'udi/polyglot/connections/polyglot'
self.topicInput = 'udi/polyglot/ns/{}'.format(self.profileNum)
self.topicSelfConnection = 'udi/polyglot/connections/{}'.format(self.profileNum)
self._threads = {}
self._threads['socket'] = Thread(target = self._startMqtt, name = 'Interface')
self._mqttc = mqtt.Client(envVar, True)
# self._mqttc.will_set(self.topicSelfConnection, json.dumps({'node': self.profileNum, 'connected': False}), retain=True)
self._mqttc.on_connect = self._connect
self._mqttc.on_message = self._message
self._mqttc.on_subscribe = self._subscribe
self._mqttc.on_disconnect = self._disconnect
self._mqttc.on_publish = self._publish
self._mqttc.on_log = self._log
self.useSecure = True
if 'USE_HTTPS' in os.environ:
self.useSecure = os.environ['USE_HTTPS']
if self.useSecure is True:
if 'MQTT_CERTPATH' in os.environ:
self._mqttc.tls_set(
ca_certs=os.environ['MQTT_CERTPATH'] + '/polyglot.crt',
certfile=os.environ['MQTT_CERTPATH'] + '/client.crt',
keyfile=os.environ['MQTT_CERTPATH'] + '/client_private.key',
tls_version=ssl.PROTOCOL_TLSv1_2)
else:
self._mqttc.tls_set(
ca_certs=join(expanduser("~") + '/.polyglot/ssl/polyglot.crt'),
certfile=join(expanduser("~") + '/.polyglot/ssl/client.crt'),
keyfile=join(expanduser("~") + '/.polyglot/ssl/client_private.key'),
tls_version=ssl.PROTOCOL_TLSv1_2
)
# self._mqttc.tls_insecure_set(True)
# self._mqttc.enable_logger(logger=LOGGER)
self.config = None
# self.loop = asyncio.new_event_loop()
self.loop = None
self.inQueue = queue.Queue()
# self.thread = Thread(target=self.start_loop)
self.isyVersion = None
self._server = os.environ.get("MQTT_HOST") or 'localhost'
self._port = os.environ.get("MQTT_PORT") or '1883'
self.polyglotConnected = False
self.__configObservers = []
self.__stopObservers = []
Interface.__exists = True
self.custom_params_docs_file_sent = False
self.custom_params_pending_docs = ''
def onConfig(self, callback):
"""
Gives the ability to bind any methods to be run when the config is received.
"""
self.__configObservers.append(callback)
def onStop(self, callback):
"""
Gives the ability to bind any methods to be run when the stop command is received.
"""
self.__stopObservers.append(callback)
def _connect(self, mqttc, userdata, flags, rc):
"""
The callback for when the client receives a CONNACK response from the server.
Subscribing in on_connect() means that if we lose the connection and
reconnect then subscriptions will be renewed.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param flags: The flags set on the connection.
:param rc: Result code of connection, 0 = Success, anything else is a failure
"""
if rc == 0:
self.connected = True
results = []
LOGGER.info("MQTT Connected with result code " + str(rc) + " (Success)")
# result, mid = self._mqttc.subscribe(self.topicInput)
results.append((self.topicInput, tuple(self._mqttc.subscribe(self.topicInput))))
results.append((self.topicPolyglotConnection, tuple(self._mqttc.subscribe(self.topicPolyglotConnection))))
for (topic, (result, mid)) in results:
if result == 0:
LOGGER.info("MQTT Subscribing to topic: " + topic + " - " + " MID: " + str(mid) + " Result: " + str(result))
else:
LOGGER.info("MQTT Subscription to " + topic + " failed. This is unusual. MID: " + str(mid) + " Result: " + str(result))
# If subscription fails, try to reconnect.
self._mqttc.reconnect()
self._mqttc.publish(self.topicSelfConnection, json.dumps(
{
'connected': True,
'node': self.profileNum
}), retain=True)
LOGGER.info('Sent Connected message to Polyglot')
else:
LOGGER.error("MQTT Failed to connect. Result code: " + str(rc))
def _message(self, mqttc, userdata, msg):
"""
The callback for when a PUBLISH message is received from the server.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param flags: The flags set on the connection.
:param msg: Dictionary of MQTT received message. Uses: msg.topic, msg.qos, msg.payload
"""
try:
inputCmds = ['query', 'command', 'result', 'status', 'shortPoll', 'longPoll', 'delete']
parsed_msg = json.loads(msg.payload.decode('utf-8'))
if 'node' in parsed_msg:
if parsed_msg['node'] != 'polyglot':
return
del parsed_msg['node']
for key in parsed_msg:
# LOGGER.debug('MQTT Received Message: {}: {}'.format(msg.topic, parsed_msg))
if key == 'config':
self.inConfig(parsed_msg[key])
elif key == 'connected':
self.polyglotConnected = parsed_msg[key]
elif key == 'stop':
LOGGER.debug('Received stop from Polyglot... Shutting Down.')
self.stop()
elif key in inputCmds:
self.input(parsed_msg)
else:
LOGGER.error('Invalid command received in message from Polyglot: {}'.format(key))
except (ValueError) as err:
LOGGER.error('MQTT Received Payload Error: {}'.format(err), exc_info=True)
def _disconnect(self, mqttc, userdata, rc):
"""
The callback for when a DISCONNECT occurs.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param rc: Result code of connection, 0 = Graceful, anything else is unclean
"""
self.connected = False
if rc != 0:
LOGGER.info("MQTT Unexpected disconnection. Trying reconnect.")
try:
self._mqttc.reconnect()
except Exception as ex:
template = "An exception of type {0} occured. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
LOGGER.error("MQTT Connection error: " + message)
else:
LOGGER.info("MQTT Graceful disconnection.")
def _log(self, mqttc, userdata, level, string):
""" Use for debugging MQTT Packets, disable for normal use, NOISY. """
# LOGGER.info('MQTT Log - {}: {}'.format(str(level), str(string)))
pass
def _subscribe(self, mqttc, userdata, mid, granted_qos):
""" Callback for Subscribe message. Unused currently. """
# LOGGER.info("MQTT Subscribed Succesfully for Message ID: {} - QoS: {}".format(str(mid), str(granted_qos)))
pass
def _publish(self, mqttc, userdata, mid):
""" Callback for publish message. Unused currently. """
# LOGGER.info("MQTT Published message ID: {}".format(str(mid)))
pass
def start(self):
for _, thread in self._threads.items():
thread.start()
def _startMqtt(self):
"""
The client start method. Starts the thread for the MQTT Client
and publishes the connected message.
"""
LOGGER.info('Connecting to MQTT... {}:{}'.format(self._server, self._port))
try:
# self._mqttc.connect_async(str(self._server), int(self._port), 10)
self._mqttc.connect_async('{}'.format(self._server), int(self._port), 10)
self._mqttc.loop_forever()
except Exception as ex:
template = "An exception of type {0} occurred. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
LOGGER.error("MQTT Connection error: {}".format(message), exc_info=True)
def stop(self):
"""
The client stop method. If the client is currently connected
stop the thread and disconnect. Publish the disconnected
message if clean shutdown.
"""
# self.loop.call_soon_threadsafe(self.loop.stop)
# self.loop.stop()
# self._longPoll.cancel()
# self._shortPoll.cancel()
if self.connected:
LOGGER.info('Disconnecting from MQTT... {}:{}'.format(self._server, self._port))
self._mqttc.publish(self.topicSelfConnection, json.dumps({'node': self.profileNum, 'connected': False}), retain=True)
self._mqttc.loop_stop()
self._mqttc.disconnect()
try:
for watcher in self.__stopObservers:
watcher()
except KeyError as e:
LOGGER.exception('KeyError in gotConfig: {}'.format(e), exc_info=True)
def send(self, message):
"""
Formatted Message to send to Polyglot. Connection messages are sent automatically from this module
so this method is used to send commands to/from Polyglot and formats it for consumption
"""
if not isinstance(message, dict) and self.connected:
warnings.warn('payload not a dictionary')
return False
try:
message['node'] = self.profileNum
self._mqttc.publish(self.topicInput, json.dumps(message), retain=False)
except TypeError as err:
LOGGER.error('MQTT Send Error: {}'.format(err), exc_info=True)
def addNode(self, node):
"""
Add a node to the NodeServer
:param node: Dictionary of node settings. Keys: address, name, node_def_id, primary, and drivers are required.
"""
LOGGER.info('Adding node {}({})'.format(node.name, node.address))
message = {
'addnode': {
'nodes': [{
'address': node.address,
'name': node.name,
'node_def_id': node.id,
'primary': node.primary,
'drivers': node.drivers,
'hint': node.hint
}]
}
}
self.send(message)
def saveCustomData(self, data):
"""
Send custom dictionary to Polyglot to save and be retrieved on startup.
:param data: Dictionary of key value pairs to store in Polyglot database.
"""
LOGGER.info('Sending customData to Polyglot.')
message = { 'customdata': data }
self.send(message)
def saveCustomParams(self, data):
"""
Send custom dictionary to Polyglot to save and be retrieved on startup.
:param data: Dictionary of key value pairs to store in Polyglot database.
"""
LOGGER.info('Sending customParams to Polyglot.')
message = { 'customparams': data }
self.send(message)
def addNotice(self, data):
"""
Add custom notice to front-end for this NodeServers
:param data: String of characters to add as a notification in the front-end.
"""
LOGGER.info('Sending addnotice to Polyglot: {}'.format(data))
message = { 'addnotice': data }
self.send(message)
def removeNotice(self, data):
"""
Add custom notice to front-end for this NodeServers
:param data: Index of notices list to remove.
"""
LOGGER.info('Sending removenotice to Polyglot for index {}'.format(data))
message = { 'removenotice': data }
self.send(message)
def restart(self):
"""
Send a command to Polyglot to restart this NodeServer
"""
LOGGER.info('Asking Polyglot to restart me.')
message = {
'restart': {}
}
self.send(message)
def installprofile(self):
LOGGER.info('Sending Install Profile command to Polyglot.')
message = { 'installprofile': { 'reboot': False } }
self.send(message)
def delNode(self, address):
"""
Delete a node from the NodeServer
:param node: Dictionary of node settings. Keys: address, name, node_def_id, primary, and drivers are required.
"""
LOGGER.info('Removing node {}'.format(address))
message = {
'removenode': {
'address': address
}
}
self.send(message)
def inConfig(self, config):
"""
Save incoming config received from Polyglot to Interface.config and then do any functions
that are waiting on the config to be received.
"""
self.config = config
self.isyVersion = config['isyVersion']
try:
for watcher in self.__configObservers:
watcher(config)
self.send_custom_config_docs()
except KeyError as e:
LOGGER.error('KeyError in gotConfig: {}'.format(e), exc_info=True)
def input(self, command):
self.inQueue.put(command)
def supports_feature(self, feature):
return True
def get_md_file_data(self, fileName):
data = ''
if os.path.isfile(fileName):
data = markdown2.markdown_path(fileName)
return data
def send_custom_config_docs(self):
data = ''
if not self.custom_params_docs_file_sent:
data = self.get_md_file_data(Interface.CUSTOM_CONFIG_DOCS_FILE_NAME)
else:
data = self.config.get('customParamsDoc', '')
# send if we're sending new file or there are updates
if (not self.custom_params_docs_file_sent or
len(self.custom_params_pending_docs) > 0):
data += self.custom_params_pending_docs
self.custom_params_docs_file_sent = True
self.custom_params_pending_docs = ''
self.config['customParamsDoc'] = data
self.send({ 'customparamsdoc': data })
def add_custom_config_docs(self, data, clearCurrentData=False):
if clearCurrentData:
self.custom_params_docs_file_sent = False
self.custom_params_pending_docs += data
self.send_custom_config_docs()
def save_typed_params(self, data):
"""
Send custom parameters descriptions to Polyglot to be used
in front end UI configuration screen
Accepts list of objects with the followin properties
name - used as a key when data is sent from UI
title - displayed in UI
defaultValue - optionanl
type - optional, can be 'NUMBER', 'STRING' or 'BOOLEAN'.
Defaults to 'STRING'
desc - optional, shown in tooltip in UI
isRequired - optional, True/False, when set, will not validate UI
input if it's empty
isList - optional, True/False, if set this will be treated as list
of values or objects by UI
params - optional, can contain a list of objects. If present, then
this (parent) is treated as object / list of objects by UI,
otherwise, it's treated as a single / list of single values
"""
LOGGER.info('Sending typed parameters to Polyglot.')
if type(data) is not list:
data = [ data ]
message = { 'typedparams': data }
self.send(message)
|
UniversalDevicesInc/polyglot-v2-python-interface
|
polyinterface/polyinterface.py
|
Interface.inConfig
|
python
|
def inConfig(self, config):
self.config = config
self.isyVersion = config['isyVersion']
try:
for watcher in self.__configObservers:
watcher(config)
self.send_custom_config_docs()
except KeyError as e:
LOGGER.error('KeyError in gotConfig: {}'.format(e), exc_info=True)
|
Save incoming config received from Polyglot to Interface.config and then do any functions
that are waiting on the config to be received.
|
train
|
https://github.com/UniversalDevicesInc/polyglot-v2-python-interface/blob/fe613135b762731a41a081222e43d2a8ae4fc53f/polyinterface/polyinterface.py#L466-L480
|
[
"def send_custom_config_docs(self):\n data = ''\n if not self.custom_params_docs_file_sent:\n data = self.get_md_file_data(Interface.CUSTOM_CONFIG_DOCS_FILE_NAME)\n else:\n data = self.config.get('customParamsDoc', '')\n\n # send if we're sending new file or there are updates\n if (not self.custom_params_docs_file_sent or\n len(self.custom_params_pending_docs) > 0):\n data += self.custom_params_pending_docs\n self.custom_params_docs_file_sent = True\n self.custom_params_pending_docs = ''\n\n self.config['customParamsDoc'] = data\n self.send({ 'customparamsdoc': data })\n"
] |
class Interface(object):
CUSTOM_CONFIG_DOCS_FILE_NAME = 'POLYGLOT_CONFIG.md'
"""
Polyglot Interface Class
:param envVar: The Name of the variable from ~/.polyglot/.env that has this NodeServer's profile number
"""
# pylint: disable=too-many-instance-attributes
# pylint: disable=unused-argument
__exists = False
def __init__(self, envVar=None):
if self.__exists:
warnings.warn('Only one Interface is allowed.')
return
self.connected = False
self.profileNum = os.environ.get("PROFILE_NUM")
if self.profileNum is None:
if envVar is not None:
self.profileNum = os.environ.get(envVar)
if self.profileNum is None:
LOGGER.error('Profile Number not found in STDIN or .env file. Exiting.')
sys.exit(1)
self.profileNum = str(self.profileNum)
self.topicPolyglotConnection = 'udi/polyglot/connections/polyglot'
self.topicInput = 'udi/polyglot/ns/{}'.format(self.profileNum)
self.topicSelfConnection = 'udi/polyglot/connections/{}'.format(self.profileNum)
self._threads = {}
self._threads['socket'] = Thread(target = self._startMqtt, name = 'Interface')
self._mqttc = mqtt.Client(envVar, True)
# self._mqttc.will_set(self.topicSelfConnection, json.dumps({'node': self.profileNum, 'connected': False}), retain=True)
self._mqttc.on_connect = self._connect
self._mqttc.on_message = self._message
self._mqttc.on_subscribe = self._subscribe
self._mqttc.on_disconnect = self._disconnect
self._mqttc.on_publish = self._publish
self._mqttc.on_log = self._log
self.useSecure = True
if 'USE_HTTPS' in os.environ:
self.useSecure = os.environ['USE_HTTPS']
if self.useSecure is True:
if 'MQTT_CERTPATH' in os.environ:
self._mqttc.tls_set(
ca_certs=os.environ['MQTT_CERTPATH'] + '/polyglot.crt',
certfile=os.environ['MQTT_CERTPATH'] + '/client.crt',
keyfile=os.environ['MQTT_CERTPATH'] + '/client_private.key',
tls_version=ssl.PROTOCOL_TLSv1_2)
else:
self._mqttc.tls_set(
ca_certs=join(expanduser("~") + '/.polyglot/ssl/polyglot.crt'),
certfile=join(expanduser("~") + '/.polyglot/ssl/client.crt'),
keyfile=join(expanduser("~") + '/.polyglot/ssl/client_private.key'),
tls_version=ssl.PROTOCOL_TLSv1_2
)
# self._mqttc.tls_insecure_set(True)
# self._mqttc.enable_logger(logger=LOGGER)
self.config = None
# self.loop = asyncio.new_event_loop()
self.loop = None
self.inQueue = queue.Queue()
# self.thread = Thread(target=self.start_loop)
self.isyVersion = None
self._server = os.environ.get("MQTT_HOST") or 'localhost'
self._port = os.environ.get("MQTT_PORT") or '1883'
self.polyglotConnected = False
self.__configObservers = []
self.__stopObservers = []
Interface.__exists = True
self.custom_params_docs_file_sent = False
self.custom_params_pending_docs = ''
def onConfig(self, callback):
"""
Gives the ability to bind any methods to be run when the config is received.
"""
self.__configObservers.append(callback)
def onStop(self, callback):
"""
Gives the ability to bind any methods to be run when the stop command is received.
"""
self.__stopObservers.append(callback)
def _connect(self, mqttc, userdata, flags, rc):
"""
The callback for when the client receives a CONNACK response from the server.
Subscribing in on_connect() means that if we lose the connection and
reconnect then subscriptions will be renewed.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param flags: The flags set on the connection.
:param rc: Result code of connection, 0 = Success, anything else is a failure
"""
if rc == 0:
self.connected = True
results = []
LOGGER.info("MQTT Connected with result code " + str(rc) + " (Success)")
# result, mid = self._mqttc.subscribe(self.topicInput)
results.append((self.topicInput, tuple(self._mqttc.subscribe(self.topicInput))))
results.append((self.topicPolyglotConnection, tuple(self._mqttc.subscribe(self.topicPolyglotConnection))))
for (topic, (result, mid)) in results:
if result == 0:
LOGGER.info("MQTT Subscribing to topic: " + topic + " - " + " MID: " + str(mid) + " Result: " + str(result))
else:
LOGGER.info("MQTT Subscription to " + topic + " failed. This is unusual. MID: " + str(mid) + " Result: " + str(result))
# If subscription fails, try to reconnect.
self._mqttc.reconnect()
self._mqttc.publish(self.topicSelfConnection, json.dumps(
{
'connected': True,
'node': self.profileNum
}), retain=True)
LOGGER.info('Sent Connected message to Polyglot')
else:
LOGGER.error("MQTT Failed to connect. Result code: " + str(rc))
def _message(self, mqttc, userdata, msg):
"""
The callback for when a PUBLISH message is received from the server.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param flags: The flags set on the connection.
:param msg: Dictionary of MQTT received message. Uses: msg.topic, msg.qos, msg.payload
"""
try:
inputCmds = ['query', 'command', 'result', 'status', 'shortPoll', 'longPoll', 'delete']
parsed_msg = json.loads(msg.payload.decode('utf-8'))
if 'node' in parsed_msg:
if parsed_msg['node'] != 'polyglot':
return
del parsed_msg['node']
for key in parsed_msg:
# LOGGER.debug('MQTT Received Message: {}: {}'.format(msg.topic, parsed_msg))
if key == 'config':
self.inConfig(parsed_msg[key])
elif key == 'connected':
self.polyglotConnected = parsed_msg[key]
elif key == 'stop':
LOGGER.debug('Received stop from Polyglot... Shutting Down.')
self.stop()
elif key in inputCmds:
self.input(parsed_msg)
else:
LOGGER.error('Invalid command received in message from Polyglot: {}'.format(key))
except (ValueError) as err:
LOGGER.error('MQTT Received Payload Error: {}'.format(err), exc_info=True)
def _disconnect(self, mqttc, userdata, rc):
"""
The callback for when a DISCONNECT occurs.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param rc: Result code of connection, 0 = Graceful, anything else is unclean
"""
self.connected = False
if rc != 0:
LOGGER.info("MQTT Unexpected disconnection. Trying reconnect.")
try:
self._mqttc.reconnect()
except Exception as ex:
template = "An exception of type {0} occured. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
LOGGER.error("MQTT Connection error: " + message)
else:
LOGGER.info("MQTT Graceful disconnection.")
def _log(self, mqttc, userdata, level, string):
""" Use for debugging MQTT Packets, disable for normal use, NOISY. """
# LOGGER.info('MQTT Log - {}: {}'.format(str(level), str(string)))
pass
def _subscribe(self, mqttc, userdata, mid, granted_qos):
""" Callback for Subscribe message. Unused currently. """
# LOGGER.info("MQTT Subscribed Succesfully for Message ID: {} - QoS: {}".format(str(mid), str(granted_qos)))
pass
def _publish(self, mqttc, userdata, mid):
""" Callback for publish message. Unused currently. """
# LOGGER.info("MQTT Published message ID: {}".format(str(mid)))
pass
def start(self):
for _, thread in self._threads.items():
thread.start()
def _startMqtt(self):
"""
The client start method. Starts the thread for the MQTT Client
and publishes the connected message.
"""
LOGGER.info('Connecting to MQTT... {}:{}'.format(self._server, self._port))
try:
# self._mqttc.connect_async(str(self._server), int(self._port), 10)
self._mqttc.connect_async('{}'.format(self._server), int(self._port), 10)
self._mqttc.loop_forever()
except Exception as ex:
template = "An exception of type {0} occurred. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
LOGGER.error("MQTT Connection error: {}".format(message), exc_info=True)
def stop(self):
"""
The client stop method. If the client is currently connected
stop the thread and disconnect. Publish the disconnected
message if clean shutdown.
"""
# self.loop.call_soon_threadsafe(self.loop.stop)
# self.loop.stop()
# self._longPoll.cancel()
# self._shortPoll.cancel()
if self.connected:
LOGGER.info('Disconnecting from MQTT... {}:{}'.format(self._server, self._port))
self._mqttc.publish(self.topicSelfConnection, json.dumps({'node': self.profileNum, 'connected': False}), retain=True)
self._mqttc.loop_stop()
self._mqttc.disconnect()
try:
for watcher in self.__stopObservers:
watcher()
except KeyError as e:
LOGGER.exception('KeyError in gotConfig: {}'.format(e), exc_info=True)
def send(self, message):
"""
Formatted Message to send to Polyglot. Connection messages are sent automatically from this module
so this method is used to send commands to/from Polyglot and formats it for consumption
"""
if not isinstance(message, dict) and self.connected:
warnings.warn('payload not a dictionary')
return False
try:
message['node'] = self.profileNum
self._mqttc.publish(self.topicInput, json.dumps(message), retain=False)
except TypeError as err:
LOGGER.error('MQTT Send Error: {}'.format(err), exc_info=True)
def addNode(self, node):
"""
Add a node to the NodeServer
:param node: Dictionary of node settings. Keys: address, name, node_def_id, primary, and drivers are required.
"""
LOGGER.info('Adding node {}({})'.format(node.name, node.address))
message = {
'addnode': {
'nodes': [{
'address': node.address,
'name': node.name,
'node_def_id': node.id,
'primary': node.primary,
'drivers': node.drivers,
'hint': node.hint
}]
}
}
self.send(message)
def saveCustomData(self, data):
"""
Send custom dictionary to Polyglot to save and be retrieved on startup.
:param data: Dictionary of key value pairs to store in Polyglot database.
"""
LOGGER.info('Sending customData to Polyglot.')
message = { 'customdata': data }
self.send(message)
def saveCustomParams(self, data):
"""
Send custom dictionary to Polyglot to save and be retrieved on startup.
:param data: Dictionary of key value pairs to store in Polyglot database.
"""
LOGGER.info('Sending customParams to Polyglot.')
message = { 'customparams': data }
self.send(message)
def addNotice(self, data):
"""
Add custom notice to front-end for this NodeServers
:param data: String of characters to add as a notification in the front-end.
"""
LOGGER.info('Sending addnotice to Polyglot: {}'.format(data))
message = { 'addnotice': data }
self.send(message)
def removeNotice(self, data):
"""
Add custom notice to front-end for this NodeServers
:param data: Index of notices list to remove.
"""
LOGGER.info('Sending removenotice to Polyglot for index {}'.format(data))
message = { 'removenotice': data }
self.send(message)
def restart(self):
"""
Send a command to Polyglot to restart this NodeServer
"""
LOGGER.info('Asking Polyglot to restart me.')
message = {
'restart': {}
}
self.send(message)
def installprofile(self):
LOGGER.info('Sending Install Profile command to Polyglot.')
message = { 'installprofile': { 'reboot': False } }
self.send(message)
def delNode(self, address):
"""
Delete a node from the NodeServer
:param node: Dictionary of node settings. Keys: address, name, node_def_id, primary, and drivers are required.
"""
LOGGER.info('Removing node {}'.format(address))
message = {
'removenode': {
'address': address
}
}
self.send(message)
def getNode(self, address):
"""
Get Node by Address of existing nodes.
"""
try:
for node in self.config['nodes']:
if node['address'] == address:
return node
return False
except KeyError:
LOGGER.error('Usually means we have not received the config yet.', exc_info=True)
return False
def input(self, command):
self.inQueue.put(command)
def supports_feature(self, feature):
return True
def get_md_file_data(self, fileName):
data = ''
if os.path.isfile(fileName):
data = markdown2.markdown_path(fileName)
return data
def send_custom_config_docs(self):
data = ''
if not self.custom_params_docs_file_sent:
data = self.get_md_file_data(Interface.CUSTOM_CONFIG_DOCS_FILE_NAME)
else:
data = self.config.get('customParamsDoc', '')
# send if we're sending new file or there are updates
if (not self.custom_params_docs_file_sent or
len(self.custom_params_pending_docs) > 0):
data += self.custom_params_pending_docs
self.custom_params_docs_file_sent = True
self.custom_params_pending_docs = ''
self.config['customParamsDoc'] = data
self.send({ 'customparamsdoc': data })
def add_custom_config_docs(self, data, clearCurrentData=False):
if clearCurrentData:
self.custom_params_docs_file_sent = False
self.custom_params_pending_docs += data
self.send_custom_config_docs()
def save_typed_params(self, data):
"""
Send custom parameters descriptions to Polyglot to be used
in front end UI configuration screen
Accepts list of objects with the followin properties
name - used as a key when data is sent from UI
title - displayed in UI
defaultValue - optionanl
type - optional, can be 'NUMBER', 'STRING' or 'BOOLEAN'.
Defaults to 'STRING'
desc - optional, shown in tooltip in UI
isRequired - optional, True/False, when set, will not validate UI
input if it's empty
isList - optional, True/False, if set this will be treated as list
of values or objects by UI
params - optional, can contain a list of objects. If present, then
this (parent) is treated as object / list of objects by UI,
otherwise, it's treated as a single / list of single values
"""
LOGGER.info('Sending typed parameters to Polyglot.')
if type(data) is not list:
data = [ data ]
message = { 'typedparams': data }
self.send(message)
|
UniversalDevicesInc/polyglot-v2-python-interface
|
polyinterface/polyinterface.py
|
Interface.save_typed_params
|
python
|
def save_typed_params(self, data):
LOGGER.info('Sending typed parameters to Polyglot.')
if type(data) is not list:
data = [ data ]
message = { 'typedparams': data }
self.send(message)
|
Send custom parameters descriptions to Polyglot to be used
in front end UI configuration screen
Accepts list of objects with the followin properties
name - used as a key when data is sent from UI
title - displayed in UI
defaultValue - optionanl
type - optional, can be 'NUMBER', 'STRING' or 'BOOLEAN'.
Defaults to 'STRING'
desc - optional, shown in tooltip in UI
isRequired - optional, True/False, when set, will not validate UI
input if it's empty
isList - optional, True/False, if set this will be treated as list
of values or objects by UI
params - optional, can contain a list of objects. If present, then
this (parent) is treated as object / list of objects by UI,
otherwise, it's treated as a single / list of single values
|
train
|
https://github.com/UniversalDevicesInc/polyglot-v2-python-interface/blob/fe613135b762731a41a081222e43d2a8ae4fc53f/polyinterface/polyinterface.py#L519-L542
|
[
"def send(self, message):\n \"\"\"\n Formatted Message to send to Polyglot. Connection messages are sent automatically from this module\n so this method is used to send commands to/from Polyglot and formats it for consumption\n \"\"\"\n if not isinstance(message, dict) and self.connected:\n warnings.warn('payload not a dictionary')\n return False\n try:\n message['node'] = self.profileNum\n self._mqttc.publish(self.topicInput, json.dumps(message), retain=False)\n except TypeError as err:\n LOGGER.error('MQTT Send Error: {}'.format(err), exc_info=True)\n"
] |
class Interface(object):
CUSTOM_CONFIG_DOCS_FILE_NAME = 'POLYGLOT_CONFIG.md'
"""
Polyglot Interface Class
:param envVar: The Name of the variable from ~/.polyglot/.env that has this NodeServer's profile number
"""
# pylint: disable=too-many-instance-attributes
# pylint: disable=unused-argument
__exists = False
def __init__(self, envVar=None):
if self.__exists:
warnings.warn('Only one Interface is allowed.')
return
self.connected = False
self.profileNum = os.environ.get("PROFILE_NUM")
if self.profileNum is None:
if envVar is not None:
self.profileNum = os.environ.get(envVar)
if self.profileNum is None:
LOGGER.error('Profile Number not found in STDIN or .env file. Exiting.')
sys.exit(1)
self.profileNum = str(self.profileNum)
self.topicPolyglotConnection = 'udi/polyglot/connections/polyglot'
self.topicInput = 'udi/polyglot/ns/{}'.format(self.profileNum)
self.topicSelfConnection = 'udi/polyglot/connections/{}'.format(self.profileNum)
self._threads = {}
self._threads['socket'] = Thread(target = self._startMqtt, name = 'Interface')
self._mqttc = mqtt.Client(envVar, True)
# self._mqttc.will_set(self.topicSelfConnection, json.dumps({'node': self.profileNum, 'connected': False}), retain=True)
self._mqttc.on_connect = self._connect
self._mqttc.on_message = self._message
self._mqttc.on_subscribe = self._subscribe
self._mqttc.on_disconnect = self._disconnect
self._mqttc.on_publish = self._publish
self._mqttc.on_log = self._log
self.useSecure = True
if 'USE_HTTPS' in os.environ:
self.useSecure = os.environ['USE_HTTPS']
if self.useSecure is True:
if 'MQTT_CERTPATH' in os.environ:
self._mqttc.tls_set(
ca_certs=os.environ['MQTT_CERTPATH'] + '/polyglot.crt',
certfile=os.environ['MQTT_CERTPATH'] + '/client.crt',
keyfile=os.environ['MQTT_CERTPATH'] + '/client_private.key',
tls_version=ssl.PROTOCOL_TLSv1_2)
else:
self._mqttc.tls_set(
ca_certs=join(expanduser("~") + '/.polyglot/ssl/polyglot.crt'),
certfile=join(expanduser("~") + '/.polyglot/ssl/client.crt'),
keyfile=join(expanduser("~") + '/.polyglot/ssl/client_private.key'),
tls_version=ssl.PROTOCOL_TLSv1_2
)
# self._mqttc.tls_insecure_set(True)
# self._mqttc.enable_logger(logger=LOGGER)
self.config = None
# self.loop = asyncio.new_event_loop()
self.loop = None
self.inQueue = queue.Queue()
# self.thread = Thread(target=self.start_loop)
self.isyVersion = None
self._server = os.environ.get("MQTT_HOST") or 'localhost'
self._port = os.environ.get("MQTT_PORT") or '1883'
self.polyglotConnected = False
self.__configObservers = []
self.__stopObservers = []
Interface.__exists = True
self.custom_params_docs_file_sent = False
self.custom_params_pending_docs = ''
def onConfig(self, callback):
"""
Gives the ability to bind any methods to be run when the config is received.
"""
self.__configObservers.append(callback)
def onStop(self, callback):
"""
Gives the ability to bind any methods to be run when the stop command is received.
"""
self.__stopObservers.append(callback)
def _connect(self, mqttc, userdata, flags, rc):
"""
The callback for when the client receives a CONNACK response from the server.
Subscribing in on_connect() means that if we lose the connection and
reconnect then subscriptions will be renewed.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param flags: The flags set on the connection.
:param rc: Result code of connection, 0 = Success, anything else is a failure
"""
if rc == 0:
self.connected = True
results = []
LOGGER.info("MQTT Connected with result code " + str(rc) + " (Success)")
# result, mid = self._mqttc.subscribe(self.topicInput)
results.append((self.topicInput, tuple(self._mqttc.subscribe(self.topicInput))))
results.append((self.topicPolyglotConnection, tuple(self._mqttc.subscribe(self.topicPolyglotConnection))))
for (topic, (result, mid)) in results:
if result == 0:
LOGGER.info("MQTT Subscribing to topic: " + topic + " - " + " MID: " + str(mid) + " Result: " + str(result))
else:
LOGGER.info("MQTT Subscription to " + topic + " failed. This is unusual. MID: " + str(mid) + " Result: " + str(result))
# If subscription fails, try to reconnect.
self._mqttc.reconnect()
self._mqttc.publish(self.topicSelfConnection, json.dumps(
{
'connected': True,
'node': self.profileNum
}), retain=True)
LOGGER.info('Sent Connected message to Polyglot')
else:
LOGGER.error("MQTT Failed to connect. Result code: " + str(rc))
def _message(self, mqttc, userdata, msg):
"""
The callback for when a PUBLISH message is received from the server.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param flags: The flags set on the connection.
:param msg: Dictionary of MQTT received message. Uses: msg.topic, msg.qos, msg.payload
"""
try:
inputCmds = ['query', 'command', 'result', 'status', 'shortPoll', 'longPoll', 'delete']
parsed_msg = json.loads(msg.payload.decode('utf-8'))
if 'node' in parsed_msg:
if parsed_msg['node'] != 'polyglot':
return
del parsed_msg['node']
for key in parsed_msg:
# LOGGER.debug('MQTT Received Message: {}: {}'.format(msg.topic, parsed_msg))
if key == 'config':
self.inConfig(parsed_msg[key])
elif key == 'connected':
self.polyglotConnected = parsed_msg[key]
elif key == 'stop':
LOGGER.debug('Received stop from Polyglot... Shutting Down.')
self.stop()
elif key in inputCmds:
self.input(parsed_msg)
else:
LOGGER.error('Invalid command received in message from Polyglot: {}'.format(key))
except (ValueError) as err:
LOGGER.error('MQTT Received Payload Error: {}'.format(err), exc_info=True)
def _disconnect(self, mqttc, userdata, rc):
"""
The callback for when a DISCONNECT occurs.
:param mqttc: The client instance for this callback
:param userdata: The private userdata for the mqtt client. Not used in Polyglot
:param rc: Result code of connection, 0 = Graceful, anything else is unclean
"""
self.connected = False
if rc != 0:
LOGGER.info("MQTT Unexpected disconnection. Trying reconnect.")
try:
self._mqttc.reconnect()
except Exception as ex:
template = "An exception of type {0} occured. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
LOGGER.error("MQTT Connection error: " + message)
else:
LOGGER.info("MQTT Graceful disconnection.")
def _log(self, mqttc, userdata, level, string):
""" Use for debugging MQTT Packets, disable for normal use, NOISY. """
# LOGGER.info('MQTT Log - {}: {}'.format(str(level), str(string)))
pass
def _subscribe(self, mqttc, userdata, mid, granted_qos):
""" Callback for Subscribe message. Unused currently. """
# LOGGER.info("MQTT Subscribed Succesfully for Message ID: {} - QoS: {}".format(str(mid), str(granted_qos)))
pass
def _publish(self, mqttc, userdata, mid):
""" Callback for publish message. Unused currently. """
# LOGGER.info("MQTT Published message ID: {}".format(str(mid)))
pass
def start(self):
for _, thread in self._threads.items():
thread.start()
def _startMqtt(self):
"""
The client start method. Starts the thread for the MQTT Client
and publishes the connected message.
"""
LOGGER.info('Connecting to MQTT... {}:{}'.format(self._server, self._port))
try:
# self._mqttc.connect_async(str(self._server), int(self._port), 10)
self._mqttc.connect_async('{}'.format(self._server), int(self._port), 10)
self._mqttc.loop_forever()
except Exception as ex:
template = "An exception of type {0} occurred. Arguments:\n{1!r}"
message = template.format(type(ex).__name__, ex.args)
LOGGER.error("MQTT Connection error: {}".format(message), exc_info=True)
def stop(self):
"""
The client stop method. If the client is currently connected
stop the thread and disconnect. Publish the disconnected
message if clean shutdown.
"""
# self.loop.call_soon_threadsafe(self.loop.stop)
# self.loop.stop()
# self._longPoll.cancel()
# self._shortPoll.cancel()
if self.connected:
LOGGER.info('Disconnecting from MQTT... {}:{}'.format(self._server, self._port))
self._mqttc.publish(self.topicSelfConnection, json.dumps({'node': self.profileNum, 'connected': False}), retain=True)
self._mqttc.loop_stop()
self._mqttc.disconnect()
try:
for watcher in self.__stopObservers:
watcher()
except KeyError as e:
LOGGER.exception('KeyError in gotConfig: {}'.format(e), exc_info=True)
def send(self, message):
"""
Formatted Message to send to Polyglot. Connection messages are sent automatically from this module
so this method is used to send commands to/from Polyglot and formats it for consumption
"""
if not isinstance(message, dict) and self.connected:
warnings.warn('payload not a dictionary')
return False
try:
message['node'] = self.profileNum
self._mqttc.publish(self.topicInput, json.dumps(message), retain=False)
except TypeError as err:
LOGGER.error('MQTT Send Error: {}'.format(err), exc_info=True)
def addNode(self, node):
"""
Add a node to the NodeServer
:param node: Dictionary of node settings. Keys: address, name, node_def_id, primary, and drivers are required.
"""
LOGGER.info('Adding node {}({})'.format(node.name, node.address))
message = {
'addnode': {
'nodes': [{
'address': node.address,
'name': node.name,
'node_def_id': node.id,
'primary': node.primary,
'drivers': node.drivers,
'hint': node.hint
}]
}
}
self.send(message)
def saveCustomData(self, data):
"""
Send custom dictionary to Polyglot to save and be retrieved on startup.
:param data: Dictionary of key value pairs to store in Polyglot database.
"""
LOGGER.info('Sending customData to Polyglot.')
message = { 'customdata': data }
self.send(message)
def saveCustomParams(self, data):
"""
Send custom dictionary to Polyglot to save and be retrieved on startup.
:param data: Dictionary of key value pairs to store in Polyglot database.
"""
LOGGER.info('Sending customParams to Polyglot.')
message = { 'customparams': data }
self.send(message)
def addNotice(self, data):
"""
Add custom notice to front-end for this NodeServers
:param data: String of characters to add as a notification in the front-end.
"""
LOGGER.info('Sending addnotice to Polyglot: {}'.format(data))
message = { 'addnotice': data }
self.send(message)
def removeNotice(self, data):
"""
Add custom notice to front-end for this NodeServers
:param data: Index of notices list to remove.
"""
LOGGER.info('Sending removenotice to Polyglot for index {}'.format(data))
message = { 'removenotice': data }
self.send(message)
def restart(self):
"""
Send a command to Polyglot to restart this NodeServer
"""
LOGGER.info('Asking Polyglot to restart me.')
message = {
'restart': {}
}
self.send(message)
def installprofile(self):
LOGGER.info('Sending Install Profile command to Polyglot.')
message = { 'installprofile': { 'reboot': False } }
self.send(message)
def delNode(self, address):
"""
Delete a node from the NodeServer
:param node: Dictionary of node settings. Keys: address, name, node_def_id, primary, and drivers are required.
"""
LOGGER.info('Removing node {}'.format(address))
message = {
'removenode': {
'address': address
}
}
self.send(message)
def getNode(self, address):
"""
Get Node by Address of existing nodes.
"""
try:
for node in self.config['nodes']:
if node['address'] == address:
return node
return False
except KeyError:
LOGGER.error('Usually means we have not received the config yet.', exc_info=True)
return False
def inConfig(self, config):
"""
Save incoming config received from Polyglot to Interface.config and then do any functions
that are waiting on the config to be received.
"""
self.config = config
self.isyVersion = config['isyVersion']
try:
for watcher in self.__configObservers:
watcher(config)
self.send_custom_config_docs()
except KeyError as e:
LOGGER.error('KeyError in gotConfig: {}'.format(e), exc_info=True)
def input(self, command):
self.inQueue.put(command)
def supports_feature(self, feature):
return True
def get_md_file_data(self, fileName):
data = ''
if os.path.isfile(fileName):
data = markdown2.markdown_path(fileName)
return data
def send_custom_config_docs(self):
data = ''
if not self.custom_params_docs_file_sent:
data = self.get_md_file_data(Interface.CUSTOM_CONFIG_DOCS_FILE_NAME)
else:
data = self.config.get('customParamsDoc', '')
# send if we're sending new file or there are updates
if (not self.custom_params_docs_file_sent or
len(self.custom_params_pending_docs) > 0):
data += self.custom_params_pending_docs
self.custom_params_docs_file_sent = True
self.custom_params_pending_docs = ''
self.config['customParamsDoc'] = data
self.send({ 'customparamsdoc': data })
def add_custom_config_docs(self, data, clearCurrentData=False):
if clearCurrentData:
self.custom_params_docs_file_sent = False
self.custom_params_pending_docs += data
self.send_custom_config_docs()
|
UniversalDevicesInc/polyglot-v2-python-interface
|
polyinterface/polyinterface.py
|
Controller.delNode
|
python
|
def delNode(self, address):
if address in self.nodes:
del self.nodes[address]
self.poly.delNode(address)
|
Just send it along if requested, should be able to delete the node even if it isn't
in our config anywhere. Usually used for normalization.
|
train
|
https://github.com/UniversalDevicesInc/polyglot-v2-python-interface/blob/fe613135b762731a41a081222e43d2a8ae4fc53f/polyinterface/polyinterface.py#L844-L851
| null |
class Controller(Node):
"""
Controller Class for controller management. Superclass of Node
"""
__exists = False
def __init__(self, poly, name='Controller'):
if self.__exists:
warnings.warn('Only one Controller is allowed.')
return
try:
self.controller = self
self.parent = self.controller
self.poly = poly
self.poly.onConfig(self._gotConfig)
self.poly.onStop(self.stop)
self.name = name
self.address = 'controller'
self.primary = self.address
self._drivers = deepcopy(self.drivers)
self._nodes = {}
self.config = None
self.nodes = { self.address: self }
self._threads = {}
self._threads['input'] = Thread(target = self._parseInput, name = 'Controller')
self._threads['ns'] = Thread(target = self.start, name = 'NodeServer')
self.polyConfig = None
self.isPrimary = None
self.timeAdded = None
self.enabled = None
self.added = None
self.started = False
self.nodesAdding = []
# self._threads = []
self._startThreads()
except (KeyError) as err:
LOGGER.error('Error Creating node: {}'.format(err), exc_info=True)
def _gotConfig(self, config):
self.polyConfig = config
for node in config['nodes']:
self._nodes[node['address']] = node
if node['address'] in self.nodes:
n = self.nodes[node['address']]
n.updateDrivers(node['drivers'])
n.config = node
n.isPrimary = node['isprimary']
n.timeAdded = node['timeAdded']
n.enabled = node['enabled']
n.added = node['added']
if self.address not in self._nodes:
self.addNode(self)
LOGGER.info('Waiting on Controller node to be added.......')
if not self.started:
self.nodes[self.address] = self
self.started = True
# self.setDriver('ST', 1, True, True)
self._threads['ns'].start()
def _startThreads(self):
self._threads['input'].daemon = True
self._threads['ns'].daemon = True
self._threads['input'].start()
def _parseInput(self):
while True:
input = self.poly.inQueue.get()
for key in input:
if key == 'command':
if input[key]['address'] in self.nodes:
try:
self.nodes[input[key]['address']].runCmd(input[key])
except (Exception) as err:
LOGGER.error('_parseInput: failed {}.runCmd({}) {}'.format(input[key]['address'], input[key]['cmd'], err), exc_info=True)
else:
LOGGER.error('_parseInput: received command {} for a node that is not in memory: {}'.format(input[key]['cmd'], input[key]['address']))
elif key == 'result':
self._handleResult(input[key])
elif key == 'delete':
self._delete()
elif key == 'shortPoll':
self.shortPoll()
elif key == 'longPoll':
self.longPoll()
elif key == 'query':
if input[key]['address'] in self.nodes:
self.nodes[input[key]['address']].query()
elif input[key]['address'] == 'all':
self.query()
elif key == 'status':
if input[key]['address'] in self.nodes:
self.nodes[input[key]['address']].status()
elif input[key]['address'] == 'all':
self.status()
self.poly.inQueue.task_done()
def _handleResult(self, result):
# LOGGER.debug(self.nodesAdding)
try:
if 'addnode' in result:
if result['addnode']['success']:
if not result['addnode']['address'] == self.address:
self.nodes[result['addnode']['address']].start()
# self.nodes[result['addnode']['address']].reportDrivers()
if result['addnode']['address'] in self.nodesAdding:
self.nodesAdding.remove(result['addnode']['address'])
else:
del self.nodes[result['addnode']['address']]
except (KeyError, ValueError) as err:
LOGGER.error('handleResult: {}'.format(err), exc_info=True)
def _delete(self):
"""
Intermediate message that stops MQTT before sending to overrideable method for delete.
"""
self.poly.stop()
self.delete()
def _convertDrivers(self, drivers):
return deepcopy(drivers)
"""
if isinstance(drivers, list):
newFormat = {}
for driver in drivers:
newFormat[driver['driver']] = {}
newFormat[driver['driver']]['value'] = driver['value']
newFormat[driver['driver']]['uom'] = driver['uom']
return newFormat
else:
return deepcopy(drivers)
"""
def delete(self):
"""
Incoming delete message from Polyglot. This NodeServer is being deleted.
You have 5 seconds before the process is killed. Cleanup and disconnect.
"""
pass
"""
AddNode adds the class to self.nodes then sends the request to Polyglot
If update is True, overwrite the node in Polyglot
"""
def addNode(self, node, update=False):
if node.address in self._nodes:
node._drivers = self._nodes[node.address]['drivers']
for driver in node.drivers:
for existing in self._nodes[node.address]['drivers']:
if driver['driver'] == existing['driver']:
driver['value'] = existing['value']
# JIMBO SAYS NO
# driver['uom'] = existing['uom']
self.nodes[node.address] = node
# if node.address not in self._nodes or update:
self.nodesAdding.append(node.address)
self.poly.addNode(node)
# else:
# self.nodes[node.address].start()
return node
"""
Forces a full overwrite of the node
"""
def updateNode(self, node):
self.nodes[node.address] = node
self.nodesAdding.append(node.address)
self.poly.addNode(node)
def longPoll(self):
pass
def shortPoll(self):
pass
def query(self):
for node in self.nodes:
self.nodes[node].reportDrivers()
def status(self):
for node in self.nodes:
self.nodes[node].reportDrivers()
def runForever(self):
self._threads['input'].join()
def start(self):
pass
def saveCustomData(self, data):
if not isinstance(data, dict):
LOGGER.error('saveCustomData: data isn\'t a dictionary. Ignoring.')
else:
self.poly.saveCustomData(data)
def addCustomParam(self, data):
if not isinstance(data, dict):
LOGGER.error('addCustomParam: data isn\'t a dictionary. Ignoring.')
else:
newData = self.poly.config['customParams']
newData.update(data)
self.poly.saveCustomParams(newData)
def removeCustomParam(self, data):
try: # check whether python knows about 'basestring'
basestring
except NameError: # no, it doesn't (it's Python3); use 'str' instead
basestring = str
if not isinstance(data, basestring):
LOGGER.error('removeCustomParam: data isn\'t a string. Ignoring.')
else:
try:
newData = deepcopy(self.poly.config['customParams'])
newData.pop(data)
self.poly.saveCustomParams(newData)
except KeyError:
LOGGER.error('{} not found in customParams. Ignoring...'.format(data), exc_info=True)
def getCustomParam(self, data):
params = deepcopy(self.poly.config['customParams'])
return params.get(data)
def addNotice(self, data, key=None):
if not isinstance(data, dict):
self.poly.addNotice({ 'key': key, 'value': data})
else:
if 'value' in data:
self.poly.addNotice(data)
else:
for key, value in data.items():
self.poly.addNotice({ 'key': key, 'value': value })
def removeNotice(self, key):
data = { 'key': str(key) }
self.poly.removeNotice(data)
def getNotices(self):
return self.poly.config['notices']
def removeNoticesAll(self):
if type(self.poly.config['notices']) == dict:
for key in self.poly.config['notices'].keys():
self.removeNotice(key)
else:
if len(self.poly.config['notices']):
for i in range(len(self.poly.config['notices'])):
self.removeNotice(i)
def stop(self):
""" Called on nodeserver stop """
pass
id = 'controller'
commands = {}
drivers = [{'driver': 'ST', 'value': 0, 'uom': 2}]
|
daskos/mentor
|
mentor/utils.py
|
remote_exception
|
python
|
def remote_exception(exc, tb):
if type(exc) in exceptions:
typ = exceptions[type(exc)]
return typ(exc, tb)
else:
try:
typ = type(exc.__class__.__name__,
(RemoteException, type(exc)),
{'exception_type': type(exc)})
exceptions[type(exc)] = typ
return typ(exc, tb)
except TypeError:
return exc
|
Metaclass that wraps exception type in RemoteException
|
train
|
https://github.com/daskos/mentor/blob/b5fd64e3a3192f5664fa5c03e8517cacb4e0590f/mentor/utils.py#L61-L74
| null |
from __future__ import absolute_import, division, print_function
import signal
from contextlib import contextmanager
class TimeoutError(Exception):
pass
@contextmanager
def timeout(seconds):
def signal_handler(signum, frame):
raise TimeoutError("Timed out!")
if seconds > 0:
signal.signal(signal.SIGALRM, signal_handler)
signal.alarm(seconds)
try:
yield
finally:
signal.alarm(0)
else: # infinite timeout
yield
class RemoteException(Exception):
""" Remote Exception
Contains the exception and traceback from a remotely run task
- Include the original error message
- Respond to try-except blocks with original error type
- Include remote traceback
"""
def __init__(self, exception, traceback):
self.exception = exception
self.traceback = traceback
def __str__(self):
return (str(self.exception) + "\n\n"
"Traceback\n"
"---------\n" +
self.traceback)
def __dir__(self):
return sorted(set(dir(type(self)) +
list(self.__dict__) +
dir(self.exception)))
def __getattr__(self, key):
try:
return object.__getattribute__(self, key)
except AttributeError:
return getattr(self.exception, key)
exceptions = dict()
|
daskos/mentor
|
mentor/binpack.py
|
ff
|
python
|
def ff(items, targets):
bins = [(target, []) for target in targets]
skip = []
for item in items:
for target, content in bins:
if item <= (target - sum(content)):
content.append(item)
break
else:
skip.append(item)
return bins, skip
|
First-Fit
This is perhaps the simplest packing heuristic;
it simply packs items in the next available bin.
Complexity O(n^2)
|
train
|
https://github.com/daskos/mentor/blob/b5fd64e3a3192f5664fa5c03e8517cacb4e0590f/mentor/binpack.py#L22-L40
| null |
from __future__ import absolute_import, division, print_function
import operator
def weight(items, **kwargs):
if not len(kwargs):
raise ValueError('Missing attribute for weighting items!')
scaled = []
for attr, weight in kwargs.items():
values = [float(getattr(item, attr)) for item in items]
try:
s = sum(values)
scaled.append([weight * (v / s) for v in values])
except ZeroDivisionError:
# s equals to zero, attr wont contribute
scaled.append([0] * len(items))
return map(sum, zip(*scaled))
def ffd(items, targets, **kwargs):
"""First-Fit Decreasing
This is perhaps the simplest packing heuristic;
it simply packs items in the next available bin.
This algorithm differs only from Next-Fit Decreasing
in having a 'sort'; that is, the items are pre-sorted
(largest to smallest).
Complexity O(n^2)
"""
sizes = zip(items, weight(items, **kwargs))
sizes = sorted(sizes, key=operator.itemgetter(1), reverse=True)
items = map(operator.itemgetter(0), sizes)
return ff(items, targets)
def mr(items, targets, **kwargs):
"""Max-Rest
Complexity O(n^2)
"""
bins = [(target, []) for target in targets]
skip = []
for item in items:
capacities = [target - sum(content) for target, content in bins]
weighted = weight(capacities, **kwargs)
(target, content), capacity, _ = max(zip(bins, capacities, weighted),
key=operator.itemgetter(2))
if item <= capacity:
content.append(item)
else:
skip.append(item)
return bins, skip
def mrpq(items, targets):
"""Max-Rest Priority Queue
Complexity O(n*log(n))
"""
raise NotImplementedError()
def bf(items, targets, **kwargs):
"""Best-Fit
Complexity O(n^2)
"""
bins = [(target, []) for target in targets]
skip = []
for item in items:
containers = []
capacities = []
for target, content in bins:
capacity = target - sum(content)
if item <= capacity:
containers.append(content)
capacities.append(capacity - item)
if len(capacities):
weighted = zip(containers, weight(capacities, **kwargs))
content, _ = min(weighted, key=operator.itemgetter(1))
content.append(item)
else:
skip.append(item)
return bins, skip
def bfd(items, targets, **kwargs):
"""Best-Fit Decreasing
Complexity O(n^2)
"""
sizes = zip(items, weight(items, **kwargs))
sizes = sorted(sizes, key=operator.itemgetter(1), reverse=True)
items = map(operator.itemgetter(0), sizes)
return bf(items, targets, **kwargs)
def bfh(items, targets):
"""Best-Fit-Heap
Slightly Improved Complexity
"""
raise NotImplementedError()
|
daskos/mentor
|
mentor/binpack.py
|
ffd
|
python
|
def ffd(items, targets, **kwargs):
sizes = zip(items, weight(items, **kwargs))
sizes = sorted(sizes, key=operator.itemgetter(1), reverse=True)
items = map(operator.itemgetter(0), sizes)
return ff(items, targets)
|
First-Fit Decreasing
This is perhaps the simplest packing heuristic;
it simply packs items in the next available bin.
This algorithm differs only from Next-Fit Decreasing
in having a 'sort'; that is, the items are pre-sorted
(largest to smallest).
Complexity O(n^2)
|
train
|
https://github.com/daskos/mentor/blob/b5fd64e3a3192f5664fa5c03e8517cacb4e0590f/mentor/binpack.py#L43-L58
|
[
"def weight(items, **kwargs):\n if not len(kwargs):\n raise ValueError('Missing attribute for weighting items!')\n scaled = []\n for attr, weight in kwargs.items():\n values = [float(getattr(item, attr)) for item in items]\n try:\n s = sum(values)\n scaled.append([weight * (v / s) for v in values])\n except ZeroDivisionError:\n # s equals to zero, attr wont contribute\n scaled.append([0] * len(items))\n\n return map(sum, zip(*scaled))\n",
"def ff(items, targets):\n \"\"\"First-Fit\n\n This is perhaps the simplest packing heuristic;\n it simply packs items in the next available bin.\n\n Complexity O(n^2)\n \"\"\"\n bins = [(target, []) for target in targets]\n skip = []\n\n for item in items:\n for target, content in bins:\n if item <= (target - sum(content)):\n content.append(item)\n break\n else:\n skip.append(item)\n return bins, skip\n"
] |
from __future__ import absolute_import, division, print_function
import operator
def weight(items, **kwargs):
if not len(kwargs):
raise ValueError('Missing attribute for weighting items!')
scaled = []
for attr, weight in kwargs.items():
values = [float(getattr(item, attr)) for item in items]
try:
s = sum(values)
scaled.append([weight * (v / s) for v in values])
except ZeroDivisionError:
# s equals to zero, attr wont contribute
scaled.append([0] * len(items))
return map(sum, zip(*scaled))
def ff(items, targets):
"""First-Fit
This is perhaps the simplest packing heuristic;
it simply packs items in the next available bin.
Complexity O(n^2)
"""
bins = [(target, []) for target in targets]
skip = []
for item in items:
for target, content in bins:
if item <= (target - sum(content)):
content.append(item)
break
else:
skip.append(item)
return bins, skip
def mr(items, targets, **kwargs):
"""Max-Rest
Complexity O(n^2)
"""
bins = [(target, []) for target in targets]
skip = []
for item in items:
capacities = [target - sum(content) for target, content in bins]
weighted = weight(capacities, **kwargs)
(target, content), capacity, _ = max(zip(bins, capacities, weighted),
key=operator.itemgetter(2))
if item <= capacity:
content.append(item)
else:
skip.append(item)
return bins, skip
def mrpq(items, targets):
"""Max-Rest Priority Queue
Complexity O(n*log(n))
"""
raise NotImplementedError()
def bf(items, targets, **kwargs):
"""Best-Fit
Complexity O(n^2)
"""
bins = [(target, []) for target in targets]
skip = []
for item in items:
containers = []
capacities = []
for target, content in bins:
capacity = target - sum(content)
if item <= capacity:
containers.append(content)
capacities.append(capacity - item)
if len(capacities):
weighted = zip(containers, weight(capacities, **kwargs))
content, _ = min(weighted, key=operator.itemgetter(1))
content.append(item)
else:
skip.append(item)
return bins, skip
def bfd(items, targets, **kwargs):
"""Best-Fit Decreasing
Complexity O(n^2)
"""
sizes = zip(items, weight(items, **kwargs))
sizes = sorted(sizes, key=operator.itemgetter(1), reverse=True)
items = map(operator.itemgetter(0), sizes)
return bf(items, targets, **kwargs)
def bfh(items, targets):
"""Best-Fit-Heap
Slightly Improved Complexity
"""
raise NotImplementedError()
|
daskos/mentor
|
mentor/binpack.py
|
mr
|
python
|
def mr(items, targets, **kwargs):
bins = [(target, []) for target in targets]
skip = []
for item in items:
capacities = [target - sum(content) for target, content in bins]
weighted = weight(capacities, **kwargs)
(target, content), capacity, _ = max(zip(bins, capacities, weighted),
key=operator.itemgetter(2))
if item <= capacity:
content.append(item)
else:
skip.append(item)
return bins, skip
|
Max-Rest
Complexity O(n^2)
|
train
|
https://github.com/daskos/mentor/blob/b5fd64e3a3192f5664fa5c03e8517cacb4e0590f/mentor/binpack.py#L61-L79
|
[
"def weight(items, **kwargs):\n if not len(kwargs):\n raise ValueError('Missing attribute for weighting items!')\n scaled = []\n for attr, weight in kwargs.items():\n values = [float(getattr(item, attr)) for item in items]\n try:\n s = sum(values)\n scaled.append([weight * (v / s) for v in values])\n except ZeroDivisionError:\n # s equals to zero, attr wont contribute\n scaled.append([0] * len(items))\n\n return map(sum, zip(*scaled))\n"
] |
from __future__ import absolute_import, division, print_function
import operator
def weight(items, **kwargs):
if not len(kwargs):
raise ValueError('Missing attribute for weighting items!')
scaled = []
for attr, weight in kwargs.items():
values = [float(getattr(item, attr)) for item in items]
try:
s = sum(values)
scaled.append([weight * (v / s) for v in values])
except ZeroDivisionError:
# s equals to zero, attr wont contribute
scaled.append([0] * len(items))
return map(sum, zip(*scaled))
def ff(items, targets):
"""First-Fit
This is perhaps the simplest packing heuristic;
it simply packs items in the next available bin.
Complexity O(n^2)
"""
bins = [(target, []) for target in targets]
skip = []
for item in items:
for target, content in bins:
if item <= (target - sum(content)):
content.append(item)
break
else:
skip.append(item)
return bins, skip
def ffd(items, targets, **kwargs):
"""First-Fit Decreasing
This is perhaps the simplest packing heuristic;
it simply packs items in the next available bin.
This algorithm differs only from Next-Fit Decreasing
in having a 'sort'; that is, the items are pre-sorted
(largest to smallest).
Complexity O(n^2)
"""
sizes = zip(items, weight(items, **kwargs))
sizes = sorted(sizes, key=operator.itemgetter(1), reverse=True)
items = map(operator.itemgetter(0), sizes)
return ff(items, targets)
def mrpq(items, targets):
"""Max-Rest Priority Queue
Complexity O(n*log(n))
"""
raise NotImplementedError()
def bf(items, targets, **kwargs):
"""Best-Fit
Complexity O(n^2)
"""
bins = [(target, []) for target in targets]
skip = []
for item in items:
containers = []
capacities = []
for target, content in bins:
capacity = target - sum(content)
if item <= capacity:
containers.append(content)
capacities.append(capacity - item)
if len(capacities):
weighted = zip(containers, weight(capacities, **kwargs))
content, _ = min(weighted, key=operator.itemgetter(1))
content.append(item)
else:
skip.append(item)
return bins, skip
def bfd(items, targets, **kwargs):
"""Best-Fit Decreasing
Complexity O(n^2)
"""
sizes = zip(items, weight(items, **kwargs))
sizes = sorted(sizes, key=operator.itemgetter(1), reverse=True)
items = map(operator.itemgetter(0), sizes)
return bf(items, targets, **kwargs)
def bfh(items, targets):
"""Best-Fit-Heap
Slightly Improved Complexity
"""
raise NotImplementedError()
|
daskos/mentor
|
mentor/binpack.py
|
bf
|
python
|
def bf(items, targets, **kwargs):
bins = [(target, []) for target in targets]
skip = []
for item in items:
containers = []
capacities = []
for target, content in bins:
capacity = target - sum(content)
if item <= capacity:
containers.append(content)
capacities.append(capacity - item)
if len(capacities):
weighted = zip(containers, weight(capacities, **kwargs))
content, _ = min(weighted, key=operator.itemgetter(1))
content.append(item)
else:
skip.append(item)
return bins, skip
|
Best-Fit
Complexity O(n^2)
|
train
|
https://github.com/daskos/mentor/blob/b5fd64e3a3192f5664fa5c03e8517cacb4e0590f/mentor/binpack.py#L90-L113
|
[
"def weight(items, **kwargs):\n if not len(kwargs):\n raise ValueError('Missing attribute for weighting items!')\n scaled = []\n for attr, weight in kwargs.items():\n values = [float(getattr(item, attr)) for item in items]\n try:\n s = sum(values)\n scaled.append([weight * (v / s) for v in values])\n except ZeroDivisionError:\n # s equals to zero, attr wont contribute\n scaled.append([0] * len(items))\n\n return map(sum, zip(*scaled))\n"
] |
from __future__ import absolute_import, division, print_function
import operator
def weight(items, **kwargs):
if not len(kwargs):
raise ValueError('Missing attribute for weighting items!')
scaled = []
for attr, weight in kwargs.items():
values = [float(getattr(item, attr)) for item in items]
try:
s = sum(values)
scaled.append([weight * (v / s) for v in values])
except ZeroDivisionError:
# s equals to zero, attr wont contribute
scaled.append([0] * len(items))
return map(sum, zip(*scaled))
def ff(items, targets):
"""First-Fit
This is perhaps the simplest packing heuristic;
it simply packs items in the next available bin.
Complexity O(n^2)
"""
bins = [(target, []) for target in targets]
skip = []
for item in items:
for target, content in bins:
if item <= (target - sum(content)):
content.append(item)
break
else:
skip.append(item)
return bins, skip
def ffd(items, targets, **kwargs):
"""First-Fit Decreasing
This is perhaps the simplest packing heuristic;
it simply packs items in the next available bin.
This algorithm differs only from Next-Fit Decreasing
in having a 'sort'; that is, the items are pre-sorted
(largest to smallest).
Complexity O(n^2)
"""
sizes = zip(items, weight(items, **kwargs))
sizes = sorted(sizes, key=operator.itemgetter(1), reverse=True)
items = map(operator.itemgetter(0), sizes)
return ff(items, targets)
def mr(items, targets, **kwargs):
"""Max-Rest
Complexity O(n^2)
"""
bins = [(target, []) for target in targets]
skip = []
for item in items:
capacities = [target - sum(content) for target, content in bins]
weighted = weight(capacities, **kwargs)
(target, content), capacity, _ = max(zip(bins, capacities, weighted),
key=operator.itemgetter(2))
if item <= capacity:
content.append(item)
else:
skip.append(item)
return bins, skip
def mrpq(items, targets):
"""Max-Rest Priority Queue
Complexity O(n*log(n))
"""
raise NotImplementedError()
def bfd(items, targets, **kwargs):
"""Best-Fit Decreasing
Complexity O(n^2)
"""
sizes = zip(items, weight(items, **kwargs))
sizes = sorted(sizes, key=operator.itemgetter(1), reverse=True)
items = map(operator.itemgetter(0), sizes)
return bf(items, targets, **kwargs)
def bfh(items, targets):
"""Best-Fit-Heap
Slightly Improved Complexity
"""
raise NotImplementedError()
|
daskos/mentor
|
mentor/binpack.py
|
bfd
|
python
|
def bfd(items, targets, **kwargs):
sizes = zip(items, weight(items, **kwargs))
sizes = sorted(sizes, key=operator.itemgetter(1), reverse=True)
items = map(operator.itemgetter(0), sizes)
return bf(items, targets, **kwargs)
|
Best-Fit Decreasing
Complexity O(n^2)
|
train
|
https://github.com/daskos/mentor/blob/b5fd64e3a3192f5664fa5c03e8517cacb4e0590f/mentor/binpack.py#L116-L124
|
[
"def weight(items, **kwargs):\n if not len(kwargs):\n raise ValueError('Missing attribute for weighting items!')\n scaled = []\n for attr, weight in kwargs.items():\n values = [float(getattr(item, attr)) for item in items]\n try:\n s = sum(values)\n scaled.append([weight * (v / s) for v in values])\n except ZeroDivisionError:\n # s equals to zero, attr wont contribute\n scaled.append([0] * len(items))\n\n return map(sum, zip(*scaled))\n",
"def bf(items, targets, **kwargs):\n \"\"\"Best-Fit\n\n Complexity O(n^2)\n \"\"\"\n bins = [(target, []) for target in targets]\n skip = []\n\n for item in items:\n containers = []\n capacities = []\n for target, content in bins:\n capacity = target - sum(content)\n if item <= capacity:\n containers.append(content)\n capacities.append(capacity - item)\n\n if len(capacities):\n weighted = zip(containers, weight(capacities, **kwargs))\n content, _ = min(weighted, key=operator.itemgetter(1))\n content.append(item)\n else:\n skip.append(item)\n return bins, skip\n"
] |
from __future__ import absolute_import, division, print_function
import operator
def weight(items, **kwargs):
if not len(kwargs):
raise ValueError('Missing attribute for weighting items!')
scaled = []
for attr, weight in kwargs.items():
values = [float(getattr(item, attr)) for item in items]
try:
s = sum(values)
scaled.append([weight * (v / s) for v in values])
except ZeroDivisionError:
# s equals to zero, attr wont contribute
scaled.append([0] * len(items))
return map(sum, zip(*scaled))
def ff(items, targets):
"""First-Fit
This is perhaps the simplest packing heuristic;
it simply packs items in the next available bin.
Complexity O(n^2)
"""
bins = [(target, []) for target in targets]
skip = []
for item in items:
for target, content in bins:
if item <= (target - sum(content)):
content.append(item)
break
else:
skip.append(item)
return bins, skip
def ffd(items, targets, **kwargs):
"""First-Fit Decreasing
This is perhaps the simplest packing heuristic;
it simply packs items in the next available bin.
This algorithm differs only from Next-Fit Decreasing
in having a 'sort'; that is, the items are pre-sorted
(largest to smallest).
Complexity O(n^2)
"""
sizes = zip(items, weight(items, **kwargs))
sizes = sorted(sizes, key=operator.itemgetter(1), reverse=True)
items = map(operator.itemgetter(0), sizes)
return ff(items, targets)
def mr(items, targets, **kwargs):
"""Max-Rest
Complexity O(n^2)
"""
bins = [(target, []) for target in targets]
skip = []
for item in items:
capacities = [target - sum(content) for target, content in bins]
weighted = weight(capacities, **kwargs)
(target, content), capacity, _ = max(zip(bins, capacities, weighted),
key=operator.itemgetter(2))
if item <= capacity:
content.append(item)
else:
skip.append(item)
return bins, skip
def mrpq(items, targets):
"""Max-Rest Priority Queue
Complexity O(n*log(n))
"""
raise NotImplementedError()
def bf(items, targets, **kwargs):
"""Best-Fit
Complexity O(n^2)
"""
bins = [(target, []) for target in targets]
skip = []
for item in items:
containers = []
capacities = []
for target, content in bins:
capacity = target - sum(content)
if item <= capacity:
containers.append(content)
capacities.append(capacity - item)
if len(capacities):
weighted = zip(containers, weight(capacities, **kwargs))
content, _ = min(weighted, key=operator.itemgetter(1))
content.append(item)
else:
skip.append(item)
return bins, skip
def bfh(items, targets):
"""Best-Fit-Heap
Slightly Improved Complexity
"""
raise NotImplementedError()
|
daskos/mentor
|
mentor/proxies/executor.py
|
ExecutorDriverProxy.update
|
python
|
def update(self, status):
logging.info('Executor sends status update {} for task {}'.format(
status.state, status.task_id))
return self.driver.sendStatusUpdate(encode(status))
|
Sends a status update to the framework scheduler.
Retrying as necessary until an acknowledgement has been received or the
executor is terminated (in which case, a TASK_LOST status update will be
sent).
See Scheduler.statusUpdate for more information about status update
acknowledgements.
|
train
|
https://github.com/daskos/mentor/blob/b5fd64e3a3192f5664fa5c03e8517cacb4e0590f/mentor/proxies/executor.py#L108-L119
| null |
class ExecutorDriverProxy(object):
def __init__(self, driver):
self.driver = driver
def start(self):
"""Starts the executor driver.
This needs to be called before any other driver calls are made.
"""
logging.info('Driver started')
return self.driver.start()
def stop(self):
"""Stops the executor driver."""
logging.info('Driver stopped')
return self.driver.stop()
def abort(self):
"""Aborts the driver so that no more callbacks can be made to the
executor.
The semantics of abort and stop have deliberately been separated so that
code can detect an aborted driver (i.e., via the return status of
ExecutorDriver.join), and instantiate and start another driver if
desired (from within the same process, although this functionality is
currently not supported for executors).
"""
logging.info('Driver aborted')
return self.driver.abort()
def join(self):
"""Waits for the driver to be stopped or aborted, possibly blocking the
current thread indefinitely.
The return status of this function can be used to determine if the
driver was aborted (see mesos.proto for a description of Status).
"""
logging.info('Joined to driver')
return self.driver.join()
def run(self):
"""Starts and immediately joins (i.e., blocks on) the driver."""
logging.info('Driver run')
return self.driver.run()
def message(self, data):
"""Sends a message to the framework scheduler.
These messages are best effort; do not expect a framework message to be
retransmitted in any reliable fashion.
"""
logging.info('Driver sends framework message {}'.format(data))
return self.driver.sendFrameworkMessage(data)
|
daskos/mentor
|
mentor/proxies/executor.py
|
ExecutorDriverProxy.message
|
python
|
def message(self, data):
logging.info('Driver sends framework message {}'.format(data))
return self.driver.sendFrameworkMessage(data)
|
Sends a message to the framework scheduler.
These messages are best effort; do not expect a framework message to be
retransmitted in any reliable fashion.
|
train
|
https://github.com/daskos/mentor/blob/b5fd64e3a3192f5664fa5c03e8517cacb4e0590f/mentor/proxies/executor.py#L121-L128
| null |
class ExecutorDriverProxy(object):
def __init__(self, driver):
self.driver = driver
def start(self):
"""Starts the executor driver.
This needs to be called before any other driver calls are made.
"""
logging.info('Driver started')
return self.driver.start()
def stop(self):
"""Stops the executor driver."""
logging.info('Driver stopped')
return self.driver.stop()
def abort(self):
"""Aborts the driver so that no more callbacks can be made to the
executor.
The semantics of abort and stop have deliberately been separated so that
code can detect an aborted driver (i.e., via the return status of
ExecutorDriver.join), and instantiate and start another driver if
desired (from within the same process, although this functionality is
currently not supported for executors).
"""
logging.info('Driver aborted')
return self.driver.abort()
def join(self):
"""Waits for the driver to be stopped or aborted, possibly blocking the
current thread indefinitely.
The return status of this function can be used to determine if the
driver was aborted (see mesos.proto for a description of Status).
"""
logging.info('Joined to driver')
return self.driver.join()
def run(self):
"""Starts and immediately joins (i.e., blocks on) the driver."""
logging.info('Driver run')
return self.driver.run()
def update(self, status):
"""Sends a status update to the framework scheduler.
Retrying as necessary until an acknowledgement has been received or the
executor is terminated (in which case, a TASK_LOST status update will be
sent).
See Scheduler.statusUpdate for more information about status update
acknowledgements.
"""
logging.info('Executor sends status update {} for task {}'.format(
status.state, status.task_id))
return self.driver.sendStatusUpdate(encode(status))
|
daskos/mentor
|
mentor/proxies/scheduler.py
|
SchedulerDriverProxy.stop
|
python
|
def stop(self, failover=False):
logging.info('Stops Scheduler Driver')
return self.driver.stop(failover)
|
Stops the scheduler driver.
If the 'failover' flag is set to False then it is expected that this
framework will never reconnect to Mesos and all of its executors and
tasks can be terminated. Otherwise, all executors and tasks will
remain running (for some framework specific failover timeout) allowing
the scheduler to reconnect (possibly in the same process, or from a
different process, for example, on a different machine.)
|
train
|
https://github.com/daskos/mentor/blob/b5fd64e3a3192f5664fa5c03e8517cacb4e0590f/mentor/proxies/scheduler.py#L86-L97
| null |
class SchedulerDriverProxy(object):
"""Proxy Interface for Mesos scheduler drivers."""
def __init__(self, driver):
self.driver = driver
def start(self):
"""Starts the scheduler driver.
This needs to be called before any other driver calls are made.
"""
logging.info('Starts Scheduler Driver')
return self.driver.start()
def abort(self):
"""Aborts the driver so that no more callbacks can be made to the
scheduler.
The semantics of abort and stop have deliberately been separated so that
code can detect an aborted driver (i.e., via the return status of
SchedulerDriver.join), and instantiate and start another driver if
desired (from within the same process.)
"""
logging.info('Aborts Scheduler Driver')
return self.driver.abort()
def join(self):
"""Waits for the driver to be stopped or aborted, possibly blocking the
current thread indefinitely.
The return status of this function can be used to determine if the
driver was aborted (see mesos.proto for a description of Status).
"""
logging.info('Joins Scheduler Driver')
return self.driver.join()
def request(self, requests):
"""Requests resources from Mesos.
(see mesos.proto for a description of Request and how, for example, to
request resources from specific slaves.)
Any resources available are offered to the framework via
Scheduler.resourceOffers callback, asynchronously.
"""
logging.info('Request resources from Mesos')
return self.driver.requestResources(map(encode, requests))
def launch(self, offer_id, tasks, filters=Filters()):
"""Launches the given set of tasks.
Any resources remaining (i.e., not used by the tasks or their executors)
will be considered declined.
The specified filters are applied on all unused resources (see
mesos.proto for a description of Filters). Available resources are
aggregated when multiple offers are provided. Note that all offers must
belong to the same slave. Invoking this function with an empty
collection of tasks declines the offers in entirety (see
Scheduler.decline).
Note that passing a single offer is also supported.
"""
logging.info('Launches tasks {}'.format(tasks))
return self.driver.launchTasks(encode(offer_id),
map(encode, tasks),
encode(filters))
def kill(self, task_id):
"""Kills the specified task.
Note that attempting to kill a task is currently not reliable.
If, for example, a scheduler fails over while it was attempting to kill
a task it will need to retry in the future.
Likewise, if unregistered / disconnected, the request will be dropped
(these semantics may be changed in the future).
"""
logging.info('Kills task {}'.format(task_id))
return self.driver.killTask(encode(task_id))
def reconcile(self, statuses):
"""Allows the framework to query the status for non-terminal tasks.
This causes the master to send back the latest task status for each task
in 'statuses', if possible. Tasks that are no longer known will result
in a TASK_LOST update. If statuses is empty, then the master will send
the latest status for each task currently known.
"""
logging.info('Reconciles task statuses {}'.format(statuses))
return self.driver.reconcileTasks(map(encode, statuses))
def decline(self, offer_id, filters=Filters()):
"""Declines an offer in its entirety and applies the specified
filters on the resources (see mesos.proto for a description of
Filters).
Note that this can be done at any time, it is not necessary to do this
within the Scheduler::resourceOffers callback.
"""
logging.info('Declines offer {}'.format(offer_id))
return self.driver.declineOffer(encode(offer_id),
encode(filters)) # TODO filters
def accept(self, offer_ids, operations, filters=Filters()):
"""Accepts the given offers and performs a sequence of operations
on those accepted offers.
See Offer.Operation in mesos.proto for the set of available operations.
Available resources are aggregated when multiple offers are provided.
Note that all offers must belong to the same slave. Any unused resources
will be considered declined. The specified filters are applied on all
unused resources (see mesos.proto for a description of Filters).
"""
logging.info('Accepts offers {}'.format(offer_ids))
return self.driver.acceptOffers(map(encode, offer_ids),
map(encode, operations),
encode(filters))
def revive(self):
"""Removes all filters previously set by the framework (via
launchTasks()).
This enables the framework to receive offers from those filtered slaves.
"""
logging.info(
'Revives; removes all filters previously set by framework')
return self.driver.reviveOffers()
def suppress(self):
"""Inform Mesos master to stop sending offers to the framework.
The scheduler should call reviveOffers() to resume getting offers.
"""
logging.info('Suppress offers for framework')
return self.driver.suppressOffers()
def acknowledge(self, status):
"""Acknowledges the status update.
This should only be called once the status update is processed durably
by the scheduler.
Not that explicit acknowledgements must be requested via the constructor
argument, otherwise a call to this method will cause the driver to
crash.
"""
logging.info('Acknowledges status update {}'.format(status))
return self.driver.acknowledgeStatusUpdate(encode(status))
def message(self, executor_id, slave_id, message):
"""Sends a message from the framework to one of its executors.
These messages are best effort; do not expect a framework message to be
retransmitted in any reliable fashion.
"""
logging.info('Sends message `{}` to executor `{}` on slave `{}`'.format(
message, executor_id, slave_id))
return self.driver.sendFrameworkMessage(encode(executor_id),
encode(slave_id),
message)
|
daskos/mentor
|
mentor/proxies/scheduler.py
|
SchedulerDriverProxy.request
|
python
|
def request(self, requests):
logging.info('Request resources from Mesos')
return self.driver.requestResources(map(encode, requests))
|
Requests resources from Mesos.
(see mesos.proto for a description of Request and how, for example, to
request resources from specific slaves.)
Any resources available are offered to the framework via
Scheduler.resourceOffers callback, asynchronously.
|
train
|
https://github.com/daskos/mentor/blob/b5fd64e3a3192f5664fa5c03e8517cacb4e0590f/mentor/proxies/scheduler.py#L121-L131
| null |
class SchedulerDriverProxy(object):
"""Proxy Interface for Mesos scheduler drivers."""
def __init__(self, driver):
self.driver = driver
def start(self):
"""Starts the scheduler driver.
This needs to be called before any other driver calls are made.
"""
logging.info('Starts Scheduler Driver')
return self.driver.start()
def stop(self, failover=False):
"""Stops the scheduler driver.
If the 'failover' flag is set to False then it is expected that this
framework will never reconnect to Mesos and all of its executors and
tasks can be terminated. Otherwise, all executors and tasks will
remain running (for some framework specific failover timeout) allowing
the scheduler to reconnect (possibly in the same process, or from a
different process, for example, on a different machine.)
"""
logging.info('Stops Scheduler Driver')
return self.driver.stop(failover)
def abort(self):
"""Aborts the driver so that no more callbacks can be made to the
scheduler.
The semantics of abort and stop have deliberately been separated so that
code can detect an aborted driver (i.e., via the return status of
SchedulerDriver.join), and instantiate and start another driver if
desired (from within the same process.)
"""
logging.info('Aborts Scheduler Driver')
return self.driver.abort()
def join(self):
"""Waits for the driver to be stopped or aborted, possibly blocking the
current thread indefinitely.
The return status of this function can be used to determine if the
driver was aborted (see mesos.proto for a description of Status).
"""
logging.info('Joins Scheduler Driver')
return self.driver.join()
def launch(self, offer_id, tasks, filters=Filters()):
"""Launches the given set of tasks.
Any resources remaining (i.e., not used by the tasks or their executors)
will be considered declined.
The specified filters are applied on all unused resources (see
mesos.proto for a description of Filters). Available resources are
aggregated when multiple offers are provided. Note that all offers must
belong to the same slave. Invoking this function with an empty
collection of tasks declines the offers in entirety (see
Scheduler.decline).
Note that passing a single offer is also supported.
"""
logging.info('Launches tasks {}'.format(tasks))
return self.driver.launchTasks(encode(offer_id),
map(encode, tasks),
encode(filters))
def kill(self, task_id):
"""Kills the specified task.
Note that attempting to kill a task is currently not reliable.
If, for example, a scheduler fails over while it was attempting to kill
a task it will need to retry in the future.
Likewise, if unregistered / disconnected, the request will be dropped
(these semantics may be changed in the future).
"""
logging.info('Kills task {}'.format(task_id))
return self.driver.killTask(encode(task_id))
def reconcile(self, statuses):
"""Allows the framework to query the status for non-terminal tasks.
This causes the master to send back the latest task status for each task
in 'statuses', if possible. Tasks that are no longer known will result
in a TASK_LOST update. If statuses is empty, then the master will send
the latest status for each task currently known.
"""
logging.info('Reconciles task statuses {}'.format(statuses))
return self.driver.reconcileTasks(map(encode, statuses))
def decline(self, offer_id, filters=Filters()):
"""Declines an offer in its entirety and applies the specified
filters on the resources (see mesos.proto for a description of
Filters).
Note that this can be done at any time, it is not necessary to do this
within the Scheduler::resourceOffers callback.
"""
logging.info('Declines offer {}'.format(offer_id))
return self.driver.declineOffer(encode(offer_id),
encode(filters)) # TODO filters
def accept(self, offer_ids, operations, filters=Filters()):
"""Accepts the given offers and performs a sequence of operations
on those accepted offers.
See Offer.Operation in mesos.proto for the set of available operations.
Available resources are aggregated when multiple offers are provided.
Note that all offers must belong to the same slave. Any unused resources
will be considered declined. The specified filters are applied on all
unused resources (see mesos.proto for a description of Filters).
"""
logging.info('Accepts offers {}'.format(offer_ids))
return self.driver.acceptOffers(map(encode, offer_ids),
map(encode, operations),
encode(filters))
def revive(self):
"""Removes all filters previously set by the framework (via
launchTasks()).
This enables the framework to receive offers from those filtered slaves.
"""
logging.info(
'Revives; removes all filters previously set by framework')
return self.driver.reviveOffers()
def suppress(self):
"""Inform Mesos master to stop sending offers to the framework.
The scheduler should call reviveOffers() to resume getting offers.
"""
logging.info('Suppress offers for framework')
return self.driver.suppressOffers()
def acknowledge(self, status):
"""Acknowledges the status update.
This should only be called once the status update is processed durably
by the scheduler.
Not that explicit acknowledgements must be requested via the constructor
argument, otherwise a call to this method will cause the driver to
crash.
"""
logging.info('Acknowledges status update {}'.format(status))
return self.driver.acknowledgeStatusUpdate(encode(status))
def message(self, executor_id, slave_id, message):
"""Sends a message from the framework to one of its executors.
These messages are best effort; do not expect a framework message to be
retransmitted in any reliable fashion.
"""
logging.info('Sends message `{}` to executor `{}` on slave `{}`'.format(
message, executor_id, slave_id))
return self.driver.sendFrameworkMessage(encode(executor_id),
encode(slave_id),
message)
|
daskos/mentor
|
mentor/proxies/scheduler.py
|
SchedulerDriverProxy.launch
|
python
|
def launch(self, offer_id, tasks, filters=Filters()):
logging.info('Launches tasks {}'.format(tasks))
return self.driver.launchTasks(encode(offer_id),
map(encode, tasks),
encode(filters))
|
Launches the given set of tasks.
Any resources remaining (i.e., not used by the tasks or their executors)
will be considered declined.
The specified filters are applied on all unused resources (see
mesos.proto for a description of Filters). Available resources are
aggregated when multiple offers are provided. Note that all offers must
belong to the same slave. Invoking this function with an empty
collection of tasks declines the offers in entirety (see
Scheduler.decline).
Note that passing a single offer is also supported.
|
train
|
https://github.com/daskos/mentor/blob/b5fd64e3a3192f5664fa5c03e8517cacb4e0590f/mentor/proxies/scheduler.py#L133-L150
| null |
class SchedulerDriverProxy(object):
"""Proxy Interface for Mesos scheduler drivers."""
def __init__(self, driver):
self.driver = driver
def start(self):
"""Starts the scheduler driver.
This needs to be called before any other driver calls are made.
"""
logging.info('Starts Scheduler Driver')
return self.driver.start()
def stop(self, failover=False):
"""Stops the scheduler driver.
If the 'failover' flag is set to False then it is expected that this
framework will never reconnect to Mesos and all of its executors and
tasks can be terminated. Otherwise, all executors and tasks will
remain running (for some framework specific failover timeout) allowing
the scheduler to reconnect (possibly in the same process, or from a
different process, for example, on a different machine.)
"""
logging.info('Stops Scheduler Driver')
return self.driver.stop(failover)
def abort(self):
"""Aborts the driver so that no more callbacks can be made to the
scheduler.
The semantics of abort and stop have deliberately been separated so that
code can detect an aborted driver (i.e., via the return status of
SchedulerDriver.join), and instantiate and start another driver if
desired (from within the same process.)
"""
logging.info('Aborts Scheduler Driver')
return self.driver.abort()
def join(self):
"""Waits for the driver to be stopped or aborted, possibly blocking the
current thread indefinitely.
The return status of this function can be used to determine if the
driver was aborted (see mesos.proto for a description of Status).
"""
logging.info('Joins Scheduler Driver')
return self.driver.join()
def request(self, requests):
"""Requests resources from Mesos.
(see mesos.proto for a description of Request and how, for example, to
request resources from specific slaves.)
Any resources available are offered to the framework via
Scheduler.resourceOffers callback, asynchronously.
"""
logging.info('Request resources from Mesos')
return self.driver.requestResources(map(encode, requests))
def kill(self, task_id):
"""Kills the specified task.
Note that attempting to kill a task is currently not reliable.
If, for example, a scheduler fails over while it was attempting to kill
a task it will need to retry in the future.
Likewise, if unregistered / disconnected, the request will be dropped
(these semantics may be changed in the future).
"""
logging.info('Kills task {}'.format(task_id))
return self.driver.killTask(encode(task_id))
def reconcile(self, statuses):
"""Allows the framework to query the status for non-terminal tasks.
This causes the master to send back the latest task status for each task
in 'statuses', if possible. Tasks that are no longer known will result
in a TASK_LOST update. If statuses is empty, then the master will send
the latest status for each task currently known.
"""
logging.info('Reconciles task statuses {}'.format(statuses))
return self.driver.reconcileTasks(map(encode, statuses))
def decline(self, offer_id, filters=Filters()):
"""Declines an offer in its entirety and applies the specified
filters on the resources (see mesos.proto for a description of
Filters).
Note that this can be done at any time, it is not necessary to do this
within the Scheduler::resourceOffers callback.
"""
logging.info('Declines offer {}'.format(offer_id))
return self.driver.declineOffer(encode(offer_id),
encode(filters)) # TODO filters
def accept(self, offer_ids, operations, filters=Filters()):
"""Accepts the given offers and performs a sequence of operations
on those accepted offers.
See Offer.Operation in mesos.proto for the set of available operations.
Available resources are aggregated when multiple offers are provided.
Note that all offers must belong to the same slave. Any unused resources
will be considered declined. The specified filters are applied on all
unused resources (see mesos.proto for a description of Filters).
"""
logging.info('Accepts offers {}'.format(offer_ids))
return self.driver.acceptOffers(map(encode, offer_ids),
map(encode, operations),
encode(filters))
def revive(self):
"""Removes all filters previously set by the framework (via
launchTasks()).
This enables the framework to receive offers from those filtered slaves.
"""
logging.info(
'Revives; removes all filters previously set by framework')
return self.driver.reviveOffers()
def suppress(self):
"""Inform Mesos master to stop sending offers to the framework.
The scheduler should call reviveOffers() to resume getting offers.
"""
logging.info('Suppress offers for framework')
return self.driver.suppressOffers()
def acknowledge(self, status):
"""Acknowledges the status update.
This should only be called once the status update is processed durably
by the scheduler.
Not that explicit acknowledgements must be requested via the constructor
argument, otherwise a call to this method will cause the driver to
crash.
"""
logging.info('Acknowledges status update {}'.format(status))
return self.driver.acknowledgeStatusUpdate(encode(status))
def message(self, executor_id, slave_id, message):
"""Sends a message from the framework to one of its executors.
These messages are best effort; do not expect a framework message to be
retransmitted in any reliable fashion.
"""
logging.info('Sends message `{}` to executor `{}` on slave `{}`'.format(
message, executor_id, slave_id))
return self.driver.sendFrameworkMessage(encode(executor_id),
encode(slave_id),
message)
|
daskos/mentor
|
mentor/proxies/scheduler.py
|
SchedulerDriverProxy.kill
|
python
|
def kill(self, task_id):
logging.info('Kills task {}'.format(task_id))
return self.driver.killTask(encode(task_id))
|
Kills the specified task.
Note that attempting to kill a task is currently not reliable.
If, for example, a scheduler fails over while it was attempting to kill
a task it will need to retry in the future.
Likewise, if unregistered / disconnected, the request will be dropped
(these semantics may be changed in the future).
|
train
|
https://github.com/daskos/mentor/blob/b5fd64e3a3192f5664fa5c03e8517cacb4e0590f/mentor/proxies/scheduler.py#L152-L162
| null |
class SchedulerDriverProxy(object):
"""Proxy Interface for Mesos scheduler drivers."""
def __init__(self, driver):
self.driver = driver
def start(self):
"""Starts the scheduler driver.
This needs to be called before any other driver calls are made.
"""
logging.info('Starts Scheduler Driver')
return self.driver.start()
def stop(self, failover=False):
"""Stops the scheduler driver.
If the 'failover' flag is set to False then it is expected that this
framework will never reconnect to Mesos and all of its executors and
tasks can be terminated. Otherwise, all executors and tasks will
remain running (for some framework specific failover timeout) allowing
the scheduler to reconnect (possibly in the same process, or from a
different process, for example, on a different machine.)
"""
logging.info('Stops Scheduler Driver')
return self.driver.stop(failover)
def abort(self):
"""Aborts the driver so that no more callbacks can be made to the
scheduler.
The semantics of abort and stop have deliberately been separated so that
code can detect an aborted driver (i.e., via the return status of
SchedulerDriver.join), and instantiate and start another driver if
desired (from within the same process.)
"""
logging.info('Aborts Scheduler Driver')
return self.driver.abort()
def join(self):
"""Waits for the driver to be stopped or aborted, possibly blocking the
current thread indefinitely.
The return status of this function can be used to determine if the
driver was aborted (see mesos.proto for a description of Status).
"""
logging.info('Joins Scheduler Driver')
return self.driver.join()
def request(self, requests):
"""Requests resources from Mesos.
(see mesos.proto for a description of Request and how, for example, to
request resources from specific slaves.)
Any resources available are offered to the framework via
Scheduler.resourceOffers callback, asynchronously.
"""
logging.info('Request resources from Mesos')
return self.driver.requestResources(map(encode, requests))
def launch(self, offer_id, tasks, filters=Filters()):
"""Launches the given set of tasks.
Any resources remaining (i.e., not used by the tasks or their executors)
will be considered declined.
The specified filters are applied on all unused resources (see
mesos.proto for a description of Filters). Available resources are
aggregated when multiple offers are provided. Note that all offers must
belong to the same slave. Invoking this function with an empty
collection of tasks declines the offers in entirety (see
Scheduler.decline).
Note that passing a single offer is also supported.
"""
logging.info('Launches tasks {}'.format(tasks))
return self.driver.launchTasks(encode(offer_id),
map(encode, tasks),
encode(filters))
def reconcile(self, statuses):
"""Allows the framework to query the status for non-terminal tasks.
This causes the master to send back the latest task status for each task
in 'statuses', if possible. Tasks that are no longer known will result
in a TASK_LOST update. If statuses is empty, then the master will send
the latest status for each task currently known.
"""
logging.info('Reconciles task statuses {}'.format(statuses))
return self.driver.reconcileTasks(map(encode, statuses))
def decline(self, offer_id, filters=Filters()):
"""Declines an offer in its entirety and applies the specified
filters on the resources (see mesos.proto for a description of
Filters).
Note that this can be done at any time, it is not necessary to do this
within the Scheduler::resourceOffers callback.
"""
logging.info('Declines offer {}'.format(offer_id))
return self.driver.declineOffer(encode(offer_id),
encode(filters)) # TODO filters
def accept(self, offer_ids, operations, filters=Filters()):
"""Accepts the given offers and performs a sequence of operations
on those accepted offers.
See Offer.Operation in mesos.proto for the set of available operations.
Available resources are aggregated when multiple offers are provided.
Note that all offers must belong to the same slave. Any unused resources
will be considered declined. The specified filters are applied on all
unused resources (see mesos.proto for a description of Filters).
"""
logging.info('Accepts offers {}'.format(offer_ids))
return self.driver.acceptOffers(map(encode, offer_ids),
map(encode, operations),
encode(filters))
def revive(self):
"""Removes all filters previously set by the framework (via
launchTasks()).
This enables the framework to receive offers from those filtered slaves.
"""
logging.info(
'Revives; removes all filters previously set by framework')
return self.driver.reviveOffers()
def suppress(self):
"""Inform Mesos master to stop sending offers to the framework.
The scheduler should call reviveOffers() to resume getting offers.
"""
logging.info('Suppress offers for framework')
return self.driver.suppressOffers()
def acknowledge(self, status):
"""Acknowledges the status update.
This should only be called once the status update is processed durably
by the scheduler.
Not that explicit acknowledgements must be requested via the constructor
argument, otherwise a call to this method will cause the driver to
crash.
"""
logging.info('Acknowledges status update {}'.format(status))
return self.driver.acknowledgeStatusUpdate(encode(status))
def message(self, executor_id, slave_id, message):
"""Sends a message from the framework to one of its executors.
These messages are best effort; do not expect a framework message to be
retransmitted in any reliable fashion.
"""
logging.info('Sends message `{}` to executor `{}` on slave `{}`'.format(
message, executor_id, slave_id))
return self.driver.sendFrameworkMessage(encode(executor_id),
encode(slave_id),
message)
|
daskos/mentor
|
mentor/proxies/scheduler.py
|
SchedulerDriverProxy.reconcile
|
python
|
def reconcile(self, statuses):
logging.info('Reconciles task statuses {}'.format(statuses))
return self.driver.reconcileTasks(map(encode, statuses))
|
Allows the framework to query the status for non-terminal tasks.
This causes the master to send back the latest task status for each task
in 'statuses', if possible. Tasks that are no longer known will result
in a TASK_LOST update. If statuses is empty, then the master will send
the latest status for each task currently known.
|
train
|
https://github.com/daskos/mentor/blob/b5fd64e3a3192f5664fa5c03e8517cacb4e0590f/mentor/proxies/scheduler.py#L164-L173
| null |
class SchedulerDriverProxy(object):
"""Proxy Interface for Mesos scheduler drivers."""
def __init__(self, driver):
self.driver = driver
def start(self):
"""Starts the scheduler driver.
This needs to be called before any other driver calls are made.
"""
logging.info('Starts Scheduler Driver')
return self.driver.start()
def stop(self, failover=False):
"""Stops the scheduler driver.
If the 'failover' flag is set to False then it is expected that this
framework will never reconnect to Mesos and all of its executors and
tasks can be terminated. Otherwise, all executors and tasks will
remain running (for some framework specific failover timeout) allowing
the scheduler to reconnect (possibly in the same process, or from a
different process, for example, on a different machine.)
"""
logging.info('Stops Scheduler Driver')
return self.driver.stop(failover)
def abort(self):
"""Aborts the driver so that no more callbacks can be made to the
scheduler.
The semantics of abort and stop have deliberately been separated so that
code can detect an aborted driver (i.e., via the return status of
SchedulerDriver.join), and instantiate and start another driver if
desired (from within the same process.)
"""
logging.info('Aborts Scheduler Driver')
return self.driver.abort()
def join(self):
"""Waits for the driver to be stopped or aborted, possibly blocking the
current thread indefinitely.
The return status of this function can be used to determine if the
driver was aborted (see mesos.proto for a description of Status).
"""
logging.info('Joins Scheduler Driver')
return self.driver.join()
def request(self, requests):
"""Requests resources from Mesos.
(see mesos.proto for a description of Request and how, for example, to
request resources from specific slaves.)
Any resources available are offered to the framework via
Scheduler.resourceOffers callback, asynchronously.
"""
logging.info('Request resources from Mesos')
return self.driver.requestResources(map(encode, requests))
def launch(self, offer_id, tasks, filters=Filters()):
"""Launches the given set of tasks.
Any resources remaining (i.e., not used by the tasks or their executors)
will be considered declined.
The specified filters are applied on all unused resources (see
mesos.proto for a description of Filters). Available resources are
aggregated when multiple offers are provided. Note that all offers must
belong to the same slave. Invoking this function with an empty
collection of tasks declines the offers in entirety (see
Scheduler.decline).
Note that passing a single offer is also supported.
"""
logging.info('Launches tasks {}'.format(tasks))
return self.driver.launchTasks(encode(offer_id),
map(encode, tasks),
encode(filters))
def kill(self, task_id):
"""Kills the specified task.
Note that attempting to kill a task is currently not reliable.
If, for example, a scheduler fails over while it was attempting to kill
a task it will need to retry in the future.
Likewise, if unregistered / disconnected, the request will be dropped
(these semantics may be changed in the future).
"""
logging.info('Kills task {}'.format(task_id))
return self.driver.killTask(encode(task_id))
def decline(self, offer_id, filters=Filters()):
"""Declines an offer in its entirety and applies the specified
filters on the resources (see mesos.proto for a description of
Filters).
Note that this can be done at any time, it is not necessary to do this
within the Scheduler::resourceOffers callback.
"""
logging.info('Declines offer {}'.format(offer_id))
return self.driver.declineOffer(encode(offer_id),
encode(filters)) # TODO filters
def accept(self, offer_ids, operations, filters=Filters()):
"""Accepts the given offers and performs a sequence of operations
on those accepted offers.
See Offer.Operation in mesos.proto for the set of available operations.
Available resources are aggregated when multiple offers are provided.
Note that all offers must belong to the same slave. Any unused resources
will be considered declined. The specified filters are applied on all
unused resources (see mesos.proto for a description of Filters).
"""
logging.info('Accepts offers {}'.format(offer_ids))
return self.driver.acceptOffers(map(encode, offer_ids),
map(encode, operations),
encode(filters))
def revive(self):
"""Removes all filters previously set by the framework (via
launchTasks()).
This enables the framework to receive offers from those filtered slaves.
"""
logging.info(
'Revives; removes all filters previously set by framework')
return self.driver.reviveOffers()
def suppress(self):
"""Inform Mesos master to stop sending offers to the framework.
The scheduler should call reviveOffers() to resume getting offers.
"""
logging.info('Suppress offers for framework')
return self.driver.suppressOffers()
def acknowledge(self, status):
"""Acknowledges the status update.
This should only be called once the status update is processed durably
by the scheduler.
Not that explicit acknowledgements must be requested via the constructor
argument, otherwise a call to this method will cause the driver to
crash.
"""
logging.info('Acknowledges status update {}'.format(status))
return self.driver.acknowledgeStatusUpdate(encode(status))
def message(self, executor_id, slave_id, message):
"""Sends a message from the framework to one of its executors.
These messages are best effort; do not expect a framework message to be
retransmitted in any reliable fashion.
"""
logging.info('Sends message `{}` to executor `{}` on slave `{}`'.format(
message, executor_id, slave_id))
return self.driver.sendFrameworkMessage(encode(executor_id),
encode(slave_id),
message)
|
daskos/mentor
|
mentor/proxies/scheduler.py
|
SchedulerDriverProxy.decline
|
python
|
def decline(self, offer_id, filters=Filters()):
logging.info('Declines offer {}'.format(offer_id))
return self.driver.declineOffer(encode(offer_id),
encode(filters))
|
Declines an offer in its entirety and applies the specified
filters on the resources (see mesos.proto for a description of
Filters).
Note that this can be done at any time, it is not necessary to do this
within the Scheduler::resourceOffers callback.
|
train
|
https://github.com/daskos/mentor/blob/b5fd64e3a3192f5664fa5c03e8517cacb4e0590f/mentor/proxies/scheduler.py#L175-L185
| null |
class SchedulerDriverProxy(object):
"""Proxy Interface for Mesos scheduler drivers."""
def __init__(self, driver):
self.driver = driver
def start(self):
"""Starts the scheduler driver.
This needs to be called before any other driver calls are made.
"""
logging.info('Starts Scheduler Driver')
return self.driver.start()
def stop(self, failover=False):
"""Stops the scheduler driver.
If the 'failover' flag is set to False then it is expected that this
framework will never reconnect to Mesos and all of its executors and
tasks can be terminated. Otherwise, all executors and tasks will
remain running (for some framework specific failover timeout) allowing
the scheduler to reconnect (possibly in the same process, or from a
different process, for example, on a different machine.)
"""
logging.info('Stops Scheduler Driver')
return self.driver.stop(failover)
def abort(self):
"""Aborts the driver so that no more callbacks can be made to the
scheduler.
The semantics of abort and stop have deliberately been separated so that
code can detect an aborted driver (i.e., via the return status of
SchedulerDriver.join), and instantiate and start another driver if
desired (from within the same process.)
"""
logging.info('Aborts Scheduler Driver')
return self.driver.abort()
def join(self):
"""Waits for the driver to be stopped or aborted, possibly blocking the
current thread indefinitely.
The return status of this function can be used to determine if the
driver was aborted (see mesos.proto for a description of Status).
"""
logging.info('Joins Scheduler Driver')
return self.driver.join()
def request(self, requests):
"""Requests resources from Mesos.
(see mesos.proto for a description of Request and how, for example, to
request resources from specific slaves.)
Any resources available are offered to the framework via
Scheduler.resourceOffers callback, asynchronously.
"""
logging.info('Request resources from Mesos')
return self.driver.requestResources(map(encode, requests))
def launch(self, offer_id, tasks, filters=Filters()):
"""Launches the given set of tasks.
Any resources remaining (i.e., not used by the tasks or their executors)
will be considered declined.
The specified filters are applied on all unused resources (see
mesos.proto for a description of Filters). Available resources are
aggregated when multiple offers are provided. Note that all offers must
belong to the same slave. Invoking this function with an empty
collection of tasks declines the offers in entirety (see
Scheduler.decline).
Note that passing a single offer is also supported.
"""
logging.info('Launches tasks {}'.format(tasks))
return self.driver.launchTasks(encode(offer_id),
map(encode, tasks),
encode(filters))
def kill(self, task_id):
"""Kills the specified task.
Note that attempting to kill a task is currently not reliable.
If, for example, a scheduler fails over while it was attempting to kill
a task it will need to retry in the future.
Likewise, if unregistered / disconnected, the request will be dropped
(these semantics may be changed in the future).
"""
logging.info('Kills task {}'.format(task_id))
return self.driver.killTask(encode(task_id))
def reconcile(self, statuses):
"""Allows the framework to query the status for non-terminal tasks.
This causes the master to send back the latest task status for each task
in 'statuses', if possible. Tasks that are no longer known will result
in a TASK_LOST update. If statuses is empty, then the master will send
the latest status for each task currently known.
"""
logging.info('Reconciles task statuses {}'.format(statuses))
return self.driver.reconcileTasks(map(encode, statuses))
# TODO filters
def accept(self, offer_ids, operations, filters=Filters()):
"""Accepts the given offers and performs a sequence of operations
on those accepted offers.
See Offer.Operation in mesos.proto for the set of available operations.
Available resources are aggregated when multiple offers are provided.
Note that all offers must belong to the same slave. Any unused resources
will be considered declined. The specified filters are applied on all
unused resources (see mesos.proto for a description of Filters).
"""
logging.info('Accepts offers {}'.format(offer_ids))
return self.driver.acceptOffers(map(encode, offer_ids),
map(encode, operations),
encode(filters))
def revive(self):
"""Removes all filters previously set by the framework (via
launchTasks()).
This enables the framework to receive offers from those filtered slaves.
"""
logging.info(
'Revives; removes all filters previously set by framework')
return self.driver.reviveOffers()
def suppress(self):
"""Inform Mesos master to stop sending offers to the framework.
The scheduler should call reviveOffers() to resume getting offers.
"""
logging.info('Suppress offers for framework')
return self.driver.suppressOffers()
def acknowledge(self, status):
"""Acknowledges the status update.
This should only be called once the status update is processed durably
by the scheduler.
Not that explicit acknowledgements must be requested via the constructor
argument, otherwise a call to this method will cause the driver to
crash.
"""
logging.info('Acknowledges status update {}'.format(status))
return self.driver.acknowledgeStatusUpdate(encode(status))
def message(self, executor_id, slave_id, message):
"""Sends a message from the framework to one of its executors.
These messages are best effort; do not expect a framework message to be
retransmitted in any reliable fashion.
"""
logging.info('Sends message `{}` to executor `{}` on slave `{}`'.format(
message, executor_id, slave_id))
return self.driver.sendFrameworkMessage(encode(executor_id),
encode(slave_id),
message)
|
daskos/mentor
|
mentor/proxies/scheduler.py
|
SchedulerDriverProxy.accept
|
python
|
def accept(self, offer_ids, operations, filters=Filters()):
logging.info('Accepts offers {}'.format(offer_ids))
return self.driver.acceptOffers(map(encode, offer_ids),
map(encode, operations),
encode(filters))
|
Accepts the given offers and performs a sequence of operations
on those accepted offers.
See Offer.Operation in mesos.proto for the set of available operations.
Available resources are aggregated when multiple offers are provided.
Note that all offers must belong to the same slave. Any unused resources
will be considered declined. The specified filters are applied on all
unused resources (see mesos.proto for a description of Filters).
|
train
|
https://github.com/daskos/mentor/blob/b5fd64e3a3192f5664fa5c03e8517cacb4e0590f/mentor/proxies/scheduler.py#L187-L201
| null |
class SchedulerDriverProxy(object):
"""Proxy Interface for Mesos scheduler drivers."""
def __init__(self, driver):
self.driver = driver
def start(self):
"""Starts the scheduler driver.
This needs to be called before any other driver calls are made.
"""
logging.info('Starts Scheduler Driver')
return self.driver.start()
def stop(self, failover=False):
"""Stops the scheduler driver.
If the 'failover' flag is set to False then it is expected that this
framework will never reconnect to Mesos and all of its executors and
tasks can be terminated. Otherwise, all executors and tasks will
remain running (for some framework specific failover timeout) allowing
the scheduler to reconnect (possibly in the same process, or from a
different process, for example, on a different machine.)
"""
logging.info('Stops Scheduler Driver')
return self.driver.stop(failover)
def abort(self):
"""Aborts the driver so that no more callbacks can be made to the
scheduler.
The semantics of abort and stop have deliberately been separated so that
code can detect an aborted driver (i.e., via the return status of
SchedulerDriver.join), and instantiate and start another driver if
desired (from within the same process.)
"""
logging.info('Aborts Scheduler Driver')
return self.driver.abort()
def join(self):
"""Waits for the driver to be stopped or aborted, possibly blocking the
current thread indefinitely.
The return status of this function can be used to determine if the
driver was aborted (see mesos.proto for a description of Status).
"""
logging.info('Joins Scheduler Driver')
return self.driver.join()
def request(self, requests):
"""Requests resources from Mesos.
(see mesos.proto for a description of Request and how, for example, to
request resources from specific slaves.)
Any resources available are offered to the framework via
Scheduler.resourceOffers callback, asynchronously.
"""
logging.info('Request resources from Mesos')
return self.driver.requestResources(map(encode, requests))
def launch(self, offer_id, tasks, filters=Filters()):
"""Launches the given set of tasks.
Any resources remaining (i.e., not used by the tasks or their executors)
will be considered declined.
The specified filters are applied on all unused resources (see
mesos.proto for a description of Filters). Available resources are
aggregated when multiple offers are provided. Note that all offers must
belong to the same slave. Invoking this function with an empty
collection of tasks declines the offers in entirety (see
Scheduler.decline).
Note that passing a single offer is also supported.
"""
logging.info('Launches tasks {}'.format(tasks))
return self.driver.launchTasks(encode(offer_id),
map(encode, tasks),
encode(filters))
def kill(self, task_id):
"""Kills the specified task.
Note that attempting to kill a task is currently not reliable.
If, for example, a scheduler fails over while it was attempting to kill
a task it will need to retry in the future.
Likewise, if unregistered / disconnected, the request will be dropped
(these semantics may be changed in the future).
"""
logging.info('Kills task {}'.format(task_id))
return self.driver.killTask(encode(task_id))
def reconcile(self, statuses):
"""Allows the framework to query the status for non-terminal tasks.
This causes the master to send back the latest task status for each task
in 'statuses', if possible. Tasks that are no longer known will result
in a TASK_LOST update. If statuses is empty, then the master will send
the latest status for each task currently known.
"""
logging.info('Reconciles task statuses {}'.format(statuses))
return self.driver.reconcileTasks(map(encode, statuses))
def decline(self, offer_id, filters=Filters()):
"""Declines an offer in its entirety and applies the specified
filters on the resources (see mesos.proto for a description of
Filters).
Note that this can be done at any time, it is not necessary to do this
within the Scheduler::resourceOffers callback.
"""
logging.info('Declines offer {}'.format(offer_id))
return self.driver.declineOffer(encode(offer_id),
encode(filters)) # TODO filters
def revive(self):
"""Removes all filters previously set by the framework (via
launchTasks()).
This enables the framework to receive offers from those filtered slaves.
"""
logging.info(
'Revives; removes all filters previously set by framework')
return self.driver.reviveOffers()
def suppress(self):
"""Inform Mesos master to stop sending offers to the framework.
The scheduler should call reviveOffers() to resume getting offers.
"""
logging.info('Suppress offers for framework')
return self.driver.suppressOffers()
def acknowledge(self, status):
"""Acknowledges the status update.
This should only be called once the status update is processed durably
by the scheduler.
Not that explicit acknowledgements must be requested via the constructor
argument, otherwise a call to this method will cause the driver to
crash.
"""
logging.info('Acknowledges status update {}'.format(status))
return self.driver.acknowledgeStatusUpdate(encode(status))
def message(self, executor_id, slave_id, message):
"""Sends a message from the framework to one of its executors.
These messages are best effort; do not expect a framework message to be
retransmitted in any reliable fashion.
"""
logging.info('Sends message `{}` to executor `{}` on slave `{}`'.format(
message, executor_id, slave_id))
return self.driver.sendFrameworkMessage(encode(executor_id),
encode(slave_id),
message)
|
daskos/mentor
|
mentor/proxies/scheduler.py
|
SchedulerDriverProxy.acknowledge
|
python
|
def acknowledge(self, status):
logging.info('Acknowledges status update {}'.format(status))
return self.driver.acknowledgeStatusUpdate(encode(status))
|
Acknowledges the status update.
This should only be called once the status update is processed durably
by the scheduler.
Not that explicit acknowledgements must be requested via the constructor
argument, otherwise a call to this method will cause the driver to
crash.
|
train
|
https://github.com/daskos/mentor/blob/b5fd64e3a3192f5664fa5c03e8517cacb4e0590f/mentor/proxies/scheduler.py#L221-L232
| null |
class SchedulerDriverProxy(object):
"""Proxy Interface for Mesos scheduler drivers."""
def __init__(self, driver):
self.driver = driver
def start(self):
"""Starts the scheduler driver.
This needs to be called before any other driver calls are made.
"""
logging.info('Starts Scheduler Driver')
return self.driver.start()
def stop(self, failover=False):
"""Stops the scheduler driver.
If the 'failover' flag is set to False then it is expected that this
framework will never reconnect to Mesos and all of its executors and
tasks can be terminated. Otherwise, all executors and tasks will
remain running (for some framework specific failover timeout) allowing
the scheduler to reconnect (possibly in the same process, or from a
different process, for example, on a different machine.)
"""
logging.info('Stops Scheduler Driver')
return self.driver.stop(failover)
def abort(self):
"""Aborts the driver so that no more callbacks can be made to the
scheduler.
The semantics of abort and stop have deliberately been separated so that
code can detect an aborted driver (i.e., via the return status of
SchedulerDriver.join), and instantiate and start another driver if
desired (from within the same process.)
"""
logging.info('Aborts Scheduler Driver')
return self.driver.abort()
def join(self):
"""Waits for the driver to be stopped or aborted, possibly blocking the
current thread indefinitely.
The return status of this function can be used to determine if the
driver was aborted (see mesos.proto for a description of Status).
"""
logging.info('Joins Scheduler Driver')
return self.driver.join()
def request(self, requests):
"""Requests resources from Mesos.
(see mesos.proto for a description of Request and how, for example, to
request resources from specific slaves.)
Any resources available are offered to the framework via
Scheduler.resourceOffers callback, asynchronously.
"""
logging.info('Request resources from Mesos')
return self.driver.requestResources(map(encode, requests))
def launch(self, offer_id, tasks, filters=Filters()):
"""Launches the given set of tasks.
Any resources remaining (i.e., not used by the tasks or their executors)
will be considered declined.
The specified filters are applied on all unused resources (see
mesos.proto for a description of Filters). Available resources are
aggregated when multiple offers are provided. Note that all offers must
belong to the same slave. Invoking this function with an empty
collection of tasks declines the offers in entirety (see
Scheduler.decline).
Note that passing a single offer is also supported.
"""
logging.info('Launches tasks {}'.format(tasks))
return self.driver.launchTasks(encode(offer_id),
map(encode, tasks),
encode(filters))
def kill(self, task_id):
"""Kills the specified task.
Note that attempting to kill a task is currently not reliable.
If, for example, a scheduler fails over while it was attempting to kill
a task it will need to retry in the future.
Likewise, if unregistered / disconnected, the request will be dropped
(these semantics may be changed in the future).
"""
logging.info('Kills task {}'.format(task_id))
return self.driver.killTask(encode(task_id))
def reconcile(self, statuses):
"""Allows the framework to query the status for non-terminal tasks.
This causes the master to send back the latest task status for each task
in 'statuses', if possible. Tasks that are no longer known will result
in a TASK_LOST update. If statuses is empty, then the master will send
the latest status for each task currently known.
"""
logging.info('Reconciles task statuses {}'.format(statuses))
return self.driver.reconcileTasks(map(encode, statuses))
def decline(self, offer_id, filters=Filters()):
"""Declines an offer in its entirety and applies the specified
filters on the resources (see mesos.proto for a description of
Filters).
Note that this can be done at any time, it is not necessary to do this
within the Scheduler::resourceOffers callback.
"""
logging.info('Declines offer {}'.format(offer_id))
return self.driver.declineOffer(encode(offer_id),
encode(filters)) # TODO filters
def accept(self, offer_ids, operations, filters=Filters()):
"""Accepts the given offers and performs a sequence of operations
on those accepted offers.
See Offer.Operation in mesos.proto for the set of available operations.
Available resources are aggregated when multiple offers are provided.
Note that all offers must belong to the same slave. Any unused resources
will be considered declined. The specified filters are applied on all
unused resources (see mesos.proto for a description of Filters).
"""
logging.info('Accepts offers {}'.format(offer_ids))
return self.driver.acceptOffers(map(encode, offer_ids),
map(encode, operations),
encode(filters))
def revive(self):
"""Removes all filters previously set by the framework (via
launchTasks()).
This enables the framework to receive offers from those filtered slaves.
"""
logging.info(
'Revives; removes all filters previously set by framework')
return self.driver.reviveOffers()
def suppress(self):
"""Inform Mesos master to stop sending offers to the framework.
The scheduler should call reviveOffers() to resume getting offers.
"""
logging.info('Suppress offers for framework')
return self.driver.suppressOffers()
def message(self, executor_id, slave_id, message):
"""Sends a message from the framework to one of its executors.
These messages are best effort; do not expect a framework message to be
retransmitted in any reliable fashion.
"""
logging.info('Sends message `{}` to executor `{}` on slave `{}`'.format(
message, executor_id, slave_id))
return self.driver.sendFrameworkMessage(encode(executor_id),
encode(slave_id),
message)
|
daskos/mentor
|
mentor/proxies/scheduler.py
|
SchedulerDriverProxy.message
|
python
|
def message(self, executor_id, slave_id, message):
logging.info('Sends message `{}` to executor `{}` on slave `{}`'.format(
message, executor_id, slave_id))
return self.driver.sendFrameworkMessage(encode(executor_id),
encode(slave_id),
message)
|
Sends a message from the framework to one of its executors.
These messages are best effort; do not expect a framework message to be
retransmitted in any reliable fashion.
|
train
|
https://github.com/daskos/mentor/blob/b5fd64e3a3192f5664fa5c03e8517cacb4e0590f/mentor/proxies/scheduler.py#L234-L244
| null |
class SchedulerDriverProxy(object):
"""Proxy Interface for Mesos scheduler drivers."""
def __init__(self, driver):
self.driver = driver
def start(self):
"""Starts the scheduler driver.
This needs to be called before any other driver calls are made.
"""
logging.info('Starts Scheduler Driver')
return self.driver.start()
def stop(self, failover=False):
"""Stops the scheduler driver.
If the 'failover' flag is set to False then it is expected that this
framework will never reconnect to Mesos and all of its executors and
tasks can be terminated. Otherwise, all executors and tasks will
remain running (for some framework specific failover timeout) allowing
the scheduler to reconnect (possibly in the same process, or from a
different process, for example, on a different machine.)
"""
logging.info('Stops Scheduler Driver')
return self.driver.stop(failover)
def abort(self):
"""Aborts the driver so that no more callbacks can be made to the
scheduler.
The semantics of abort and stop have deliberately been separated so that
code can detect an aborted driver (i.e., via the return status of
SchedulerDriver.join), and instantiate and start another driver if
desired (from within the same process.)
"""
logging.info('Aborts Scheduler Driver')
return self.driver.abort()
def join(self):
"""Waits for the driver to be stopped or aborted, possibly blocking the
current thread indefinitely.
The return status of this function can be used to determine if the
driver was aborted (see mesos.proto for a description of Status).
"""
logging.info('Joins Scheduler Driver')
return self.driver.join()
def request(self, requests):
"""Requests resources from Mesos.
(see mesos.proto for a description of Request and how, for example, to
request resources from specific slaves.)
Any resources available are offered to the framework via
Scheduler.resourceOffers callback, asynchronously.
"""
logging.info('Request resources from Mesos')
return self.driver.requestResources(map(encode, requests))
def launch(self, offer_id, tasks, filters=Filters()):
"""Launches the given set of tasks.
Any resources remaining (i.e., not used by the tasks or their executors)
will be considered declined.
The specified filters are applied on all unused resources (see
mesos.proto for a description of Filters). Available resources are
aggregated when multiple offers are provided. Note that all offers must
belong to the same slave. Invoking this function with an empty
collection of tasks declines the offers in entirety (see
Scheduler.decline).
Note that passing a single offer is also supported.
"""
logging.info('Launches tasks {}'.format(tasks))
return self.driver.launchTasks(encode(offer_id),
map(encode, tasks),
encode(filters))
def kill(self, task_id):
"""Kills the specified task.
Note that attempting to kill a task is currently not reliable.
If, for example, a scheduler fails over while it was attempting to kill
a task it will need to retry in the future.
Likewise, if unregistered / disconnected, the request will be dropped
(these semantics may be changed in the future).
"""
logging.info('Kills task {}'.format(task_id))
return self.driver.killTask(encode(task_id))
def reconcile(self, statuses):
"""Allows the framework to query the status for non-terminal tasks.
This causes the master to send back the latest task status for each task
in 'statuses', if possible. Tasks that are no longer known will result
in a TASK_LOST update. If statuses is empty, then the master will send
the latest status for each task currently known.
"""
logging.info('Reconciles task statuses {}'.format(statuses))
return self.driver.reconcileTasks(map(encode, statuses))
def decline(self, offer_id, filters=Filters()):
"""Declines an offer in its entirety and applies the specified
filters on the resources (see mesos.proto for a description of
Filters).
Note that this can be done at any time, it is not necessary to do this
within the Scheduler::resourceOffers callback.
"""
logging.info('Declines offer {}'.format(offer_id))
return self.driver.declineOffer(encode(offer_id),
encode(filters)) # TODO filters
def accept(self, offer_ids, operations, filters=Filters()):
"""Accepts the given offers and performs a sequence of operations
on those accepted offers.
See Offer.Operation in mesos.proto for the set of available operations.
Available resources are aggregated when multiple offers are provided.
Note that all offers must belong to the same slave. Any unused resources
will be considered declined. The specified filters are applied on all
unused resources (see mesos.proto for a description of Filters).
"""
logging.info('Accepts offers {}'.format(offer_ids))
return self.driver.acceptOffers(map(encode, offer_ids),
map(encode, operations),
encode(filters))
def revive(self):
"""Removes all filters previously set by the framework (via
launchTasks()).
This enables the framework to receive offers from those filtered slaves.
"""
logging.info(
'Revives; removes all filters previously set by framework')
return self.driver.reviveOffers()
def suppress(self):
"""Inform Mesos master to stop sending offers to the framework.
The scheduler should call reviveOffers() to resume getting offers.
"""
logging.info('Suppress offers for framework')
return self.driver.suppressOffers()
def acknowledge(self, status):
"""Acknowledges the status update.
This should only be called once the status update is processed durably
by the scheduler.
Not that explicit acknowledgements must be requested via the constructor
argument, otherwise a call to this method will cause the driver to
crash.
"""
logging.info('Acknowledges status update {}'.format(status))
return self.driver.acknowledgeStatusUpdate(encode(status))
|
push-things/wallabag_api
|
wallabag_api/wallabag.py
|
Wallabag.query
|
python
|
async def query(self, path, method='get', **params):
if method in ('get', 'post', 'patch', 'delete', 'put'):
full_path = self.host + path
if method == 'get':
resp = await self.aio_sess.get(full_path, params=params)
elif method == 'post':
resp = await self.aio_sess.post(full_path, data=params)
elif method == 'patch':
resp = await self.aio_sess.patch(full_path, data=params)
elif method == 'delete':
resp = await self.aio_sess.delete(full_path, params=params, headers=params)
elif method == 'put':
resp = await self.aio_sess.put(full_path, data=params)
async with resp:
# return the content if its a binary one
if resp.content_type.startswith('application/pdf') or \
resp.content_type.startswith('application/epub'):
return await resp.read()
return await self.handle_json_response(resp)
else:
raise ValueError('method expected: get, post, patch, delete, put')
|
Do a query to the System API
:param path: url to the API
:param method: the kind of query to do
:param params: a dict with all the
necessary things to query the API
:return json data
|
train
|
https://github.com/push-things/wallabag_api/blob/8d1e10a6ebc03d1ac9af2b38b57eb69f29b4216e/wallabag_api/wallabag.py#L60-L91
| null |
class Wallabag(object):
"""
Python Class 'Wallabag' to deal with Wallabag REST API
This class is able to handle any data from your Wallabag account
"""
EXTENTIONS = ('xml', 'json', 'txt', 'csv', 'pdf', 'epub', 'mobi', 'html')
host = ''
token = ''
client_id = ''
client_secret = ''
user_agent = ''
format = ''
username = ''
password = ''
aio_sess = None
def __init__(self,
host='',
token='',
client_id='',
client_secret='',
extension='json',
user_agent="WallabagPython/1.2.2 "
" +https://github.com/push-things/wallabag-api",
aio_sess=None):
"""
init variable
:param host: string url to the official API Wallabag
:param token: string of the key provided by Wallabag
:param client_id client id
:param client_secret client secret
:param extension: xml|json|txt|csv|pdf|epub|mobi|html
:param user_agent
:param aio_sess aiohttp session
"""
self.host = host
self.client_id = client_id
self.client_secret = client_secret
self.token = token
self.format = extension
self.user_agent = user_agent
self.aio_sess = aio_sess
if self.format not in self.EXTENTIONS:
raise ValueError("format invalid {0} should be one of {1}".format(
self.format, self.EXTENTIONS))
@staticmethod
async def handle_json_response(responses):
"""
get the json data response
:param responses: the json response
:return the json data without 'root' node
"""
json_data = {}
if responses.status != 200:
err_msg = HttpProcessingError(code=responses.status,
message=await responses.json())
logging.error("Wallabag: aiohttp error {err_msg}".format(
err_msg=err_msg))
else:
try:
json_data = responses.json()
except ClientResponseError as e:
# sometimes json_data does not return any json() without
# any error. This is due to the grabbing URL which "rejects"
# the URL
logging.error("Wallabag: aiohttp error {code} {message}"
.format(code=e.code, message=e.message))
return await json_data
@staticmethod
def __get_attr(what, type_attr, value_attr, **kwargs):
"""
get the value of a parm
:param what: string parm
:param type_attr: type of parm
:param value_attr:
:param kwargs:
:return: value of the parm
"""
if what in kwargs:
value = int(kwargs[what]) if type_attr == 'int' else kwargs[what]
if value in value_attr:
return value
# ENTRIES
async def get_entries(self, **kwargs):
"""
GET /api/entries.{_format}
Retrieve all entries. It could be filtered by many options.
:param kwargs: can contain one of the following filters
archive: '0' or '1', default '0' filter by archived status.
starred: '0' or '1', default '0' filter by starred status.
sort: 'created' or 'updated', default 'created'
order: 'asc' or 'desc', default 'desc'
page: int default 1 what page you want
perPage: int default 30 result per page
tags: list of tags url encoded.
since: int default 0 from what timestamp you want
Will returns entries that matches ALL tags
:return data related to the ext
"""
# default values
params = dict({'access_token': self.token,
'sort': 'created',
'order': 'desc',
'page': 1,
'perPage': 30,
'tags': '',
'since': 0})
if 'archive' in kwargs and int(kwargs['archive']) in (0, 1):
params['archive'] = int(kwargs['archive'])
if 'starred' in kwargs and int(kwargs['starred']) in (0, 1):
params['starred'] = int(kwargs['starred'])
if 'order' in kwargs and kwargs['order'] in ('asc', 'desc'):
params['order'] = kwargs['order']
if 'page' in kwargs and isinstance(kwargs['page'], int):
params['page'] = kwargs['page']
if 'perPage' in kwargs and isinstance(kwargs['perPage'], int):
params['perPage'] = kwargs['perPage']
if 'tags' in kwargs and isinstance(kwargs['tags'], list):
params['tags'] = ', '.join(kwargs['tags'])
if 'since' in kwargs and isinstance(kwargs['since'], int):
params['since'] = kwargs['since']
path = '/api/entries.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
async def post_entries(self, url, title='', tags='', starred=0, archive=0, content='', language='', published_at='',
authors='', public=1, original_url=''):
"""
POST /api/entries.{_format}
Create an entry
:param url: the url of the note to store
:param title: Optional, we'll get the title from the page.
:param tags: tag1,tag2,tag3 a comma-separated list of tags.
:param starred entry already starred
:param archive entry already archived
:param content additionnal html content
:param language
:param published_at
:param authors
:param public
:param original_url
:return result
"""
params = {'access_token': self.token, 'url': url, 'title': title,
'tags': tags, 'starred': starred, 'archive': archive,
'content': content, 'language': language, 'published_at': published_at,
'authors': authors, 'public': public, 'original_url': original_url}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
path = '/api/entries.{ext}'.format(ext=self.format)
return await self.query(path, "post", **params)
async def get_entry(self, entry):
"""
GET /api/entries/{entry}.{_format}
Retrieve a single entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def reaload_entry(self, entry):
"""
PATCH /api/entries/{entry}/reload.{_format}
Reload a single entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/reload.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "patch", **params)
async def patch_entries(self, entry, **kwargs):
"""
PATCH /api/entries/{entry}.{_format}
Change several properties of an entry
:param entry: the entry to 'patch' / update
:param kwargs: can contain one of the following
title: string
tags: a list of tags tag1,tag2,tag3
archive: '0' or '1', default '0' archived the entry.
starred: '0' or '1', default '0' starred the entry
In case that you don't want to *really* remove it..
:return data related to the ext
"""
# default values
params = {'access_token': self.token,
'title': '',
'tags': []}
if 'title' in kwargs:
params['title'] = kwargs['title']
if 'tags' in kwargs and isinstance(kwargs['tags'], list):
params['tags'] = ', '.join(kwargs['tags'])
params['archive'] = self.__get_attr(what='archive',
type_attr=int,
value_attr=(0, 1),
**kwargs)
params['starred'] = self.__get_attr(what='starred',
type_attr=int,
value_attr=(0, 1),
**kwargs)
params['order'] = self.__get_attr(what='order',
type_attr=str,
value_attr=('asc', 'desc'),
**kwargs)
path = '/api/entries/{entry}.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "patch", **params)
async def get_entry_export(self, entry):
"""
GET /api/entries/{entry}/export.{_format}
Retrieve a single entry as a predefined format.
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/export.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def patch_entry_reload(self, entry):
"""
PATCH /api/entries/{entry}/reload.{_format}
Reload an entry. An empty response with HTTP Status 304 will be send
if we weren't able to update the content (because it hasn't changed
or we got an error).
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/reload.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "patch", **params)
async def delete_entries(self, entry):
"""
DELETE /api/entries/{entry}.{_format}
Delete permanently an entry
:param entry: \w+ an integer The Entry ID
:return result
"""
params = {'Authorization': 'Bearer {}'.format(self.token)}
path = '/api/entries/{entry}.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "delete", **params)
async def entries_exists(self, url, urls=''):
"""
GET /api/entries/exists.{_format}
Check if an entry exist by url.
:param url string true An url Url to check if it exists
:param urls string false An array of urls
(?urls[]=http...&urls[]=http...) Urls (as an array)
to check if it exists
:return result
"""
params = {'access_token': self.token,
'url': url,
'urls': urls}
path = '/api/entries/exists.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
# TAGS
async def get_entry_tags(self, entry):
"""
GET /api/entries/{entry}/tags.{_format}
Retrieve all tags for an entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/tags.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(url, "get", **params)
async def post_entry_tags(self, entry, tags):
"""
POST /api/entries/{entry}/tags.{_format}
Add one or more tags to an entry
:param entry: \w+ an integer The Entry ID
:param tags: list of tags (urlencoded)
:return result
"""
params = {'access_token': self.token, 'tags': []}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
path = '/api/entries/{entry}/tags.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "post", **params)
async def delete_entry_tag(self, entry, tag):
"""
DELETE /api/entries/{entry}/tags/{tag}.{_format}
Permanently remove one tag for an entry
:param entry: \w+ an integer The Entry ID
:param tag: string The Tag
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/tags/{tag}.{ext}'.format(
entry=entry, tag=tag, ext=self.format)
return await self.query(url, "delete", **params)
async def get_tags(self):
"""
GET /api/tags.{_format}
Retrieve all tags
:return data related to the ext
"""
params = {'access_token': self.token}
path = '/api/tags.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
async def delete_tag(self, tag):
"""
DELETE /api/tags/{tag}.{_format}
Permanently remove one tag from every entry
:param tag: string The Tag
:return data related to the ext
"""
path = '/api/tags/{tag}.{ext}'.format(tag=tag, ext=self.format)
params = {'access_token': self.token}
return await self.query(path, "delete", **params)
async def delete_tag_label(self, tag):
"""
DELETE /api/tag/label.{_format}
Permanently remove one tag from every entry.
:param tag: string The Tag
:return data related to the ext
"""
path = '/api/tag/label.{ext}'.format(ext=self.format)
params = {'access_token': self.token,
'tag': tag}
return await self.query(path, "delete", **params)
async def delete_tags_label(self, tags):
"""
DELETE /api/tags/label.{_format}
Permanently remove some tags from every entry.
:param tags: list of tags (urlencoded)
:return data related to the ext
"""
path = '/api/tag/label.{ext}'.format(ext=self.format)
params = {'access_token': self.token, 'tags': []}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
return await self.query(path, "delete", **params)
# ANNOTATIONS
async def delete_annotations(self, annotation):
"""
DELETE /api/annotations/{annotation}.{_format}
Removes an annotation.
:param annotation \w+ string The annotation ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{annotation}.{ext}'.format(
annotation=annotation, ext=self.format)
return await self.query(url, "delete", **params)
async def put_annotations(self, annotation):
"""
PUT /api/annotations/{annotation}.{_format}
Updates an annotation.
:param annotation \w+ string The annotation ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{annotation}.{ext}'.format(
annotation=annotation, ext=self.format)
return await self.query(url, "put", **params)
async def get_annotations(self, entry):
"""
GET /api/annotations/{entry}.{_format}
Retrieve annotations for an entry
:param entry \w+ integer The entry ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def post_annotations(self, entry, **kwargs):
"""
POST /api/annotations/{entry}.{_format}
Creates a new annotation.
:param entry \w+ integer The entry ID
:return
"""
params = dict({'access_token': self.token,
'ranges': [],
'quote': '',
'text': ''})
if 'ranges' in kwargs:
params['ranges'] = kwargs['ranges']
if 'quote' in kwargs:
params['quote'] = kwargs['quote']
if 'text' in kwargs:
params['text'] = kwargs['text']
url = '/api/annotations/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "post", **params)
# VERSION
@property
async def version(self):
"""
GET /api/version.{_format}
Retrieve version number
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/version.{ext}'.format(ext=self.format)
return await self.query(url, "get", **params)
@classmethod
async def get_token(cls, host, **params):
"""
POST /oauth/v2/token
Get a new token
:param host: host of the service
:param params: will contain :
params = {"grant_type": "password",
"client_id": "a string",
"client_secret": "a string",
"username": "a login",
"password": "a password"}
:return: access token
"""
params['grant_type'] = "password"
path = "/oauth/v2/token"
async with aiohttp.ClientSession() as sess:
async with sess.post(host + path, data=params) as resp:
data = await cls.handle_json_response(resp)
return data.get("access_token")
|
push-things/wallabag_api
|
wallabag_api/wallabag.py
|
Wallabag.handle_json_response
|
python
|
async def handle_json_response(responses):
json_data = {}
if responses.status != 200:
err_msg = HttpProcessingError(code=responses.status,
message=await responses.json())
logging.error("Wallabag: aiohttp error {err_msg}".format(
err_msg=err_msg))
else:
try:
json_data = responses.json()
except ClientResponseError as e:
# sometimes json_data does not return any json() without
# any error. This is due to the grabbing URL which "rejects"
# the URL
logging.error("Wallabag: aiohttp error {code} {message}"
.format(code=e.code, message=e.message))
return await json_data
|
get the json data response
:param responses: the json response
:return the json data without 'root' node
|
train
|
https://github.com/push-things/wallabag_api/blob/8d1e10a6ebc03d1ac9af2b38b57eb69f29b4216e/wallabag_api/wallabag.py#L94-L115
| null |
class Wallabag(object):
"""
Python Class 'Wallabag' to deal with Wallabag REST API
This class is able to handle any data from your Wallabag account
"""
EXTENTIONS = ('xml', 'json', 'txt', 'csv', 'pdf', 'epub', 'mobi', 'html')
host = ''
token = ''
client_id = ''
client_secret = ''
user_agent = ''
format = ''
username = ''
password = ''
aio_sess = None
def __init__(self,
host='',
token='',
client_id='',
client_secret='',
extension='json',
user_agent="WallabagPython/1.2.2 "
" +https://github.com/push-things/wallabag-api",
aio_sess=None):
"""
init variable
:param host: string url to the official API Wallabag
:param token: string of the key provided by Wallabag
:param client_id client id
:param client_secret client secret
:param extension: xml|json|txt|csv|pdf|epub|mobi|html
:param user_agent
:param aio_sess aiohttp session
"""
self.host = host
self.client_id = client_id
self.client_secret = client_secret
self.token = token
self.format = extension
self.user_agent = user_agent
self.aio_sess = aio_sess
if self.format not in self.EXTENTIONS:
raise ValueError("format invalid {0} should be one of {1}".format(
self.format, self.EXTENTIONS))
async def query(self, path, method='get', **params):
"""
Do a query to the System API
:param path: url to the API
:param method: the kind of query to do
:param params: a dict with all the
necessary things to query the API
:return json data
"""
if method in ('get', 'post', 'patch', 'delete', 'put'):
full_path = self.host + path
if method == 'get':
resp = await self.aio_sess.get(full_path, params=params)
elif method == 'post':
resp = await self.aio_sess.post(full_path, data=params)
elif method == 'patch':
resp = await self.aio_sess.patch(full_path, data=params)
elif method == 'delete':
resp = await self.aio_sess.delete(full_path, params=params, headers=params)
elif method == 'put':
resp = await self.aio_sess.put(full_path, data=params)
async with resp:
# return the content if its a binary one
if resp.content_type.startswith('application/pdf') or \
resp.content_type.startswith('application/epub'):
return await resp.read()
return await self.handle_json_response(resp)
else:
raise ValueError('method expected: get, post, patch, delete, put')
@staticmethod
@staticmethod
def __get_attr(what, type_attr, value_attr, **kwargs):
"""
get the value of a parm
:param what: string parm
:param type_attr: type of parm
:param value_attr:
:param kwargs:
:return: value of the parm
"""
if what in kwargs:
value = int(kwargs[what]) if type_attr == 'int' else kwargs[what]
if value in value_attr:
return value
# ENTRIES
async def get_entries(self, **kwargs):
"""
GET /api/entries.{_format}
Retrieve all entries. It could be filtered by many options.
:param kwargs: can contain one of the following filters
archive: '0' or '1', default '0' filter by archived status.
starred: '0' or '1', default '0' filter by starred status.
sort: 'created' or 'updated', default 'created'
order: 'asc' or 'desc', default 'desc'
page: int default 1 what page you want
perPage: int default 30 result per page
tags: list of tags url encoded.
since: int default 0 from what timestamp you want
Will returns entries that matches ALL tags
:return data related to the ext
"""
# default values
params = dict({'access_token': self.token,
'sort': 'created',
'order': 'desc',
'page': 1,
'perPage': 30,
'tags': '',
'since': 0})
if 'archive' in kwargs and int(kwargs['archive']) in (0, 1):
params['archive'] = int(kwargs['archive'])
if 'starred' in kwargs and int(kwargs['starred']) in (0, 1):
params['starred'] = int(kwargs['starred'])
if 'order' in kwargs and kwargs['order'] in ('asc', 'desc'):
params['order'] = kwargs['order']
if 'page' in kwargs and isinstance(kwargs['page'], int):
params['page'] = kwargs['page']
if 'perPage' in kwargs and isinstance(kwargs['perPage'], int):
params['perPage'] = kwargs['perPage']
if 'tags' in kwargs and isinstance(kwargs['tags'], list):
params['tags'] = ', '.join(kwargs['tags'])
if 'since' in kwargs and isinstance(kwargs['since'], int):
params['since'] = kwargs['since']
path = '/api/entries.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
async def post_entries(self, url, title='', tags='', starred=0, archive=0, content='', language='', published_at='',
authors='', public=1, original_url=''):
"""
POST /api/entries.{_format}
Create an entry
:param url: the url of the note to store
:param title: Optional, we'll get the title from the page.
:param tags: tag1,tag2,tag3 a comma-separated list of tags.
:param starred entry already starred
:param archive entry already archived
:param content additionnal html content
:param language
:param published_at
:param authors
:param public
:param original_url
:return result
"""
params = {'access_token': self.token, 'url': url, 'title': title,
'tags': tags, 'starred': starred, 'archive': archive,
'content': content, 'language': language, 'published_at': published_at,
'authors': authors, 'public': public, 'original_url': original_url}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
path = '/api/entries.{ext}'.format(ext=self.format)
return await self.query(path, "post", **params)
async def get_entry(self, entry):
"""
GET /api/entries/{entry}.{_format}
Retrieve a single entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def reaload_entry(self, entry):
"""
PATCH /api/entries/{entry}/reload.{_format}
Reload a single entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/reload.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "patch", **params)
async def patch_entries(self, entry, **kwargs):
"""
PATCH /api/entries/{entry}.{_format}
Change several properties of an entry
:param entry: the entry to 'patch' / update
:param kwargs: can contain one of the following
title: string
tags: a list of tags tag1,tag2,tag3
archive: '0' or '1', default '0' archived the entry.
starred: '0' or '1', default '0' starred the entry
In case that you don't want to *really* remove it..
:return data related to the ext
"""
# default values
params = {'access_token': self.token,
'title': '',
'tags': []}
if 'title' in kwargs:
params['title'] = kwargs['title']
if 'tags' in kwargs and isinstance(kwargs['tags'], list):
params['tags'] = ', '.join(kwargs['tags'])
params['archive'] = self.__get_attr(what='archive',
type_attr=int,
value_attr=(0, 1),
**kwargs)
params['starred'] = self.__get_attr(what='starred',
type_attr=int,
value_attr=(0, 1),
**kwargs)
params['order'] = self.__get_attr(what='order',
type_attr=str,
value_attr=('asc', 'desc'),
**kwargs)
path = '/api/entries/{entry}.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "patch", **params)
async def get_entry_export(self, entry):
"""
GET /api/entries/{entry}/export.{_format}
Retrieve a single entry as a predefined format.
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/export.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def patch_entry_reload(self, entry):
"""
PATCH /api/entries/{entry}/reload.{_format}
Reload an entry. An empty response with HTTP Status 304 will be send
if we weren't able to update the content (because it hasn't changed
or we got an error).
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/reload.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "patch", **params)
async def delete_entries(self, entry):
"""
DELETE /api/entries/{entry}.{_format}
Delete permanently an entry
:param entry: \w+ an integer The Entry ID
:return result
"""
params = {'Authorization': 'Bearer {}'.format(self.token)}
path = '/api/entries/{entry}.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "delete", **params)
async def entries_exists(self, url, urls=''):
"""
GET /api/entries/exists.{_format}
Check if an entry exist by url.
:param url string true An url Url to check if it exists
:param urls string false An array of urls
(?urls[]=http...&urls[]=http...) Urls (as an array)
to check if it exists
:return result
"""
params = {'access_token': self.token,
'url': url,
'urls': urls}
path = '/api/entries/exists.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
# TAGS
async def get_entry_tags(self, entry):
"""
GET /api/entries/{entry}/tags.{_format}
Retrieve all tags for an entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/tags.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(url, "get", **params)
async def post_entry_tags(self, entry, tags):
"""
POST /api/entries/{entry}/tags.{_format}
Add one or more tags to an entry
:param entry: \w+ an integer The Entry ID
:param tags: list of tags (urlencoded)
:return result
"""
params = {'access_token': self.token, 'tags': []}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
path = '/api/entries/{entry}/tags.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "post", **params)
async def delete_entry_tag(self, entry, tag):
"""
DELETE /api/entries/{entry}/tags/{tag}.{_format}
Permanently remove one tag for an entry
:param entry: \w+ an integer The Entry ID
:param tag: string The Tag
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/tags/{tag}.{ext}'.format(
entry=entry, tag=tag, ext=self.format)
return await self.query(url, "delete", **params)
async def get_tags(self):
"""
GET /api/tags.{_format}
Retrieve all tags
:return data related to the ext
"""
params = {'access_token': self.token}
path = '/api/tags.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
async def delete_tag(self, tag):
"""
DELETE /api/tags/{tag}.{_format}
Permanently remove one tag from every entry
:param tag: string The Tag
:return data related to the ext
"""
path = '/api/tags/{tag}.{ext}'.format(tag=tag, ext=self.format)
params = {'access_token': self.token}
return await self.query(path, "delete", **params)
async def delete_tag_label(self, tag):
"""
DELETE /api/tag/label.{_format}
Permanently remove one tag from every entry.
:param tag: string The Tag
:return data related to the ext
"""
path = '/api/tag/label.{ext}'.format(ext=self.format)
params = {'access_token': self.token,
'tag': tag}
return await self.query(path, "delete", **params)
async def delete_tags_label(self, tags):
"""
DELETE /api/tags/label.{_format}
Permanently remove some tags from every entry.
:param tags: list of tags (urlencoded)
:return data related to the ext
"""
path = '/api/tag/label.{ext}'.format(ext=self.format)
params = {'access_token': self.token, 'tags': []}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
return await self.query(path, "delete", **params)
# ANNOTATIONS
async def delete_annotations(self, annotation):
"""
DELETE /api/annotations/{annotation}.{_format}
Removes an annotation.
:param annotation \w+ string The annotation ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{annotation}.{ext}'.format(
annotation=annotation, ext=self.format)
return await self.query(url, "delete", **params)
async def put_annotations(self, annotation):
"""
PUT /api/annotations/{annotation}.{_format}
Updates an annotation.
:param annotation \w+ string The annotation ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{annotation}.{ext}'.format(
annotation=annotation, ext=self.format)
return await self.query(url, "put", **params)
async def get_annotations(self, entry):
"""
GET /api/annotations/{entry}.{_format}
Retrieve annotations for an entry
:param entry \w+ integer The entry ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def post_annotations(self, entry, **kwargs):
"""
POST /api/annotations/{entry}.{_format}
Creates a new annotation.
:param entry \w+ integer The entry ID
:return
"""
params = dict({'access_token': self.token,
'ranges': [],
'quote': '',
'text': ''})
if 'ranges' in kwargs:
params['ranges'] = kwargs['ranges']
if 'quote' in kwargs:
params['quote'] = kwargs['quote']
if 'text' in kwargs:
params['text'] = kwargs['text']
url = '/api/annotations/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "post", **params)
# VERSION
@property
async def version(self):
"""
GET /api/version.{_format}
Retrieve version number
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/version.{ext}'.format(ext=self.format)
return await self.query(url, "get", **params)
@classmethod
async def get_token(cls, host, **params):
"""
POST /oauth/v2/token
Get a new token
:param host: host of the service
:param params: will contain :
params = {"grant_type": "password",
"client_id": "a string",
"client_secret": "a string",
"username": "a login",
"password": "a password"}
:return: access token
"""
params['grant_type'] = "password"
path = "/oauth/v2/token"
async with aiohttp.ClientSession() as sess:
async with sess.post(host + path, data=params) as resp:
data = await cls.handle_json_response(resp)
return data.get("access_token")
|
push-things/wallabag_api
|
wallabag_api/wallabag.py
|
Wallabag.__get_attr
|
python
|
def __get_attr(what, type_attr, value_attr, **kwargs):
if what in kwargs:
value = int(kwargs[what]) if type_attr == 'int' else kwargs[what]
if value in value_attr:
return value
|
get the value of a parm
:param what: string parm
:param type_attr: type of parm
:param value_attr:
:param kwargs:
:return: value of the parm
|
train
|
https://github.com/push-things/wallabag_api/blob/8d1e10a6ebc03d1ac9af2b38b57eb69f29b4216e/wallabag_api/wallabag.py#L118-L130
| null |
class Wallabag(object):
"""
Python Class 'Wallabag' to deal with Wallabag REST API
This class is able to handle any data from your Wallabag account
"""
EXTENTIONS = ('xml', 'json', 'txt', 'csv', 'pdf', 'epub', 'mobi', 'html')
host = ''
token = ''
client_id = ''
client_secret = ''
user_agent = ''
format = ''
username = ''
password = ''
aio_sess = None
def __init__(self,
host='',
token='',
client_id='',
client_secret='',
extension='json',
user_agent="WallabagPython/1.2.2 "
" +https://github.com/push-things/wallabag-api",
aio_sess=None):
"""
init variable
:param host: string url to the official API Wallabag
:param token: string of the key provided by Wallabag
:param client_id client id
:param client_secret client secret
:param extension: xml|json|txt|csv|pdf|epub|mobi|html
:param user_agent
:param aio_sess aiohttp session
"""
self.host = host
self.client_id = client_id
self.client_secret = client_secret
self.token = token
self.format = extension
self.user_agent = user_agent
self.aio_sess = aio_sess
if self.format not in self.EXTENTIONS:
raise ValueError("format invalid {0} should be one of {1}".format(
self.format, self.EXTENTIONS))
async def query(self, path, method='get', **params):
"""
Do a query to the System API
:param path: url to the API
:param method: the kind of query to do
:param params: a dict with all the
necessary things to query the API
:return json data
"""
if method in ('get', 'post', 'patch', 'delete', 'put'):
full_path = self.host + path
if method == 'get':
resp = await self.aio_sess.get(full_path, params=params)
elif method == 'post':
resp = await self.aio_sess.post(full_path, data=params)
elif method == 'patch':
resp = await self.aio_sess.patch(full_path, data=params)
elif method == 'delete':
resp = await self.aio_sess.delete(full_path, params=params, headers=params)
elif method == 'put':
resp = await self.aio_sess.put(full_path, data=params)
async with resp:
# return the content if its a binary one
if resp.content_type.startswith('application/pdf') or \
resp.content_type.startswith('application/epub'):
return await resp.read()
return await self.handle_json_response(resp)
else:
raise ValueError('method expected: get, post, patch, delete, put')
@staticmethod
async def handle_json_response(responses):
"""
get the json data response
:param responses: the json response
:return the json data without 'root' node
"""
json_data = {}
if responses.status != 200:
err_msg = HttpProcessingError(code=responses.status,
message=await responses.json())
logging.error("Wallabag: aiohttp error {err_msg}".format(
err_msg=err_msg))
else:
try:
json_data = responses.json()
except ClientResponseError as e:
# sometimes json_data does not return any json() without
# any error. This is due to the grabbing URL which "rejects"
# the URL
logging.error("Wallabag: aiohttp error {code} {message}"
.format(code=e.code, message=e.message))
return await json_data
@staticmethod
# ENTRIES
async def get_entries(self, **kwargs):
"""
GET /api/entries.{_format}
Retrieve all entries. It could be filtered by many options.
:param kwargs: can contain one of the following filters
archive: '0' or '1', default '0' filter by archived status.
starred: '0' or '1', default '0' filter by starred status.
sort: 'created' or 'updated', default 'created'
order: 'asc' or 'desc', default 'desc'
page: int default 1 what page you want
perPage: int default 30 result per page
tags: list of tags url encoded.
since: int default 0 from what timestamp you want
Will returns entries that matches ALL tags
:return data related to the ext
"""
# default values
params = dict({'access_token': self.token,
'sort': 'created',
'order': 'desc',
'page': 1,
'perPage': 30,
'tags': '',
'since': 0})
if 'archive' in kwargs and int(kwargs['archive']) in (0, 1):
params['archive'] = int(kwargs['archive'])
if 'starred' in kwargs and int(kwargs['starred']) in (0, 1):
params['starred'] = int(kwargs['starred'])
if 'order' in kwargs and kwargs['order'] in ('asc', 'desc'):
params['order'] = kwargs['order']
if 'page' in kwargs and isinstance(kwargs['page'], int):
params['page'] = kwargs['page']
if 'perPage' in kwargs and isinstance(kwargs['perPage'], int):
params['perPage'] = kwargs['perPage']
if 'tags' in kwargs and isinstance(kwargs['tags'], list):
params['tags'] = ', '.join(kwargs['tags'])
if 'since' in kwargs and isinstance(kwargs['since'], int):
params['since'] = kwargs['since']
path = '/api/entries.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
async def post_entries(self, url, title='', tags='', starred=0, archive=0, content='', language='', published_at='',
authors='', public=1, original_url=''):
"""
POST /api/entries.{_format}
Create an entry
:param url: the url of the note to store
:param title: Optional, we'll get the title from the page.
:param tags: tag1,tag2,tag3 a comma-separated list of tags.
:param starred entry already starred
:param archive entry already archived
:param content additionnal html content
:param language
:param published_at
:param authors
:param public
:param original_url
:return result
"""
params = {'access_token': self.token, 'url': url, 'title': title,
'tags': tags, 'starred': starred, 'archive': archive,
'content': content, 'language': language, 'published_at': published_at,
'authors': authors, 'public': public, 'original_url': original_url}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
path = '/api/entries.{ext}'.format(ext=self.format)
return await self.query(path, "post", **params)
async def get_entry(self, entry):
"""
GET /api/entries/{entry}.{_format}
Retrieve a single entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def reaload_entry(self, entry):
"""
PATCH /api/entries/{entry}/reload.{_format}
Reload a single entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/reload.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "patch", **params)
async def patch_entries(self, entry, **kwargs):
"""
PATCH /api/entries/{entry}.{_format}
Change several properties of an entry
:param entry: the entry to 'patch' / update
:param kwargs: can contain one of the following
title: string
tags: a list of tags tag1,tag2,tag3
archive: '0' or '1', default '0' archived the entry.
starred: '0' or '1', default '0' starred the entry
In case that you don't want to *really* remove it..
:return data related to the ext
"""
# default values
params = {'access_token': self.token,
'title': '',
'tags': []}
if 'title' in kwargs:
params['title'] = kwargs['title']
if 'tags' in kwargs and isinstance(kwargs['tags'], list):
params['tags'] = ', '.join(kwargs['tags'])
params['archive'] = self.__get_attr(what='archive',
type_attr=int,
value_attr=(0, 1),
**kwargs)
params['starred'] = self.__get_attr(what='starred',
type_attr=int,
value_attr=(0, 1),
**kwargs)
params['order'] = self.__get_attr(what='order',
type_attr=str,
value_attr=('asc', 'desc'),
**kwargs)
path = '/api/entries/{entry}.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "patch", **params)
async def get_entry_export(self, entry):
"""
GET /api/entries/{entry}/export.{_format}
Retrieve a single entry as a predefined format.
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/export.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def patch_entry_reload(self, entry):
"""
PATCH /api/entries/{entry}/reload.{_format}
Reload an entry. An empty response with HTTP Status 304 will be send
if we weren't able to update the content (because it hasn't changed
or we got an error).
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/reload.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "patch", **params)
async def delete_entries(self, entry):
"""
DELETE /api/entries/{entry}.{_format}
Delete permanently an entry
:param entry: \w+ an integer The Entry ID
:return result
"""
params = {'Authorization': 'Bearer {}'.format(self.token)}
path = '/api/entries/{entry}.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "delete", **params)
async def entries_exists(self, url, urls=''):
"""
GET /api/entries/exists.{_format}
Check if an entry exist by url.
:param url string true An url Url to check if it exists
:param urls string false An array of urls
(?urls[]=http...&urls[]=http...) Urls (as an array)
to check if it exists
:return result
"""
params = {'access_token': self.token,
'url': url,
'urls': urls}
path = '/api/entries/exists.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
# TAGS
async def get_entry_tags(self, entry):
"""
GET /api/entries/{entry}/tags.{_format}
Retrieve all tags for an entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/tags.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(url, "get", **params)
async def post_entry_tags(self, entry, tags):
"""
POST /api/entries/{entry}/tags.{_format}
Add one or more tags to an entry
:param entry: \w+ an integer The Entry ID
:param tags: list of tags (urlencoded)
:return result
"""
params = {'access_token': self.token, 'tags': []}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
path = '/api/entries/{entry}/tags.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "post", **params)
async def delete_entry_tag(self, entry, tag):
"""
DELETE /api/entries/{entry}/tags/{tag}.{_format}
Permanently remove one tag for an entry
:param entry: \w+ an integer The Entry ID
:param tag: string The Tag
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/tags/{tag}.{ext}'.format(
entry=entry, tag=tag, ext=self.format)
return await self.query(url, "delete", **params)
async def get_tags(self):
"""
GET /api/tags.{_format}
Retrieve all tags
:return data related to the ext
"""
params = {'access_token': self.token}
path = '/api/tags.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
async def delete_tag(self, tag):
"""
DELETE /api/tags/{tag}.{_format}
Permanently remove one tag from every entry
:param tag: string The Tag
:return data related to the ext
"""
path = '/api/tags/{tag}.{ext}'.format(tag=tag, ext=self.format)
params = {'access_token': self.token}
return await self.query(path, "delete", **params)
async def delete_tag_label(self, tag):
"""
DELETE /api/tag/label.{_format}
Permanently remove one tag from every entry.
:param tag: string The Tag
:return data related to the ext
"""
path = '/api/tag/label.{ext}'.format(ext=self.format)
params = {'access_token': self.token,
'tag': tag}
return await self.query(path, "delete", **params)
async def delete_tags_label(self, tags):
"""
DELETE /api/tags/label.{_format}
Permanently remove some tags from every entry.
:param tags: list of tags (urlencoded)
:return data related to the ext
"""
path = '/api/tag/label.{ext}'.format(ext=self.format)
params = {'access_token': self.token, 'tags': []}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
return await self.query(path, "delete", **params)
# ANNOTATIONS
async def delete_annotations(self, annotation):
"""
DELETE /api/annotations/{annotation}.{_format}
Removes an annotation.
:param annotation \w+ string The annotation ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{annotation}.{ext}'.format(
annotation=annotation, ext=self.format)
return await self.query(url, "delete", **params)
async def put_annotations(self, annotation):
"""
PUT /api/annotations/{annotation}.{_format}
Updates an annotation.
:param annotation \w+ string The annotation ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{annotation}.{ext}'.format(
annotation=annotation, ext=self.format)
return await self.query(url, "put", **params)
async def get_annotations(self, entry):
"""
GET /api/annotations/{entry}.{_format}
Retrieve annotations for an entry
:param entry \w+ integer The entry ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def post_annotations(self, entry, **kwargs):
"""
POST /api/annotations/{entry}.{_format}
Creates a new annotation.
:param entry \w+ integer The entry ID
:return
"""
params = dict({'access_token': self.token,
'ranges': [],
'quote': '',
'text': ''})
if 'ranges' in kwargs:
params['ranges'] = kwargs['ranges']
if 'quote' in kwargs:
params['quote'] = kwargs['quote']
if 'text' in kwargs:
params['text'] = kwargs['text']
url = '/api/annotations/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "post", **params)
# VERSION
@property
async def version(self):
"""
GET /api/version.{_format}
Retrieve version number
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/version.{ext}'.format(ext=self.format)
return await self.query(url, "get", **params)
@classmethod
async def get_token(cls, host, **params):
"""
POST /oauth/v2/token
Get a new token
:param host: host of the service
:param params: will contain :
params = {"grant_type": "password",
"client_id": "a string",
"client_secret": "a string",
"username": "a login",
"password": "a password"}
:return: access token
"""
params['grant_type'] = "password"
path = "/oauth/v2/token"
async with aiohttp.ClientSession() as sess:
async with sess.post(host + path, data=params) as resp:
data = await cls.handle_json_response(resp)
return data.get("access_token")
|
push-things/wallabag_api
|
wallabag_api/wallabag.py
|
Wallabag.get_entries
|
python
|
async def get_entries(self, **kwargs):
# default values
params = dict({'access_token': self.token,
'sort': 'created',
'order': 'desc',
'page': 1,
'perPage': 30,
'tags': '',
'since': 0})
if 'archive' in kwargs and int(kwargs['archive']) in (0, 1):
params['archive'] = int(kwargs['archive'])
if 'starred' in kwargs and int(kwargs['starred']) in (0, 1):
params['starred'] = int(kwargs['starred'])
if 'order' in kwargs and kwargs['order'] in ('asc', 'desc'):
params['order'] = kwargs['order']
if 'page' in kwargs and isinstance(kwargs['page'], int):
params['page'] = kwargs['page']
if 'perPage' in kwargs and isinstance(kwargs['perPage'], int):
params['perPage'] = kwargs['perPage']
if 'tags' in kwargs and isinstance(kwargs['tags'], list):
params['tags'] = ', '.join(kwargs['tags'])
if 'since' in kwargs and isinstance(kwargs['since'], int):
params['since'] = kwargs['since']
path = '/api/entries.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
|
GET /api/entries.{_format}
Retrieve all entries. It could be filtered by many options.
:param kwargs: can contain one of the following filters
archive: '0' or '1', default '0' filter by archived status.
starred: '0' or '1', default '0' filter by starred status.
sort: 'created' or 'updated', default 'created'
order: 'asc' or 'desc', default 'desc'
page: int default 1 what page you want
perPage: int default 30 result per page
tags: list of tags url encoded.
since: int default 0 from what timestamp you want
Will returns entries that matches ALL tags
:return data related to the ext
|
train
|
https://github.com/push-things/wallabag_api/blob/8d1e10a6ebc03d1ac9af2b38b57eb69f29b4216e/wallabag_api/wallabag.py#L133-L177
|
[
"async def query(self, path, method='get', **params):\n \"\"\"\n Do a query to the System API\n\n :param path: url to the API\n :param method: the kind of query to do\n :param params: a dict with all the\n necessary things to query the API\n :return json data\n \"\"\"\n if method in ('get', 'post', 'patch', 'delete', 'put'):\n full_path = self.host + path\n if method == 'get':\n resp = await self.aio_sess.get(full_path, params=params)\n elif method == 'post':\n resp = await self.aio_sess.post(full_path, data=params)\n elif method == 'patch':\n resp = await self.aio_sess.patch(full_path, data=params)\n elif method == 'delete':\n resp = await self.aio_sess.delete(full_path, params=params, headers=params)\n elif method == 'put':\n resp = await self.aio_sess.put(full_path, data=params)\n\n async with resp:\n # return the content if its a binary one\n if resp.content_type.startswith('application/pdf') or \\\n resp.content_type.startswith('application/epub'):\n return await resp.read()\n\n return await self.handle_json_response(resp)\n else:\n raise ValueError('method expected: get, post, patch, delete, put')\n"
] |
class Wallabag(object):
"""
Python Class 'Wallabag' to deal with Wallabag REST API
This class is able to handle any data from your Wallabag account
"""
EXTENTIONS = ('xml', 'json', 'txt', 'csv', 'pdf', 'epub', 'mobi', 'html')
host = ''
token = ''
client_id = ''
client_secret = ''
user_agent = ''
format = ''
username = ''
password = ''
aio_sess = None
def __init__(self,
host='',
token='',
client_id='',
client_secret='',
extension='json',
user_agent="WallabagPython/1.2.2 "
" +https://github.com/push-things/wallabag-api",
aio_sess=None):
"""
init variable
:param host: string url to the official API Wallabag
:param token: string of the key provided by Wallabag
:param client_id client id
:param client_secret client secret
:param extension: xml|json|txt|csv|pdf|epub|mobi|html
:param user_agent
:param aio_sess aiohttp session
"""
self.host = host
self.client_id = client_id
self.client_secret = client_secret
self.token = token
self.format = extension
self.user_agent = user_agent
self.aio_sess = aio_sess
if self.format not in self.EXTENTIONS:
raise ValueError("format invalid {0} should be one of {1}".format(
self.format, self.EXTENTIONS))
async def query(self, path, method='get', **params):
"""
Do a query to the System API
:param path: url to the API
:param method: the kind of query to do
:param params: a dict with all the
necessary things to query the API
:return json data
"""
if method in ('get', 'post', 'patch', 'delete', 'put'):
full_path = self.host + path
if method == 'get':
resp = await self.aio_sess.get(full_path, params=params)
elif method == 'post':
resp = await self.aio_sess.post(full_path, data=params)
elif method == 'patch':
resp = await self.aio_sess.patch(full_path, data=params)
elif method == 'delete':
resp = await self.aio_sess.delete(full_path, params=params, headers=params)
elif method == 'put':
resp = await self.aio_sess.put(full_path, data=params)
async with resp:
# return the content if its a binary one
if resp.content_type.startswith('application/pdf') or \
resp.content_type.startswith('application/epub'):
return await resp.read()
return await self.handle_json_response(resp)
else:
raise ValueError('method expected: get, post, patch, delete, put')
@staticmethod
async def handle_json_response(responses):
"""
get the json data response
:param responses: the json response
:return the json data without 'root' node
"""
json_data = {}
if responses.status != 200:
err_msg = HttpProcessingError(code=responses.status,
message=await responses.json())
logging.error("Wallabag: aiohttp error {err_msg}".format(
err_msg=err_msg))
else:
try:
json_data = responses.json()
except ClientResponseError as e:
# sometimes json_data does not return any json() without
# any error. This is due to the grabbing URL which "rejects"
# the URL
logging.error("Wallabag: aiohttp error {code} {message}"
.format(code=e.code, message=e.message))
return await json_data
@staticmethod
def __get_attr(what, type_attr, value_attr, **kwargs):
"""
get the value of a parm
:param what: string parm
:param type_attr: type of parm
:param value_attr:
:param kwargs:
:return: value of the parm
"""
if what in kwargs:
value = int(kwargs[what]) if type_attr == 'int' else kwargs[what]
if value in value_attr:
return value
# ENTRIES
async def post_entries(self, url, title='', tags='', starred=0, archive=0, content='', language='', published_at='',
authors='', public=1, original_url=''):
"""
POST /api/entries.{_format}
Create an entry
:param url: the url of the note to store
:param title: Optional, we'll get the title from the page.
:param tags: tag1,tag2,tag3 a comma-separated list of tags.
:param starred entry already starred
:param archive entry already archived
:param content additionnal html content
:param language
:param published_at
:param authors
:param public
:param original_url
:return result
"""
params = {'access_token': self.token, 'url': url, 'title': title,
'tags': tags, 'starred': starred, 'archive': archive,
'content': content, 'language': language, 'published_at': published_at,
'authors': authors, 'public': public, 'original_url': original_url}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
path = '/api/entries.{ext}'.format(ext=self.format)
return await self.query(path, "post", **params)
async def get_entry(self, entry):
"""
GET /api/entries/{entry}.{_format}
Retrieve a single entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def reaload_entry(self, entry):
"""
PATCH /api/entries/{entry}/reload.{_format}
Reload a single entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/reload.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "patch", **params)
async def patch_entries(self, entry, **kwargs):
"""
PATCH /api/entries/{entry}.{_format}
Change several properties of an entry
:param entry: the entry to 'patch' / update
:param kwargs: can contain one of the following
title: string
tags: a list of tags tag1,tag2,tag3
archive: '0' or '1', default '0' archived the entry.
starred: '0' or '1', default '0' starred the entry
In case that you don't want to *really* remove it..
:return data related to the ext
"""
# default values
params = {'access_token': self.token,
'title': '',
'tags': []}
if 'title' in kwargs:
params['title'] = kwargs['title']
if 'tags' in kwargs and isinstance(kwargs['tags'], list):
params['tags'] = ', '.join(kwargs['tags'])
params['archive'] = self.__get_attr(what='archive',
type_attr=int,
value_attr=(0, 1),
**kwargs)
params['starred'] = self.__get_attr(what='starred',
type_attr=int,
value_attr=(0, 1),
**kwargs)
params['order'] = self.__get_attr(what='order',
type_attr=str,
value_attr=('asc', 'desc'),
**kwargs)
path = '/api/entries/{entry}.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "patch", **params)
async def get_entry_export(self, entry):
"""
GET /api/entries/{entry}/export.{_format}
Retrieve a single entry as a predefined format.
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/export.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def patch_entry_reload(self, entry):
"""
PATCH /api/entries/{entry}/reload.{_format}
Reload an entry. An empty response with HTTP Status 304 will be send
if we weren't able to update the content (because it hasn't changed
or we got an error).
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/reload.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "patch", **params)
async def delete_entries(self, entry):
"""
DELETE /api/entries/{entry}.{_format}
Delete permanently an entry
:param entry: \w+ an integer The Entry ID
:return result
"""
params = {'Authorization': 'Bearer {}'.format(self.token)}
path = '/api/entries/{entry}.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "delete", **params)
async def entries_exists(self, url, urls=''):
"""
GET /api/entries/exists.{_format}
Check if an entry exist by url.
:param url string true An url Url to check if it exists
:param urls string false An array of urls
(?urls[]=http...&urls[]=http...) Urls (as an array)
to check if it exists
:return result
"""
params = {'access_token': self.token,
'url': url,
'urls': urls}
path = '/api/entries/exists.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
# TAGS
async def get_entry_tags(self, entry):
"""
GET /api/entries/{entry}/tags.{_format}
Retrieve all tags for an entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/tags.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(url, "get", **params)
async def post_entry_tags(self, entry, tags):
"""
POST /api/entries/{entry}/tags.{_format}
Add one or more tags to an entry
:param entry: \w+ an integer The Entry ID
:param tags: list of tags (urlencoded)
:return result
"""
params = {'access_token': self.token, 'tags': []}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
path = '/api/entries/{entry}/tags.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "post", **params)
async def delete_entry_tag(self, entry, tag):
"""
DELETE /api/entries/{entry}/tags/{tag}.{_format}
Permanently remove one tag for an entry
:param entry: \w+ an integer The Entry ID
:param tag: string The Tag
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/tags/{tag}.{ext}'.format(
entry=entry, tag=tag, ext=self.format)
return await self.query(url, "delete", **params)
async def get_tags(self):
"""
GET /api/tags.{_format}
Retrieve all tags
:return data related to the ext
"""
params = {'access_token': self.token}
path = '/api/tags.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
async def delete_tag(self, tag):
"""
DELETE /api/tags/{tag}.{_format}
Permanently remove one tag from every entry
:param tag: string The Tag
:return data related to the ext
"""
path = '/api/tags/{tag}.{ext}'.format(tag=tag, ext=self.format)
params = {'access_token': self.token}
return await self.query(path, "delete", **params)
async def delete_tag_label(self, tag):
"""
DELETE /api/tag/label.{_format}
Permanently remove one tag from every entry.
:param tag: string The Tag
:return data related to the ext
"""
path = '/api/tag/label.{ext}'.format(ext=self.format)
params = {'access_token': self.token,
'tag': tag}
return await self.query(path, "delete", **params)
async def delete_tags_label(self, tags):
"""
DELETE /api/tags/label.{_format}
Permanently remove some tags from every entry.
:param tags: list of tags (urlencoded)
:return data related to the ext
"""
path = '/api/tag/label.{ext}'.format(ext=self.format)
params = {'access_token': self.token, 'tags': []}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
return await self.query(path, "delete", **params)
# ANNOTATIONS
async def delete_annotations(self, annotation):
"""
DELETE /api/annotations/{annotation}.{_format}
Removes an annotation.
:param annotation \w+ string The annotation ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{annotation}.{ext}'.format(
annotation=annotation, ext=self.format)
return await self.query(url, "delete", **params)
async def put_annotations(self, annotation):
"""
PUT /api/annotations/{annotation}.{_format}
Updates an annotation.
:param annotation \w+ string The annotation ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{annotation}.{ext}'.format(
annotation=annotation, ext=self.format)
return await self.query(url, "put", **params)
async def get_annotations(self, entry):
"""
GET /api/annotations/{entry}.{_format}
Retrieve annotations for an entry
:param entry \w+ integer The entry ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def post_annotations(self, entry, **kwargs):
"""
POST /api/annotations/{entry}.{_format}
Creates a new annotation.
:param entry \w+ integer The entry ID
:return
"""
params = dict({'access_token': self.token,
'ranges': [],
'quote': '',
'text': ''})
if 'ranges' in kwargs:
params['ranges'] = kwargs['ranges']
if 'quote' in kwargs:
params['quote'] = kwargs['quote']
if 'text' in kwargs:
params['text'] = kwargs['text']
url = '/api/annotations/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "post", **params)
# VERSION
@property
async def version(self):
"""
GET /api/version.{_format}
Retrieve version number
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/version.{ext}'.format(ext=self.format)
return await self.query(url, "get", **params)
@classmethod
async def get_token(cls, host, **params):
"""
POST /oauth/v2/token
Get a new token
:param host: host of the service
:param params: will contain :
params = {"grant_type": "password",
"client_id": "a string",
"client_secret": "a string",
"username": "a login",
"password": "a password"}
:return: access token
"""
params['grant_type'] = "password"
path = "/oauth/v2/token"
async with aiohttp.ClientSession() as sess:
async with sess.post(host + path, data=params) as resp:
data = await cls.handle_json_response(resp)
return data.get("access_token")
|
push-things/wallabag_api
|
wallabag_api/wallabag.py
|
Wallabag.post_entries
|
python
|
async def post_entries(self, url, title='', tags='', starred=0, archive=0, content='', language='', published_at='',
authors='', public=1, original_url=''):
params = {'access_token': self.token, 'url': url, 'title': title,
'tags': tags, 'starred': starred, 'archive': archive,
'content': content, 'language': language, 'published_at': published_at,
'authors': authors, 'public': public, 'original_url': original_url}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
path = '/api/entries.{ext}'.format(ext=self.format)
return await self.query(path, "post", **params)
|
POST /api/entries.{_format}
Create an entry
:param url: the url of the note to store
:param title: Optional, we'll get the title from the page.
:param tags: tag1,tag2,tag3 a comma-separated list of tags.
:param starred entry already starred
:param archive entry already archived
:param content additionnal html content
:param language
:param published_at
:param authors
:param public
:param original_url
:return result
|
train
|
https://github.com/push-things/wallabag_api/blob/8d1e10a6ebc03d1ac9af2b38b57eb69f29b4216e/wallabag_api/wallabag.py#L179-L206
|
[
"async def query(self, path, method='get', **params):\n \"\"\"\n Do a query to the System API\n\n :param path: url to the API\n :param method: the kind of query to do\n :param params: a dict with all the\n necessary things to query the API\n :return json data\n \"\"\"\n if method in ('get', 'post', 'patch', 'delete', 'put'):\n full_path = self.host + path\n if method == 'get':\n resp = await self.aio_sess.get(full_path, params=params)\n elif method == 'post':\n resp = await self.aio_sess.post(full_path, data=params)\n elif method == 'patch':\n resp = await self.aio_sess.patch(full_path, data=params)\n elif method == 'delete':\n resp = await self.aio_sess.delete(full_path, params=params, headers=params)\n elif method == 'put':\n resp = await self.aio_sess.put(full_path, data=params)\n\n async with resp:\n # return the content if its a binary one\n if resp.content_type.startswith('application/pdf') or \\\n resp.content_type.startswith('application/epub'):\n return await resp.read()\n\n return await self.handle_json_response(resp)\n else:\n raise ValueError('method expected: get, post, patch, delete, put')\n"
] |
class Wallabag(object):
"""
Python Class 'Wallabag' to deal with Wallabag REST API
This class is able to handle any data from your Wallabag account
"""
EXTENTIONS = ('xml', 'json', 'txt', 'csv', 'pdf', 'epub', 'mobi', 'html')
host = ''
token = ''
client_id = ''
client_secret = ''
user_agent = ''
format = ''
username = ''
password = ''
aio_sess = None
def __init__(self,
host='',
token='',
client_id='',
client_secret='',
extension='json',
user_agent="WallabagPython/1.2.2 "
" +https://github.com/push-things/wallabag-api",
aio_sess=None):
"""
init variable
:param host: string url to the official API Wallabag
:param token: string of the key provided by Wallabag
:param client_id client id
:param client_secret client secret
:param extension: xml|json|txt|csv|pdf|epub|mobi|html
:param user_agent
:param aio_sess aiohttp session
"""
self.host = host
self.client_id = client_id
self.client_secret = client_secret
self.token = token
self.format = extension
self.user_agent = user_agent
self.aio_sess = aio_sess
if self.format not in self.EXTENTIONS:
raise ValueError("format invalid {0} should be one of {1}".format(
self.format, self.EXTENTIONS))
async def query(self, path, method='get', **params):
"""
Do a query to the System API
:param path: url to the API
:param method: the kind of query to do
:param params: a dict with all the
necessary things to query the API
:return json data
"""
if method in ('get', 'post', 'patch', 'delete', 'put'):
full_path = self.host + path
if method == 'get':
resp = await self.aio_sess.get(full_path, params=params)
elif method == 'post':
resp = await self.aio_sess.post(full_path, data=params)
elif method == 'patch':
resp = await self.aio_sess.patch(full_path, data=params)
elif method == 'delete':
resp = await self.aio_sess.delete(full_path, params=params, headers=params)
elif method == 'put':
resp = await self.aio_sess.put(full_path, data=params)
async with resp:
# return the content if its a binary one
if resp.content_type.startswith('application/pdf') or \
resp.content_type.startswith('application/epub'):
return await resp.read()
return await self.handle_json_response(resp)
else:
raise ValueError('method expected: get, post, patch, delete, put')
@staticmethod
async def handle_json_response(responses):
"""
get the json data response
:param responses: the json response
:return the json data without 'root' node
"""
json_data = {}
if responses.status != 200:
err_msg = HttpProcessingError(code=responses.status,
message=await responses.json())
logging.error("Wallabag: aiohttp error {err_msg}".format(
err_msg=err_msg))
else:
try:
json_data = responses.json()
except ClientResponseError as e:
# sometimes json_data does not return any json() without
# any error. This is due to the grabbing URL which "rejects"
# the URL
logging.error("Wallabag: aiohttp error {code} {message}"
.format(code=e.code, message=e.message))
return await json_data
@staticmethod
def __get_attr(what, type_attr, value_attr, **kwargs):
"""
get the value of a parm
:param what: string parm
:param type_attr: type of parm
:param value_attr:
:param kwargs:
:return: value of the parm
"""
if what in kwargs:
value = int(kwargs[what]) if type_attr == 'int' else kwargs[what]
if value in value_attr:
return value
# ENTRIES
async def get_entries(self, **kwargs):
"""
GET /api/entries.{_format}
Retrieve all entries. It could be filtered by many options.
:param kwargs: can contain one of the following filters
archive: '0' or '1', default '0' filter by archived status.
starred: '0' or '1', default '0' filter by starred status.
sort: 'created' or 'updated', default 'created'
order: 'asc' or 'desc', default 'desc'
page: int default 1 what page you want
perPage: int default 30 result per page
tags: list of tags url encoded.
since: int default 0 from what timestamp you want
Will returns entries that matches ALL tags
:return data related to the ext
"""
# default values
params = dict({'access_token': self.token,
'sort': 'created',
'order': 'desc',
'page': 1,
'perPage': 30,
'tags': '',
'since': 0})
if 'archive' in kwargs and int(kwargs['archive']) in (0, 1):
params['archive'] = int(kwargs['archive'])
if 'starred' in kwargs and int(kwargs['starred']) in (0, 1):
params['starred'] = int(kwargs['starred'])
if 'order' in kwargs and kwargs['order'] in ('asc', 'desc'):
params['order'] = kwargs['order']
if 'page' in kwargs and isinstance(kwargs['page'], int):
params['page'] = kwargs['page']
if 'perPage' in kwargs and isinstance(kwargs['perPage'], int):
params['perPage'] = kwargs['perPage']
if 'tags' in kwargs and isinstance(kwargs['tags'], list):
params['tags'] = ', '.join(kwargs['tags'])
if 'since' in kwargs and isinstance(kwargs['since'], int):
params['since'] = kwargs['since']
path = '/api/entries.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
async def get_entry(self, entry):
"""
GET /api/entries/{entry}.{_format}
Retrieve a single entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def reaload_entry(self, entry):
"""
PATCH /api/entries/{entry}/reload.{_format}
Reload a single entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/reload.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "patch", **params)
async def patch_entries(self, entry, **kwargs):
"""
PATCH /api/entries/{entry}.{_format}
Change several properties of an entry
:param entry: the entry to 'patch' / update
:param kwargs: can contain one of the following
title: string
tags: a list of tags tag1,tag2,tag3
archive: '0' or '1', default '0' archived the entry.
starred: '0' or '1', default '0' starred the entry
In case that you don't want to *really* remove it..
:return data related to the ext
"""
# default values
params = {'access_token': self.token,
'title': '',
'tags': []}
if 'title' in kwargs:
params['title'] = kwargs['title']
if 'tags' in kwargs and isinstance(kwargs['tags'], list):
params['tags'] = ', '.join(kwargs['tags'])
params['archive'] = self.__get_attr(what='archive',
type_attr=int,
value_attr=(0, 1),
**kwargs)
params['starred'] = self.__get_attr(what='starred',
type_attr=int,
value_attr=(0, 1),
**kwargs)
params['order'] = self.__get_attr(what='order',
type_attr=str,
value_attr=('asc', 'desc'),
**kwargs)
path = '/api/entries/{entry}.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "patch", **params)
async def get_entry_export(self, entry):
"""
GET /api/entries/{entry}/export.{_format}
Retrieve a single entry as a predefined format.
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/export.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def patch_entry_reload(self, entry):
"""
PATCH /api/entries/{entry}/reload.{_format}
Reload an entry. An empty response with HTTP Status 304 will be send
if we weren't able to update the content (because it hasn't changed
or we got an error).
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/reload.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "patch", **params)
async def delete_entries(self, entry):
"""
DELETE /api/entries/{entry}.{_format}
Delete permanently an entry
:param entry: \w+ an integer The Entry ID
:return result
"""
params = {'Authorization': 'Bearer {}'.format(self.token)}
path = '/api/entries/{entry}.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "delete", **params)
async def entries_exists(self, url, urls=''):
"""
GET /api/entries/exists.{_format}
Check if an entry exist by url.
:param url string true An url Url to check if it exists
:param urls string false An array of urls
(?urls[]=http...&urls[]=http...) Urls (as an array)
to check if it exists
:return result
"""
params = {'access_token': self.token,
'url': url,
'urls': urls}
path = '/api/entries/exists.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
# TAGS
async def get_entry_tags(self, entry):
"""
GET /api/entries/{entry}/tags.{_format}
Retrieve all tags for an entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/tags.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(url, "get", **params)
async def post_entry_tags(self, entry, tags):
"""
POST /api/entries/{entry}/tags.{_format}
Add one or more tags to an entry
:param entry: \w+ an integer The Entry ID
:param tags: list of tags (urlencoded)
:return result
"""
params = {'access_token': self.token, 'tags': []}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
path = '/api/entries/{entry}/tags.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "post", **params)
async def delete_entry_tag(self, entry, tag):
"""
DELETE /api/entries/{entry}/tags/{tag}.{_format}
Permanently remove one tag for an entry
:param entry: \w+ an integer The Entry ID
:param tag: string The Tag
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/tags/{tag}.{ext}'.format(
entry=entry, tag=tag, ext=self.format)
return await self.query(url, "delete", **params)
async def get_tags(self):
"""
GET /api/tags.{_format}
Retrieve all tags
:return data related to the ext
"""
params = {'access_token': self.token}
path = '/api/tags.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
async def delete_tag(self, tag):
"""
DELETE /api/tags/{tag}.{_format}
Permanently remove one tag from every entry
:param tag: string The Tag
:return data related to the ext
"""
path = '/api/tags/{tag}.{ext}'.format(tag=tag, ext=self.format)
params = {'access_token': self.token}
return await self.query(path, "delete", **params)
async def delete_tag_label(self, tag):
"""
DELETE /api/tag/label.{_format}
Permanently remove one tag from every entry.
:param tag: string The Tag
:return data related to the ext
"""
path = '/api/tag/label.{ext}'.format(ext=self.format)
params = {'access_token': self.token,
'tag': tag}
return await self.query(path, "delete", **params)
async def delete_tags_label(self, tags):
"""
DELETE /api/tags/label.{_format}
Permanently remove some tags from every entry.
:param tags: list of tags (urlencoded)
:return data related to the ext
"""
path = '/api/tag/label.{ext}'.format(ext=self.format)
params = {'access_token': self.token, 'tags': []}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
return await self.query(path, "delete", **params)
# ANNOTATIONS
async def delete_annotations(self, annotation):
"""
DELETE /api/annotations/{annotation}.{_format}
Removes an annotation.
:param annotation \w+ string The annotation ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{annotation}.{ext}'.format(
annotation=annotation, ext=self.format)
return await self.query(url, "delete", **params)
async def put_annotations(self, annotation):
"""
PUT /api/annotations/{annotation}.{_format}
Updates an annotation.
:param annotation \w+ string The annotation ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{annotation}.{ext}'.format(
annotation=annotation, ext=self.format)
return await self.query(url, "put", **params)
async def get_annotations(self, entry):
"""
GET /api/annotations/{entry}.{_format}
Retrieve annotations for an entry
:param entry \w+ integer The entry ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def post_annotations(self, entry, **kwargs):
"""
POST /api/annotations/{entry}.{_format}
Creates a new annotation.
:param entry \w+ integer The entry ID
:return
"""
params = dict({'access_token': self.token,
'ranges': [],
'quote': '',
'text': ''})
if 'ranges' in kwargs:
params['ranges'] = kwargs['ranges']
if 'quote' in kwargs:
params['quote'] = kwargs['quote']
if 'text' in kwargs:
params['text'] = kwargs['text']
url = '/api/annotations/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "post", **params)
# VERSION
@property
async def version(self):
"""
GET /api/version.{_format}
Retrieve version number
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/version.{ext}'.format(ext=self.format)
return await self.query(url, "get", **params)
@classmethod
async def get_token(cls, host, **params):
"""
POST /oauth/v2/token
Get a new token
:param host: host of the service
:param params: will contain :
params = {"grant_type": "password",
"client_id": "a string",
"client_secret": "a string",
"username": "a login",
"password": "a password"}
:return: access token
"""
params['grant_type'] = "password"
path = "/oauth/v2/token"
async with aiohttp.ClientSession() as sess:
async with sess.post(host + path, data=params) as resp:
data = await cls.handle_json_response(resp)
return data.get("access_token")
|
push-things/wallabag_api
|
wallabag_api/wallabag.py
|
Wallabag.get_entry
|
python
|
async def get_entry(self, entry):
params = {'access_token': self.token}
url = '/api/entries/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
|
GET /api/entries/{entry}.{_format}
Retrieve a single entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
|
train
|
https://github.com/push-things/wallabag_api/blob/8d1e10a6ebc03d1ac9af2b38b57eb69f29b4216e/wallabag_api/wallabag.py#L208-L220
|
[
"async def query(self, path, method='get', **params):\n \"\"\"\n Do a query to the System API\n\n :param path: url to the API\n :param method: the kind of query to do\n :param params: a dict with all the\n necessary things to query the API\n :return json data\n \"\"\"\n if method in ('get', 'post', 'patch', 'delete', 'put'):\n full_path = self.host + path\n if method == 'get':\n resp = await self.aio_sess.get(full_path, params=params)\n elif method == 'post':\n resp = await self.aio_sess.post(full_path, data=params)\n elif method == 'patch':\n resp = await self.aio_sess.patch(full_path, data=params)\n elif method == 'delete':\n resp = await self.aio_sess.delete(full_path, params=params, headers=params)\n elif method == 'put':\n resp = await self.aio_sess.put(full_path, data=params)\n\n async with resp:\n # return the content if its a binary one\n if resp.content_type.startswith('application/pdf') or \\\n resp.content_type.startswith('application/epub'):\n return await resp.read()\n\n return await self.handle_json_response(resp)\n else:\n raise ValueError('method expected: get, post, patch, delete, put')\n"
] |
class Wallabag(object):
"""
Python Class 'Wallabag' to deal with Wallabag REST API
This class is able to handle any data from your Wallabag account
"""
EXTENTIONS = ('xml', 'json', 'txt', 'csv', 'pdf', 'epub', 'mobi', 'html')
host = ''
token = ''
client_id = ''
client_secret = ''
user_agent = ''
format = ''
username = ''
password = ''
aio_sess = None
def __init__(self,
host='',
token='',
client_id='',
client_secret='',
extension='json',
user_agent="WallabagPython/1.2.2 "
" +https://github.com/push-things/wallabag-api",
aio_sess=None):
"""
init variable
:param host: string url to the official API Wallabag
:param token: string of the key provided by Wallabag
:param client_id client id
:param client_secret client secret
:param extension: xml|json|txt|csv|pdf|epub|mobi|html
:param user_agent
:param aio_sess aiohttp session
"""
self.host = host
self.client_id = client_id
self.client_secret = client_secret
self.token = token
self.format = extension
self.user_agent = user_agent
self.aio_sess = aio_sess
if self.format not in self.EXTENTIONS:
raise ValueError("format invalid {0} should be one of {1}".format(
self.format, self.EXTENTIONS))
async def query(self, path, method='get', **params):
"""
Do a query to the System API
:param path: url to the API
:param method: the kind of query to do
:param params: a dict with all the
necessary things to query the API
:return json data
"""
if method in ('get', 'post', 'patch', 'delete', 'put'):
full_path = self.host + path
if method == 'get':
resp = await self.aio_sess.get(full_path, params=params)
elif method == 'post':
resp = await self.aio_sess.post(full_path, data=params)
elif method == 'patch':
resp = await self.aio_sess.patch(full_path, data=params)
elif method == 'delete':
resp = await self.aio_sess.delete(full_path, params=params, headers=params)
elif method == 'put':
resp = await self.aio_sess.put(full_path, data=params)
async with resp:
# return the content if its a binary one
if resp.content_type.startswith('application/pdf') or \
resp.content_type.startswith('application/epub'):
return await resp.read()
return await self.handle_json_response(resp)
else:
raise ValueError('method expected: get, post, patch, delete, put')
@staticmethod
async def handle_json_response(responses):
"""
get the json data response
:param responses: the json response
:return the json data without 'root' node
"""
json_data = {}
if responses.status != 200:
err_msg = HttpProcessingError(code=responses.status,
message=await responses.json())
logging.error("Wallabag: aiohttp error {err_msg}".format(
err_msg=err_msg))
else:
try:
json_data = responses.json()
except ClientResponseError as e:
# sometimes json_data does not return any json() without
# any error. This is due to the grabbing URL which "rejects"
# the URL
logging.error("Wallabag: aiohttp error {code} {message}"
.format(code=e.code, message=e.message))
return await json_data
@staticmethod
def __get_attr(what, type_attr, value_attr, **kwargs):
"""
get the value of a parm
:param what: string parm
:param type_attr: type of parm
:param value_attr:
:param kwargs:
:return: value of the parm
"""
if what in kwargs:
value = int(kwargs[what]) if type_attr == 'int' else kwargs[what]
if value in value_attr:
return value
# ENTRIES
async def get_entries(self, **kwargs):
"""
GET /api/entries.{_format}
Retrieve all entries. It could be filtered by many options.
:param kwargs: can contain one of the following filters
archive: '0' or '1', default '0' filter by archived status.
starred: '0' or '1', default '0' filter by starred status.
sort: 'created' or 'updated', default 'created'
order: 'asc' or 'desc', default 'desc'
page: int default 1 what page you want
perPage: int default 30 result per page
tags: list of tags url encoded.
since: int default 0 from what timestamp you want
Will returns entries that matches ALL tags
:return data related to the ext
"""
# default values
params = dict({'access_token': self.token,
'sort': 'created',
'order': 'desc',
'page': 1,
'perPage': 30,
'tags': '',
'since': 0})
if 'archive' in kwargs and int(kwargs['archive']) in (0, 1):
params['archive'] = int(kwargs['archive'])
if 'starred' in kwargs and int(kwargs['starred']) in (0, 1):
params['starred'] = int(kwargs['starred'])
if 'order' in kwargs and kwargs['order'] in ('asc', 'desc'):
params['order'] = kwargs['order']
if 'page' in kwargs and isinstance(kwargs['page'], int):
params['page'] = kwargs['page']
if 'perPage' in kwargs and isinstance(kwargs['perPage'], int):
params['perPage'] = kwargs['perPage']
if 'tags' in kwargs and isinstance(kwargs['tags'], list):
params['tags'] = ', '.join(kwargs['tags'])
if 'since' in kwargs and isinstance(kwargs['since'], int):
params['since'] = kwargs['since']
path = '/api/entries.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
async def post_entries(self, url, title='', tags='', starred=0, archive=0, content='', language='', published_at='',
authors='', public=1, original_url=''):
"""
POST /api/entries.{_format}
Create an entry
:param url: the url of the note to store
:param title: Optional, we'll get the title from the page.
:param tags: tag1,tag2,tag3 a comma-separated list of tags.
:param starred entry already starred
:param archive entry already archived
:param content additionnal html content
:param language
:param published_at
:param authors
:param public
:param original_url
:return result
"""
params = {'access_token': self.token, 'url': url, 'title': title,
'tags': tags, 'starred': starred, 'archive': archive,
'content': content, 'language': language, 'published_at': published_at,
'authors': authors, 'public': public, 'original_url': original_url}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
path = '/api/entries.{ext}'.format(ext=self.format)
return await self.query(path, "post", **params)
async def reaload_entry(self, entry):
"""
PATCH /api/entries/{entry}/reload.{_format}
Reload a single entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/reload.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "patch", **params)
async def patch_entries(self, entry, **kwargs):
"""
PATCH /api/entries/{entry}.{_format}
Change several properties of an entry
:param entry: the entry to 'patch' / update
:param kwargs: can contain one of the following
title: string
tags: a list of tags tag1,tag2,tag3
archive: '0' or '1', default '0' archived the entry.
starred: '0' or '1', default '0' starred the entry
In case that you don't want to *really* remove it..
:return data related to the ext
"""
# default values
params = {'access_token': self.token,
'title': '',
'tags': []}
if 'title' in kwargs:
params['title'] = kwargs['title']
if 'tags' in kwargs and isinstance(kwargs['tags'], list):
params['tags'] = ', '.join(kwargs['tags'])
params['archive'] = self.__get_attr(what='archive',
type_attr=int,
value_attr=(0, 1),
**kwargs)
params['starred'] = self.__get_attr(what='starred',
type_attr=int,
value_attr=(0, 1),
**kwargs)
params['order'] = self.__get_attr(what='order',
type_attr=str,
value_attr=('asc', 'desc'),
**kwargs)
path = '/api/entries/{entry}.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "patch", **params)
async def get_entry_export(self, entry):
"""
GET /api/entries/{entry}/export.{_format}
Retrieve a single entry as a predefined format.
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/export.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def patch_entry_reload(self, entry):
"""
PATCH /api/entries/{entry}/reload.{_format}
Reload an entry. An empty response with HTTP Status 304 will be send
if we weren't able to update the content (because it hasn't changed
or we got an error).
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/reload.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "patch", **params)
async def delete_entries(self, entry):
"""
DELETE /api/entries/{entry}.{_format}
Delete permanently an entry
:param entry: \w+ an integer The Entry ID
:return result
"""
params = {'Authorization': 'Bearer {}'.format(self.token)}
path = '/api/entries/{entry}.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "delete", **params)
async def entries_exists(self, url, urls=''):
"""
GET /api/entries/exists.{_format}
Check if an entry exist by url.
:param url string true An url Url to check if it exists
:param urls string false An array of urls
(?urls[]=http...&urls[]=http...) Urls (as an array)
to check if it exists
:return result
"""
params = {'access_token': self.token,
'url': url,
'urls': urls}
path = '/api/entries/exists.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
# TAGS
async def get_entry_tags(self, entry):
"""
GET /api/entries/{entry}/tags.{_format}
Retrieve all tags for an entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/tags.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(url, "get", **params)
async def post_entry_tags(self, entry, tags):
"""
POST /api/entries/{entry}/tags.{_format}
Add one or more tags to an entry
:param entry: \w+ an integer The Entry ID
:param tags: list of tags (urlencoded)
:return result
"""
params = {'access_token': self.token, 'tags': []}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
path = '/api/entries/{entry}/tags.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "post", **params)
async def delete_entry_tag(self, entry, tag):
"""
DELETE /api/entries/{entry}/tags/{tag}.{_format}
Permanently remove one tag for an entry
:param entry: \w+ an integer The Entry ID
:param tag: string The Tag
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/tags/{tag}.{ext}'.format(
entry=entry, tag=tag, ext=self.format)
return await self.query(url, "delete", **params)
async def get_tags(self):
"""
GET /api/tags.{_format}
Retrieve all tags
:return data related to the ext
"""
params = {'access_token': self.token}
path = '/api/tags.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
async def delete_tag(self, tag):
"""
DELETE /api/tags/{tag}.{_format}
Permanently remove one tag from every entry
:param tag: string The Tag
:return data related to the ext
"""
path = '/api/tags/{tag}.{ext}'.format(tag=tag, ext=self.format)
params = {'access_token': self.token}
return await self.query(path, "delete", **params)
async def delete_tag_label(self, tag):
"""
DELETE /api/tag/label.{_format}
Permanently remove one tag from every entry.
:param tag: string The Tag
:return data related to the ext
"""
path = '/api/tag/label.{ext}'.format(ext=self.format)
params = {'access_token': self.token,
'tag': tag}
return await self.query(path, "delete", **params)
async def delete_tags_label(self, tags):
"""
DELETE /api/tags/label.{_format}
Permanently remove some tags from every entry.
:param tags: list of tags (urlencoded)
:return data related to the ext
"""
path = '/api/tag/label.{ext}'.format(ext=self.format)
params = {'access_token': self.token, 'tags': []}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
return await self.query(path, "delete", **params)
# ANNOTATIONS
async def delete_annotations(self, annotation):
"""
DELETE /api/annotations/{annotation}.{_format}
Removes an annotation.
:param annotation \w+ string The annotation ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{annotation}.{ext}'.format(
annotation=annotation, ext=self.format)
return await self.query(url, "delete", **params)
async def put_annotations(self, annotation):
"""
PUT /api/annotations/{annotation}.{_format}
Updates an annotation.
:param annotation \w+ string The annotation ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{annotation}.{ext}'.format(
annotation=annotation, ext=self.format)
return await self.query(url, "put", **params)
async def get_annotations(self, entry):
"""
GET /api/annotations/{entry}.{_format}
Retrieve annotations for an entry
:param entry \w+ integer The entry ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def post_annotations(self, entry, **kwargs):
"""
POST /api/annotations/{entry}.{_format}
Creates a new annotation.
:param entry \w+ integer The entry ID
:return
"""
params = dict({'access_token': self.token,
'ranges': [],
'quote': '',
'text': ''})
if 'ranges' in kwargs:
params['ranges'] = kwargs['ranges']
if 'quote' in kwargs:
params['quote'] = kwargs['quote']
if 'text' in kwargs:
params['text'] = kwargs['text']
url = '/api/annotations/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "post", **params)
# VERSION
@property
async def version(self):
"""
GET /api/version.{_format}
Retrieve version number
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/version.{ext}'.format(ext=self.format)
return await self.query(url, "get", **params)
@classmethod
async def get_token(cls, host, **params):
"""
POST /oauth/v2/token
Get a new token
:param host: host of the service
:param params: will contain :
params = {"grant_type": "password",
"client_id": "a string",
"client_secret": "a string",
"username": "a login",
"password": "a password"}
:return: access token
"""
params['grant_type'] = "password"
path = "/oauth/v2/token"
async with aiohttp.ClientSession() as sess:
async with sess.post(host + path, data=params) as resp:
data = await cls.handle_json_response(resp)
return data.get("access_token")
|
push-things/wallabag_api
|
wallabag_api/wallabag.py
|
Wallabag.patch_entries
|
python
|
async def patch_entries(self, entry, **kwargs):
# default values
params = {'access_token': self.token,
'title': '',
'tags': []}
if 'title' in kwargs:
params['title'] = kwargs['title']
if 'tags' in kwargs and isinstance(kwargs['tags'], list):
params['tags'] = ', '.join(kwargs['tags'])
params['archive'] = self.__get_attr(what='archive',
type_attr=int,
value_attr=(0, 1),
**kwargs)
params['starred'] = self.__get_attr(what='starred',
type_attr=int,
value_attr=(0, 1),
**kwargs)
params['order'] = self.__get_attr(what='order',
type_attr=str,
value_attr=('asc', 'desc'),
**kwargs)
path = '/api/entries/{entry}.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "patch", **params)
|
PATCH /api/entries/{entry}.{_format}
Change several properties of an entry
:param entry: the entry to 'patch' / update
:param kwargs: can contain one of the following
title: string
tags: a list of tags tag1,tag2,tag3
archive: '0' or '1', default '0' archived the entry.
starred: '0' or '1', default '0' starred the entry
In case that you don't want to *really* remove it..
:return data related to the ext
|
train
|
https://github.com/push-things/wallabag_api/blob/8d1e10a6ebc03d1ac9af2b38b57eb69f29b4216e/wallabag_api/wallabag.py#L236-L276
|
[
"async def query(self, path, method='get', **params):\n \"\"\"\n Do a query to the System API\n\n :param path: url to the API\n :param method: the kind of query to do\n :param params: a dict with all the\n necessary things to query the API\n :return json data\n \"\"\"\n if method in ('get', 'post', 'patch', 'delete', 'put'):\n full_path = self.host + path\n if method == 'get':\n resp = await self.aio_sess.get(full_path, params=params)\n elif method == 'post':\n resp = await self.aio_sess.post(full_path, data=params)\n elif method == 'patch':\n resp = await self.aio_sess.patch(full_path, data=params)\n elif method == 'delete':\n resp = await self.aio_sess.delete(full_path, params=params, headers=params)\n elif method == 'put':\n resp = await self.aio_sess.put(full_path, data=params)\n\n async with resp:\n # return the content if its a binary one\n if resp.content_type.startswith('application/pdf') or \\\n resp.content_type.startswith('application/epub'):\n return await resp.read()\n\n return await self.handle_json_response(resp)\n else:\n raise ValueError('method expected: get, post, patch, delete, put')\n",
"def __get_attr(what, type_attr, value_attr, **kwargs):\n \"\"\"\n get the value of a parm\n :param what: string parm\n :param type_attr: type of parm\n :param value_attr:\n :param kwargs:\n :return: value of the parm\n \"\"\"\n if what in kwargs:\n value = int(kwargs[what]) if type_attr == 'int' else kwargs[what]\n if value in value_attr:\n return value\n"
] |
class Wallabag(object):
"""
Python Class 'Wallabag' to deal with Wallabag REST API
This class is able to handle any data from your Wallabag account
"""
EXTENTIONS = ('xml', 'json', 'txt', 'csv', 'pdf', 'epub', 'mobi', 'html')
host = ''
token = ''
client_id = ''
client_secret = ''
user_agent = ''
format = ''
username = ''
password = ''
aio_sess = None
def __init__(self,
host='',
token='',
client_id='',
client_secret='',
extension='json',
user_agent="WallabagPython/1.2.2 "
" +https://github.com/push-things/wallabag-api",
aio_sess=None):
"""
init variable
:param host: string url to the official API Wallabag
:param token: string of the key provided by Wallabag
:param client_id client id
:param client_secret client secret
:param extension: xml|json|txt|csv|pdf|epub|mobi|html
:param user_agent
:param aio_sess aiohttp session
"""
self.host = host
self.client_id = client_id
self.client_secret = client_secret
self.token = token
self.format = extension
self.user_agent = user_agent
self.aio_sess = aio_sess
if self.format not in self.EXTENTIONS:
raise ValueError("format invalid {0} should be one of {1}".format(
self.format, self.EXTENTIONS))
async def query(self, path, method='get', **params):
"""
Do a query to the System API
:param path: url to the API
:param method: the kind of query to do
:param params: a dict with all the
necessary things to query the API
:return json data
"""
if method in ('get', 'post', 'patch', 'delete', 'put'):
full_path = self.host + path
if method == 'get':
resp = await self.aio_sess.get(full_path, params=params)
elif method == 'post':
resp = await self.aio_sess.post(full_path, data=params)
elif method == 'patch':
resp = await self.aio_sess.patch(full_path, data=params)
elif method == 'delete':
resp = await self.aio_sess.delete(full_path, params=params, headers=params)
elif method == 'put':
resp = await self.aio_sess.put(full_path, data=params)
async with resp:
# return the content if its a binary one
if resp.content_type.startswith('application/pdf') or \
resp.content_type.startswith('application/epub'):
return await resp.read()
return await self.handle_json_response(resp)
else:
raise ValueError('method expected: get, post, patch, delete, put')
@staticmethod
async def handle_json_response(responses):
"""
get the json data response
:param responses: the json response
:return the json data without 'root' node
"""
json_data = {}
if responses.status != 200:
err_msg = HttpProcessingError(code=responses.status,
message=await responses.json())
logging.error("Wallabag: aiohttp error {err_msg}".format(
err_msg=err_msg))
else:
try:
json_data = responses.json()
except ClientResponseError as e:
# sometimes json_data does not return any json() without
# any error. This is due to the grabbing URL which "rejects"
# the URL
logging.error("Wallabag: aiohttp error {code} {message}"
.format(code=e.code, message=e.message))
return await json_data
@staticmethod
def __get_attr(what, type_attr, value_attr, **kwargs):
"""
get the value of a parm
:param what: string parm
:param type_attr: type of parm
:param value_attr:
:param kwargs:
:return: value of the parm
"""
if what in kwargs:
value = int(kwargs[what]) if type_attr == 'int' else kwargs[what]
if value in value_attr:
return value
# ENTRIES
async def get_entries(self, **kwargs):
"""
GET /api/entries.{_format}
Retrieve all entries. It could be filtered by many options.
:param kwargs: can contain one of the following filters
archive: '0' or '1', default '0' filter by archived status.
starred: '0' or '1', default '0' filter by starred status.
sort: 'created' or 'updated', default 'created'
order: 'asc' or 'desc', default 'desc'
page: int default 1 what page you want
perPage: int default 30 result per page
tags: list of tags url encoded.
since: int default 0 from what timestamp you want
Will returns entries that matches ALL tags
:return data related to the ext
"""
# default values
params = dict({'access_token': self.token,
'sort': 'created',
'order': 'desc',
'page': 1,
'perPage': 30,
'tags': '',
'since': 0})
if 'archive' in kwargs and int(kwargs['archive']) in (0, 1):
params['archive'] = int(kwargs['archive'])
if 'starred' in kwargs and int(kwargs['starred']) in (0, 1):
params['starred'] = int(kwargs['starred'])
if 'order' in kwargs and kwargs['order'] in ('asc', 'desc'):
params['order'] = kwargs['order']
if 'page' in kwargs and isinstance(kwargs['page'], int):
params['page'] = kwargs['page']
if 'perPage' in kwargs and isinstance(kwargs['perPage'], int):
params['perPage'] = kwargs['perPage']
if 'tags' in kwargs and isinstance(kwargs['tags'], list):
params['tags'] = ', '.join(kwargs['tags'])
if 'since' in kwargs and isinstance(kwargs['since'], int):
params['since'] = kwargs['since']
path = '/api/entries.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
async def post_entries(self, url, title='', tags='', starred=0, archive=0, content='', language='', published_at='',
authors='', public=1, original_url=''):
"""
POST /api/entries.{_format}
Create an entry
:param url: the url of the note to store
:param title: Optional, we'll get the title from the page.
:param tags: tag1,tag2,tag3 a comma-separated list of tags.
:param starred entry already starred
:param archive entry already archived
:param content additionnal html content
:param language
:param published_at
:param authors
:param public
:param original_url
:return result
"""
params = {'access_token': self.token, 'url': url, 'title': title,
'tags': tags, 'starred': starred, 'archive': archive,
'content': content, 'language': language, 'published_at': published_at,
'authors': authors, 'public': public, 'original_url': original_url}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
path = '/api/entries.{ext}'.format(ext=self.format)
return await self.query(path, "post", **params)
async def get_entry(self, entry):
"""
GET /api/entries/{entry}.{_format}
Retrieve a single entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def reaload_entry(self, entry):
"""
PATCH /api/entries/{entry}/reload.{_format}
Reload a single entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/reload.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "patch", **params)
async def get_entry_export(self, entry):
"""
GET /api/entries/{entry}/export.{_format}
Retrieve a single entry as a predefined format.
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/export.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def patch_entry_reload(self, entry):
"""
PATCH /api/entries/{entry}/reload.{_format}
Reload an entry. An empty response with HTTP Status 304 will be send
if we weren't able to update the content (because it hasn't changed
or we got an error).
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/reload.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "patch", **params)
async def delete_entries(self, entry):
"""
DELETE /api/entries/{entry}.{_format}
Delete permanently an entry
:param entry: \w+ an integer The Entry ID
:return result
"""
params = {'Authorization': 'Bearer {}'.format(self.token)}
path = '/api/entries/{entry}.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "delete", **params)
async def entries_exists(self, url, urls=''):
"""
GET /api/entries/exists.{_format}
Check if an entry exist by url.
:param url string true An url Url to check if it exists
:param urls string false An array of urls
(?urls[]=http...&urls[]=http...) Urls (as an array)
to check if it exists
:return result
"""
params = {'access_token': self.token,
'url': url,
'urls': urls}
path = '/api/entries/exists.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
# TAGS
async def get_entry_tags(self, entry):
"""
GET /api/entries/{entry}/tags.{_format}
Retrieve all tags for an entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/tags.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(url, "get", **params)
async def post_entry_tags(self, entry, tags):
"""
POST /api/entries/{entry}/tags.{_format}
Add one or more tags to an entry
:param entry: \w+ an integer The Entry ID
:param tags: list of tags (urlencoded)
:return result
"""
params = {'access_token': self.token, 'tags': []}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
path = '/api/entries/{entry}/tags.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "post", **params)
async def delete_entry_tag(self, entry, tag):
"""
DELETE /api/entries/{entry}/tags/{tag}.{_format}
Permanently remove one tag for an entry
:param entry: \w+ an integer The Entry ID
:param tag: string The Tag
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/tags/{tag}.{ext}'.format(
entry=entry, tag=tag, ext=self.format)
return await self.query(url, "delete", **params)
async def get_tags(self):
"""
GET /api/tags.{_format}
Retrieve all tags
:return data related to the ext
"""
params = {'access_token': self.token}
path = '/api/tags.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
async def delete_tag(self, tag):
"""
DELETE /api/tags/{tag}.{_format}
Permanently remove one tag from every entry
:param tag: string The Tag
:return data related to the ext
"""
path = '/api/tags/{tag}.{ext}'.format(tag=tag, ext=self.format)
params = {'access_token': self.token}
return await self.query(path, "delete", **params)
async def delete_tag_label(self, tag):
"""
DELETE /api/tag/label.{_format}
Permanently remove one tag from every entry.
:param tag: string The Tag
:return data related to the ext
"""
path = '/api/tag/label.{ext}'.format(ext=self.format)
params = {'access_token': self.token,
'tag': tag}
return await self.query(path, "delete", **params)
async def delete_tags_label(self, tags):
"""
DELETE /api/tags/label.{_format}
Permanently remove some tags from every entry.
:param tags: list of tags (urlencoded)
:return data related to the ext
"""
path = '/api/tag/label.{ext}'.format(ext=self.format)
params = {'access_token': self.token, 'tags': []}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
return await self.query(path, "delete", **params)
# ANNOTATIONS
async def delete_annotations(self, annotation):
"""
DELETE /api/annotations/{annotation}.{_format}
Removes an annotation.
:param annotation \w+ string The annotation ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{annotation}.{ext}'.format(
annotation=annotation, ext=self.format)
return await self.query(url, "delete", **params)
async def put_annotations(self, annotation):
"""
PUT /api/annotations/{annotation}.{_format}
Updates an annotation.
:param annotation \w+ string The annotation ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{annotation}.{ext}'.format(
annotation=annotation, ext=self.format)
return await self.query(url, "put", **params)
async def get_annotations(self, entry):
"""
GET /api/annotations/{entry}.{_format}
Retrieve annotations for an entry
:param entry \w+ integer The entry ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def post_annotations(self, entry, **kwargs):
"""
POST /api/annotations/{entry}.{_format}
Creates a new annotation.
:param entry \w+ integer The entry ID
:return
"""
params = dict({'access_token': self.token,
'ranges': [],
'quote': '',
'text': ''})
if 'ranges' in kwargs:
params['ranges'] = kwargs['ranges']
if 'quote' in kwargs:
params['quote'] = kwargs['quote']
if 'text' in kwargs:
params['text'] = kwargs['text']
url = '/api/annotations/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "post", **params)
# VERSION
@property
async def version(self):
"""
GET /api/version.{_format}
Retrieve version number
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/version.{ext}'.format(ext=self.format)
return await self.query(url, "get", **params)
@classmethod
async def get_token(cls, host, **params):
"""
POST /oauth/v2/token
Get a new token
:param host: host of the service
:param params: will contain :
params = {"grant_type": "password",
"client_id": "a string",
"client_secret": "a string",
"username": "a login",
"password": "a password"}
:return: access token
"""
params['grant_type'] = "password"
path = "/oauth/v2/token"
async with aiohttp.ClientSession() as sess:
async with sess.post(host + path, data=params) as resp:
data = await cls.handle_json_response(resp)
return data.get("access_token")
|
push-things/wallabag_api
|
wallabag_api/wallabag.py
|
Wallabag.delete_entries
|
python
|
async def delete_entries(self, entry):
params = {'Authorization': 'Bearer {}'.format(self.token)}
path = '/api/entries/{entry}.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "delete", **params)
|
DELETE /api/entries/{entry}.{_format}
Delete permanently an entry
:param entry: \w+ an integer The Entry ID
:return result
|
train
|
https://github.com/push-things/wallabag_api/blob/8d1e10a6ebc03d1ac9af2b38b57eb69f29b4216e/wallabag_api/wallabag.py#L308-L321
|
[
"async def query(self, path, method='get', **params):\n \"\"\"\n Do a query to the System API\n\n :param path: url to the API\n :param method: the kind of query to do\n :param params: a dict with all the\n necessary things to query the API\n :return json data\n \"\"\"\n if method in ('get', 'post', 'patch', 'delete', 'put'):\n full_path = self.host + path\n if method == 'get':\n resp = await self.aio_sess.get(full_path, params=params)\n elif method == 'post':\n resp = await self.aio_sess.post(full_path, data=params)\n elif method == 'patch':\n resp = await self.aio_sess.patch(full_path, data=params)\n elif method == 'delete':\n resp = await self.aio_sess.delete(full_path, params=params, headers=params)\n elif method == 'put':\n resp = await self.aio_sess.put(full_path, data=params)\n\n async with resp:\n # return the content if its a binary one\n if resp.content_type.startswith('application/pdf') or \\\n resp.content_type.startswith('application/epub'):\n return await resp.read()\n\n return await self.handle_json_response(resp)\n else:\n raise ValueError('method expected: get, post, patch, delete, put')\n"
] |
class Wallabag(object):
"""
Python Class 'Wallabag' to deal with Wallabag REST API
This class is able to handle any data from your Wallabag account
"""
EXTENTIONS = ('xml', 'json', 'txt', 'csv', 'pdf', 'epub', 'mobi', 'html')
host = ''
token = ''
client_id = ''
client_secret = ''
user_agent = ''
format = ''
username = ''
password = ''
aio_sess = None
def __init__(self,
host='',
token='',
client_id='',
client_secret='',
extension='json',
user_agent="WallabagPython/1.2.2 "
" +https://github.com/push-things/wallabag-api",
aio_sess=None):
"""
init variable
:param host: string url to the official API Wallabag
:param token: string of the key provided by Wallabag
:param client_id client id
:param client_secret client secret
:param extension: xml|json|txt|csv|pdf|epub|mobi|html
:param user_agent
:param aio_sess aiohttp session
"""
self.host = host
self.client_id = client_id
self.client_secret = client_secret
self.token = token
self.format = extension
self.user_agent = user_agent
self.aio_sess = aio_sess
if self.format not in self.EXTENTIONS:
raise ValueError("format invalid {0} should be one of {1}".format(
self.format, self.EXTENTIONS))
async def query(self, path, method='get', **params):
"""
Do a query to the System API
:param path: url to the API
:param method: the kind of query to do
:param params: a dict with all the
necessary things to query the API
:return json data
"""
if method in ('get', 'post', 'patch', 'delete', 'put'):
full_path = self.host + path
if method == 'get':
resp = await self.aio_sess.get(full_path, params=params)
elif method == 'post':
resp = await self.aio_sess.post(full_path, data=params)
elif method == 'patch':
resp = await self.aio_sess.patch(full_path, data=params)
elif method == 'delete':
resp = await self.aio_sess.delete(full_path, params=params, headers=params)
elif method == 'put':
resp = await self.aio_sess.put(full_path, data=params)
async with resp:
# return the content if its a binary one
if resp.content_type.startswith('application/pdf') or \
resp.content_type.startswith('application/epub'):
return await resp.read()
return await self.handle_json_response(resp)
else:
raise ValueError('method expected: get, post, patch, delete, put')
@staticmethod
async def handle_json_response(responses):
"""
get the json data response
:param responses: the json response
:return the json data without 'root' node
"""
json_data = {}
if responses.status != 200:
err_msg = HttpProcessingError(code=responses.status,
message=await responses.json())
logging.error("Wallabag: aiohttp error {err_msg}".format(
err_msg=err_msg))
else:
try:
json_data = responses.json()
except ClientResponseError as e:
# sometimes json_data does not return any json() without
# any error. This is due to the grabbing URL which "rejects"
# the URL
logging.error("Wallabag: aiohttp error {code} {message}"
.format(code=e.code, message=e.message))
return await json_data
@staticmethod
def __get_attr(what, type_attr, value_attr, **kwargs):
"""
get the value of a parm
:param what: string parm
:param type_attr: type of parm
:param value_attr:
:param kwargs:
:return: value of the parm
"""
if what in kwargs:
value = int(kwargs[what]) if type_attr == 'int' else kwargs[what]
if value in value_attr:
return value
# ENTRIES
async def get_entries(self, **kwargs):
"""
GET /api/entries.{_format}
Retrieve all entries. It could be filtered by many options.
:param kwargs: can contain one of the following filters
archive: '0' or '1', default '0' filter by archived status.
starred: '0' or '1', default '0' filter by starred status.
sort: 'created' or 'updated', default 'created'
order: 'asc' or 'desc', default 'desc'
page: int default 1 what page you want
perPage: int default 30 result per page
tags: list of tags url encoded.
since: int default 0 from what timestamp you want
Will returns entries that matches ALL tags
:return data related to the ext
"""
# default values
params = dict({'access_token': self.token,
'sort': 'created',
'order': 'desc',
'page': 1,
'perPage': 30,
'tags': '',
'since': 0})
if 'archive' in kwargs and int(kwargs['archive']) in (0, 1):
params['archive'] = int(kwargs['archive'])
if 'starred' in kwargs and int(kwargs['starred']) in (0, 1):
params['starred'] = int(kwargs['starred'])
if 'order' in kwargs and kwargs['order'] in ('asc', 'desc'):
params['order'] = kwargs['order']
if 'page' in kwargs and isinstance(kwargs['page'], int):
params['page'] = kwargs['page']
if 'perPage' in kwargs and isinstance(kwargs['perPage'], int):
params['perPage'] = kwargs['perPage']
if 'tags' in kwargs and isinstance(kwargs['tags'], list):
params['tags'] = ', '.join(kwargs['tags'])
if 'since' in kwargs and isinstance(kwargs['since'], int):
params['since'] = kwargs['since']
path = '/api/entries.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
async def post_entries(self, url, title='', tags='', starred=0, archive=0, content='', language='', published_at='',
authors='', public=1, original_url=''):
"""
POST /api/entries.{_format}
Create an entry
:param url: the url of the note to store
:param title: Optional, we'll get the title from the page.
:param tags: tag1,tag2,tag3 a comma-separated list of tags.
:param starred entry already starred
:param archive entry already archived
:param content additionnal html content
:param language
:param published_at
:param authors
:param public
:param original_url
:return result
"""
params = {'access_token': self.token, 'url': url, 'title': title,
'tags': tags, 'starred': starred, 'archive': archive,
'content': content, 'language': language, 'published_at': published_at,
'authors': authors, 'public': public, 'original_url': original_url}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
path = '/api/entries.{ext}'.format(ext=self.format)
return await self.query(path, "post", **params)
async def get_entry(self, entry):
"""
GET /api/entries/{entry}.{_format}
Retrieve a single entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def reaload_entry(self, entry):
"""
PATCH /api/entries/{entry}/reload.{_format}
Reload a single entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/reload.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "patch", **params)
async def patch_entries(self, entry, **kwargs):
"""
PATCH /api/entries/{entry}.{_format}
Change several properties of an entry
:param entry: the entry to 'patch' / update
:param kwargs: can contain one of the following
title: string
tags: a list of tags tag1,tag2,tag3
archive: '0' or '1', default '0' archived the entry.
starred: '0' or '1', default '0' starred the entry
In case that you don't want to *really* remove it..
:return data related to the ext
"""
# default values
params = {'access_token': self.token,
'title': '',
'tags': []}
if 'title' in kwargs:
params['title'] = kwargs['title']
if 'tags' in kwargs and isinstance(kwargs['tags'], list):
params['tags'] = ', '.join(kwargs['tags'])
params['archive'] = self.__get_attr(what='archive',
type_attr=int,
value_attr=(0, 1),
**kwargs)
params['starred'] = self.__get_attr(what='starred',
type_attr=int,
value_attr=(0, 1),
**kwargs)
params['order'] = self.__get_attr(what='order',
type_attr=str,
value_attr=('asc', 'desc'),
**kwargs)
path = '/api/entries/{entry}.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "patch", **params)
async def get_entry_export(self, entry):
"""
GET /api/entries/{entry}/export.{_format}
Retrieve a single entry as a predefined format.
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/export.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def patch_entry_reload(self, entry):
"""
PATCH /api/entries/{entry}/reload.{_format}
Reload an entry. An empty response with HTTP Status 304 will be send
if we weren't able to update the content (because it hasn't changed
or we got an error).
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/reload.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "patch", **params)
async def entries_exists(self, url, urls=''):
"""
GET /api/entries/exists.{_format}
Check if an entry exist by url.
:param url string true An url Url to check if it exists
:param urls string false An array of urls
(?urls[]=http...&urls[]=http...) Urls (as an array)
to check if it exists
:return result
"""
params = {'access_token': self.token,
'url': url,
'urls': urls}
path = '/api/entries/exists.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
# TAGS
async def get_entry_tags(self, entry):
"""
GET /api/entries/{entry}/tags.{_format}
Retrieve all tags for an entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/tags.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(url, "get", **params)
async def post_entry_tags(self, entry, tags):
"""
POST /api/entries/{entry}/tags.{_format}
Add one or more tags to an entry
:param entry: \w+ an integer The Entry ID
:param tags: list of tags (urlencoded)
:return result
"""
params = {'access_token': self.token, 'tags': []}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
path = '/api/entries/{entry}/tags.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "post", **params)
async def delete_entry_tag(self, entry, tag):
"""
DELETE /api/entries/{entry}/tags/{tag}.{_format}
Permanently remove one tag for an entry
:param entry: \w+ an integer The Entry ID
:param tag: string The Tag
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/tags/{tag}.{ext}'.format(
entry=entry, tag=tag, ext=self.format)
return await self.query(url, "delete", **params)
async def get_tags(self):
"""
GET /api/tags.{_format}
Retrieve all tags
:return data related to the ext
"""
params = {'access_token': self.token}
path = '/api/tags.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
async def delete_tag(self, tag):
"""
DELETE /api/tags/{tag}.{_format}
Permanently remove one tag from every entry
:param tag: string The Tag
:return data related to the ext
"""
path = '/api/tags/{tag}.{ext}'.format(tag=tag, ext=self.format)
params = {'access_token': self.token}
return await self.query(path, "delete", **params)
async def delete_tag_label(self, tag):
"""
DELETE /api/tag/label.{_format}
Permanently remove one tag from every entry.
:param tag: string The Tag
:return data related to the ext
"""
path = '/api/tag/label.{ext}'.format(ext=self.format)
params = {'access_token': self.token,
'tag': tag}
return await self.query(path, "delete", **params)
async def delete_tags_label(self, tags):
"""
DELETE /api/tags/label.{_format}
Permanently remove some tags from every entry.
:param tags: list of tags (urlencoded)
:return data related to the ext
"""
path = '/api/tag/label.{ext}'.format(ext=self.format)
params = {'access_token': self.token, 'tags': []}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
return await self.query(path, "delete", **params)
# ANNOTATIONS
async def delete_annotations(self, annotation):
"""
DELETE /api/annotations/{annotation}.{_format}
Removes an annotation.
:param annotation \w+ string The annotation ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{annotation}.{ext}'.format(
annotation=annotation, ext=self.format)
return await self.query(url, "delete", **params)
async def put_annotations(self, annotation):
"""
PUT /api/annotations/{annotation}.{_format}
Updates an annotation.
:param annotation \w+ string The annotation ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{annotation}.{ext}'.format(
annotation=annotation, ext=self.format)
return await self.query(url, "put", **params)
async def get_annotations(self, entry):
"""
GET /api/annotations/{entry}.{_format}
Retrieve annotations for an entry
:param entry \w+ integer The entry ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def post_annotations(self, entry, **kwargs):
"""
POST /api/annotations/{entry}.{_format}
Creates a new annotation.
:param entry \w+ integer The entry ID
:return
"""
params = dict({'access_token': self.token,
'ranges': [],
'quote': '',
'text': ''})
if 'ranges' in kwargs:
params['ranges'] = kwargs['ranges']
if 'quote' in kwargs:
params['quote'] = kwargs['quote']
if 'text' in kwargs:
params['text'] = kwargs['text']
url = '/api/annotations/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "post", **params)
# VERSION
@property
async def version(self):
"""
GET /api/version.{_format}
Retrieve version number
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/version.{ext}'.format(ext=self.format)
return await self.query(url, "get", **params)
@classmethod
async def get_token(cls, host, **params):
"""
POST /oauth/v2/token
Get a new token
:param host: host of the service
:param params: will contain :
params = {"grant_type": "password",
"client_id": "a string",
"client_secret": "a string",
"username": "a login",
"password": "a password"}
:return: access token
"""
params['grant_type'] = "password"
path = "/oauth/v2/token"
async with aiohttp.ClientSession() as sess:
async with sess.post(host + path, data=params) as resp:
data = await cls.handle_json_response(resp)
return data.get("access_token")
|
push-things/wallabag_api
|
wallabag_api/wallabag.py
|
Wallabag.entries_exists
|
python
|
async def entries_exists(self, url, urls=''):
params = {'access_token': self.token,
'url': url,
'urls': urls}
path = '/api/entries/exists.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
|
GET /api/entries/exists.{_format}
Check if an entry exist by url.
:param url string true An url Url to check if it exists
:param urls string false An array of urls
(?urls[]=http...&urls[]=http...) Urls (as an array)
to check if it exists
:return result
|
train
|
https://github.com/push-things/wallabag_api/blob/8d1e10a6ebc03d1ac9af2b38b57eb69f29b4216e/wallabag_api/wallabag.py#L323-L341
|
[
"async def query(self, path, method='get', **params):\n \"\"\"\n Do a query to the System API\n\n :param path: url to the API\n :param method: the kind of query to do\n :param params: a dict with all the\n necessary things to query the API\n :return json data\n \"\"\"\n if method in ('get', 'post', 'patch', 'delete', 'put'):\n full_path = self.host + path\n if method == 'get':\n resp = await self.aio_sess.get(full_path, params=params)\n elif method == 'post':\n resp = await self.aio_sess.post(full_path, data=params)\n elif method == 'patch':\n resp = await self.aio_sess.patch(full_path, data=params)\n elif method == 'delete':\n resp = await self.aio_sess.delete(full_path, params=params, headers=params)\n elif method == 'put':\n resp = await self.aio_sess.put(full_path, data=params)\n\n async with resp:\n # return the content if its a binary one\n if resp.content_type.startswith('application/pdf') or \\\n resp.content_type.startswith('application/epub'):\n return await resp.read()\n\n return await self.handle_json_response(resp)\n else:\n raise ValueError('method expected: get, post, patch, delete, put')\n"
] |
class Wallabag(object):
"""
Python Class 'Wallabag' to deal with Wallabag REST API
This class is able to handle any data from your Wallabag account
"""
EXTENTIONS = ('xml', 'json', 'txt', 'csv', 'pdf', 'epub', 'mobi', 'html')
host = ''
token = ''
client_id = ''
client_secret = ''
user_agent = ''
format = ''
username = ''
password = ''
aio_sess = None
def __init__(self,
host='',
token='',
client_id='',
client_secret='',
extension='json',
user_agent="WallabagPython/1.2.2 "
" +https://github.com/push-things/wallabag-api",
aio_sess=None):
"""
init variable
:param host: string url to the official API Wallabag
:param token: string of the key provided by Wallabag
:param client_id client id
:param client_secret client secret
:param extension: xml|json|txt|csv|pdf|epub|mobi|html
:param user_agent
:param aio_sess aiohttp session
"""
self.host = host
self.client_id = client_id
self.client_secret = client_secret
self.token = token
self.format = extension
self.user_agent = user_agent
self.aio_sess = aio_sess
if self.format not in self.EXTENTIONS:
raise ValueError("format invalid {0} should be one of {1}".format(
self.format, self.EXTENTIONS))
async def query(self, path, method='get', **params):
"""
Do a query to the System API
:param path: url to the API
:param method: the kind of query to do
:param params: a dict with all the
necessary things to query the API
:return json data
"""
if method in ('get', 'post', 'patch', 'delete', 'put'):
full_path = self.host + path
if method == 'get':
resp = await self.aio_sess.get(full_path, params=params)
elif method == 'post':
resp = await self.aio_sess.post(full_path, data=params)
elif method == 'patch':
resp = await self.aio_sess.patch(full_path, data=params)
elif method == 'delete':
resp = await self.aio_sess.delete(full_path, params=params, headers=params)
elif method == 'put':
resp = await self.aio_sess.put(full_path, data=params)
async with resp:
# return the content if its a binary one
if resp.content_type.startswith('application/pdf') or \
resp.content_type.startswith('application/epub'):
return await resp.read()
return await self.handle_json_response(resp)
else:
raise ValueError('method expected: get, post, patch, delete, put')
@staticmethod
async def handle_json_response(responses):
"""
get the json data response
:param responses: the json response
:return the json data without 'root' node
"""
json_data = {}
if responses.status != 200:
err_msg = HttpProcessingError(code=responses.status,
message=await responses.json())
logging.error("Wallabag: aiohttp error {err_msg}".format(
err_msg=err_msg))
else:
try:
json_data = responses.json()
except ClientResponseError as e:
# sometimes json_data does not return any json() without
# any error. This is due to the grabbing URL which "rejects"
# the URL
logging.error("Wallabag: aiohttp error {code} {message}"
.format(code=e.code, message=e.message))
return await json_data
@staticmethod
def __get_attr(what, type_attr, value_attr, **kwargs):
"""
get the value of a parm
:param what: string parm
:param type_attr: type of parm
:param value_attr:
:param kwargs:
:return: value of the parm
"""
if what in kwargs:
value = int(kwargs[what]) if type_attr == 'int' else kwargs[what]
if value in value_attr:
return value
# ENTRIES
async def get_entries(self, **kwargs):
"""
GET /api/entries.{_format}
Retrieve all entries. It could be filtered by many options.
:param kwargs: can contain one of the following filters
archive: '0' or '1', default '0' filter by archived status.
starred: '0' or '1', default '0' filter by starred status.
sort: 'created' or 'updated', default 'created'
order: 'asc' or 'desc', default 'desc'
page: int default 1 what page you want
perPage: int default 30 result per page
tags: list of tags url encoded.
since: int default 0 from what timestamp you want
Will returns entries that matches ALL tags
:return data related to the ext
"""
# default values
params = dict({'access_token': self.token,
'sort': 'created',
'order': 'desc',
'page': 1,
'perPage': 30,
'tags': '',
'since': 0})
if 'archive' in kwargs and int(kwargs['archive']) in (0, 1):
params['archive'] = int(kwargs['archive'])
if 'starred' in kwargs and int(kwargs['starred']) in (0, 1):
params['starred'] = int(kwargs['starred'])
if 'order' in kwargs and kwargs['order'] in ('asc', 'desc'):
params['order'] = kwargs['order']
if 'page' in kwargs and isinstance(kwargs['page'], int):
params['page'] = kwargs['page']
if 'perPage' in kwargs and isinstance(kwargs['perPage'], int):
params['perPage'] = kwargs['perPage']
if 'tags' in kwargs and isinstance(kwargs['tags'], list):
params['tags'] = ', '.join(kwargs['tags'])
if 'since' in kwargs and isinstance(kwargs['since'], int):
params['since'] = kwargs['since']
path = '/api/entries.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
async def post_entries(self, url, title='', tags='', starred=0, archive=0, content='', language='', published_at='',
authors='', public=1, original_url=''):
"""
POST /api/entries.{_format}
Create an entry
:param url: the url of the note to store
:param title: Optional, we'll get the title from the page.
:param tags: tag1,tag2,tag3 a comma-separated list of tags.
:param starred entry already starred
:param archive entry already archived
:param content additionnal html content
:param language
:param published_at
:param authors
:param public
:param original_url
:return result
"""
params = {'access_token': self.token, 'url': url, 'title': title,
'tags': tags, 'starred': starred, 'archive': archive,
'content': content, 'language': language, 'published_at': published_at,
'authors': authors, 'public': public, 'original_url': original_url}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
path = '/api/entries.{ext}'.format(ext=self.format)
return await self.query(path, "post", **params)
async def get_entry(self, entry):
"""
GET /api/entries/{entry}.{_format}
Retrieve a single entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def reaload_entry(self, entry):
"""
PATCH /api/entries/{entry}/reload.{_format}
Reload a single entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/reload.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "patch", **params)
async def patch_entries(self, entry, **kwargs):
"""
PATCH /api/entries/{entry}.{_format}
Change several properties of an entry
:param entry: the entry to 'patch' / update
:param kwargs: can contain one of the following
title: string
tags: a list of tags tag1,tag2,tag3
archive: '0' or '1', default '0' archived the entry.
starred: '0' or '1', default '0' starred the entry
In case that you don't want to *really* remove it..
:return data related to the ext
"""
# default values
params = {'access_token': self.token,
'title': '',
'tags': []}
if 'title' in kwargs:
params['title'] = kwargs['title']
if 'tags' in kwargs and isinstance(kwargs['tags'], list):
params['tags'] = ', '.join(kwargs['tags'])
params['archive'] = self.__get_attr(what='archive',
type_attr=int,
value_attr=(0, 1),
**kwargs)
params['starred'] = self.__get_attr(what='starred',
type_attr=int,
value_attr=(0, 1),
**kwargs)
params['order'] = self.__get_attr(what='order',
type_attr=str,
value_attr=('asc', 'desc'),
**kwargs)
path = '/api/entries/{entry}.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "patch", **params)
async def get_entry_export(self, entry):
"""
GET /api/entries/{entry}/export.{_format}
Retrieve a single entry as a predefined format.
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/export.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def patch_entry_reload(self, entry):
"""
PATCH /api/entries/{entry}/reload.{_format}
Reload an entry. An empty response with HTTP Status 304 will be send
if we weren't able to update the content (because it hasn't changed
or we got an error).
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/reload.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "patch", **params)
async def delete_entries(self, entry):
"""
DELETE /api/entries/{entry}.{_format}
Delete permanently an entry
:param entry: \w+ an integer The Entry ID
:return result
"""
params = {'Authorization': 'Bearer {}'.format(self.token)}
path = '/api/entries/{entry}.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "delete", **params)
# TAGS
async def get_entry_tags(self, entry):
"""
GET /api/entries/{entry}/tags.{_format}
Retrieve all tags for an entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/tags.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(url, "get", **params)
async def post_entry_tags(self, entry, tags):
"""
POST /api/entries/{entry}/tags.{_format}
Add one or more tags to an entry
:param entry: \w+ an integer The Entry ID
:param tags: list of tags (urlencoded)
:return result
"""
params = {'access_token': self.token, 'tags': []}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
path = '/api/entries/{entry}/tags.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "post", **params)
async def delete_entry_tag(self, entry, tag):
"""
DELETE /api/entries/{entry}/tags/{tag}.{_format}
Permanently remove one tag for an entry
:param entry: \w+ an integer The Entry ID
:param tag: string The Tag
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/tags/{tag}.{ext}'.format(
entry=entry, tag=tag, ext=self.format)
return await self.query(url, "delete", **params)
async def get_tags(self):
"""
GET /api/tags.{_format}
Retrieve all tags
:return data related to the ext
"""
params = {'access_token': self.token}
path = '/api/tags.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
async def delete_tag(self, tag):
"""
DELETE /api/tags/{tag}.{_format}
Permanently remove one tag from every entry
:param tag: string The Tag
:return data related to the ext
"""
path = '/api/tags/{tag}.{ext}'.format(tag=tag, ext=self.format)
params = {'access_token': self.token}
return await self.query(path, "delete", **params)
async def delete_tag_label(self, tag):
"""
DELETE /api/tag/label.{_format}
Permanently remove one tag from every entry.
:param tag: string The Tag
:return data related to the ext
"""
path = '/api/tag/label.{ext}'.format(ext=self.format)
params = {'access_token': self.token,
'tag': tag}
return await self.query(path, "delete", **params)
async def delete_tags_label(self, tags):
"""
DELETE /api/tags/label.{_format}
Permanently remove some tags from every entry.
:param tags: list of tags (urlencoded)
:return data related to the ext
"""
path = '/api/tag/label.{ext}'.format(ext=self.format)
params = {'access_token': self.token, 'tags': []}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
return await self.query(path, "delete", **params)
# ANNOTATIONS
async def delete_annotations(self, annotation):
"""
DELETE /api/annotations/{annotation}.{_format}
Removes an annotation.
:param annotation \w+ string The annotation ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{annotation}.{ext}'.format(
annotation=annotation, ext=self.format)
return await self.query(url, "delete", **params)
async def put_annotations(self, annotation):
"""
PUT /api/annotations/{annotation}.{_format}
Updates an annotation.
:param annotation \w+ string The annotation ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{annotation}.{ext}'.format(
annotation=annotation, ext=self.format)
return await self.query(url, "put", **params)
async def get_annotations(self, entry):
"""
GET /api/annotations/{entry}.{_format}
Retrieve annotations for an entry
:param entry \w+ integer The entry ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def post_annotations(self, entry, **kwargs):
"""
POST /api/annotations/{entry}.{_format}
Creates a new annotation.
:param entry \w+ integer The entry ID
:return
"""
params = dict({'access_token': self.token,
'ranges': [],
'quote': '',
'text': ''})
if 'ranges' in kwargs:
params['ranges'] = kwargs['ranges']
if 'quote' in kwargs:
params['quote'] = kwargs['quote']
if 'text' in kwargs:
params['text'] = kwargs['text']
url = '/api/annotations/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "post", **params)
# VERSION
@property
async def version(self):
"""
GET /api/version.{_format}
Retrieve version number
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/version.{ext}'.format(ext=self.format)
return await self.query(url, "get", **params)
@classmethod
async def get_token(cls, host, **params):
"""
POST /oauth/v2/token
Get a new token
:param host: host of the service
:param params: will contain :
params = {"grant_type": "password",
"client_id": "a string",
"client_secret": "a string",
"username": "a login",
"password": "a password"}
:return: access token
"""
params['grant_type'] = "password"
path = "/oauth/v2/token"
async with aiohttp.ClientSession() as sess:
async with sess.post(host + path, data=params) as resp:
data = await cls.handle_json_response(resp)
return data.get("access_token")
|
push-things/wallabag_api
|
wallabag_api/wallabag.py
|
Wallabag.post_entry_tags
|
python
|
async def post_entry_tags(self, entry, tags):
params = {'access_token': self.token, 'tags': []}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
path = '/api/entries/{entry}/tags.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "post", **params)
|
POST /api/entries/{entry}/tags.{_format}
Add one or more tags to an entry
:param entry: \w+ an integer The Entry ID
:param tags: list of tags (urlencoded)
:return result
|
train
|
https://github.com/push-things/wallabag_api/blob/8d1e10a6ebc03d1ac9af2b38b57eb69f29b4216e/wallabag_api/wallabag.py#L359-L374
|
[
"async def query(self, path, method='get', **params):\n \"\"\"\n Do a query to the System API\n\n :param path: url to the API\n :param method: the kind of query to do\n :param params: a dict with all the\n necessary things to query the API\n :return json data\n \"\"\"\n if method in ('get', 'post', 'patch', 'delete', 'put'):\n full_path = self.host + path\n if method == 'get':\n resp = await self.aio_sess.get(full_path, params=params)\n elif method == 'post':\n resp = await self.aio_sess.post(full_path, data=params)\n elif method == 'patch':\n resp = await self.aio_sess.patch(full_path, data=params)\n elif method == 'delete':\n resp = await self.aio_sess.delete(full_path, params=params, headers=params)\n elif method == 'put':\n resp = await self.aio_sess.put(full_path, data=params)\n\n async with resp:\n # return the content if its a binary one\n if resp.content_type.startswith('application/pdf') or \\\n resp.content_type.startswith('application/epub'):\n return await resp.read()\n\n return await self.handle_json_response(resp)\n else:\n raise ValueError('method expected: get, post, patch, delete, put')\n"
] |
class Wallabag(object):
"""
Python Class 'Wallabag' to deal with Wallabag REST API
This class is able to handle any data from your Wallabag account
"""
EXTENTIONS = ('xml', 'json', 'txt', 'csv', 'pdf', 'epub', 'mobi', 'html')
host = ''
token = ''
client_id = ''
client_secret = ''
user_agent = ''
format = ''
username = ''
password = ''
aio_sess = None
def __init__(self,
host='',
token='',
client_id='',
client_secret='',
extension='json',
user_agent="WallabagPython/1.2.2 "
" +https://github.com/push-things/wallabag-api",
aio_sess=None):
"""
init variable
:param host: string url to the official API Wallabag
:param token: string of the key provided by Wallabag
:param client_id client id
:param client_secret client secret
:param extension: xml|json|txt|csv|pdf|epub|mobi|html
:param user_agent
:param aio_sess aiohttp session
"""
self.host = host
self.client_id = client_id
self.client_secret = client_secret
self.token = token
self.format = extension
self.user_agent = user_agent
self.aio_sess = aio_sess
if self.format not in self.EXTENTIONS:
raise ValueError("format invalid {0} should be one of {1}".format(
self.format, self.EXTENTIONS))
async def query(self, path, method='get', **params):
"""
Do a query to the System API
:param path: url to the API
:param method: the kind of query to do
:param params: a dict with all the
necessary things to query the API
:return json data
"""
if method in ('get', 'post', 'patch', 'delete', 'put'):
full_path = self.host + path
if method == 'get':
resp = await self.aio_sess.get(full_path, params=params)
elif method == 'post':
resp = await self.aio_sess.post(full_path, data=params)
elif method == 'patch':
resp = await self.aio_sess.patch(full_path, data=params)
elif method == 'delete':
resp = await self.aio_sess.delete(full_path, params=params, headers=params)
elif method == 'put':
resp = await self.aio_sess.put(full_path, data=params)
async with resp:
# return the content if its a binary one
if resp.content_type.startswith('application/pdf') or \
resp.content_type.startswith('application/epub'):
return await resp.read()
return await self.handle_json_response(resp)
else:
raise ValueError('method expected: get, post, patch, delete, put')
@staticmethod
async def handle_json_response(responses):
"""
get the json data response
:param responses: the json response
:return the json data without 'root' node
"""
json_data = {}
if responses.status != 200:
err_msg = HttpProcessingError(code=responses.status,
message=await responses.json())
logging.error("Wallabag: aiohttp error {err_msg}".format(
err_msg=err_msg))
else:
try:
json_data = responses.json()
except ClientResponseError as e:
# sometimes json_data does not return any json() without
# any error. This is due to the grabbing URL which "rejects"
# the URL
logging.error("Wallabag: aiohttp error {code} {message}"
.format(code=e.code, message=e.message))
return await json_data
@staticmethod
def __get_attr(what, type_attr, value_attr, **kwargs):
"""
get the value of a parm
:param what: string parm
:param type_attr: type of parm
:param value_attr:
:param kwargs:
:return: value of the parm
"""
if what in kwargs:
value = int(kwargs[what]) if type_attr == 'int' else kwargs[what]
if value in value_attr:
return value
# ENTRIES
async def get_entries(self, **kwargs):
"""
GET /api/entries.{_format}
Retrieve all entries. It could be filtered by many options.
:param kwargs: can contain one of the following filters
archive: '0' or '1', default '0' filter by archived status.
starred: '0' or '1', default '0' filter by starred status.
sort: 'created' or 'updated', default 'created'
order: 'asc' or 'desc', default 'desc'
page: int default 1 what page you want
perPage: int default 30 result per page
tags: list of tags url encoded.
since: int default 0 from what timestamp you want
Will returns entries that matches ALL tags
:return data related to the ext
"""
# default values
params = dict({'access_token': self.token,
'sort': 'created',
'order': 'desc',
'page': 1,
'perPage': 30,
'tags': '',
'since': 0})
if 'archive' in kwargs and int(kwargs['archive']) in (0, 1):
params['archive'] = int(kwargs['archive'])
if 'starred' in kwargs and int(kwargs['starred']) in (0, 1):
params['starred'] = int(kwargs['starred'])
if 'order' in kwargs and kwargs['order'] in ('asc', 'desc'):
params['order'] = kwargs['order']
if 'page' in kwargs and isinstance(kwargs['page'], int):
params['page'] = kwargs['page']
if 'perPage' in kwargs and isinstance(kwargs['perPage'], int):
params['perPage'] = kwargs['perPage']
if 'tags' in kwargs and isinstance(kwargs['tags'], list):
params['tags'] = ', '.join(kwargs['tags'])
if 'since' in kwargs and isinstance(kwargs['since'], int):
params['since'] = kwargs['since']
path = '/api/entries.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
async def post_entries(self, url, title='', tags='', starred=0, archive=0, content='', language='', published_at='',
authors='', public=1, original_url=''):
"""
POST /api/entries.{_format}
Create an entry
:param url: the url of the note to store
:param title: Optional, we'll get the title from the page.
:param tags: tag1,tag2,tag3 a comma-separated list of tags.
:param starred entry already starred
:param archive entry already archived
:param content additionnal html content
:param language
:param published_at
:param authors
:param public
:param original_url
:return result
"""
params = {'access_token': self.token, 'url': url, 'title': title,
'tags': tags, 'starred': starred, 'archive': archive,
'content': content, 'language': language, 'published_at': published_at,
'authors': authors, 'public': public, 'original_url': original_url}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
path = '/api/entries.{ext}'.format(ext=self.format)
return await self.query(path, "post", **params)
async def get_entry(self, entry):
"""
GET /api/entries/{entry}.{_format}
Retrieve a single entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def reaload_entry(self, entry):
"""
PATCH /api/entries/{entry}/reload.{_format}
Reload a single entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/reload.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "patch", **params)
async def patch_entries(self, entry, **kwargs):
"""
PATCH /api/entries/{entry}.{_format}
Change several properties of an entry
:param entry: the entry to 'patch' / update
:param kwargs: can contain one of the following
title: string
tags: a list of tags tag1,tag2,tag3
archive: '0' or '1', default '0' archived the entry.
starred: '0' or '1', default '0' starred the entry
In case that you don't want to *really* remove it..
:return data related to the ext
"""
# default values
params = {'access_token': self.token,
'title': '',
'tags': []}
if 'title' in kwargs:
params['title'] = kwargs['title']
if 'tags' in kwargs and isinstance(kwargs['tags'], list):
params['tags'] = ', '.join(kwargs['tags'])
params['archive'] = self.__get_attr(what='archive',
type_attr=int,
value_attr=(0, 1),
**kwargs)
params['starred'] = self.__get_attr(what='starred',
type_attr=int,
value_attr=(0, 1),
**kwargs)
params['order'] = self.__get_attr(what='order',
type_attr=str,
value_attr=('asc', 'desc'),
**kwargs)
path = '/api/entries/{entry}.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "patch", **params)
async def get_entry_export(self, entry):
"""
GET /api/entries/{entry}/export.{_format}
Retrieve a single entry as a predefined format.
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/export.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def patch_entry_reload(self, entry):
"""
PATCH /api/entries/{entry}/reload.{_format}
Reload an entry. An empty response with HTTP Status 304 will be send
if we weren't able to update the content (because it hasn't changed
or we got an error).
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/reload.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "patch", **params)
async def delete_entries(self, entry):
"""
DELETE /api/entries/{entry}.{_format}
Delete permanently an entry
:param entry: \w+ an integer The Entry ID
:return result
"""
params = {'Authorization': 'Bearer {}'.format(self.token)}
path = '/api/entries/{entry}.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "delete", **params)
async def entries_exists(self, url, urls=''):
"""
GET /api/entries/exists.{_format}
Check if an entry exist by url.
:param url string true An url Url to check if it exists
:param urls string false An array of urls
(?urls[]=http...&urls[]=http...) Urls (as an array)
to check if it exists
:return result
"""
params = {'access_token': self.token,
'url': url,
'urls': urls}
path = '/api/entries/exists.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
# TAGS
async def get_entry_tags(self, entry):
"""
GET /api/entries/{entry}/tags.{_format}
Retrieve all tags for an entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/tags.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(url, "get", **params)
async def delete_entry_tag(self, entry, tag):
"""
DELETE /api/entries/{entry}/tags/{tag}.{_format}
Permanently remove one tag for an entry
:param entry: \w+ an integer The Entry ID
:param tag: string The Tag
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/tags/{tag}.{ext}'.format(
entry=entry, tag=tag, ext=self.format)
return await self.query(url, "delete", **params)
async def get_tags(self):
"""
GET /api/tags.{_format}
Retrieve all tags
:return data related to the ext
"""
params = {'access_token': self.token}
path = '/api/tags.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
async def delete_tag(self, tag):
"""
DELETE /api/tags/{tag}.{_format}
Permanently remove one tag from every entry
:param tag: string The Tag
:return data related to the ext
"""
path = '/api/tags/{tag}.{ext}'.format(tag=tag, ext=self.format)
params = {'access_token': self.token}
return await self.query(path, "delete", **params)
async def delete_tag_label(self, tag):
"""
DELETE /api/tag/label.{_format}
Permanently remove one tag from every entry.
:param tag: string The Tag
:return data related to the ext
"""
path = '/api/tag/label.{ext}'.format(ext=self.format)
params = {'access_token': self.token,
'tag': tag}
return await self.query(path, "delete", **params)
async def delete_tags_label(self, tags):
"""
DELETE /api/tags/label.{_format}
Permanently remove some tags from every entry.
:param tags: list of tags (urlencoded)
:return data related to the ext
"""
path = '/api/tag/label.{ext}'.format(ext=self.format)
params = {'access_token': self.token, 'tags': []}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
return await self.query(path, "delete", **params)
# ANNOTATIONS
async def delete_annotations(self, annotation):
"""
DELETE /api/annotations/{annotation}.{_format}
Removes an annotation.
:param annotation \w+ string The annotation ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{annotation}.{ext}'.format(
annotation=annotation, ext=self.format)
return await self.query(url, "delete", **params)
async def put_annotations(self, annotation):
"""
PUT /api/annotations/{annotation}.{_format}
Updates an annotation.
:param annotation \w+ string The annotation ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{annotation}.{ext}'.format(
annotation=annotation, ext=self.format)
return await self.query(url, "put", **params)
async def get_annotations(self, entry):
"""
GET /api/annotations/{entry}.{_format}
Retrieve annotations for an entry
:param entry \w+ integer The entry ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def post_annotations(self, entry, **kwargs):
"""
POST /api/annotations/{entry}.{_format}
Creates a new annotation.
:param entry \w+ integer The entry ID
:return
"""
params = dict({'access_token': self.token,
'ranges': [],
'quote': '',
'text': ''})
if 'ranges' in kwargs:
params['ranges'] = kwargs['ranges']
if 'quote' in kwargs:
params['quote'] = kwargs['quote']
if 'text' in kwargs:
params['text'] = kwargs['text']
url = '/api/annotations/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "post", **params)
# VERSION
@property
async def version(self):
"""
GET /api/version.{_format}
Retrieve version number
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/version.{ext}'.format(ext=self.format)
return await self.query(url, "get", **params)
@classmethod
async def get_token(cls, host, **params):
"""
POST /oauth/v2/token
Get a new token
:param host: host of the service
:param params: will contain :
params = {"grant_type": "password",
"client_id": "a string",
"client_secret": "a string",
"username": "a login",
"password": "a password"}
:return: access token
"""
params['grant_type'] = "password"
path = "/oauth/v2/token"
async with aiohttp.ClientSession() as sess:
async with sess.post(host + path, data=params) as resp:
data = await cls.handle_json_response(resp)
return data.get("access_token")
|
push-things/wallabag_api
|
wallabag_api/wallabag.py
|
Wallabag.delete_entry_tag
|
python
|
async def delete_entry_tag(self, entry, tag):
params = {'access_token': self.token}
url = '/api/entries/{entry}/tags/{tag}.{ext}'.format(
entry=entry, tag=tag, ext=self.format)
return await self.query(url, "delete", **params)
|
DELETE /api/entries/{entry}/tags/{tag}.{_format}
Permanently remove one tag for an entry
:param entry: \w+ an integer The Entry ID
:param tag: string The Tag
:return data related to the ext
|
train
|
https://github.com/push-things/wallabag_api/blob/8d1e10a6ebc03d1ac9af2b38b57eb69f29b4216e/wallabag_api/wallabag.py#L376-L389
|
[
"async def query(self, path, method='get', **params):\n \"\"\"\n Do a query to the System API\n\n :param path: url to the API\n :param method: the kind of query to do\n :param params: a dict with all the\n necessary things to query the API\n :return json data\n \"\"\"\n if method in ('get', 'post', 'patch', 'delete', 'put'):\n full_path = self.host + path\n if method == 'get':\n resp = await self.aio_sess.get(full_path, params=params)\n elif method == 'post':\n resp = await self.aio_sess.post(full_path, data=params)\n elif method == 'patch':\n resp = await self.aio_sess.patch(full_path, data=params)\n elif method == 'delete':\n resp = await self.aio_sess.delete(full_path, params=params, headers=params)\n elif method == 'put':\n resp = await self.aio_sess.put(full_path, data=params)\n\n async with resp:\n # return the content if its a binary one\n if resp.content_type.startswith('application/pdf') or \\\n resp.content_type.startswith('application/epub'):\n return await resp.read()\n\n return await self.handle_json_response(resp)\n else:\n raise ValueError('method expected: get, post, patch, delete, put')\n"
] |
class Wallabag(object):
"""
Python Class 'Wallabag' to deal with Wallabag REST API
This class is able to handle any data from your Wallabag account
"""
EXTENTIONS = ('xml', 'json', 'txt', 'csv', 'pdf', 'epub', 'mobi', 'html')
host = ''
token = ''
client_id = ''
client_secret = ''
user_agent = ''
format = ''
username = ''
password = ''
aio_sess = None
def __init__(self,
host='',
token='',
client_id='',
client_secret='',
extension='json',
user_agent="WallabagPython/1.2.2 "
" +https://github.com/push-things/wallabag-api",
aio_sess=None):
"""
init variable
:param host: string url to the official API Wallabag
:param token: string of the key provided by Wallabag
:param client_id client id
:param client_secret client secret
:param extension: xml|json|txt|csv|pdf|epub|mobi|html
:param user_agent
:param aio_sess aiohttp session
"""
self.host = host
self.client_id = client_id
self.client_secret = client_secret
self.token = token
self.format = extension
self.user_agent = user_agent
self.aio_sess = aio_sess
if self.format not in self.EXTENTIONS:
raise ValueError("format invalid {0} should be one of {1}".format(
self.format, self.EXTENTIONS))
async def query(self, path, method='get', **params):
"""
Do a query to the System API
:param path: url to the API
:param method: the kind of query to do
:param params: a dict with all the
necessary things to query the API
:return json data
"""
if method in ('get', 'post', 'patch', 'delete', 'put'):
full_path = self.host + path
if method == 'get':
resp = await self.aio_sess.get(full_path, params=params)
elif method == 'post':
resp = await self.aio_sess.post(full_path, data=params)
elif method == 'patch':
resp = await self.aio_sess.patch(full_path, data=params)
elif method == 'delete':
resp = await self.aio_sess.delete(full_path, params=params, headers=params)
elif method == 'put':
resp = await self.aio_sess.put(full_path, data=params)
async with resp:
# return the content if its a binary one
if resp.content_type.startswith('application/pdf') or \
resp.content_type.startswith('application/epub'):
return await resp.read()
return await self.handle_json_response(resp)
else:
raise ValueError('method expected: get, post, patch, delete, put')
@staticmethod
async def handle_json_response(responses):
"""
get the json data response
:param responses: the json response
:return the json data without 'root' node
"""
json_data = {}
if responses.status != 200:
err_msg = HttpProcessingError(code=responses.status,
message=await responses.json())
logging.error("Wallabag: aiohttp error {err_msg}".format(
err_msg=err_msg))
else:
try:
json_data = responses.json()
except ClientResponseError as e:
# sometimes json_data does not return any json() without
# any error. This is due to the grabbing URL which "rejects"
# the URL
logging.error("Wallabag: aiohttp error {code} {message}"
.format(code=e.code, message=e.message))
return await json_data
@staticmethod
def __get_attr(what, type_attr, value_attr, **kwargs):
"""
get the value of a parm
:param what: string parm
:param type_attr: type of parm
:param value_attr:
:param kwargs:
:return: value of the parm
"""
if what in kwargs:
value = int(kwargs[what]) if type_attr == 'int' else kwargs[what]
if value in value_attr:
return value
# ENTRIES
async def get_entries(self, **kwargs):
"""
GET /api/entries.{_format}
Retrieve all entries. It could be filtered by many options.
:param kwargs: can contain one of the following filters
archive: '0' or '1', default '0' filter by archived status.
starred: '0' or '1', default '0' filter by starred status.
sort: 'created' or 'updated', default 'created'
order: 'asc' or 'desc', default 'desc'
page: int default 1 what page you want
perPage: int default 30 result per page
tags: list of tags url encoded.
since: int default 0 from what timestamp you want
Will returns entries that matches ALL tags
:return data related to the ext
"""
# default values
params = dict({'access_token': self.token,
'sort': 'created',
'order': 'desc',
'page': 1,
'perPage': 30,
'tags': '',
'since': 0})
if 'archive' in kwargs and int(kwargs['archive']) in (0, 1):
params['archive'] = int(kwargs['archive'])
if 'starred' in kwargs and int(kwargs['starred']) in (0, 1):
params['starred'] = int(kwargs['starred'])
if 'order' in kwargs and kwargs['order'] in ('asc', 'desc'):
params['order'] = kwargs['order']
if 'page' in kwargs and isinstance(kwargs['page'], int):
params['page'] = kwargs['page']
if 'perPage' in kwargs and isinstance(kwargs['perPage'], int):
params['perPage'] = kwargs['perPage']
if 'tags' in kwargs and isinstance(kwargs['tags'], list):
params['tags'] = ', '.join(kwargs['tags'])
if 'since' in kwargs and isinstance(kwargs['since'], int):
params['since'] = kwargs['since']
path = '/api/entries.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
async def post_entries(self, url, title='', tags='', starred=0, archive=0, content='', language='', published_at='',
authors='', public=1, original_url=''):
"""
POST /api/entries.{_format}
Create an entry
:param url: the url of the note to store
:param title: Optional, we'll get the title from the page.
:param tags: tag1,tag2,tag3 a comma-separated list of tags.
:param starred entry already starred
:param archive entry already archived
:param content additionnal html content
:param language
:param published_at
:param authors
:param public
:param original_url
:return result
"""
params = {'access_token': self.token, 'url': url, 'title': title,
'tags': tags, 'starred': starred, 'archive': archive,
'content': content, 'language': language, 'published_at': published_at,
'authors': authors, 'public': public, 'original_url': original_url}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
path = '/api/entries.{ext}'.format(ext=self.format)
return await self.query(path, "post", **params)
async def get_entry(self, entry):
"""
GET /api/entries/{entry}.{_format}
Retrieve a single entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def reaload_entry(self, entry):
"""
PATCH /api/entries/{entry}/reload.{_format}
Reload a single entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/reload.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "patch", **params)
async def patch_entries(self, entry, **kwargs):
"""
PATCH /api/entries/{entry}.{_format}
Change several properties of an entry
:param entry: the entry to 'patch' / update
:param kwargs: can contain one of the following
title: string
tags: a list of tags tag1,tag2,tag3
archive: '0' or '1', default '0' archived the entry.
starred: '0' or '1', default '0' starred the entry
In case that you don't want to *really* remove it..
:return data related to the ext
"""
# default values
params = {'access_token': self.token,
'title': '',
'tags': []}
if 'title' in kwargs:
params['title'] = kwargs['title']
if 'tags' in kwargs and isinstance(kwargs['tags'], list):
params['tags'] = ', '.join(kwargs['tags'])
params['archive'] = self.__get_attr(what='archive',
type_attr=int,
value_attr=(0, 1),
**kwargs)
params['starred'] = self.__get_attr(what='starred',
type_attr=int,
value_attr=(0, 1),
**kwargs)
params['order'] = self.__get_attr(what='order',
type_attr=str,
value_attr=('asc', 'desc'),
**kwargs)
path = '/api/entries/{entry}.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "patch", **params)
async def get_entry_export(self, entry):
"""
GET /api/entries/{entry}/export.{_format}
Retrieve a single entry as a predefined format.
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/export.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def patch_entry_reload(self, entry):
"""
PATCH /api/entries/{entry}/reload.{_format}
Reload an entry. An empty response with HTTP Status 304 will be send
if we weren't able to update the content (because it hasn't changed
or we got an error).
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/reload.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "patch", **params)
async def delete_entries(self, entry):
"""
DELETE /api/entries/{entry}.{_format}
Delete permanently an entry
:param entry: \w+ an integer The Entry ID
:return result
"""
params = {'Authorization': 'Bearer {}'.format(self.token)}
path = '/api/entries/{entry}.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "delete", **params)
async def entries_exists(self, url, urls=''):
"""
GET /api/entries/exists.{_format}
Check if an entry exist by url.
:param url string true An url Url to check if it exists
:param urls string false An array of urls
(?urls[]=http...&urls[]=http...) Urls (as an array)
to check if it exists
:return result
"""
params = {'access_token': self.token,
'url': url,
'urls': urls}
path = '/api/entries/exists.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
# TAGS
async def get_entry_tags(self, entry):
"""
GET /api/entries/{entry}/tags.{_format}
Retrieve all tags for an entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/tags.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(url, "get", **params)
async def post_entry_tags(self, entry, tags):
"""
POST /api/entries/{entry}/tags.{_format}
Add one or more tags to an entry
:param entry: \w+ an integer The Entry ID
:param tags: list of tags (urlencoded)
:return result
"""
params = {'access_token': self.token, 'tags': []}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
path = '/api/entries/{entry}/tags.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "post", **params)
async def get_tags(self):
"""
GET /api/tags.{_format}
Retrieve all tags
:return data related to the ext
"""
params = {'access_token': self.token}
path = '/api/tags.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
async def delete_tag(self, tag):
"""
DELETE /api/tags/{tag}.{_format}
Permanently remove one tag from every entry
:param tag: string The Tag
:return data related to the ext
"""
path = '/api/tags/{tag}.{ext}'.format(tag=tag, ext=self.format)
params = {'access_token': self.token}
return await self.query(path, "delete", **params)
async def delete_tag_label(self, tag):
"""
DELETE /api/tag/label.{_format}
Permanently remove one tag from every entry.
:param tag: string The Tag
:return data related to the ext
"""
path = '/api/tag/label.{ext}'.format(ext=self.format)
params = {'access_token': self.token,
'tag': tag}
return await self.query(path, "delete", **params)
async def delete_tags_label(self, tags):
"""
DELETE /api/tags/label.{_format}
Permanently remove some tags from every entry.
:param tags: list of tags (urlencoded)
:return data related to the ext
"""
path = '/api/tag/label.{ext}'.format(ext=self.format)
params = {'access_token': self.token, 'tags': []}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
return await self.query(path, "delete", **params)
# ANNOTATIONS
async def delete_annotations(self, annotation):
"""
DELETE /api/annotations/{annotation}.{_format}
Removes an annotation.
:param annotation \w+ string The annotation ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{annotation}.{ext}'.format(
annotation=annotation, ext=self.format)
return await self.query(url, "delete", **params)
async def put_annotations(self, annotation):
"""
PUT /api/annotations/{annotation}.{_format}
Updates an annotation.
:param annotation \w+ string The annotation ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{annotation}.{ext}'.format(
annotation=annotation, ext=self.format)
return await self.query(url, "put", **params)
async def get_annotations(self, entry):
"""
GET /api/annotations/{entry}.{_format}
Retrieve annotations for an entry
:param entry \w+ integer The entry ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def post_annotations(self, entry, **kwargs):
"""
POST /api/annotations/{entry}.{_format}
Creates a new annotation.
:param entry \w+ integer The entry ID
:return
"""
params = dict({'access_token': self.token,
'ranges': [],
'quote': '',
'text': ''})
if 'ranges' in kwargs:
params['ranges'] = kwargs['ranges']
if 'quote' in kwargs:
params['quote'] = kwargs['quote']
if 'text' in kwargs:
params['text'] = kwargs['text']
url = '/api/annotations/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "post", **params)
# VERSION
@property
async def version(self):
"""
GET /api/version.{_format}
Retrieve version number
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/version.{ext}'.format(ext=self.format)
return await self.query(url, "get", **params)
@classmethod
async def get_token(cls, host, **params):
"""
POST /oauth/v2/token
Get a new token
:param host: host of the service
:param params: will contain :
params = {"grant_type": "password",
"client_id": "a string",
"client_secret": "a string",
"username": "a login",
"password": "a password"}
:return: access token
"""
params['grant_type'] = "password"
path = "/oauth/v2/token"
async with aiohttp.ClientSession() as sess:
async with sess.post(host + path, data=params) as resp:
data = await cls.handle_json_response(resp)
return data.get("access_token")
|
push-things/wallabag_api
|
wallabag_api/wallabag.py
|
Wallabag.get_tags
|
python
|
async def get_tags(self):
params = {'access_token': self.token}
path = '/api/tags.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
|
GET /api/tags.{_format}
Retrieve all tags
:return data related to the ext
|
train
|
https://github.com/push-things/wallabag_api/blob/8d1e10a6ebc03d1ac9af2b38b57eb69f29b4216e/wallabag_api/wallabag.py#L391-L401
|
[
"async def query(self, path, method='get', **params):\n \"\"\"\n Do a query to the System API\n\n :param path: url to the API\n :param method: the kind of query to do\n :param params: a dict with all the\n necessary things to query the API\n :return json data\n \"\"\"\n if method in ('get', 'post', 'patch', 'delete', 'put'):\n full_path = self.host + path\n if method == 'get':\n resp = await self.aio_sess.get(full_path, params=params)\n elif method == 'post':\n resp = await self.aio_sess.post(full_path, data=params)\n elif method == 'patch':\n resp = await self.aio_sess.patch(full_path, data=params)\n elif method == 'delete':\n resp = await self.aio_sess.delete(full_path, params=params, headers=params)\n elif method == 'put':\n resp = await self.aio_sess.put(full_path, data=params)\n\n async with resp:\n # return the content if its a binary one\n if resp.content_type.startswith('application/pdf') or \\\n resp.content_type.startswith('application/epub'):\n return await resp.read()\n\n return await self.handle_json_response(resp)\n else:\n raise ValueError('method expected: get, post, patch, delete, put')\n"
] |
class Wallabag(object):
"""
Python Class 'Wallabag' to deal with Wallabag REST API
This class is able to handle any data from your Wallabag account
"""
EXTENTIONS = ('xml', 'json', 'txt', 'csv', 'pdf', 'epub', 'mobi', 'html')
host = ''
token = ''
client_id = ''
client_secret = ''
user_agent = ''
format = ''
username = ''
password = ''
aio_sess = None
def __init__(self,
host='',
token='',
client_id='',
client_secret='',
extension='json',
user_agent="WallabagPython/1.2.2 "
" +https://github.com/push-things/wallabag-api",
aio_sess=None):
"""
init variable
:param host: string url to the official API Wallabag
:param token: string of the key provided by Wallabag
:param client_id client id
:param client_secret client secret
:param extension: xml|json|txt|csv|pdf|epub|mobi|html
:param user_agent
:param aio_sess aiohttp session
"""
self.host = host
self.client_id = client_id
self.client_secret = client_secret
self.token = token
self.format = extension
self.user_agent = user_agent
self.aio_sess = aio_sess
if self.format not in self.EXTENTIONS:
raise ValueError("format invalid {0} should be one of {1}".format(
self.format, self.EXTENTIONS))
async def query(self, path, method='get', **params):
"""
Do a query to the System API
:param path: url to the API
:param method: the kind of query to do
:param params: a dict with all the
necessary things to query the API
:return json data
"""
if method in ('get', 'post', 'patch', 'delete', 'put'):
full_path = self.host + path
if method == 'get':
resp = await self.aio_sess.get(full_path, params=params)
elif method == 'post':
resp = await self.aio_sess.post(full_path, data=params)
elif method == 'patch':
resp = await self.aio_sess.patch(full_path, data=params)
elif method == 'delete':
resp = await self.aio_sess.delete(full_path, params=params, headers=params)
elif method == 'put':
resp = await self.aio_sess.put(full_path, data=params)
async with resp:
# return the content if its a binary one
if resp.content_type.startswith('application/pdf') or \
resp.content_type.startswith('application/epub'):
return await resp.read()
return await self.handle_json_response(resp)
else:
raise ValueError('method expected: get, post, patch, delete, put')
@staticmethod
async def handle_json_response(responses):
"""
get the json data response
:param responses: the json response
:return the json data without 'root' node
"""
json_data = {}
if responses.status != 200:
err_msg = HttpProcessingError(code=responses.status,
message=await responses.json())
logging.error("Wallabag: aiohttp error {err_msg}".format(
err_msg=err_msg))
else:
try:
json_data = responses.json()
except ClientResponseError as e:
# sometimes json_data does not return any json() without
# any error. This is due to the grabbing URL which "rejects"
# the URL
logging.error("Wallabag: aiohttp error {code} {message}"
.format(code=e.code, message=e.message))
return await json_data
@staticmethod
def __get_attr(what, type_attr, value_attr, **kwargs):
"""
get the value of a parm
:param what: string parm
:param type_attr: type of parm
:param value_attr:
:param kwargs:
:return: value of the parm
"""
if what in kwargs:
value = int(kwargs[what]) if type_attr == 'int' else kwargs[what]
if value in value_attr:
return value
# ENTRIES
async def get_entries(self, **kwargs):
"""
GET /api/entries.{_format}
Retrieve all entries. It could be filtered by many options.
:param kwargs: can contain one of the following filters
archive: '0' or '1', default '0' filter by archived status.
starred: '0' or '1', default '0' filter by starred status.
sort: 'created' or 'updated', default 'created'
order: 'asc' or 'desc', default 'desc'
page: int default 1 what page you want
perPage: int default 30 result per page
tags: list of tags url encoded.
since: int default 0 from what timestamp you want
Will returns entries that matches ALL tags
:return data related to the ext
"""
# default values
params = dict({'access_token': self.token,
'sort': 'created',
'order': 'desc',
'page': 1,
'perPage': 30,
'tags': '',
'since': 0})
if 'archive' in kwargs and int(kwargs['archive']) in (0, 1):
params['archive'] = int(kwargs['archive'])
if 'starred' in kwargs and int(kwargs['starred']) in (0, 1):
params['starred'] = int(kwargs['starred'])
if 'order' in kwargs and kwargs['order'] in ('asc', 'desc'):
params['order'] = kwargs['order']
if 'page' in kwargs and isinstance(kwargs['page'], int):
params['page'] = kwargs['page']
if 'perPage' in kwargs and isinstance(kwargs['perPage'], int):
params['perPage'] = kwargs['perPage']
if 'tags' in kwargs and isinstance(kwargs['tags'], list):
params['tags'] = ', '.join(kwargs['tags'])
if 'since' in kwargs and isinstance(kwargs['since'], int):
params['since'] = kwargs['since']
path = '/api/entries.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
async def post_entries(self, url, title='', tags='', starred=0, archive=0, content='', language='', published_at='',
authors='', public=1, original_url=''):
"""
POST /api/entries.{_format}
Create an entry
:param url: the url of the note to store
:param title: Optional, we'll get the title from the page.
:param tags: tag1,tag2,tag3 a comma-separated list of tags.
:param starred entry already starred
:param archive entry already archived
:param content additionnal html content
:param language
:param published_at
:param authors
:param public
:param original_url
:return result
"""
params = {'access_token': self.token, 'url': url, 'title': title,
'tags': tags, 'starred': starred, 'archive': archive,
'content': content, 'language': language, 'published_at': published_at,
'authors': authors, 'public': public, 'original_url': original_url}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
path = '/api/entries.{ext}'.format(ext=self.format)
return await self.query(path, "post", **params)
async def get_entry(self, entry):
"""
GET /api/entries/{entry}.{_format}
Retrieve a single entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def reaload_entry(self, entry):
"""
PATCH /api/entries/{entry}/reload.{_format}
Reload a single entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/reload.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "patch", **params)
async def patch_entries(self, entry, **kwargs):
"""
PATCH /api/entries/{entry}.{_format}
Change several properties of an entry
:param entry: the entry to 'patch' / update
:param kwargs: can contain one of the following
title: string
tags: a list of tags tag1,tag2,tag3
archive: '0' or '1', default '0' archived the entry.
starred: '0' or '1', default '0' starred the entry
In case that you don't want to *really* remove it..
:return data related to the ext
"""
# default values
params = {'access_token': self.token,
'title': '',
'tags': []}
if 'title' in kwargs:
params['title'] = kwargs['title']
if 'tags' in kwargs and isinstance(kwargs['tags'], list):
params['tags'] = ', '.join(kwargs['tags'])
params['archive'] = self.__get_attr(what='archive',
type_attr=int,
value_attr=(0, 1),
**kwargs)
params['starred'] = self.__get_attr(what='starred',
type_attr=int,
value_attr=(0, 1),
**kwargs)
params['order'] = self.__get_attr(what='order',
type_attr=str,
value_attr=('asc', 'desc'),
**kwargs)
path = '/api/entries/{entry}.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "patch", **params)
async def get_entry_export(self, entry):
"""
GET /api/entries/{entry}/export.{_format}
Retrieve a single entry as a predefined format.
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/export.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def patch_entry_reload(self, entry):
"""
PATCH /api/entries/{entry}/reload.{_format}
Reload an entry. An empty response with HTTP Status 304 will be send
if we weren't able to update the content (because it hasn't changed
or we got an error).
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/reload.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "patch", **params)
async def delete_entries(self, entry):
"""
DELETE /api/entries/{entry}.{_format}
Delete permanently an entry
:param entry: \w+ an integer The Entry ID
:return result
"""
params = {'Authorization': 'Bearer {}'.format(self.token)}
path = '/api/entries/{entry}.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "delete", **params)
async def entries_exists(self, url, urls=''):
"""
GET /api/entries/exists.{_format}
Check if an entry exist by url.
:param url string true An url Url to check if it exists
:param urls string false An array of urls
(?urls[]=http...&urls[]=http...) Urls (as an array)
to check if it exists
:return result
"""
params = {'access_token': self.token,
'url': url,
'urls': urls}
path = '/api/entries/exists.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
# TAGS
async def get_entry_tags(self, entry):
"""
GET /api/entries/{entry}/tags.{_format}
Retrieve all tags for an entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/tags.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(url, "get", **params)
async def post_entry_tags(self, entry, tags):
"""
POST /api/entries/{entry}/tags.{_format}
Add one or more tags to an entry
:param entry: \w+ an integer The Entry ID
:param tags: list of tags (urlencoded)
:return result
"""
params = {'access_token': self.token, 'tags': []}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
path = '/api/entries/{entry}/tags.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "post", **params)
async def delete_entry_tag(self, entry, tag):
"""
DELETE /api/entries/{entry}/tags/{tag}.{_format}
Permanently remove one tag for an entry
:param entry: \w+ an integer The Entry ID
:param tag: string The Tag
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/tags/{tag}.{ext}'.format(
entry=entry, tag=tag, ext=self.format)
return await self.query(url, "delete", **params)
async def delete_tag(self, tag):
"""
DELETE /api/tags/{tag}.{_format}
Permanently remove one tag from every entry
:param tag: string The Tag
:return data related to the ext
"""
path = '/api/tags/{tag}.{ext}'.format(tag=tag, ext=self.format)
params = {'access_token': self.token}
return await self.query(path, "delete", **params)
async def delete_tag_label(self, tag):
"""
DELETE /api/tag/label.{_format}
Permanently remove one tag from every entry.
:param tag: string The Tag
:return data related to the ext
"""
path = '/api/tag/label.{ext}'.format(ext=self.format)
params = {'access_token': self.token,
'tag': tag}
return await self.query(path, "delete", **params)
async def delete_tags_label(self, tags):
"""
DELETE /api/tags/label.{_format}
Permanently remove some tags from every entry.
:param tags: list of tags (urlencoded)
:return data related to the ext
"""
path = '/api/tag/label.{ext}'.format(ext=self.format)
params = {'access_token': self.token, 'tags': []}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
return await self.query(path, "delete", **params)
# ANNOTATIONS
async def delete_annotations(self, annotation):
"""
DELETE /api/annotations/{annotation}.{_format}
Removes an annotation.
:param annotation \w+ string The annotation ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{annotation}.{ext}'.format(
annotation=annotation, ext=self.format)
return await self.query(url, "delete", **params)
async def put_annotations(self, annotation):
"""
PUT /api/annotations/{annotation}.{_format}
Updates an annotation.
:param annotation \w+ string The annotation ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{annotation}.{ext}'.format(
annotation=annotation, ext=self.format)
return await self.query(url, "put", **params)
async def get_annotations(self, entry):
"""
GET /api/annotations/{entry}.{_format}
Retrieve annotations for an entry
:param entry \w+ integer The entry ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def post_annotations(self, entry, **kwargs):
"""
POST /api/annotations/{entry}.{_format}
Creates a new annotation.
:param entry \w+ integer The entry ID
:return
"""
params = dict({'access_token': self.token,
'ranges': [],
'quote': '',
'text': ''})
if 'ranges' in kwargs:
params['ranges'] = kwargs['ranges']
if 'quote' in kwargs:
params['quote'] = kwargs['quote']
if 'text' in kwargs:
params['text'] = kwargs['text']
url = '/api/annotations/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "post", **params)
# VERSION
@property
async def version(self):
"""
GET /api/version.{_format}
Retrieve version number
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/version.{ext}'.format(ext=self.format)
return await self.query(url, "get", **params)
@classmethod
async def get_token(cls, host, **params):
"""
POST /oauth/v2/token
Get a new token
:param host: host of the service
:param params: will contain :
params = {"grant_type": "password",
"client_id": "a string",
"client_secret": "a string",
"username": "a login",
"password": "a password"}
:return: access token
"""
params['grant_type'] = "password"
path = "/oauth/v2/token"
async with aiohttp.ClientSession() as sess:
async with sess.post(host + path, data=params) as resp:
data = await cls.handle_json_response(resp)
return data.get("access_token")
|
push-things/wallabag_api
|
wallabag_api/wallabag.py
|
Wallabag.delete_tag
|
python
|
async def delete_tag(self, tag):
path = '/api/tags/{tag}.{ext}'.format(tag=tag, ext=self.format)
params = {'access_token': self.token}
return await self.query(path, "delete", **params)
|
DELETE /api/tags/{tag}.{_format}
Permanently remove one tag from every entry
:param tag: string The Tag
:return data related to the ext
|
train
|
https://github.com/push-things/wallabag_api/blob/8d1e10a6ebc03d1ac9af2b38b57eb69f29b4216e/wallabag_api/wallabag.py#L403-L414
|
[
"async def query(self, path, method='get', **params):\n \"\"\"\n Do a query to the System API\n\n :param path: url to the API\n :param method: the kind of query to do\n :param params: a dict with all the\n necessary things to query the API\n :return json data\n \"\"\"\n if method in ('get', 'post', 'patch', 'delete', 'put'):\n full_path = self.host + path\n if method == 'get':\n resp = await self.aio_sess.get(full_path, params=params)\n elif method == 'post':\n resp = await self.aio_sess.post(full_path, data=params)\n elif method == 'patch':\n resp = await self.aio_sess.patch(full_path, data=params)\n elif method == 'delete':\n resp = await self.aio_sess.delete(full_path, params=params, headers=params)\n elif method == 'put':\n resp = await self.aio_sess.put(full_path, data=params)\n\n async with resp:\n # return the content if its a binary one\n if resp.content_type.startswith('application/pdf') or \\\n resp.content_type.startswith('application/epub'):\n return await resp.read()\n\n return await self.handle_json_response(resp)\n else:\n raise ValueError('method expected: get, post, patch, delete, put')\n"
] |
class Wallabag(object):
"""
Python Class 'Wallabag' to deal with Wallabag REST API
This class is able to handle any data from your Wallabag account
"""
EXTENTIONS = ('xml', 'json', 'txt', 'csv', 'pdf', 'epub', 'mobi', 'html')
host = ''
token = ''
client_id = ''
client_secret = ''
user_agent = ''
format = ''
username = ''
password = ''
aio_sess = None
def __init__(self,
host='',
token='',
client_id='',
client_secret='',
extension='json',
user_agent="WallabagPython/1.2.2 "
" +https://github.com/push-things/wallabag-api",
aio_sess=None):
"""
init variable
:param host: string url to the official API Wallabag
:param token: string of the key provided by Wallabag
:param client_id client id
:param client_secret client secret
:param extension: xml|json|txt|csv|pdf|epub|mobi|html
:param user_agent
:param aio_sess aiohttp session
"""
self.host = host
self.client_id = client_id
self.client_secret = client_secret
self.token = token
self.format = extension
self.user_agent = user_agent
self.aio_sess = aio_sess
if self.format not in self.EXTENTIONS:
raise ValueError("format invalid {0} should be one of {1}".format(
self.format, self.EXTENTIONS))
async def query(self, path, method='get', **params):
"""
Do a query to the System API
:param path: url to the API
:param method: the kind of query to do
:param params: a dict with all the
necessary things to query the API
:return json data
"""
if method in ('get', 'post', 'patch', 'delete', 'put'):
full_path = self.host + path
if method == 'get':
resp = await self.aio_sess.get(full_path, params=params)
elif method == 'post':
resp = await self.aio_sess.post(full_path, data=params)
elif method == 'patch':
resp = await self.aio_sess.patch(full_path, data=params)
elif method == 'delete':
resp = await self.aio_sess.delete(full_path, params=params, headers=params)
elif method == 'put':
resp = await self.aio_sess.put(full_path, data=params)
async with resp:
# return the content if its a binary one
if resp.content_type.startswith('application/pdf') or \
resp.content_type.startswith('application/epub'):
return await resp.read()
return await self.handle_json_response(resp)
else:
raise ValueError('method expected: get, post, patch, delete, put')
@staticmethod
async def handle_json_response(responses):
"""
get the json data response
:param responses: the json response
:return the json data without 'root' node
"""
json_data = {}
if responses.status != 200:
err_msg = HttpProcessingError(code=responses.status,
message=await responses.json())
logging.error("Wallabag: aiohttp error {err_msg}".format(
err_msg=err_msg))
else:
try:
json_data = responses.json()
except ClientResponseError as e:
# sometimes json_data does not return any json() without
# any error. This is due to the grabbing URL which "rejects"
# the URL
logging.error("Wallabag: aiohttp error {code} {message}"
.format(code=e.code, message=e.message))
return await json_data
@staticmethod
def __get_attr(what, type_attr, value_attr, **kwargs):
"""
get the value of a parm
:param what: string parm
:param type_attr: type of parm
:param value_attr:
:param kwargs:
:return: value of the parm
"""
if what in kwargs:
value = int(kwargs[what]) if type_attr == 'int' else kwargs[what]
if value in value_attr:
return value
# ENTRIES
async def get_entries(self, **kwargs):
"""
GET /api/entries.{_format}
Retrieve all entries. It could be filtered by many options.
:param kwargs: can contain one of the following filters
archive: '0' or '1', default '0' filter by archived status.
starred: '0' or '1', default '0' filter by starred status.
sort: 'created' or 'updated', default 'created'
order: 'asc' or 'desc', default 'desc'
page: int default 1 what page you want
perPage: int default 30 result per page
tags: list of tags url encoded.
since: int default 0 from what timestamp you want
Will returns entries that matches ALL tags
:return data related to the ext
"""
# default values
params = dict({'access_token': self.token,
'sort': 'created',
'order': 'desc',
'page': 1,
'perPage': 30,
'tags': '',
'since': 0})
if 'archive' in kwargs and int(kwargs['archive']) in (0, 1):
params['archive'] = int(kwargs['archive'])
if 'starred' in kwargs and int(kwargs['starred']) in (0, 1):
params['starred'] = int(kwargs['starred'])
if 'order' in kwargs and kwargs['order'] in ('asc', 'desc'):
params['order'] = kwargs['order']
if 'page' in kwargs and isinstance(kwargs['page'], int):
params['page'] = kwargs['page']
if 'perPage' in kwargs and isinstance(kwargs['perPage'], int):
params['perPage'] = kwargs['perPage']
if 'tags' in kwargs and isinstance(kwargs['tags'], list):
params['tags'] = ', '.join(kwargs['tags'])
if 'since' in kwargs and isinstance(kwargs['since'], int):
params['since'] = kwargs['since']
path = '/api/entries.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
async def post_entries(self, url, title='', tags='', starred=0, archive=0, content='', language='', published_at='',
authors='', public=1, original_url=''):
"""
POST /api/entries.{_format}
Create an entry
:param url: the url of the note to store
:param title: Optional, we'll get the title from the page.
:param tags: tag1,tag2,tag3 a comma-separated list of tags.
:param starred entry already starred
:param archive entry already archived
:param content additionnal html content
:param language
:param published_at
:param authors
:param public
:param original_url
:return result
"""
params = {'access_token': self.token, 'url': url, 'title': title,
'tags': tags, 'starred': starred, 'archive': archive,
'content': content, 'language': language, 'published_at': published_at,
'authors': authors, 'public': public, 'original_url': original_url}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
path = '/api/entries.{ext}'.format(ext=self.format)
return await self.query(path, "post", **params)
async def get_entry(self, entry):
"""
GET /api/entries/{entry}.{_format}
Retrieve a single entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def reaload_entry(self, entry):
"""
PATCH /api/entries/{entry}/reload.{_format}
Reload a single entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/reload.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "patch", **params)
async def patch_entries(self, entry, **kwargs):
"""
PATCH /api/entries/{entry}.{_format}
Change several properties of an entry
:param entry: the entry to 'patch' / update
:param kwargs: can contain one of the following
title: string
tags: a list of tags tag1,tag2,tag3
archive: '0' or '1', default '0' archived the entry.
starred: '0' or '1', default '0' starred the entry
In case that you don't want to *really* remove it..
:return data related to the ext
"""
# default values
params = {'access_token': self.token,
'title': '',
'tags': []}
if 'title' in kwargs:
params['title'] = kwargs['title']
if 'tags' in kwargs and isinstance(kwargs['tags'], list):
params['tags'] = ', '.join(kwargs['tags'])
params['archive'] = self.__get_attr(what='archive',
type_attr=int,
value_attr=(0, 1),
**kwargs)
params['starred'] = self.__get_attr(what='starred',
type_attr=int,
value_attr=(0, 1),
**kwargs)
params['order'] = self.__get_attr(what='order',
type_attr=str,
value_attr=('asc', 'desc'),
**kwargs)
path = '/api/entries/{entry}.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "patch", **params)
async def get_entry_export(self, entry):
"""
GET /api/entries/{entry}/export.{_format}
Retrieve a single entry as a predefined format.
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/export.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def patch_entry_reload(self, entry):
"""
PATCH /api/entries/{entry}/reload.{_format}
Reload an entry. An empty response with HTTP Status 304 will be send
if we weren't able to update the content (because it hasn't changed
or we got an error).
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/reload.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "patch", **params)
async def delete_entries(self, entry):
"""
DELETE /api/entries/{entry}.{_format}
Delete permanently an entry
:param entry: \w+ an integer The Entry ID
:return result
"""
params = {'Authorization': 'Bearer {}'.format(self.token)}
path = '/api/entries/{entry}.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "delete", **params)
async def entries_exists(self, url, urls=''):
"""
GET /api/entries/exists.{_format}
Check if an entry exist by url.
:param url string true An url Url to check if it exists
:param urls string false An array of urls
(?urls[]=http...&urls[]=http...) Urls (as an array)
to check if it exists
:return result
"""
params = {'access_token': self.token,
'url': url,
'urls': urls}
path = '/api/entries/exists.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
# TAGS
async def get_entry_tags(self, entry):
"""
GET /api/entries/{entry}/tags.{_format}
Retrieve all tags for an entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/tags.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(url, "get", **params)
async def post_entry_tags(self, entry, tags):
"""
POST /api/entries/{entry}/tags.{_format}
Add one or more tags to an entry
:param entry: \w+ an integer The Entry ID
:param tags: list of tags (urlencoded)
:return result
"""
params = {'access_token': self.token, 'tags': []}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
path = '/api/entries/{entry}/tags.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "post", **params)
async def delete_entry_tag(self, entry, tag):
"""
DELETE /api/entries/{entry}/tags/{tag}.{_format}
Permanently remove one tag for an entry
:param entry: \w+ an integer The Entry ID
:param tag: string The Tag
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/tags/{tag}.{ext}'.format(
entry=entry, tag=tag, ext=self.format)
return await self.query(url, "delete", **params)
async def get_tags(self):
"""
GET /api/tags.{_format}
Retrieve all tags
:return data related to the ext
"""
params = {'access_token': self.token}
path = '/api/tags.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
async def delete_tag_label(self, tag):
"""
DELETE /api/tag/label.{_format}
Permanently remove one tag from every entry.
:param tag: string The Tag
:return data related to the ext
"""
path = '/api/tag/label.{ext}'.format(ext=self.format)
params = {'access_token': self.token,
'tag': tag}
return await self.query(path, "delete", **params)
async def delete_tags_label(self, tags):
"""
DELETE /api/tags/label.{_format}
Permanently remove some tags from every entry.
:param tags: list of tags (urlencoded)
:return data related to the ext
"""
path = '/api/tag/label.{ext}'.format(ext=self.format)
params = {'access_token': self.token, 'tags': []}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
return await self.query(path, "delete", **params)
# ANNOTATIONS
async def delete_annotations(self, annotation):
"""
DELETE /api/annotations/{annotation}.{_format}
Removes an annotation.
:param annotation \w+ string The annotation ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{annotation}.{ext}'.format(
annotation=annotation, ext=self.format)
return await self.query(url, "delete", **params)
async def put_annotations(self, annotation):
"""
PUT /api/annotations/{annotation}.{_format}
Updates an annotation.
:param annotation \w+ string The annotation ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{annotation}.{ext}'.format(
annotation=annotation, ext=self.format)
return await self.query(url, "put", **params)
async def get_annotations(self, entry):
"""
GET /api/annotations/{entry}.{_format}
Retrieve annotations for an entry
:param entry \w+ integer The entry ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def post_annotations(self, entry, **kwargs):
"""
POST /api/annotations/{entry}.{_format}
Creates a new annotation.
:param entry \w+ integer The entry ID
:return
"""
params = dict({'access_token': self.token,
'ranges': [],
'quote': '',
'text': ''})
if 'ranges' in kwargs:
params['ranges'] = kwargs['ranges']
if 'quote' in kwargs:
params['quote'] = kwargs['quote']
if 'text' in kwargs:
params['text'] = kwargs['text']
url = '/api/annotations/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "post", **params)
# VERSION
@property
async def version(self):
"""
GET /api/version.{_format}
Retrieve version number
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/version.{ext}'.format(ext=self.format)
return await self.query(url, "get", **params)
@classmethod
async def get_token(cls, host, **params):
"""
POST /oauth/v2/token
Get a new token
:param host: host of the service
:param params: will contain :
params = {"grant_type": "password",
"client_id": "a string",
"client_secret": "a string",
"username": "a login",
"password": "a password"}
:return: access token
"""
params['grant_type'] = "password"
path = "/oauth/v2/token"
async with aiohttp.ClientSession() as sess:
async with sess.post(host + path, data=params) as resp:
data = await cls.handle_json_response(resp)
return data.get("access_token")
|
push-things/wallabag_api
|
wallabag_api/wallabag.py
|
Wallabag.put_annotations
|
python
|
async def put_annotations(self, annotation):
params = {'access_token': self.token}
url = '/api/annotations/{annotation}.{ext}'.format(
annotation=annotation, ext=self.format)
return await self.query(url, "put", **params)
|
PUT /api/annotations/{annotation}.{_format}
Updates an annotation.
:param annotation \w+ string The annotation ID
Will returns annotation for this entry
:return data related to the ext
|
train
|
https://github.com/push-things/wallabag_api/blob/8d1e10a6ebc03d1ac9af2b38b57eb69f29b4216e/wallabag_api/wallabag.py#L462-L476
|
[
"async def query(self, path, method='get', **params):\n \"\"\"\n Do a query to the System API\n\n :param path: url to the API\n :param method: the kind of query to do\n :param params: a dict with all the\n necessary things to query the API\n :return json data\n \"\"\"\n if method in ('get', 'post', 'patch', 'delete', 'put'):\n full_path = self.host + path\n if method == 'get':\n resp = await self.aio_sess.get(full_path, params=params)\n elif method == 'post':\n resp = await self.aio_sess.post(full_path, data=params)\n elif method == 'patch':\n resp = await self.aio_sess.patch(full_path, data=params)\n elif method == 'delete':\n resp = await self.aio_sess.delete(full_path, params=params, headers=params)\n elif method == 'put':\n resp = await self.aio_sess.put(full_path, data=params)\n\n async with resp:\n # return the content if its a binary one\n if resp.content_type.startswith('application/pdf') or \\\n resp.content_type.startswith('application/epub'):\n return await resp.read()\n\n return await self.handle_json_response(resp)\n else:\n raise ValueError('method expected: get, post, patch, delete, put')\n"
] |
class Wallabag(object):
"""
Python Class 'Wallabag' to deal with Wallabag REST API
This class is able to handle any data from your Wallabag account
"""
EXTENTIONS = ('xml', 'json', 'txt', 'csv', 'pdf', 'epub', 'mobi', 'html')
host = ''
token = ''
client_id = ''
client_secret = ''
user_agent = ''
format = ''
username = ''
password = ''
aio_sess = None
def __init__(self,
host='',
token='',
client_id='',
client_secret='',
extension='json',
user_agent="WallabagPython/1.2.2 "
" +https://github.com/push-things/wallabag-api",
aio_sess=None):
"""
init variable
:param host: string url to the official API Wallabag
:param token: string of the key provided by Wallabag
:param client_id client id
:param client_secret client secret
:param extension: xml|json|txt|csv|pdf|epub|mobi|html
:param user_agent
:param aio_sess aiohttp session
"""
self.host = host
self.client_id = client_id
self.client_secret = client_secret
self.token = token
self.format = extension
self.user_agent = user_agent
self.aio_sess = aio_sess
if self.format not in self.EXTENTIONS:
raise ValueError("format invalid {0} should be one of {1}".format(
self.format, self.EXTENTIONS))
async def query(self, path, method='get', **params):
"""
Do a query to the System API
:param path: url to the API
:param method: the kind of query to do
:param params: a dict with all the
necessary things to query the API
:return json data
"""
if method in ('get', 'post', 'patch', 'delete', 'put'):
full_path = self.host + path
if method == 'get':
resp = await self.aio_sess.get(full_path, params=params)
elif method == 'post':
resp = await self.aio_sess.post(full_path, data=params)
elif method == 'patch':
resp = await self.aio_sess.patch(full_path, data=params)
elif method == 'delete':
resp = await self.aio_sess.delete(full_path, params=params, headers=params)
elif method == 'put':
resp = await self.aio_sess.put(full_path, data=params)
async with resp:
# return the content if its a binary one
if resp.content_type.startswith('application/pdf') or \
resp.content_type.startswith('application/epub'):
return await resp.read()
return await self.handle_json_response(resp)
else:
raise ValueError('method expected: get, post, patch, delete, put')
@staticmethod
async def handle_json_response(responses):
"""
get the json data response
:param responses: the json response
:return the json data without 'root' node
"""
json_data = {}
if responses.status != 200:
err_msg = HttpProcessingError(code=responses.status,
message=await responses.json())
logging.error("Wallabag: aiohttp error {err_msg}".format(
err_msg=err_msg))
else:
try:
json_data = responses.json()
except ClientResponseError as e:
# sometimes json_data does not return any json() without
# any error. This is due to the grabbing URL which "rejects"
# the URL
logging.error("Wallabag: aiohttp error {code} {message}"
.format(code=e.code, message=e.message))
return await json_data
@staticmethod
def __get_attr(what, type_attr, value_attr, **kwargs):
"""
get the value of a parm
:param what: string parm
:param type_attr: type of parm
:param value_attr:
:param kwargs:
:return: value of the parm
"""
if what in kwargs:
value = int(kwargs[what]) if type_attr == 'int' else kwargs[what]
if value in value_attr:
return value
# ENTRIES
async def get_entries(self, **kwargs):
"""
GET /api/entries.{_format}
Retrieve all entries. It could be filtered by many options.
:param kwargs: can contain one of the following filters
archive: '0' or '1', default '0' filter by archived status.
starred: '0' or '1', default '0' filter by starred status.
sort: 'created' or 'updated', default 'created'
order: 'asc' or 'desc', default 'desc'
page: int default 1 what page you want
perPage: int default 30 result per page
tags: list of tags url encoded.
since: int default 0 from what timestamp you want
Will returns entries that matches ALL tags
:return data related to the ext
"""
# default values
params = dict({'access_token': self.token,
'sort': 'created',
'order': 'desc',
'page': 1,
'perPage': 30,
'tags': '',
'since': 0})
if 'archive' in kwargs and int(kwargs['archive']) in (0, 1):
params['archive'] = int(kwargs['archive'])
if 'starred' in kwargs and int(kwargs['starred']) in (0, 1):
params['starred'] = int(kwargs['starred'])
if 'order' in kwargs and kwargs['order'] in ('asc', 'desc'):
params['order'] = kwargs['order']
if 'page' in kwargs and isinstance(kwargs['page'], int):
params['page'] = kwargs['page']
if 'perPage' in kwargs and isinstance(kwargs['perPage'], int):
params['perPage'] = kwargs['perPage']
if 'tags' in kwargs and isinstance(kwargs['tags'], list):
params['tags'] = ', '.join(kwargs['tags'])
if 'since' in kwargs and isinstance(kwargs['since'], int):
params['since'] = kwargs['since']
path = '/api/entries.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
async def post_entries(self, url, title='', tags='', starred=0, archive=0, content='', language='', published_at='',
authors='', public=1, original_url=''):
"""
POST /api/entries.{_format}
Create an entry
:param url: the url of the note to store
:param title: Optional, we'll get the title from the page.
:param tags: tag1,tag2,tag3 a comma-separated list of tags.
:param starred entry already starred
:param archive entry already archived
:param content additionnal html content
:param language
:param published_at
:param authors
:param public
:param original_url
:return result
"""
params = {'access_token': self.token, 'url': url, 'title': title,
'tags': tags, 'starred': starred, 'archive': archive,
'content': content, 'language': language, 'published_at': published_at,
'authors': authors, 'public': public, 'original_url': original_url}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
path = '/api/entries.{ext}'.format(ext=self.format)
return await self.query(path, "post", **params)
async def get_entry(self, entry):
"""
GET /api/entries/{entry}.{_format}
Retrieve a single entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def reaload_entry(self, entry):
"""
PATCH /api/entries/{entry}/reload.{_format}
Reload a single entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/reload.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "patch", **params)
async def patch_entries(self, entry, **kwargs):
"""
PATCH /api/entries/{entry}.{_format}
Change several properties of an entry
:param entry: the entry to 'patch' / update
:param kwargs: can contain one of the following
title: string
tags: a list of tags tag1,tag2,tag3
archive: '0' or '1', default '0' archived the entry.
starred: '0' or '1', default '0' starred the entry
In case that you don't want to *really* remove it..
:return data related to the ext
"""
# default values
params = {'access_token': self.token,
'title': '',
'tags': []}
if 'title' in kwargs:
params['title'] = kwargs['title']
if 'tags' in kwargs and isinstance(kwargs['tags'], list):
params['tags'] = ', '.join(kwargs['tags'])
params['archive'] = self.__get_attr(what='archive',
type_attr=int,
value_attr=(0, 1),
**kwargs)
params['starred'] = self.__get_attr(what='starred',
type_attr=int,
value_attr=(0, 1),
**kwargs)
params['order'] = self.__get_attr(what='order',
type_attr=str,
value_attr=('asc', 'desc'),
**kwargs)
path = '/api/entries/{entry}.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "patch", **params)
async def get_entry_export(self, entry):
"""
GET /api/entries/{entry}/export.{_format}
Retrieve a single entry as a predefined format.
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/export.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def patch_entry_reload(self, entry):
"""
PATCH /api/entries/{entry}/reload.{_format}
Reload an entry. An empty response with HTTP Status 304 will be send
if we weren't able to update the content (because it hasn't changed
or we got an error).
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/reload.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "patch", **params)
async def delete_entries(self, entry):
"""
DELETE /api/entries/{entry}.{_format}
Delete permanently an entry
:param entry: \w+ an integer The Entry ID
:return result
"""
params = {'Authorization': 'Bearer {}'.format(self.token)}
path = '/api/entries/{entry}.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "delete", **params)
async def entries_exists(self, url, urls=''):
"""
GET /api/entries/exists.{_format}
Check if an entry exist by url.
:param url string true An url Url to check if it exists
:param urls string false An array of urls
(?urls[]=http...&urls[]=http...) Urls (as an array)
to check if it exists
:return result
"""
params = {'access_token': self.token,
'url': url,
'urls': urls}
path = '/api/entries/exists.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
# TAGS
async def get_entry_tags(self, entry):
"""
GET /api/entries/{entry}/tags.{_format}
Retrieve all tags for an entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/tags.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(url, "get", **params)
async def post_entry_tags(self, entry, tags):
"""
POST /api/entries/{entry}/tags.{_format}
Add one or more tags to an entry
:param entry: \w+ an integer The Entry ID
:param tags: list of tags (urlencoded)
:return result
"""
params = {'access_token': self.token, 'tags': []}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
path = '/api/entries/{entry}/tags.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "post", **params)
async def delete_entry_tag(self, entry, tag):
"""
DELETE /api/entries/{entry}/tags/{tag}.{_format}
Permanently remove one tag for an entry
:param entry: \w+ an integer The Entry ID
:param tag: string The Tag
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/tags/{tag}.{ext}'.format(
entry=entry, tag=tag, ext=self.format)
return await self.query(url, "delete", **params)
async def get_tags(self):
"""
GET /api/tags.{_format}
Retrieve all tags
:return data related to the ext
"""
params = {'access_token': self.token}
path = '/api/tags.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
async def delete_tag(self, tag):
"""
DELETE /api/tags/{tag}.{_format}
Permanently remove one tag from every entry
:param tag: string The Tag
:return data related to the ext
"""
path = '/api/tags/{tag}.{ext}'.format(tag=tag, ext=self.format)
params = {'access_token': self.token}
return await self.query(path, "delete", **params)
async def delete_tag_label(self, tag):
"""
DELETE /api/tag/label.{_format}
Permanently remove one tag from every entry.
:param tag: string The Tag
:return data related to the ext
"""
path = '/api/tag/label.{ext}'.format(ext=self.format)
params = {'access_token': self.token,
'tag': tag}
return await self.query(path, "delete", **params)
async def delete_tags_label(self, tags):
"""
DELETE /api/tags/label.{_format}
Permanently remove some tags from every entry.
:param tags: list of tags (urlencoded)
:return data related to the ext
"""
path = '/api/tag/label.{ext}'.format(ext=self.format)
params = {'access_token': self.token, 'tags': []}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
return await self.query(path, "delete", **params)
# ANNOTATIONS
async def delete_annotations(self, annotation):
"""
DELETE /api/annotations/{annotation}.{_format}
Removes an annotation.
:param annotation \w+ string The annotation ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{annotation}.{ext}'.format(
annotation=annotation, ext=self.format)
return await self.query(url, "delete", **params)
async def get_annotations(self, entry):
"""
GET /api/annotations/{entry}.{_format}
Retrieve annotations for an entry
:param entry \w+ integer The entry ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def post_annotations(self, entry, **kwargs):
"""
POST /api/annotations/{entry}.{_format}
Creates a new annotation.
:param entry \w+ integer The entry ID
:return
"""
params = dict({'access_token': self.token,
'ranges': [],
'quote': '',
'text': ''})
if 'ranges' in kwargs:
params['ranges'] = kwargs['ranges']
if 'quote' in kwargs:
params['quote'] = kwargs['quote']
if 'text' in kwargs:
params['text'] = kwargs['text']
url = '/api/annotations/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "post", **params)
# VERSION
@property
async def version(self):
"""
GET /api/version.{_format}
Retrieve version number
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/version.{ext}'.format(ext=self.format)
return await self.query(url, "get", **params)
@classmethod
async def get_token(cls, host, **params):
"""
POST /oauth/v2/token
Get a new token
:param host: host of the service
:param params: will contain :
params = {"grant_type": "password",
"client_id": "a string",
"client_secret": "a string",
"username": "a login",
"password": "a password"}
:return: access token
"""
params['grant_type'] = "password"
path = "/oauth/v2/token"
async with aiohttp.ClientSession() as sess:
async with sess.post(host + path, data=params) as resp:
data = await cls.handle_json_response(resp)
return data.get("access_token")
|
push-things/wallabag_api
|
wallabag_api/wallabag.py
|
Wallabag.post_annotations
|
python
|
async def post_annotations(self, entry, **kwargs):
params = dict({'access_token': self.token,
'ranges': [],
'quote': '',
'text': ''})
if 'ranges' in kwargs:
params['ranges'] = kwargs['ranges']
if 'quote' in kwargs:
params['quote'] = kwargs['quote']
if 'text' in kwargs:
params['text'] = kwargs['text']
url = '/api/annotations/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "post", **params)
|
POST /api/annotations/{entry}.{_format}
Creates a new annotation.
:param entry \w+ integer The entry ID
:return
|
train
|
https://github.com/push-things/wallabag_api/blob/8d1e10a6ebc03d1ac9af2b38b57eb69f29b4216e/wallabag_api/wallabag.py#L494-L517
|
[
"async def query(self, path, method='get', **params):\n \"\"\"\n Do a query to the System API\n\n :param path: url to the API\n :param method: the kind of query to do\n :param params: a dict with all the\n necessary things to query the API\n :return json data\n \"\"\"\n if method in ('get', 'post', 'patch', 'delete', 'put'):\n full_path = self.host + path\n if method == 'get':\n resp = await self.aio_sess.get(full_path, params=params)\n elif method == 'post':\n resp = await self.aio_sess.post(full_path, data=params)\n elif method == 'patch':\n resp = await self.aio_sess.patch(full_path, data=params)\n elif method == 'delete':\n resp = await self.aio_sess.delete(full_path, params=params, headers=params)\n elif method == 'put':\n resp = await self.aio_sess.put(full_path, data=params)\n\n async with resp:\n # return the content if its a binary one\n if resp.content_type.startswith('application/pdf') or \\\n resp.content_type.startswith('application/epub'):\n return await resp.read()\n\n return await self.handle_json_response(resp)\n else:\n raise ValueError('method expected: get, post, patch, delete, put')\n"
] |
class Wallabag(object):
"""
Python Class 'Wallabag' to deal with Wallabag REST API
This class is able to handle any data from your Wallabag account
"""
EXTENTIONS = ('xml', 'json', 'txt', 'csv', 'pdf', 'epub', 'mobi', 'html')
host = ''
token = ''
client_id = ''
client_secret = ''
user_agent = ''
format = ''
username = ''
password = ''
aio_sess = None
def __init__(self,
host='',
token='',
client_id='',
client_secret='',
extension='json',
user_agent="WallabagPython/1.2.2 "
" +https://github.com/push-things/wallabag-api",
aio_sess=None):
"""
init variable
:param host: string url to the official API Wallabag
:param token: string of the key provided by Wallabag
:param client_id client id
:param client_secret client secret
:param extension: xml|json|txt|csv|pdf|epub|mobi|html
:param user_agent
:param aio_sess aiohttp session
"""
self.host = host
self.client_id = client_id
self.client_secret = client_secret
self.token = token
self.format = extension
self.user_agent = user_agent
self.aio_sess = aio_sess
if self.format not in self.EXTENTIONS:
raise ValueError("format invalid {0} should be one of {1}".format(
self.format, self.EXTENTIONS))
async def query(self, path, method='get', **params):
"""
Do a query to the System API
:param path: url to the API
:param method: the kind of query to do
:param params: a dict with all the
necessary things to query the API
:return json data
"""
if method in ('get', 'post', 'patch', 'delete', 'put'):
full_path = self.host + path
if method == 'get':
resp = await self.aio_sess.get(full_path, params=params)
elif method == 'post':
resp = await self.aio_sess.post(full_path, data=params)
elif method == 'patch':
resp = await self.aio_sess.patch(full_path, data=params)
elif method == 'delete':
resp = await self.aio_sess.delete(full_path, params=params, headers=params)
elif method == 'put':
resp = await self.aio_sess.put(full_path, data=params)
async with resp:
# return the content if its a binary one
if resp.content_type.startswith('application/pdf') or \
resp.content_type.startswith('application/epub'):
return await resp.read()
return await self.handle_json_response(resp)
else:
raise ValueError('method expected: get, post, patch, delete, put')
@staticmethod
async def handle_json_response(responses):
"""
get the json data response
:param responses: the json response
:return the json data without 'root' node
"""
json_data = {}
if responses.status != 200:
err_msg = HttpProcessingError(code=responses.status,
message=await responses.json())
logging.error("Wallabag: aiohttp error {err_msg}".format(
err_msg=err_msg))
else:
try:
json_data = responses.json()
except ClientResponseError as e:
# sometimes json_data does not return any json() without
# any error. This is due to the grabbing URL which "rejects"
# the URL
logging.error("Wallabag: aiohttp error {code} {message}"
.format(code=e.code, message=e.message))
return await json_data
@staticmethod
def __get_attr(what, type_attr, value_attr, **kwargs):
"""
get the value of a parm
:param what: string parm
:param type_attr: type of parm
:param value_attr:
:param kwargs:
:return: value of the parm
"""
if what in kwargs:
value = int(kwargs[what]) if type_attr == 'int' else kwargs[what]
if value in value_attr:
return value
# ENTRIES
async def get_entries(self, **kwargs):
"""
GET /api/entries.{_format}
Retrieve all entries. It could be filtered by many options.
:param kwargs: can contain one of the following filters
archive: '0' or '1', default '0' filter by archived status.
starred: '0' or '1', default '0' filter by starred status.
sort: 'created' or 'updated', default 'created'
order: 'asc' or 'desc', default 'desc'
page: int default 1 what page you want
perPage: int default 30 result per page
tags: list of tags url encoded.
since: int default 0 from what timestamp you want
Will returns entries that matches ALL tags
:return data related to the ext
"""
# default values
params = dict({'access_token': self.token,
'sort': 'created',
'order': 'desc',
'page': 1,
'perPage': 30,
'tags': '',
'since': 0})
if 'archive' in kwargs and int(kwargs['archive']) in (0, 1):
params['archive'] = int(kwargs['archive'])
if 'starred' in kwargs and int(kwargs['starred']) in (0, 1):
params['starred'] = int(kwargs['starred'])
if 'order' in kwargs and kwargs['order'] in ('asc', 'desc'):
params['order'] = kwargs['order']
if 'page' in kwargs and isinstance(kwargs['page'], int):
params['page'] = kwargs['page']
if 'perPage' in kwargs and isinstance(kwargs['perPage'], int):
params['perPage'] = kwargs['perPage']
if 'tags' in kwargs and isinstance(kwargs['tags'], list):
params['tags'] = ', '.join(kwargs['tags'])
if 'since' in kwargs and isinstance(kwargs['since'], int):
params['since'] = kwargs['since']
path = '/api/entries.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
async def post_entries(self, url, title='', tags='', starred=0, archive=0, content='', language='', published_at='',
authors='', public=1, original_url=''):
"""
POST /api/entries.{_format}
Create an entry
:param url: the url of the note to store
:param title: Optional, we'll get the title from the page.
:param tags: tag1,tag2,tag3 a comma-separated list of tags.
:param starred entry already starred
:param archive entry already archived
:param content additionnal html content
:param language
:param published_at
:param authors
:param public
:param original_url
:return result
"""
params = {'access_token': self.token, 'url': url, 'title': title,
'tags': tags, 'starred': starred, 'archive': archive,
'content': content, 'language': language, 'published_at': published_at,
'authors': authors, 'public': public, 'original_url': original_url}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
path = '/api/entries.{ext}'.format(ext=self.format)
return await self.query(path, "post", **params)
async def get_entry(self, entry):
"""
GET /api/entries/{entry}.{_format}
Retrieve a single entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def reaload_entry(self, entry):
"""
PATCH /api/entries/{entry}/reload.{_format}
Reload a single entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/reload.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "patch", **params)
async def patch_entries(self, entry, **kwargs):
"""
PATCH /api/entries/{entry}.{_format}
Change several properties of an entry
:param entry: the entry to 'patch' / update
:param kwargs: can contain one of the following
title: string
tags: a list of tags tag1,tag2,tag3
archive: '0' or '1', default '0' archived the entry.
starred: '0' or '1', default '0' starred the entry
In case that you don't want to *really* remove it..
:return data related to the ext
"""
# default values
params = {'access_token': self.token,
'title': '',
'tags': []}
if 'title' in kwargs:
params['title'] = kwargs['title']
if 'tags' in kwargs and isinstance(kwargs['tags'], list):
params['tags'] = ', '.join(kwargs['tags'])
params['archive'] = self.__get_attr(what='archive',
type_attr=int,
value_attr=(0, 1),
**kwargs)
params['starred'] = self.__get_attr(what='starred',
type_attr=int,
value_attr=(0, 1),
**kwargs)
params['order'] = self.__get_attr(what='order',
type_attr=str,
value_attr=('asc', 'desc'),
**kwargs)
path = '/api/entries/{entry}.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "patch", **params)
async def get_entry_export(self, entry):
"""
GET /api/entries/{entry}/export.{_format}
Retrieve a single entry as a predefined format.
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/export.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def patch_entry_reload(self, entry):
"""
PATCH /api/entries/{entry}/reload.{_format}
Reload an entry. An empty response with HTTP Status 304 will be send
if we weren't able to update the content (because it hasn't changed
or we got an error).
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/reload.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "patch", **params)
async def delete_entries(self, entry):
"""
DELETE /api/entries/{entry}.{_format}
Delete permanently an entry
:param entry: \w+ an integer The Entry ID
:return result
"""
params = {'Authorization': 'Bearer {}'.format(self.token)}
path = '/api/entries/{entry}.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "delete", **params)
async def entries_exists(self, url, urls=''):
"""
GET /api/entries/exists.{_format}
Check if an entry exist by url.
:param url string true An url Url to check if it exists
:param urls string false An array of urls
(?urls[]=http...&urls[]=http...) Urls (as an array)
to check if it exists
:return result
"""
params = {'access_token': self.token,
'url': url,
'urls': urls}
path = '/api/entries/exists.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
# TAGS
async def get_entry_tags(self, entry):
"""
GET /api/entries/{entry}/tags.{_format}
Retrieve all tags for an entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/tags.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(url, "get", **params)
async def post_entry_tags(self, entry, tags):
"""
POST /api/entries/{entry}/tags.{_format}
Add one or more tags to an entry
:param entry: \w+ an integer The Entry ID
:param tags: list of tags (urlencoded)
:return result
"""
params = {'access_token': self.token, 'tags': []}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
path = '/api/entries/{entry}/tags.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "post", **params)
async def delete_entry_tag(self, entry, tag):
"""
DELETE /api/entries/{entry}/tags/{tag}.{_format}
Permanently remove one tag for an entry
:param entry: \w+ an integer The Entry ID
:param tag: string The Tag
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/tags/{tag}.{ext}'.format(
entry=entry, tag=tag, ext=self.format)
return await self.query(url, "delete", **params)
async def get_tags(self):
"""
GET /api/tags.{_format}
Retrieve all tags
:return data related to the ext
"""
params = {'access_token': self.token}
path = '/api/tags.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
async def delete_tag(self, tag):
"""
DELETE /api/tags/{tag}.{_format}
Permanently remove one tag from every entry
:param tag: string The Tag
:return data related to the ext
"""
path = '/api/tags/{tag}.{ext}'.format(tag=tag, ext=self.format)
params = {'access_token': self.token}
return await self.query(path, "delete", **params)
async def delete_tag_label(self, tag):
"""
DELETE /api/tag/label.{_format}
Permanently remove one tag from every entry.
:param tag: string The Tag
:return data related to the ext
"""
path = '/api/tag/label.{ext}'.format(ext=self.format)
params = {'access_token': self.token,
'tag': tag}
return await self.query(path, "delete", **params)
async def delete_tags_label(self, tags):
"""
DELETE /api/tags/label.{_format}
Permanently remove some tags from every entry.
:param tags: list of tags (urlencoded)
:return data related to the ext
"""
path = '/api/tag/label.{ext}'.format(ext=self.format)
params = {'access_token': self.token, 'tags': []}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
return await self.query(path, "delete", **params)
# ANNOTATIONS
async def delete_annotations(self, annotation):
"""
DELETE /api/annotations/{annotation}.{_format}
Removes an annotation.
:param annotation \w+ string The annotation ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{annotation}.{ext}'.format(
annotation=annotation, ext=self.format)
return await self.query(url, "delete", **params)
async def put_annotations(self, annotation):
"""
PUT /api/annotations/{annotation}.{_format}
Updates an annotation.
:param annotation \w+ string The annotation ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{annotation}.{ext}'.format(
annotation=annotation, ext=self.format)
return await self.query(url, "put", **params)
async def get_annotations(self, entry):
"""
GET /api/annotations/{entry}.{_format}
Retrieve annotations for an entry
:param entry \w+ integer The entry ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
# VERSION
@property
async def version(self):
"""
GET /api/version.{_format}
Retrieve version number
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/version.{ext}'.format(ext=self.format)
return await self.query(url, "get", **params)
@classmethod
async def get_token(cls, host, **params):
"""
POST /oauth/v2/token
Get a new token
:param host: host of the service
:param params: will contain :
params = {"grant_type": "password",
"client_id": "a string",
"client_secret": "a string",
"username": "a login",
"password": "a password"}
:return: access token
"""
params['grant_type'] = "password"
path = "/oauth/v2/token"
async with aiohttp.ClientSession() as sess:
async with sess.post(host + path, data=params) as resp:
data = await cls.handle_json_response(resp)
return data.get("access_token")
|
push-things/wallabag_api
|
wallabag_api/wallabag.py
|
Wallabag.version
|
python
|
async def version(self):
params = {'access_token': self.token}
url = '/api/version.{ext}'.format(ext=self.format)
return await self.query(url, "get", **params)
|
GET /api/version.{_format}
Retrieve version number
:return data related to the ext
|
train
|
https://github.com/push-things/wallabag_api/blob/8d1e10a6ebc03d1ac9af2b38b57eb69f29b4216e/wallabag_api/wallabag.py#L521-L531
|
[
"async def query(self, path, method='get', **params):\n \"\"\"\n Do a query to the System API\n\n :param path: url to the API\n :param method: the kind of query to do\n :param params: a dict with all the\n necessary things to query the API\n :return json data\n \"\"\"\n if method in ('get', 'post', 'patch', 'delete', 'put'):\n full_path = self.host + path\n if method == 'get':\n resp = await self.aio_sess.get(full_path, params=params)\n elif method == 'post':\n resp = await self.aio_sess.post(full_path, data=params)\n elif method == 'patch':\n resp = await self.aio_sess.patch(full_path, data=params)\n elif method == 'delete':\n resp = await self.aio_sess.delete(full_path, params=params, headers=params)\n elif method == 'put':\n resp = await self.aio_sess.put(full_path, data=params)\n\n async with resp:\n # return the content if its a binary one\n if resp.content_type.startswith('application/pdf') or \\\n resp.content_type.startswith('application/epub'):\n return await resp.read()\n\n return await self.handle_json_response(resp)\n else:\n raise ValueError('method expected: get, post, patch, delete, put')\n"
] |
class Wallabag(object):
"""
Python Class 'Wallabag' to deal with Wallabag REST API
This class is able to handle any data from your Wallabag account
"""
EXTENTIONS = ('xml', 'json', 'txt', 'csv', 'pdf', 'epub', 'mobi', 'html')
host = ''
token = ''
client_id = ''
client_secret = ''
user_agent = ''
format = ''
username = ''
password = ''
aio_sess = None
def __init__(self,
host='',
token='',
client_id='',
client_secret='',
extension='json',
user_agent="WallabagPython/1.2.2 "
" +https://github.com/push-things/wallabag-api",
aio_sess=None):
"""
init variable
:param host: string url to the official API Wallabag
:param token: string of the key provided by Wallabag
:param client_id client id
:param client_secret client secret
:param extension: xml|json|txt|csv|pdf|epub|mobi|html
:param user_agent
:param aio_sess aiohttp session
"""
self.host = host
self.client_id = client_id
self.client_secret = client_secret
self.token = token
self.format = extension
self.user_agent = user_agent
self.aio_sess = aio_sess
if self.format not in self.EXTENTIONS:
raise ValueError("format invalid {0} should be one of {1}".format(
self.format, self.EXTENTIONS))
async def query(self, path, method='get', **params):
"""
Do a query to the System API
:param path: url to the API
:param method: the kind of query to do
:param params: a dict with all the
necessary things to query the API
:return json data
"""
if method in ('get', 'post', 'patch', 'delete', 'put'):
full_path = self.host + path
if method == 'get':
resp = await self.aio_sess.get(full_path, params=params)
elif method == 'post':
resp = await self.aio_sess.post(full_path, data=params)
elif method == 'patch':
resp = await self.aio_sess.patch(full_path, data=params)
elif method == 'delete':
resp = await self.aio_sess.delete(full_path, params=params, headers=params)
elif method == 'put':
resp = await self.aio_sess.put(full_path, data=params)
async with resp:
# return the content if its a binary one
if resp.content_type.startswith('application/pdf') or \
resp.content_type.startswith('application/epub'):
return await resp.read()
return await self.handle_json_response(resp)
else:
raise ValueError('method expected: get, post, patch, delete, put')
@staticmethod
async def handle_json_response(responses):
"""
get the json data response
:param responses: the json response
:return the json data without 'root' node
"""
json_data = {}
if responses.status != 200:
err_msg = HttpProcessingError(code=responses.status,
message=await responses.json())
logging.error("Wallabag: aiohttp error {err_msg}".format(
err_msg=err_msg))
else:
try:
json_data = responses.json()
except ClientResponseError as e:
# sometimes json_data does not return any json() without
# any error. This is due to the grabbing URL which "rejects"
# the URL
logging.error("Wallabag: aiohttp error {code} {message}"
.format(code=e.code, message=e.message))
return await json_data
@staticmethod
def __get_attr(what, type_attr, value_attr, **kwargs):
"""
get the value of a parm
:param what: string parm
:param type_attr: type of parm
:param value_attr:
:param kwargs:
:return: value of the parm
"""
if what in kwargs:
value = int(kwargs[what]) if type_attr == 'int' else kwargs[what]
if value in value_attr:
return value
# ENTRIES
async def get_entries(self, **kwargs):
"""
GET /api/entries.{_format}
Retrieve all entries. It could be filtered by many options.
:param kwargs: can contain one of the following filters
archive: '0' or '1', default '0' filter by archived status.
starred: '0' or '1', default '0' filter by starred status.
sort: 'created' or 'updated', default 'created'
order: 'asc' or 'desc', default 'desc'
page: int default 1 what page you want
perPage: int default 30 result per page
tags: list of tags url encoded.
since: int default 0 from what timestamp you want
Will returns entries that matches ALL tags
:return data related to the ext
"""
# default values
params = dict({'access_token': self.token,
'sort': 'created',
'order': 'desc',
'page': 1,
'perPage': 30,
'tags': '',
'since': 0})
if 'archive' in kwargs and int(kwargs['archive']) in (0, 1):
params['archive'] = int(kwargs['archive'])
if 'starred' in kwargs and int(kwargs['starred']) in (0, 1):
params['starred'] = int(kwargs['starred'])
if 'order' in kwargs and kwargs['order'] in ('asc', 'desc'):
params['order'] = kwargs['order']
if 'page' in kwargs and isinstance(kwargs['page'], int):
params['page'] = kwargs['page']
if 'perPage' in kwargs and isinstance(kwargs['perPage'], int):
params['perPage'] = kwargs['perPage']
if 'tags' in kwargs and isinstance(kwargs['tags'], list):
params['tags'] = ', '.join(kwargs['tags'])
if 'since' in kwargs and isinstance(kwargs['since'], int):
params['since'] = kwargs['since']
path = '/api/entries.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
async def post_entries(self, url, title='', tags='', starred=0, archive=0, content='', language='', published_at='',
authors='', public=1, original_url=''):
"""
POST /api/entries.{_format}
Create an entry
:param url: the url of the note to store
:param title: Optional, we'll get the title from the page.
:param tags: tag1,tag2,tag3 a comma-separated list of tags.
:param starred entry already starred
:param archive entry already archived
:param content additionnal html content
:param language
:param published_at
:param authors
:param public
:param original_url
:return result
"""
params = {'access_token': self.token, 'url': url, 'title': title,
'tags': tags, 'starred': starred, 'archive': archive,
'content': content, 'language': language, 'published_at': published_at,
'authors': authors, 'public': public, 'original_url': original_url}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
path = '/api/entries.{ext}'.format(ext=self.format)
return await self.query(path, "post", **params)
async def get_entry(self, entry):
"""
GET /api/entries/{entry}.{_format}
Retrieve a single entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def reaload_entry(self, entry):
"""
PATCH /api/entries/{entry}/reload.{_format}
Reload a single entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/reload.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "patch", **params)
async def patch_entries(self, entry, **kwargs):
"""
PATCH /api/entries/{entry}.{_format}
Change several properties of an entry
:param entry: the entry to 'patch' / update
:param kwargs: can contain one of the following
title: string
tags: a list of tags tag1,tag2,tag3
archive: '0' or '1', default '0' archived the entry.
starred: '0' or '1', default '0' starred the entry
In case that you don't want to *really* remove it..
:return data related to the ext
"""
# default values
params = {'access_token': self.token,
'title': '',
'tags': []}
if 'title' in kwargs:
params['title'] = kwargs['title']
if 'tags' in kwargs and isinstance(kwargs['tags'], list):
params['tags'] = ', '.join(kwargs['tags'])
params['archive'] = self.__get_attr(what='archive',
type_attr=int,
value_attr=(0, 1),
**kwargs)
params['starred'] = self.__get_attr(what='starred',
type_attr=int,
value_attr=(0, 1),
**kwargs)
params['order'] = self.__get_attr(what='order',
type_attr=str,
value_attr=('asc', 'desc'),
**kwargs)
path = '/api/entries/{entry}.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "patch", **params)
async def get_entry_export(self, entry):
"""
GET /api/entries/{entry}/export.{_format}
Retrieve a single entry as a predefined format.
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/export.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def patch_entry_reload(self, entry):
"""
PATCH /api/entries/{entry}/reload.{_format}
Reload an entry. An empty response with HTTP Status 304 will be send
if we weren't able to update the content (because it hasn't changed
or we got an error).
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/reload.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "patch", **params)
async def delete_entries(self, entry):
"""
DELETE /api/entries/{entry}.{_format}
Delete permanently an entry
:param entry: \w+ an integer The Entry ID
:return result
"""
params = {'Authorization': 'Bearer {}'.format(self.token)}
path = '/api/entries/{entry}.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "delete", **params)
async def entries_exists(self, url, urls=''):
"""
GET /api/entries/exists.{_format}
Check if an entry exist by url.
:param url string true An url Url to check if it exists
:param urls string false An array of urls
(?urls[]=http...&urls[]=http...) Urls (as an array)
to check if it exists
:return result
"""
params = {'access_token': self.token,
'url': url,
'urls': urls}
path = '/api/entries/exists.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
# TAGS
async def get_entry_tags(self, entry):
"""
GET /api/entries/{entry}/tags.{_format}
Retrieve all tags for an entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/tags.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(url, "get", **params)
async def post_entry_tags(self, entry, tags):
"""
POST /api/entries/{entry}/tags.{_format}
Add one or more tags to an entry
:param entry: \w+ an integer The Entry ID
:param tags: list of tags (urlencoded)
:return result
"""
params = {'access_token': self.token, 'tags': []}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
path = '/api/entries/{entry}/tags.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "post", **params)
async def delete_entry_tag(self, entry, tag):
"""
DELETE /api/entries/{entry}/tags/{tag}.{_format}
Permanently remove one tag for an entry
:param entry: \w+ an integer The Entry ID
:param tag: string The Tag
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/tags/{tag}.{ext}'.format(
entry=entry, tag=tag, ext=self.format)
return await self.query(url, "delete", **params)
async def get_tags(self):
"""
GET /api/tags.{_format}
Retrieve all tags
:return data related to the ext
"""
params = {'access_token': self.token}
path = '/api/tags.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
async def delete_tag(self, tag):
"""
DELETE /api/tags/{tag}.{_format}
Permanently remove one tag from every entry
:param tag: string The Tag
:return data related to the ext
"""
path = '/api/tags/{tag}.{ext}'.format(tag=tag, ext=self.format)
params = {'access_token': self.token}
return await self.query(path, "delete", **params)
async def delete_tag_label(self, tag):
"""
DELETE /api/tag/label.{_format}
Permanently remove one tag from every entry.
:param tag: string The Tag
:return data related to the ext
"""
path = '/api/tag/label.{ext}'.format(ext=self.format)
params = {'access_token': self.token,
'tag': tag}
return await self.query(path, "delete", **params)
async def delete_tags_label(self, tags):
"""
DELETE /api/tags/label.{_format}
Permanently remove some tags from every entry.
:param tags: list of tags (urlencoded)
:return data related to the ext
"""
path = '/api/tag/label.{ext}'.format(ext=self.format)
params = {'access_token': self.token, 'tags': []}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
return await self.query(path, "delete", **params)
# ANNOTATIONS
async def delete_annotations(self, annotation):
"""
DELETE /api/annotations/{annotation}.{_format}
Removes an annotation.
:param annotation \w+ string The annotation ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{annotation}.{ext}'.format(
annotation=annotation, ext=self.format)
return await self.query(url, "delete", **params)
async def put_annotations(self, annotation):
"""
PUT /api/annotations/{annotation}.{_format}
Updates an annotation.
:param annotation \w+ string The annotation ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{annotation}.{ext}'.format(
annotation=annotation, ext=self.format)
return await self.query(url, "put", **params)
async def get_annotations(self, entry):
"""
GET /api/annotations/{entry}.{_format}
Retrieve annotations for an entry
:param entry \w+ integer The entry ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def post_annotations(self, entry, **kwargs):
"""
POST /api/annotations/{entry}.{_format}
Creates a new annotation.
:param entry \w+ integer The entry ID
:return
"""
params = dict({'access_token': self.token,
'ranges': [],
'quote': '',
'text': ''})
if 'ranges' in kwargs:
params['ranges'] = kwargs['ranges']
if 'quote' in kwargs:
params['quote'] = kwargs['quote']
if 'text' in kwargs:
params['text'] = kwargs['text']
url = '/api/annotations/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "post", **params)
# VERSION
@property
@classmethod
async def get_token(cls, host, **params):
"""
POST /oauth/v2/token
Get a new token
:param host: host of the service
:param params: will contain :
params = {"grant_type": "password",
"client_id": "a string",
"client_secret": "a string",
"username": "a login",
"password": "a password"}
:return: access token
"""
params['grant_type'] = "password"
path = "/oauth/v2/token"
async with aiohttp.ClientSession() as sess:
async with sess.post(host + path, data=params) as resp:
data = await cls.handle_json_response(resp)
return data.get("access_token")
|
push-things/wallabag_api
|
wallabag_api/wallabag.py
|
Wallabag.get_token
|
python
|
async def get_token(cls, host, **params):
params['grant_type'] = "password"
path = "/oauth/v2/token"
async with aiohttp.ClientSession() as sess:
async with sess.post(host + path, data=params) as resp:
data = await cls.handle_json_response(resp)
return data.get("access_token")
|
POST /oauth/v2/token
Get a new token
:param host: host of the service
:param params: will contain :
params = {"grant_type": "password",
"client_id": "a string",
"client_secret": "a string",
"username": "a login",
"password": "a password"}
:return: access token
|
train
|
https://github.com/push-things/wallabag_api/blob/8d1e10a6ebc03d1ac9af2b38b57eb69f29b4216e/wallabag_api/wallabag.py#L534-L556
|
[
"async def handle_json_response(responses):\n \"\"\"\n get the json data response\n :param responses: the json response\n :return the json data without 'root' node\n \"\"\"\n json_data = {}\n if responses.status != 200:\n err_msg = HttpProcessingError(code=responses.status,\n message=await responses.json())\n logging.error(\"Wallabag: aiohttp error {err_msg}\".format(\n err_msg=err_msg))\n else:\n try:\n json_data = responses.json()\n except ClientResponseError as e:\n # sometimes json_data does not return any json() without\n # any error. This is due to the grabbing URL which \"rejects\"\n # the URL\n logging.error(\"Wallabag: aiohttp error {code} {message}\"\n .format(code=e.code, message=e.message))\n return await json_data\n"
] |
class Wallabag(object):
"""
Python Class 'Wallabag' to deal with Wallabag REST API
This class is able to handle any data from your Wallabag account
"""
EXTENTIONS = ('xml', 'json', 'txt', 'csv', 'pdf', 'epub', 'mobi', 'html')
host = ''
token = ''
client_id = ''
client_secret = ''
user_agent = ''
format = ''
username = ''
password = ''
aio_sess = None
def __init__(self,
host='',
token='',
client_id='',
client_secret='',
extension='json',
user_agent="WallabagPython/1.2.2 "
" +https://github.com/push-things/wallabag-api",
aio_sess=None):
"""
init variable
:param host: string url to the official API Wallabag
:param token: string of the key provided by Wallabag
:param client_id client id
:param client_secret client secret
:param extension: xml|json|txt|csv|pdf|epub|mobi|html
:param user_agent
:param aio_sess aiohttp session
"""
self.host = host
self.client_id = client_id
self.client_secret = client_secret
self.token = token
self.format = extension
self.user_agent = user_agent
self.aio_sess = aio_sess
if self.format not in self.EXTENTIONS:
raise ValueError("format invalid {0} should be one of {1}".format(
self.format, self.EXTENTIONS))
async def query(self, path, method='get', **params):
"""
Do a query to the System API
:param path: url to the API
:param method: the kind of query to do
:param params: a dict with all the
necessary things to query the API
:return json data
"""
if method in ('get', 'post', 'patch', 'delete', 'put'):
full_path = self.host + path
if method == 'get':
resp = await self.aio_sess.get(full_path, params=params)
elif method == 'post':
resp = await self.aio_sess.post(full_path, data=params)
elif method == 'patch':
resp = await self.aio_sess.patch(full_path, data=params)
elif method == 'delete':
resp = await self.aio_sess.delete(full_path, params=params, headers=params)
elif method == 'put':
resp = await self.aio_sess.put(full_path, data=params)
async with resp:
# return the content if its a binary one
if resp.content_type.startswith('application/pdf') or \
resp.content_type.startswith('application/epub'):
return await resp.read()
return await self.handle_json_response(resp)
else:
raise ValueError('method expected: get, post, patch, delete, put')
@staticmethod
async def handle_json_response(responses):
"""
get the json data response
:param responses: the json response
:return the json data without 'root' node
"""
json_data = {}
if responses.status != 200:
err_msg = HttpProcessingError(code=responses.status,
message=await responses.json())
logging.error("Wallabag: aiohttp error {err_msg}".format(
err_msg=err_msg))
else:
try:
json_data = responses.json()
except ClientResponseError as e:
# sometimes json_data does not return any json() without
# any error. This is due to the grabbing URL which "rejects"
# the URL
logging.error("Wallabag: aiohttp error {code} {message}"
.format(code=e.code, message=e.message))
return await json_data
@staticmethod
def __get_attr(what, type_attr, value_attr, **kwargs):
"""
get the value of a parm
:param what: string parm
:param type_attr: type of parm
:param value_attr:
:param kwargs:
:return: value of the parm
"""
if what in kwargs:
value = int(kwargs[what]) if type_attr == 'int' else kwargs[what]
if value in value_attr:
return value
# ENTRIES
async def get_entries(self, **kwargs):
"""
GET /api/entries.{_format}
Retrieve all entries. It could be filtered by many options.
:param kwargs: can contain one of the following filters
archive: '0' or '1', default '0' filter by archived status.
starred: '0' or '1', default '0' filter by starred status.
sort: 'created' or 'updated', default 'created'
order: 'asc' or 'desc', default 'desc'
page: int default 1 what page you want
perPage: int default 30 result per page
tags: list of tags url encoded.
since: int default 0 from what timestamp you want
Will returns entries that matches ALL tags
:return data related to the ext
"""
# default values
params = dict({'access_token': self.token,
'sort': 'created',
'order': 'desc',
'page': 1,
'perPage': 30,
'tags': '',
'since': 0})
if 'archive' in kwargs and int(kwargs['archive']) in (0, 1):
params['archive'] = int(kwargs['archive'])
if 'starred' in kwargs and int(kwargs['starred']) in (0, 1):
params['starred'] = int(kwargs['starred'])
if 'order' in kwargs and kwargs['order'] in ('asc', 'desc'):
params['order'] = kwargs['order']
if 'page' in kwargs and isinstance(kwargs['page'], int):
params['page'] = kwargs['page']
if 'perPage' in kwargs and isinstance(kwargs['perPage'], int):
params['perPage'] = kwargs['perPage']
if 'tags' in kwargs and isinstance(kwargs['tags'], list):
params['tags'] = ', '.join(kwargs['tags'])
if 'since' in kwargs and isinstance(kwargs['since'], int):
params['since'] = kwargs['since']
path = '/api/entries.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
async def post_entries(self, url, title='', tags='', starred=0, archive=0, content='', language='', published_at='',
authors='', public=1, original_url=''):
"""
POST /api/entries.{_format}
Create an entry
:param url: the url of the note to store
:param title: Optional, we'll get the title from the page.
:param tags: tag1,tag2,tag3 a comma-separated list of tags.
:param starred entry already starred
:param archive entry already archived
:param content additionnal html content
:param language
:param published_at
:param authors
:param public
:param original_url
:return result
"""
params = {'access_token': self.token, 'url': url, 'title': title,
'tags': tags, 'starred': starred, 'archive': archive,
'content': content, 'language': language, 'published_at': published_at,
'authors': authors, 'public': public, 'original_url': original_url}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
path = '/api/entries.{ext}'.format(ext=self.format)
return await self.query(path, "post", **params)
async def get_entry(self, entry):
"""
GET /api/entries/{entry}.{_format}
Retrieve a single entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def reaload_entry(self, entry):
"""
PATCH /api/entries/{entry}/reload.{_format}
Reload a single entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/reload.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "patch", **params)
async def patch_entries(self, entry, **kwargs):
"""
PATCH /api/entries/{entry}.{_format}
Change several properties of an entry
:param entry: the entry to 'patch' / update
:param kwargs: can contain one of the following
title: string
tags: a list of tags tag1,tag2,tag3
archive: '0' or '1', default '0' archived the entry.
starred: '0' or '1', default '0' starred the entry
In case that you don't want to *really* remove it..
:return data related to the ext
"""
# default values
params = {'access_token': self.token,
'title': '',
'tags': []}
if 'title' in kwargs:
params['title'] = kwargs['title']
if 'tags' in kwargs and isinstance(kwargs['tags'], list):
params['tags'] = ', '.join(kwargs['tags'])
params['archive'] = self.__get_attr(what='archive',
type_attr=int,
value_attr=(0, 1),
**kwargs)
params['starred'] = self.__get_attr(what='starred',
type_attr=int,
value_attr=(0, 1),
**kwargs)
params['order'] = self.__get_attr(what='order',
type_attr=str,
value_attr=('asc', 'desc'),
**kwargs)
path = '/api/entries/{entry}.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "patch", **params)
async def get_entry_export(self, entry):
"""
GET /api/entries/{entry}/export.{_format}
Retrieve a single entry as a predefined format.
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/export.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def patch_entry_reload(self, entry):
"""
PATCH /api/entries/{entry}/reload.{_format}
Reload an entry. An empty response with HTTP Status 304 will be send
if we weren't able to update the content (because it hasn't changed
or we got an error).
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/reload.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "patch", **params)
async def delete_entries(self, entry):
"""
DELETE /api/entries/{entry}.{_format}
Delete permanently an entry
:param entry: \w+ an integer The Entry ID
:return result
"""
params = {'Authorization': 'Bearer {}'.format(self.token)}
path = '/api/entries/{entry}.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "delete", **params)
async def entries_exists(self, url, urls=''):
"""
GET /api/entries/exists.{_format}
Check if an entry exist by url.
:param url string true An url Url to check if it exists
:param urls string false An array of urls
(?urls[]=http...&urls[]=http...) Urls (as an array)
to check if it exists
:return result
"""
params = {'access_token': self.token,
'url': url,
'urls': urls}
path = '/api/entries/exists.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
# TAGS
async def get_entry_tags(self, entry):
"""
GET /api/entries/{entry}/tags.{_format}
Retrieve all tags for an entry
:param entry: \w+ an integer The Entry ID
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/tags.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(url, "get", **params)
async def post_entry_tags(self, entry, tags):
"""
POST /api/entries/{entry}/tags.{_format}
Add one or more tags to an entry
:param entry: \w+ an integer The Entry ID
:param tags: list of tags (urlencoded)
:return result
"""
params = {'access_token': self.token, 'tags': []}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
path = '/api/entries/{entry}/tags.{ext}'.format(
entry=entry, ext=self.format)
return await self.query(path, "post", **params)
async def delete_entry_tag(self, entry, tag):
"""
DELETE /api/entries/{entry}/tags/{tag}.{_format}
Permanently remove one tag for an entry
:param entry: \w+ an integer The Entry ID
:param tag: string The Tag
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/entries/{entry}/tags/{tag}.{ext}'.format(
entry=entry, tag=tag, ext=self.format)
return await self.query(url, "delete", **params)
async def get_tags(self):
"""
GET /api/tags.{_format}
Retrieve all tags
:return data related to the ext
"""
params = {'access_token': self.token}
path = '/api/tags.{ext}'.format(ext=self.format)
return await self.query(path, "get", **params)
async def delete_tag(self, tag):
"""
DELETE /api/tags/{tag}.{_format}
Permanently remove one tag from every entry
:param tag: string The Tag
:return data related to the ext
"""
path = '/api/tags/{tag}.{ext}'.format(tag=tag, ext=self.format)
params = {'access_token': self.token}
return await self.query(path, "delete", **params)
async def delete_tag_label(self, tag):
"""
DELETE /api/tag/label.{_format}
Permanently remove one tag from every entry.
:param tag: string The Tag
:return data related to the ext
"""
path = '/api/tag/label.{ext}'.format(ext=self.format)
params = {'access_token': self.token,
'tag': tag}
return await self.query(path, "delete", **params)
async def delete_tags_label(self, tags):
"""
DELETE /api/tags/label.{_format}
Permanently remove some tags from every entry.
:param tags: list of tags (urlencoded)
:return data related to the ext
"""
path = '/api/tag/label.{ext}'.format(ext=self.format)
params = {'access_token': self.token, 'tags': []}
if len(tags) > 0 and isinstance(tags, list):
params['tags'] = ', '.join(tags)
return await self.query(path, "delete", **params)
# ANNOTATIONS
async def delete_annotations(self, annotation):
"""
DELETE /api/annotations/{annotation}.{_format}
Removes an annotation.
:param annotation \w+ string The annotation ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{annotation}.{ext}'.format(
annotation=annotation, ext=self.format)
return await self.query(url, "delete", **params)
async def put_annotations(self, annotation):
"""
PUT /api/annotations/{annotation}.{_format}
Updates an annotation.
:param annotation \w+ string The annotation ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{annotation}.{ext}'.format(
annotation=annotation, ext=self.format)
return await self.query(url, "put", **params)
async def get_annotations(self, entry):
"""
GET /api/annotations/{entry}.{_format}
Retrieve annotations for an entry
:param entry \w+ integer The entry ID
Will returns annotation for this entry
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/annotations/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "get", **params)
async def post_annotations(self, entry, **kwargs):
"""
POST /api/annotations/{entry}.{_format}
Creates a new annotation.
:param entry \w+ integer The entry ID
:return
"""
params = dict({'access_token': self.token,
'ranges': [],
'quote': '',
'text': ''})
if 'ranges' in kwargs:
params['ranges'] = kwargs['ranges']
if 'quote' in kwargs:
params['quote'] = kwargs['quote']
if 'text' in kwargs:
params['text'] = kwargs['text']
url = '/api/annotations/{entry}.{ext}'.format(entry=entry,
ext=self.format)
return await self.query(url, "post", **params)
# VERSION
@property
async def version(self):
"""
GET /api/version.{_format}
Retrieve version number
:return data related to the ext
"""
params = {'access_token': self.token}
url = '/api/version.{ext}'.format(ext=self.format)
return await self.query(url, "get", **params)
@classmethod
|
drslump/pyshould
|
pyshould/expectation.py
|
Expectation.reset
|
python
|
def reset(self):
self.expr = []
self.matcher = None
self.last_matcher = None
self.description = None
|
Resets the state of the expression
|
train
|
https://github.com/drslump/pyshould/blob/7210859d4c84cfbaa64f91b30c2a541aea788ddf/pyshould/expectation.py#L44-L49
| null |
class Expectation(object):
""" Represents an expectation allowing to configure it with matchers and
finally resolving it.
"""
_contexts = []
def __init__(self, value=None, deferred=False, description=None, factory=False,
def_op=OPERATOR.AND, def_matcher='equal'):
self.reset()
self.value = value
self.deferred = deferred
self.factory = factory
self.description = description
self.def_op = def_op
self.def_matcher = def_matcher
self.transform = None
def clone(self):
""" Clone this expression """
from copy import copy
clone = copy(self)
clone.expr = copy(self.expr)
clone.factory = False
return clone
def __ror__(self, lvalue):
""" Evaluate against the left hand side of the OR (pipe) operator. Since in
Python this operator has a fairly low precedence this method will usually
be called once the whole right hand side of the expression has been evaluated.
Note: We clone and return that clone instead of the self object because
resolving resets the expectation, when using a REPL it's nice to see
the expectation explanation after a successful one.
"""
clone = self.clone()
self.resolve(lvalue)
return clone
def __or__(self, rvalue):
""" Allows the use case: it(value) | should.xxx
Specially useful to wrap mocks which override the __or__ operator
"""
if isinstance(rvalue, Expectation) and self.__class__ is Expectation:
return rvalue.__ror__(self.value)
return NotImplemented
def resolve(self, value=None):
""" Resolve the current expression against the supplied value """
# If we still have an uninitialized matcher init it now
if self.matcher:
self._init_matcher()
# Evaluate the current set of matchers forming the expression
matcher = self.evaluate()
try:
value = self._transform(value)
self._assertion(matcher, value)
except AssertionError as ex:
# By re-raising here the exception we reset the traceback
raise ex
finally:
# Reset the state of the object so we can use it again
if self.deferred:
self.reset()
def _assertion(self, matcher, value):
""" Perform the actual assertion for the given matcher and value. Override
this method to apply a special configuration when performing the assertion.
If the assertion fails it should raise an AssertionError.
"""
# To support the syntax `any_of(subject) | should ...` we check if the
# value to check is an Expectation object and if it is we use the descriptor
# protocol to bind the value's assertion logic to this expectation.
if isinstance(value, Expectation):
assertion = value._assertion.__get__(self, Expectation)
assertion(matcher, value.value)
else:
hc.assert_that(value, matcher)
def _transform(self, value):
""" Applies any defined transformation to the given value
"""
if self.transform:
try:
value = self.transform(value)
except:
import sys
exc_type, exc_obj, exc_tb = sys.exc_info()
raise AssertionError('Error applying transformation <{0}>: {2}: {3}'.format(
self.transform.__name__, value, exc_type.__name__, exc_obj))
return value
def evaluate(self):
""" Converts the current expression into a single matcher, applying
coordination operators to operands according to their binding rules
"""
# Apply Shunting Yard algorithm to convert the infix expression
# into Reverse Polish Notation. Since we have a very limited
# set of operators and binding rules, the implementation becomes
# really simple. The expression is formed of hamcrest matcher instances
# and operators identifiers (ints).
ops = []
rpn = []
for token in self.expr:
if isinstance(token, int):
while len(ops) and token <= ops[-1]:
rpn.append(ops.pop())
ops.append(token)
else:
rpn.append(token)
# Append the remaining operators
while len(ops):
rpn.append(ops.pop())
# Walk the RPN expression to create AllOf/AnyOf matchers
stack = []
for token in rpn:
if isinstance(token, int):
# Handle the NOT case in a special way since it's unary
if token == OPERATOR.NOT:
stack[-1] = IsNot(stack[-1])
continue
# Our operators always need two operands
if len(stack) < 2:
raise RuntimeError('Unable to build a valid expression. Not enough operands available.')
# Check what kind of matcher we need to create
if token == OPERATOR.OR:
matcher = hc.any_of(*stack[-2:])
else: # AND, BUT
matcher = hc.all_of(*stack[-2:])
stack[-2:] = [matcher]
else:
stack.append(token)
if len(stack) != 1:
raise RuntimeError('Unable to build a valid expression. The RPN stack should have just one item.')
matcher = stack.pop()
# If a description has been given include it in the matcher
if self.description:
matcher = hc.described_as(self.description, matcher)
return matcher
def _find_matcher(self, alias):
""" Finds a matcher based on the given alias or raises an error if no
matcher could be found.
"""
matcher = lookup(alias)
if not matcher:
msg = 'Matcher "%s" not found' % alias
# Try to find similarly named matchers to help the user
similar = suggest(alias, max=3, cutoff=0.5)
if len(similar) > 1:
last = similar.pop()
msg += '. Perhaps you meant to use %s or %s?' % (', '.join(similar), last)
elif len(similar) > 0:
msg += '. Perhaps you meant to use %s?' % similar.pop()
raise KeyError(msg)
return matcher
def _init_matcher(self, *args, **kwargs):
""" Executes the current matcher appending it to the expression """
# If subject-less expectation are provided as arguments convert them
# to plain Hamcrest matchers in order to allow complex compositions
fn = lambda x: x.evaluate() if isinstance(x, Expectation) else x
args = [fn(x) for x in args]
kwargs = dict((k, fn(v)) for k, v in kwargs.items())
matcher = self.matcher(*args, **kwargs)
self.expr.append(matcher)
self.matcher = None
return matcher
def described_as(self, description, *args):
""" Specify a custom message for the matcher """
if len(args):
description = description.format(*args)
self.description = description
return self
def desc(self, description, *args):
""" Just an alias to described_as """
return self.described_as(description, *args)
def __getattribute__(self, name):
""" Hijack property access to handle some special cases.
Since we might have patched the root object to include
the `should` properties, we have to capture their use
here.
"""
# Ignore .should. style properties
lowname = name.lower()
if lowname in ('should', 'to'):
return self
if lowname == 'should_not':
return ExpectationNot(
self.value,
self.deferred,
self.description,
self.factory,
self.def_op,
self.def_matcher
)
return object.__getattribute__(self, name)
def __getattr__(self, name):
""" Overload property access to interpret them as matchers. """
# Ignore private (protocol) methods
if name[0:2] == '__':
raise AttributeError
# In factory mode we always create a new instance. This avoids
# problems when defining multiple expectations using the `should`
# keyword without resolving every expectation in order.
obj = self.clone() if self.factory else self
# If we still have an uninitialized matcher then init it now
if obj.matcher:
obj._init_matcher()
# In deferred mode we will resolve in the __ror__ overload
if not obj.deferred:
obj.resolve(obj.value)
# Normalize the name
name = re.sub(r'([a-z])([A-Z])', r'\1_\2', name)
parts = name.lower().split('_')
# Check if we have a coordinator as first item
expr = []
if parts[0] == 'and':
expr.append(OPERATOR.AND)
parts.pop(0)
elif parts[0] == 'or':
expr.append(OPERATOR.OR)
parts.pop(0)
elif parts[0] == 'but':
expr.append(OPERATOR.BUT)
parts.pop(0)
# If no coordinator is given assume a default one
elif len(obj.expr):
expr.append(obj.def_op)
# Negation can come just after a combinator (ie: .and_not_be_equal)
if 'not' in parts:
expr.append(OPERATOR.NOT)
parts.pop(parts.index('not'))
if len(parts):
name = '_'.join(parts)
else:
name = obj.last_matcher or obj.def_matcher
# Find a matcher for the computed name
try:
obj.matcher = obj._find_matcher(name)
obj.last_matcher = name
obj.expr.extend(expr)
except KeyError as ex:
# Signal correctly for `hasattr`
raise AttributeError(str(ex))
return obj
def __call__(self, *args, **kwargs):
""" Execute the matcher just registered by __getattr__ passing any given
arguments. If we're in deferred mode we don't resolve the matcher yet,
it'll be done in the __ror__ overload.
"""
# When called directly (ie: should(foo).xxx) register the param as a transform
if (len(args) == 1 and hasattr(args[0], '__call__')
and not self.expr and not self.matcher):
# We have to clone the expectation so we play fair with the `should` shortcut
clone = self.clone()
clone.transform = args[0]
return clone
if not self.matcher:
raise TypeError('No matchers set. Usage: <value> | should.<matcher>(<expectation>)')
self._init_matcher(*args, **kwargs)
# In deferred mode we will resolve in the __ror__ overload
if not self.deferred:
self.resolve(self.value)
return self
def __enter__(self):
""" Implements the context manager protocol. Specially useful for asserting exceptions
"""
clone = self.clone()
self._contexts.append(clone)
self.reset()
return self
def __exit__(self, exc, value, trace):
# If an assertion failed inside the block just raise that one
if isinstance(value, AssertionError):
return False
expr = self._contexts.pop()
result = ContextManagerResult(exc, value, trace)
expr.resolve(result)
return True
def __eq__(self, other):
""" Overloads the equality operator to trigger a resolution of the matcher
against the other expression value. This allows to easily use expressions
in other libraries like Mock.
"""
clone = self.clone()
try:
clone.resolve(other)
return True
except AssertionError:
return False
# Any exception is silenced and we just return false
except:
return False
def __ne__(self, other):
""" Overload not equal since Python will default to identity instead of negating
the result from equality.
"""
return not self.__eq__(other)
def __repr__(self):
""" This is specially useful when using the library on an interactive interpreter
"""
exp = self.clone()
if exp.matcher:
exp._init_matcher()
if not exp.expr:
return 'Uninitialized expectation <{0}>'.format(self.__class__.__name__)
matcher = exp.evaluate()
return str(matcher)
|
drslump/pyshould
|
pyshould/expectation.py
|
Expectation.clone
|
python
|
def clone(self):
from copy import copy
clone = copy(self)
clone.expr = copy(self.expr)
clone.factory = False
return clone
|
Clone this expression
|
train
|
https://github.com/drslump/pyshould/blob/7210859d4c84cfbaa64f91b30c2a541aea788ddf/pyshould/expectation.py#L51-L57
| null |
class Expectation(object):
""" Represents an expectation allowing to configure it with matchers and
finally resolving it.
"""
_contexts = []
def __init__(self, value=None, deferred=False, description=None, factory=False,
def_op=OPERATOR.AND, def_matcher='equal'):
self.reset()
self.value = value
self.deferred = deferred
self.factory = factory
self.description = description
self.def_op = def_op
self.def_matcher = def_matcher
self.transform = None
def reset(self):
""" Resets the state of the expression """
self.expr = []
self.matcher = None
self.last_matcher = None
self.description = None
def __ror__(self, lvalue):
""" Evaluate against the left hand side of the OR (pipe) operator. Since in
Python this operator has a fairly low precedence this method will usually
be called once the whole right hand side of the expression has been evaluated.
Note: We clone and return that clone instead of the self object because
resolving resets the expectation, when using a REPL it's nice to see
the expectation explanation after a successful one.
"""
clone = self.clone()
self.resolve(lvalue)
return clone
def __or__(self, rvalue):
""" Allows the use case: it(value) | should.xxx
Specially useful to wrap mocks which override the __or__ operator
"""
if isinstance(rvalue, Expectation) and self.__class__ is Expectation:
return rvalue.__ror__(self.value)
return NotImplemented
def resolve(self, value=None):
""" Resolve the current expression against the supplied value """
# If we still have an uninitialized matcher init it now
if self.matcher:
self._init_matcher()
# Evaluate the current set of matchers forming the expression
matcher = self.evaluate()
try:
value = self._transform(value)
self._assertion(matcher, value)
except AssertionError as ex:
# By re-raising here the exception we reset the traceback
raise ex
finally:
# Reset the state of the object so we can use it again
if self.deferred:
self.reset()
def _assertion(self, matcher, value):
""" Perform the actual assertion for the given matcher and value. Override
this method to apply a special configuration when performing the assertion.
If the assertion fails it should raise an AssertionError.
"""
# To support the syntax `any_of(subject) | should ...` we check if the
# value to check is an Expectation object and if it is we use the descriptor
# protocol to bind the value's assertion logic to this expectation.
if isinstance(value, Expectation):
assertion = value._assertion.__get__(self, Expectation)
assertion(matcher, value.value)
else:
hc.assert_that(value, matcher)
def _transform(self, value):
""" Applies any defined transformation to the given value
"""
if self.transform:
try:
value = self.transform(value)
except:
import sys
exc_type, exc_obj, exc_tb = sys.exc_info()
raise AssertionError('Error applying transformation <{0}>: {2}: {3}'.format(
self.transform.__name__, value, exc_type.__name__, exc_obj))
return value
def evaluate(self):
""" Converts the current expression into a single matcher, applying
coordination operators to operands according to their binding rules
"""
# Apply Shunting Yard algorithm to convert the infix expression
# into Reverse Polish Notation. Since we have a very limited
# set of operators and binding rules, the implementation becomes
# really simple. The expression is formed of hamcrest matcher instances
# and operators identifiers (ints).
ops = []
rpn = []
for token in self.expr:
if isinstance(token, int):
while len(ops) and token <= ops[-1]:
rpn.append(ops.pop())
ops.append(token)
else:
rpn.append(token)
# Append the remaining operators
while len(ops):
rpn.append(ops.pop())
# Walk the RPN expression to create AllOf/AnyOf matchers
stack = []
for token in rpn:
if isinstance(token, int):
# Handle the NOT case in a special way since it's unary
if token == OPERATOR.NOT:
stack[-1] = IsNot(stack[-1])
continue
# Our operators always need two operands
if len(stack) < 2:
raise RuntimeError('Unable to build a valid expression. Not enough operands available.')
# Check what kind of matcher we need to create
if token == OPERATOR.OR:
matcher = hc.any_of(*stack[-2:])
else: # AND, BUT
matcher = hc.all_of(*stack[-2:])
stack[-2:] = [matcher]
else:
stack.append(token)
if len(stack) != 1:
raise RuntimeError('Unable to build a valid expression. The RPN stack should have just one item.')
matcher = stack.pop()
# If a description has been given include it in the matcher
if self.description:
matcher = hc.described_as(self.description, matcher)
return matcher
def _find_matcher(self, alias):
""" Finds a matcher based on the given alias or raises an error if no
matcher could be found.
"""
matcher = lookup(alias)
if not matcher:
msg = 'Matcher "%s" not found' % alias
# Try to find similarly named matchers to help the user
similar = suggest(alias, max=3, cutoff=0.5)
if len(similar) > 1:
last = similar.pop()
msg += '. Perhaps you meant to use %s or %s?' % (', '.join(similar), last)
elif len(similar) > 0:
msg += '. Perhaps you meant to use %s?' % similar.pop()
raise KeyError(msg)
return matcher
def _init_matcher(self, *args, **kwargs):
""" Executes the current matcher appending it to the expression """
# If subject-less expectation are provided as arguments convert them
# to plain Hamcrest matchers in order to allow complex compositions
fn = lambda x: x.evaluate() if isinstance(x, Expectation) else x
args = [fn(x) for x in args]
kwargs = dict((k, fn(v)) for k, v in kwargs.items())
matcher = self.matcher(*args, **kwargs)
self.expr.append(matcher)
self.matcher = None
return matcher
def described_as(self, description, *args):
""" Specify a custom message for the matcher """
if len(args):
description = description.format(*args)
self.description = description
return self
def desc(self, description, *args):
""" Just an alias to described_as """
return self.described_as(description, *args)
def __getattribute__(self, name):
""" Hijack property access to handle some special cases.
Since we might have patched the root object to include
the `should` properties, we have to capture their use
here.
"""
# Ignore .should. style properties
lowname = name.lower()
if lowname in ('should', 'to'):
return self
if lowname == 'should_not':
return ExpectationNot(
self.value,
self.deferred,
self.description,
self.factory,
self.def_op,
self.def_matcher
)
return object.__getattribute__(self, name)
def __getattr__(self, name):
""" Overload property access to interpret them as matchers. """
# Ignore private (protocol) methods
if name[0:2] == '__':
raise AttributeError
# In factory mode we always create a new instance. This avoids
# problems when defining multiple expectations using the `should`
# keyword without resolving every expectation in order.
obj = self.clone() if self.factory else self
# If we still have an uninitialized matcher then init it now
if obj.matcher:
obj._init_matcher()
# In deferred mode we will resolve in the __ror__ overload
if not obj.deferred:
obj.resolve(obj.value)
# Normalize the name
name = re.sub(r'([a-z])([A-Z])', r'\1_\2', name)
parts = name.lower().split('_')
# Check if we have a coordinator as first item
expr = []
if parts[0] == 'and':
expr.append(OPERATOR.AND)
parts.pop(0)
elif parts[0] == 'or':
expr.append(OPERATOR.OR)
parts.pop(0)
elif parts[0] == 'but':
expr.append(OPERATOR.BUT)
parts.pop(0)
# If no coordinator is given assume a default one
elif len(obj.expr):
expr.append(obj.def_op)
# Negation can come just after a combinator (ie: .and_not_be_equal)
if 'not' in parts:
expr.append(OPERATOR.NOT)
parts.pop(parts.index('not'))
if len(parts):
name = '_'.join(parts)
else:
name = obj.last_matcher or obj.def_matcher
# Find a matcher for the computed name
try:
obj.matcher = obj._find_matcher(name)
obj.last_matcher = name
obj.expr.extend(expr)
except KeyError as ex:
# Signal correctly for `hasattr`
raise AttributeError(str(ex))
return obj
def __call__(self, *args, **kwargs):
""" Execute the matcher just registered by __getattr__ passing any given
arguments. If we're in deferred mode we don't resolve the matcher yet,
it'll be done in the __ror__ overload.
"""
# When called directly (ie: should(foo).xxx) register the param as a transform
if (len(args) == 1 and hasattr(args[0], '__call__')
and not self.expr and not self.matcher):
# We have to clone the expectation so we play fair with the `should` shortcut
clone = self.clone()
clone.transform = args[0]
return clone
if not self.matcher:
raise TypeError('No matchers set. Usage: <value> | should.<matcher>(<expectation>)')
self._init_matcher(*args, **kwargs)
# In deferred mode we will resolve in the __ror__ overload
if not self.deferred:
self.resolve(self.value)
return self
def __enter__(self):
""" Implements the context manager protocol. Specially useful for asserting exceptions
"""
clone = self.clone()
self._contexts.append(clone)
self.reset()
return self
def __exit__(self, exc, value, trace):
# If an assertion failed inside the block just raise that one
if isinstance(value, AssertionError):
return False
expr = self._contexts.pop()
result = ContextManagerResult(exc, value, trace)
expr.resolve(result)
return True
def __eq__(self, other):
""" Overloads the equality operator to trigger a resolution of the matcher
against the other expression value. This allows to easily use expressions
in other libraries like Mock.
"""
clone = self.clone()
try:
clone.resolve(other)
return True
except AssertionError:
return False
# Any exception is silenced and we just return false
except:
return False
def __ne__(self, other):
""" Overload not equal since Python will default to identity instead of negating
the result from equality.
"""
return not self.__eq__(other)
def __repr__(self):
""" This is specially useful when using the library on an interactive interpreter
"""
exp = self.clone()
if exp.matcher:
exp._init_matcher()
if not exp.expr:
return 'Uninitialized expectation <{0}>'.format(self.__class__.__name__)
matcher = exp.evaluate()
return str(matcher)
|
drslump/pyshould
|
pyshould/expectation.py
|
Expectation.resolve
|
python
|
def resolve(self, value=None):
# If we still have an uninitialized matcher init it now
if self.matcher:
self._init_matcher()
# Evaluate the current set of matchers forming the expression
matcher = self.evaluate()
try:
value = self._transform(value)
self._assertion(matcher, value)
except AssertionError as ex:
# By re-raising here the exception we reset the traceback
raise ex
finally:
# Reset the state of the object so we can use it again
if self.deferred:
self.reset()
|
Resolve the current expression against the supplied value
|
train
|
https://github.com/drslump/pyshould/blob/7210859d4c84cfbaa64f91b30c2a541aea788ddf/pyshould/expectation.py#L81-L100
|
[
"def reset(self):\n \"\"\" Resets the state of the expression \"\"\"\n self.expr = []\n self.matcher = None\n self.last_matcher = None\n self.description = None\n",
"def _assertion(self, matcher, value):\n \"\"\" Perform the actual assertion for the given matcher and value. Override\n this method to apply a special configuration when performing the assertion.\n If the assertion fails it should raise an AssertionError.\n \"\"\"\n # To support the syntax `any_of(subject) | should ...` we check if the\n # value to check is an Expectation object and if it is we use the descriptor\n # protocol to bind the value's assertion logic to this expectation.\n if isinstance(value, Expectation):\n assertion = value._assertion.__get__(self, Expectation)\n assertion(matcher, value.value)\n else:\n hc.assert_that(value, matcher)\n",
"def _transform(self, value):\n \"\"\" Applies any defined transformation to the given value\n \"\"\"\n if self.transform:\n try:\n value = self.transform(value)\n except:\n import sys\n exc_type, exc_obj, exc_tb = sys.exc_info()\n raise AssertionError('Error applying transformation <{0}>: {2}: {3}'.format(\n self.transform.__name__, value, exc_type.__name__, exc_obj))\n\n return value\n",
"def evaluate(self):\n \"\"\" Converts the current expression into a single matcher, applying\n coordination operators to operands according to their binding rules\n \"\"\"\n\n # Apply Shunting Yard algorithm to convert the infix expression\n # into Reverse Polish Notation. Since we have a very limited\n # set of operators and binding rules, the implementation becomes\n # really simple. The expression is formed of hamcrest matcher instances\n # and operators identifiers (ints).\n ops = []\n rpn = []\n for token in self.expr:\n if isinstance(token, int):\n while len(ops) and token <= ops[-1]:\n rpn.append(ops.pop())\n ops.append(token)\n else:\n rpn.append(token)\n\n # Append the remaining operators\n while len(ops):\n rpn.append(ops.pop())\n\n # Walk the RPN expression to create AllOf/AnyOf matchers\n stack = []\n for token in rpn:\n if isinstance(token, int):\n # Handle the NOT case in a special way since it's unary\n if token == OPERATOR.NOT:\n stack[-1] = IsNot(stack[-1])\n continue\n\n # Our operators always need two operands\n if len(stack) < 2:\n raise RuntimeError('Unable to build a valid expression. Not enough operands available.')\n\n # Check what kind of matcher we need to create\n if token == OPERATOR.OR:\n matcher = hc.any_of(*stack[-2:])\n else: # AND, BUT\n matcher = hc.all_of(*stack[-2:])\n\n stack[-2:] = [matcher]\n else:\n stack.append(token)\n\n if len(stack) != 1:\n raise RuntimeError('Unable to build a valid expression. The RPN stack should have just one item.')\n\n matcher = stack.pop()\n\n # If a description has been given include it in the matcher\n if self.description:\n matcher = hc.described_as(self.description, matcher)\n\n return matcher\n",
"def _init_matcher(self, *args, **kwargs):\n \"\"\" Executes the current matcher appending it to the expression \"\"\"\n\n # If subject-less expectation are provided as arguments convert them\n # to plain Hamcrest matchers in order to allow complex compositions\n fn = lambda x: x.evaluate() if isinstance(x, Expectation) else x\n args = [fn(x) for x in args]\n kwargs = dict((k, fn(v)) for k, v in kwargs.items())\n\n matcher = self.matcher(*args, **kwargs)\n self.expr.append(matcher)\n self.matcher = None\n return matcher\n"
] |
class Expectation(object):
""" Represents an expectation allowing to configure it with matchers and
finally resolving it.
"""
_contexts = []
def __init__(self, value=None, deferred=False, description=None, factory=False,
def_op=OPERATOR.AND, def_matcher='equal'):
self.reset()
self.value = value
self.deferred = deferred
self.factory = factory
self.description = description
self.def_op = def_op
self.def_matcher = def_matcher
self.transform = None
def reset(self):
""" Resets the state of the expression """
self.expr = []
self.matcher = None
self.last_matcher = None
self.description = None
def clone(self):
""" Clone this expression """
from copy import copy
clone = copy(self)
clone.expr = copy(self.expr)
clone.factory = False
return clone
def __ror__(self, lvalue):
""" Evaluate against the left hand side of the OR (pipe) operator. Since in
Python this operator has a fairly low precedence this method will usually
be called once the whole right hand side of the expression has been evaluated.
Note: We clone and return that clone instead of the self object because
resolving resets the expectation, when using a REPL it's nice to see
the expectation explanation after a successful one.
"""
clone = self.clone()
self.resolve(lvalue)
return clone
def __or__(self, rvalue):
""" Allows the use case: it(value) | should.xxx
Specially useful to wrap mocks which override the __or__ operator
"""
if isinstance(rvalue, Expectation) and self.__class__ is Expectation:
return rvalue.__ror__(self.value)
return NotImplemented
def _assertion(self, matcher, value):
""" Perform the actual assertion for the given matcher and value. Override
this method to apply a special configuration when performing the assertion.
If the assertion fails it should raise an AssertionError.
"""
# To support the syntax `any_of(subject) | should ...` we check if the
# value to check is an Expectation object and if it is we use the descriptor
# protocol to bind the value's assertion logic to this expectation.
if isinstance(value, Expectation):
assertion = value._assertion.__get__(self, Expectation)
assertion(matcher, value.value)
else:
hc.assert_that(value, matcher)
def _transform(self, value):
""" Applies any defined transformation to the given value
"""
if self.transform:
try:
value = self.transform(value)
except:
import sys
exc_type, exc_obj, exc_tb = sys.exc_info()
raise AssertionError('Error applying transformation <{0}>: {2}: {3}'.format(
self.transform.__name__, value, exc_type.__name__, exc_obj))
return value
def evaluate(self):
""" Converts the current expression into a single matcher, applying
coordination operators to operands according to their binding rules
"""
# Apply Shunting Yard algorithm to convert the infix expression
# into Reverse Polish Notation. Since we have a very limited
# set of operators and binding rules, the implementation becomes
# really simple. The expression is formed of hamcrest matcher instances
# and operators identifiers (ints).
ops = []
rpn = []
for token in self.expr:
if isinstance(token, int):
while len(ops) and token <= ops[-1]:
rpn.append(ops.pop())
ops.append(token)
else:
rpn.append(token)
# Append the remaining operators
while len(ops):
rpn.append(ops.pop())
# Walk the RPN expression to create AllOf/AnyOf matchers
stack = []
for token in rpn:
if isinstance(token, int):
# Handle the NOT case in a special way since it's unary
if token == OPERATOR.NOT:
stack[-1] = IsNot(stack[-1])
continue
# Our operators always need two operands
if len(stack) < 2:
raise RuntimeError('Unable to build a valid expression. Not enough operands available.')
# Check what kind of matcher we need to create
if token == OPERATOR.OR:
matcher = hc.any_of(*stack[-2:])
else: # AND, BUT
matcher = hc.all_of(*stack[-2:])
stack[-2:] = [matcher]
else:
stack.append(token)
if len(stack) != 1:
raise RuntimeError('Unable to build a valid expression. The RPN stack should have just one item.')
matcher = stack.pop()
# If a description has been given include it in the matcher
if self.description:
matcher = hc.described_as(self.description, matcher)
return matcher
def _find_matcher(self, alias):
""" Finds a matcher based on the given alias or raises an error if no
matcher could be found.
"""
matcher = lookup(alias)
if not matcher:
msg = 'Matcher "%s" not found' % alias
# Try to find similarly named matchers to help the user
similar = suggest(alias, max=3, cutoff=0.5)
if len(similar) > 1:
last = similar.pop()
msg += '. Perhaps you meant to use %s or %s?' % (', '.join(similar), last)
elif len(similar) > 0:
msg += '. Perhaps you meant to use %s?' % similar.pop()
raise KeyError(msg)
return matcher
def _init_matcher(self, *args, **kwargs):
""" Executes the current matcher appending it to the expression """
# If subject-less expectation are provided as arguments convert them
# to plain Hamcrest matchers in order to allow complex compositions
fn = lambda x: x.evaluate() if isinstance(x, Expectation) else x
args = [fn(x) for x in args]
kwargs = dict((k, fn(v)) for k, v in kwargs.items())
matcher = self.matcher(*args, **kwargs)
self.expr.append(matcher)
self.matcher = None
return matcher
def described_as(self, description, *args):
""" Specify a custom message for the matcher """
if len(args):
description = description.format(*args)
self.description = description
return self
def desc(self, description, *args):
""" Just an alias to described_as """
return self.described_as(description, *args)
def __getattribute__(self, name):
""" Hijack property access to handle some special cases.
Since we might have patched the root object to include
the `should` properties, we have to capture their use
here.
"""
# Ignore .should. style properties
lowname = name.lower()
if lowname in ('should', 'to'):
return self
if lowname == 'should_not':
return ExpectationNot(
self.value,
self.deferred,
self.description,
self.factory,
self.def_op,
self.def_matcher
)
return object.__getattribute__(self, name)
def __getattr__(self, name):
""" Overload property access to interpret them as matchers. """
# Ignore private (protocol) methods
if name[0:2] == '__':
raise AttributeError
# In factory mode we always create a new instance. This avoids
# problems when defining multiple expectations using the `should`
# keyword without resolving every expectation in order.
obj = self.clone() if self.factory else self
# If we still have an uninitialized matcher then init it now
if obj.matcher:
obj._init_matcher()
# In deferred mode we will resolve in the __ror__ overload
if not obj.deferred:
obj.resolve(obj.value)
# Normalize the name
name = re.sub(r'([a-z])([A-Z])', r'\1_\2', name)
parts = name.lower().split('_')
# Check if we have a coordinator as first item
expr = []
if parts[0] == 'and':
expr.append(OPERATOR.AND)
parts.pop(0)
elif parts[0] == 'or':
expr.append(OPERATOR.OR)
parts.pop(0)
elif parts[0] == 'but':
expr.append(OPERATOR.BUT)
parts.pop(0)
# If no coordinator is given assume a default one
elif len(obj.expr):
expr.append(obj.def_op)
# Negation can come just after a combinator (ie: .and_not_be_equal)
if 'not' in parts:
expr.append(OPERATOR.NOT)
parts.pop(parts.index('not'))
if len(parts):
name = '_'.join(parts)
else:
name = obj.last_matcher or obj.def_matcher
# Find a matcher for the computed name
try:
obj.matcher = obj._find_matcher(name)
obj.last_matcher = name
obj.expr.extend(expr)
except KeyError as ex:
# Signal correctly for `hasattr`
raise AttributeError(str(ex))
return obj
def __call__(self, *args, **kwargs):
""" Execute the matcher just registered by __getattr__ passing any given
arguments. If we're in deferred mode we don't resolve the matcher yet,
it'll be done in the __ror__ overload.
"""
# When called directly (ie: should(foo).xxx) register the param as a transform
if (len(args) == 1 and hasattr(args[0], '__call__')
and not self.expr and not self.matcher):
# We have to clone the expectation so we play fair with the `should` shortcut
clone = self.clone()
clone.transform = args[0]
return clone
if not self.matcher:
raise TypeError('No matchers set. Usage: <value> | should.<matcher>(<expectation>)')
self._init_matcher(*args, **kwargs)
# In deferred mode we will resolve in the __ror__ overload
if not self.deferred:
self.resolve(self.value)
return self
def __enter__(self):
""" Implements the context manager protocol. Specially useful for asserting exceptions
"""
clone = self.clone()
self._contexts.append(clone)
self.reset()
return self
def __exit__(self, exc, value, trace):
# If an assertion failed inside the block just raise that one
if isinstance(value, AssertionError):
return False
expr = self._contexts.pop()
result = ContextManagerResult(exc, value, trace)
expr.resolve(result)
return True
def __eq__(self, other):
""" Overloads the equality operator to trigger a resolution of the matcher
against the other expression value. This allows to easily use expressions
in other libraries like Mock.
"""
clone = self.clone()
try:
clone.resolve(other)
return True
except AssertionError:
return False
# Any exception is silenced and we just return false
except:
return False
def __ne__(self, other):
""" Overload not equal since Python will default to identity instead of negating
the result from equality.
"""
return not self.__eq__(other)
def __repr__(self):
""" This is specially useful when using the library on an interactive interpreter
"""
exp = self.clone()
if exp.matcher:
exp._init_matcher()
if not exp.expr:
return 'Uninitialized expectation <{0}>'.format(self.__class__.__name__)
matcher = exp.evaluate()
return str(matcher)
|
drslump/pyshould
|
pyshould/expectation.py
|
Expectation._assertion
|
python
|
def _assertion(self, matcher, value):
# To support the syntax `any_of(subject) | should ...` we check if the
# value to check is an Expectation object and if it is we use the descriptor
# protocol to bind the value's assertion logic to this expectation.
if isinstance(value, Expectation):
assertion = value._assertion.__get__(self, Expectation)
assertion(matcher, value.value)
else:
hc.assert_that(value, matcher)
|
Perform the actual assertion for the given matcher and value. Override
this method to apply a special configuration when performing the assertion.
If the assertion fails it should raise an AssertionError.
|
train
|
https://github.com/drslump/pyshould/blob/7210859d4c84cfbaa64f91b30c2a541aea788ddf/pyshould/expectation.py#L102-L114
| null |
class Expectation(object):
""" Represents an expectation allowing to configure it with matchers and
finally resolving it.
"""
_contexts = []
def __init__(self, value=None, deferred=False, description=None, factory=False,
def_op=OPERATOR.AND, def_matcher='equal'):
self.reset()
self.value = value
self.deferred = deferred
self.factory = factory
self.description = description
self.def_op = def_op
self.def_matcher = def_matcher
self.transform = None
def reset(self):
""" Resets the state of the expression """
self.expr = []
self.matcher = None
self.last_matcher = None
self.description = None
def clone(self):
""" Clone this expression """
from copy import copy
clone = copy(self)
clone.expr = copy(self.expr)
clone.factory = False
return clone
def __ror__(self, lvalue):
""" Evaluate against the left hand side of the OR (pipe) operator. Since in
Python this operator has a fairly low precedence this method will usually
be called once the whole right hand side of the expression has been evaluated.
Note: We clone and return that clone instead of the self object because
resolving resets the expectation, when using a REPL it's nice to see
the expectation explanation after a successful one.
"""
clone = self.clone()
self.resolve(lvalue)
return clone
def __or__(self, rvalue):
""" Allows the use case: it(value) | should.xxx
Specially useful to wrap mocks which override the __or__ operator
"""
if isinstance(rvalue, Expectation) and self.__class__ is Expectation:
return rvalue.__ror__(self.value)
return NotImplemented
def resolve(self, value=None):
""" Resolve the current expression against the supplied value """
# If we still have an uninitialized matcher init it now
if self.matcher:
self._init_matcher()
# Evaluate the current set of matchers forming the expression
matcher = self.evaluate()
try:
value = self._transform(value)
self._assertion(matcher, value)
except AssertionError as ex:
# By re-raising here the exception we reset the traceback
raise ex
finally:
# Reset the state of the object so we can use it again
if self.deferred:
self.reset()
def _transform(self, value):
""" Applies any defined transformation to the given value
"""
if self.transform:
try:
value = self.transform(value)
except:
import sys
exc_type, exc_obj, exc_tb = sys.exc_info()
raise AssertionError('Error applying transformation <{0}>: {2}: {3}'.format(
self.transform.__name__, value, exc_type.__name__, exc_obj))
return value
def evaluate(self):
""" Converts the current expression into a single matcher, applying
coordination operators to operands according to their binding rules
"""
# Apply Shunting Yard algorithm to convert the infix expression
# into Reverse Polish Notation. Since we have a very limited
# set of operators and binding rules, the implementation becomes
# really simple. The expression is formed of hamcrest matcher instances
# and operators identifiers (ints).
ops = []
rpn = []
for token in self.expr:
if isinstance(token, int):
while len(ops) and token <= ops[-1]:
rpn.append(ops.pop())
ops.append(token)
else:
rpn.append(token)
# Append the remaining operators
while len(ops):
rpn.append(ops.pop())
# Walk the RPN expression to create AllOf/AnyOf matchers
stack = []
for token in rpn:
if isinstance(token, int):
# Handle the NOT case in a special way since it's unary
if token == OPERATOR.NOT:
stack[-1] = IsNot(stack[-1])
continue
# Our operators always need two operands
if len(stack) < 2:
raise RuntimeError('Unable to build a valid expression. Not enough operands available.')
# Check what kind of matcher we need to create
if token == OPERATOR.OR:
matcher = hc.any_of(*stack[-2:])
else: # AND, BUT
matcher = hc.all_of(*stack[-2:])
stack[-2:] = [matcher]
else:
stack.append(token)
if len(stack) != 1:
raise RuntimeError('Unable to build a valid expression. The RPN stack should have just one item.')
matcher = stack.pop()
# If a description has been given include it in the matcher
if self.description:
matcher = hc.described_as(self.description, matcher)
return matcher
def _find_matcher(self, alias):
""" Finds a matcher based on the given alias or raises an error if no
matcher could be found.
"""
matcher = lookup(alias)
if not matcher:
msg = 'Matcher "%s" not found' % alias
# Try to find similarly named matchers to help the user
similar = suggest(alias, max=3, cutoff=0.5)
if len(similar) > 1:
last = similar.pop()
msg += '. Perhaps you meant to use %s or %s?' % (', '.join(similar), last)
elif len(similar) > 0:
msg += '. Perhaps you meant to use %s?' % similar.pop()
raise KeyError(msg)
return matcher
def _init_matcher(self, *args, **kwargs):
""" Executes the current matcher appending it to the expression """
# If subject-less expectation are provided as arguments convert them
# to plain Hamcrest matchers in order to allow complex compositions
fn = lambda x: x.evaluate() if isinstance(x, Expectation) else x
args = [fn(x) for x in args]
kwargs = dict((k, fn(v)) for k, v in kwargs.items())
matcher = self.matcher(*args, **kwargs)
self.expr.append(matcher)
self.matcher = None
return matcher
def described_as(self, description, *args):
""" Specify a custom message for the matcher """
if len(args):
description = description.format(*args)
self.description = description
return self
def desc(self, description, *args):
""" Just an alias to described_as """
return self.described_as(description, *args)
def __getattribute__(self, name):
""" Hijack property access to handle some special cases.
Since we might have patched the root object to include
the `should` properties, we have to capture their use
here.
"""
# Ignore .should. style properties
lowname = name.lower()
if lowname in ('should', 'to'):
return self
if lowname == 'should_not':
return ExpectationNot(
self.value,
self.deferred,
self.description,
self.factory,
self.def_op,
self.def_matcher
)
return object.__getattribute__(self, name)
def __getattr__(self, name):
""" Overload property access to interpret them as matchers. """
# Ignore private (protocol) methods
if name[0:2] == '__':
raise AttributeError
# In factory mode we always create a new instance. This avoids
# problems when defining multiple expectations using the `should`
# keyword without resolving every expectation in order.
obj = self.clone() if self.factory else self
# If we still have an uninitialized matcher then init it now
if obj.matcher:
obj._init_matcher()
# In deferred mode we will resolve in the __ror__ overload
if not obj.deferred:
obj.resolve(obj.value)
# Normalize the name
name = re.sub(r'([a-z])([A-Z])', r'\1_\2', name)
parts = name.lower().split('_')
# Check if we have a coordinator as first item
expr = []
if parts[0] == 'and':
expr.append(OPERATOR.AND)
parts.pop(0)
elif parts[0] == 'or':
expr.append(OPERATOR.OR)
parts.pop(0)
elif parts[0] == 'but':
expr.append(OPERATOR.BUT)
parts.pop(0)
# If no coordinator is given assume a default one
elif len(obj.expr):
expr.append(obj.def_op)
# Negation can come just after a combinator (ie: .and_not_be_equal)
if 'not' in parts:
expr.append(OPERATOR.NOT)
parts.pop(parts.index('not'))
if len(parts):
name = '_'.join(parts)
else:
name = obj.last_matcher or obj.def_matcher
# Find a matcher for the computed name
try:
obj.matcher = obj._find_matcher(name)
obj.last_matcher = name
obj.expr.extend(expr)
except KeyError as ex:
# Signal correctly for `hasattr`
raise AttributeError(str(ex))
return obj
def __call__(self, *args, **kwargs):
""" Execute the matcher just registered by __getattr__ passing any given
arguments. If we're in deferred mode we don't resolve the matcher yet,
it'll be done in the __ror__ overload.
"""
# When called directly (ie: should(foo).xxx) register the param as a transform
if (len(args) == 1 and hasattr(args[0], '__call__')
and not self.expr and not self.matcher):
# We have to clone the expectation so we play fair with the `should` shortcut
clone = self.clone()
clone.transform = args[0]
return clone
if not self.matcher:
raise TypeError('No matchers set. Usage: <value> | should.<matcher>(<expectation>)')
self._init_matcher(*args, **kwargs)
# In deferred mode we will resolve in the __ror__ overload
if not self.deferred:
self.resolve(self.value)
return self
def __enter__(self):
""" Implements the context manager protocol. Specially useful for asserting exceptions
"""
clone = self.clone()
self._contexts.append(clone)
self.reset()
return self
def __exit__(self, exc, value, trace):
# If an assertion failed inside the block just raise that one
if isinstance(value, AssertionError):
return False
expr = self._contexts.pop()
result = ContextManagerResult(exc, value, trace)
expr.resolve(result)
return True
def __eq__(self, other):
""" Overloads the equality operator to trigger a resolution of the matcher
against the other expression value. This allows to easily use expressions
in other libraries like Mock.
"""
clone = self.clone()
try:
clone.resolve(other)
return True
except AssertionError:
return False
# Any exception is silenced and we just return false
except:
return False
def __ne__(self, other):
""" Overload not equal since Python will default to identity instead of negating
the result from equality.
"""
return not self.__eq__(other)
def __repr__(self):
""" This is specially useful when using the library on an interactive interpreter
"""
exp = self.clone()
if exp.matcher:
exp._init_matcher()
if not exp.expr:
return 'Uninitialized expectation <{0}>'.format(self.__class__.__name__)
matcher = exp.evaluate()
return str(matcher)
|
drslump/pyshould
|
pyshould/expectation.py
|
Expectation._transform
|
python
|
def _transform(self, value):
if self.transform:
try:
value = self.transform(value)
except:
import sys
exc_type, exc_obj, exc_tb = sys.exc_info()
raise AssertionError('Error applying transformation <{0}>: {2}: {3}'.format(
self.transform.__name__, value, exc_type.__name__, exc_obj))
return value
|
Applies any defined transformation to the given value
|
train
|
https://github.com/drslump/pyshould/blob/7210859d4c84cfbaa64f91b30c2a541aea788ddf/pyshould/expectation.py#L116-L128
| null |
class Expectation(object):
""" Represents an expectation allowing to configure it with matchers and
finally resolving it.
"""
_contexts = []
def __init__(self, value=None, deferred=False, description=None, factory=False,
def_op=OPERATOR.AND, def_matcher='equal'):
self.reset()
self.value = value
self.deferred = deferred
self.factory = factory
self.description = description
self.def_op = def_op
self.def_matcher = def_matcher
self.transform = None
def reset(self):
""" Resets the state of the expression """
self.expr = []
self.matcher = None
self.last_matcher = None
self.description = None
def clone(self):
""" Clone this expression """
from copy import copy
clone = copy(self)
clone.expr = copy(self.expr)
clone.factory = False
return clone
def __ror__(self, lvalue):
""" Evaluate against the left hand side of the OR (pipe) operator. Since in
Python this operator has a fairly low precedence this method will usually
be called once the whole right hand side of the expression has been evaluated.
Note: We clone and return that clone instead of the self object because
resolving resets the expectation, when using a REPL it's nice to see
the expectation explanation after a successful one.
"""
clone = self.clone()
self.resolve(lvalue)
return clone
def __or__(self, rvalue):
""" Allows the use case: it(value) | should.xxx
Specially useful to wrap mocks which override the __or__ operator
"""
if isinstance(rvalue, Expectation) and self.__class__ is Expectation:
return rvalue.__ror__(self.value)
return NotImplemented
def resolve(self, value=None):
""" Resolve the current expression against the supplied value """
# If we still have an uninitialized matcher init it now
if self.matcher:
self._init_matcher()
# Evaluate the current set of matchers forming the expression
matcher = self.evaluate()
try:
value = self._transform(value)
self._assertion(matcher, value)
except AssertionError as ex:
# By re-raising here the exception we reset the traceback
raise ex
finally:
# Reset the state of the object so we can use it again
if self.deferred:
self.reset()
def _assertion(self, matcher, value):
""" Perform the actual assertion for the given matcher and value. Override
this method to apply a special configuration when performing the assertion.
If the assertion fails it should raise an AssertionError.
"""
# To support the syntax `any_of(subject) | should ...` we check if the
# value to check is an Expectation object and if it is we use the descriptor
# protocol to bind the value's assertion logic to this expectation.
if isinstance(value, Expectation):
assertion = value._assertion.__get__(self, Expectation)
assertion(matcher, value.value)
else:
hc.assert_that(value, matcher)
def evaluate(self):
""" Converts the current expression into a single matcher, applying
coordination operators to operands according to their binding rules
"""
# Apply Shunting Yard algorithm to convert the infix expression
# into Reverse Polish Notation. Since we have a very limited
# set of operators and binding rules, the implementation becomes
# really simple. The expression is formed of hamcrest matcher instances
# and operators identifiers (ints).
ops = []
rpn = []
for token in self.expr:
if isinstance(token, int):
while len(ops) and token <= ops[-1]:
rpn.append(ops.pop())
ops.append(token)
else:
rpn.append(token)
# Append the remaining operators
while len(ops):
rpn.append(ops.pop())
# Walk the RPN expression to create AllOf/AnyOf matchers
stack = []
for token in rpn:
if isinstance(token, int):
# Handle the NOT case in a special way since it's unary
if token == OPERATOR.NOT:
stack[-1] = IsNot(stack[-1])
continue
# Our operators always need two operands
if len(stack) < 2:
raise RuntimeError('Unable to build a valid expression. Not enough operands available.')
# Check what kind of matcher we need to create
if token == OPERATOR.OR:
matcher = hc.any_of(*stack[-2:])
else: # AND, BUT
matcher = hc.all_of(*stack[-2:])
stack[-2:] = [matcher]
else:
stack.append(token)
if len(stack) != 1:
raise RuntimeError('Unable to build a valid expression. The RPN stack should have just one item.')
matcher = stack.pop()
# If a description has been given include it in the matcher
if self.description:
matcher = hc.described_as(self.description, matcher)
return matcher
def _find_matcher(self, alias):
""" Finds a matcher based on the given alias or raises an error if no
matcher could be found.
"""
matcher = lookup(alias)
if not matcher:
msg = 'Matcher "%s" not found' % alias
# Try to find similarly named matchers to help the user
similar = suggest(alias, max=3, cutoff=0.5)
if len(similar) > 1:
last = similar.pop()
msg += '. Perhaps you meant to use %s or %s?' % (', '.join(similar), last)
elif len(similar) > 0:
msg += '. Perhaps you meant to use %s?' % similar.pop()
raise KeyError(msg)
return matcher
def _init_matcher(self, *args, **kwargs):
""" Executes the current matcher appending it to the expression """
# If subject-less expectation are provided as arguments convert them
# to plain Hamcrest matchers in order to allow complex compositions
fn = lambda x: x.evaluate() if isinstance(x, Expectation) else x
args = [fn(x) for x in args]
kwargs = dict((k, fn(v)) for k, v in kwargs.items())
matcher = self.matcher(*args, **kwargs)
self.expr.append(matcher)
self.matcher = None
return matcher
def described_as(self, description, *args):
""" Specify a custom message for the matcher """
if len(args):
description = description.format(*args)
self.description = description
return self
def desc(self, description, *args):
""" Just an alias to described_as """
return self.described_as(description, *args)
def __getattribute__(self, name):
""" Hijack property access to handle some special cases.
Since we might have patched the root object to include
the `should` properties, we have to capture their use
here.
"""
# Ignore .should. style properties
lowname = name.lower()
if lowname in ('should', 'to'):
return self
if lowname == 'should_not':
return ExpectationNot(
self.value,
self.deferred,
self.description,
self.factory,
self.def_op,
self.def_matcher
)
return object.__getattribute__(self, name)
def __getattr__(self, name):
""" Overload property access to interpret them as matchers. """
# Ignore private (protocol) methods
if name[0:2] == '__':
raise AttributeError
# In factory mode we always create a new instance. This avoids
# problems when defining multiple expectations using the `should`
# keyword without resolving every expectation in order.
obj = self.clone() if self.factory else self
# If we still have an uninitialized matcher then init it now
if obj.matcher:
obj._init_matcher()
# In deferred mode we will resolve in the __ror__ overload
if not obj.deferred:
obj.resolve(obj.value)
# Normalize the name
name = re.sub(r'([a-z])([A-Z])', r'\1_\2', name)
parts = name.lower().split('_')
# Check if we have a coordinator as first item
expr = []
if parts[0] == 'and':
expr.append(OPERATOR.AND)
parts.pop(0)
elif parts[0] == 'or':
expr.append(OPERATOR.OR)
parts.pop(0)
elif parts[0] == 'but':
expr.append(OPERATOR.BUT)
parts.pop(0)
# If no coordinator is given assume a default one
elif len(obj.expr):
expr.append(obj.def_op)
# Negation can come just after a combinator (ie: .and_not_be_equal)
if 'not' in parts:
expr.append(OPERATOR.NOT)
parts.pop(parts.index('not'))
if len(parts):
name = '_'.join(parts)
else:
name = obj.last_matcher or obj.def_matcher
# Find a matcher for the computed name
try:
obj.matcher = obj._find_matcher(name)
obj.last_matcher = name
obj.expr.extend(expr)
except KeyError as ex:
# Signal correctly for `hasattr`
raise AttributeError(str(ex))
return obj
def __call__(self, *args, **kwargs):
""" Execute the matcher just registered by __getattr__ passing any given
arguments. If we're in deferred mode we don't resolve the matcher yet,
it'll be done in the __ror__ overload.
"""
# When called directly (ie: should(foo).xxx) register the param as a transform
if (len(args) == 1 and hasattr(args[0], '__call__')
and not self.expr and not self.matcher):
# We have to clone the expectation so we play fair with the `should` shortcut
clone = self.clone()
clone.transform = args[0]
return clone
if not self.matcher:
raise TypeError('No matchers set. Usage: <value> | should.<matcher>(<expectation>)')
self._init_matcher(*args, **kwargs)
# In deferred mode we will resolve in the __ror__ overload
if not self.deferred:
self.resolve(self.value)
return self
def __enter__(self):
""" Implements the context manager protocol. Specially useful for asserting exceptions
"""
clone = self.clone()
self._contexts.append(clone)
self.reset()
return self
def __exit__(self, exc, value, trace):
# If an assertion failed inside the block just raise that one
if isinstance(value, AssertionError):
return False
expr = self._contexts.pop()
result = ContextManagerResult(exc, value, trace)
expr.resolve(result)
return True
def __eq__(self, other):
""" Overloads the equality operator to trigger a resolution of the matcher
against the other expression value. This allows to easily use expressions
in other libraries like Mock.
"""
clone = self.clone()
try:
clone.resolve(other)
return True
except AssertionError:
return False
# Any exception is silenced and we just return false
except:
return False
def __ne__(self, other):
""" Overload not equal since Python will default to identity instead of negating
the result from equality.
"""
return not self.__eq__(other)
def __repr__(self):
""" This is specially useful when using the library on an interactive interpreter
"""
exp = self.clone()
if exp.matcher:
exp._init_matcher()
if not exp.expr:
return 'Uninitialized expectation <{0}>'.format(self.__class__.__name__)
matcher = exp.evaluate()
return str(matcher)
|
drslump/pyshould
|
pyshould/expectation.py
|
Expectation.evaluate
|
python
|
def evaluate(self):
# Apply Shunting Yard algorithm to convert the infix expression
# into Reverse Polish Notation. Since we have a very limited
# set of operators and binding rules, the implementation becomes
# really simple. The expression is formed of hamcrest matcher instances
# and operators identifiers (ints).
ops = []
rpn = []
for token in self.expr:
if isinstance(token, int):
while len(ops) and token <= ops[-1]:
rpn.append(ops.pop())
ops.append(token)
else:
rpn.append(token)
# Append the remaining operators
while len(ops):
rpn.append(ops.pop())
# Walk the RPN expression to create AllOf/AnyOf matchers
stack = []
for token in rpn:
if isinstance(token, int):
# Handle the NOT case in a special way since it's unary
if token == OPERATOR.NOT:
stack[-1] = IsNot(stack[-1])
continue
# Our operators always need two operands
if len(stack) < 2:
raise RuntimeError('Unable to build a valid expression. Not enough operands available.')
# Check what kind of matcher we need to create
if token == OPERATOR.OR:
matcher = hc.any_of(*stack[-2:])
else: # AND, BUT
matcher = hc.all_of(*stack[-2:])
stack[-2:] = [matcher]
else:
stack.append(token)
if len(stack) != 1:
raise RuntimeError('Unable to build a valid expression. The RPN stack should have just one item.')
matcher = stack.pop()
# If a description has been given include it in the matcher
if self.description:
matcher = hc.described_as(self.description, matcher)
return matcher
|
Converts the current expression into a single matcher, applying
coordination operators to operands according to their binding rules
|
train
|
https://github.com/drslump/pyshould/blob/7210859d4c84cfbaa64f91b30c2a541aea788ddf/pyshould/expectation.py#L130-L186
| null |
class Expectation(object):
""" Represents an expectation allowing to configure it with matchers and
finally resolving it.
"""
_contexts = []
def __init__(self, value=None, deferred=False, description=None, factory=False,
def_op=OPERATOR.AND, def_matcher='equal'):
self.reset()
self.value = value
self.deferred = deferred
self.factory = factory
self.description = description
self.def_op = def_op
self.def_matcher = def_matcher
self.transform = None
def reset(self):
""" Resets the state of the expression """
self.expr = []
self.matcher = None
self.last_matcher = None
self.description = None
def clone(self):
""" Clone this expression """
from copy import copy
clone = copy(self)
clone.expr = copy(self.expr)
clone.factory = False
return clone
def __ror__(self, lvalue):
""" Evaluate against the left hand side of the OR (pipe) operator. Since in
Python this operator has a fairly low precedence this method will usually
be called once the whole right hand side of the expression has been evaluated.
Note: We clone and return that clone instead of the self object because
resolving resets the expectation, when using a REPL it's nice to see
the expectation explanation after a successful one.
"""
clone = self.clone()
self.resolve(lvalue)
return clone
def __or__(self, rvalue):
""" Allows the use case: it(value) | should.xxx
Specially useful to wrap mocks which override the __or__ operator
"""
if isinstance(rvalue, Expectation) and self.__class__ is Expectation:
return rvalue.__ror__(self.value)
return NotImplemented
def resolve(self, value=None):
""" Resolve the current expression against the supplied value """
# If we still have an uninitialized matcher init it now
if self.matcher:
self._init_matcher()
# Evaluate the current set of matchers forming the expression
matcher = self.evaluate()
try:
value = self._transform(value)
self._assertion(matcher, value)
except AssertionError as ex:
# By re-raising here the exception we reset the traceback
raise ex
finally:
# Reset the state of the object so we can use it again
if self.deferred:
self.reset()
def _assertion(self, matcher, value):
""" Perform the actual assertion for the given matcher and value. Override
this method to apply a special configuration when performing the assertion.
If the assertion fails it should raise an AssertionError.
"""
# To support the syntax `any_of(subject) | should ...` we check if the
# value to check is an Expectation object and if it is we use the descriptor
# protocol to bind the value's assertion logic to this expectation.
if isinstance(value, Expectation):
assertion = value._assertion.__get__(self, Expectation)
assertion(matcher, value.value)
else:
hc.assert_that(value, matcher)
def _transform(self, value):
""" Applies any defined transformation to the given value
"""
if self.transform:
try:
value = self.transform(value)
except:
import sys
exc_type, exc_obj, exc_tb = sys.exc_info()
raise AssertionError('Error applying transformation <{0}>: {2}: {3}'.format(
self.transform.__name__, value, exc_type.__name__, exc_obj))
return value
def _find_matcher(self, alias):
""" Finds a matcher based on the given alias or raises an error if no
matcher could be found.
"""
matcher = lookup(alias)
if not matcher:
msg = 'Matcher "%s" not found' % alias
# Try to find similarly named matchers to help the user
similar = suggest(alias, max=3, cutoff=0.5)
if len(similar) > 1:
last = similar.pop()
msg += '. Perhaps you meant to use %s or %s?' % (', '.join(similar), last)
elif len(similar) > 0:
msg += '. Perhaps you meant to use %s?' % similar.pop()
raise KeyError(msg)
return matcher
def _init_matcher(self, *args, **kwargs):
""" Executes the current matcher appending it to the expression """
# If subject-less expectation are provided as arguments convert them
# to plain Hamcrest matchers in order to allow complex compositions
fn = lambda x: x.evaluate() if isinstance(x, Expectation) else x
args = [fn(x) for x in args]
kwargs = dict((k, fn(v)) for k, v in kwargs.items())
matcher = self.matcher(*args, **kwargs)
self.expr.append(matcher)
self.matcher = None
return matcher
def described_as(self, description, *args):
""" Specify a custom message for the matcher """
if len(args):
description = description.format(*args)
self.description = description
return self
def desc(self, description, *args):
""" Just an alias to described_as """
return self.described_as(description, *args)
def __getattribute__(self, name):
""" Hijack property access to handle some special cases.
Since we might have patched the root object to include
the `should` properties, we have to capture their use
here.
"""
# Ignore .should. style properties
lowname = name.lower()
if lowname in ('should', 'to'):
return self
if lowname == 'should_not':
return ExpectationNot(
self.value,
self.deferred,
self.description,
self.factory,
self.def_op,
self.def_matcher
)
return object.__getattribute__(self, name)
def __getattr__(self, name):
""" Overload property access to interpret them as matchers. """
# Ignore private (protocol) methods
if name[0:2] == '__':
raise AttributeError
# In factory mode we always create a new instance. This avoids
# problems when defining multiple expectations using the `should`
# keyword without resolving every expectation in order.
obj = self.clone() if self.factory else self
# If we still have an uninitialized matcher then init it now
if obj.matcher:
obj._init_matcher()
# In deferred mode we will resolve in the __ror__ overload
if not obj.deferred:
obj.resolve(obj.value)
# Normalize the name
name = re.sub(r'([a-z])([A-Z])', r'\1_\2', name)
parts = name.lower().split('_')
# Check if we have a coordinator as first item
expr = []
if parts[0] == 'and':
expr.append(OPERATOR.AND)
parts.pop(0)
elif parts[0] == 'or':
expr.append(OPERATOR.OR)
parts.pop(0)
elif parts[0] == 'but':
expr.append(OPERATOR.BUT)
parts.pop(0)
# If no coordinator is given assume a default one
elif len(obj.expr):
expr.append(obj.def_op)
# Negation can come just after a combinator (ie: .and_not_be_equal)
if 'not' in parts:
expr.append(OPERATOR.NOT)
parts.pop(parts.index('not'))
if len(parts):
name = '_'.join(parts)
else:
name = obj.last_matcher or obj.def_matcher
# Find a matcher for the computed name
try:
obj.matcher = obj._find_matcher(name)
obj.last_matcher = name
obj.expr.extend(expr)
except KeyError as ex:
# Signal correctly for `hasattr`
raise AttributeError(str(ex))
return obj
def __call__(self, *args, **kwargs):
""" Execute the matcher just registered by __getattr__ passing any given
arguments. If we're in deferred mode we don't resolve the matcher yet,
it'll be done in the __ror__ overload.
"""
# When called directly (ie: should(foo).xxx) register the param as a transform
if (len(args) == 1 and hasattr(args[0], '__call__')
and not self.expr and not self.matcher):
# We have to clone the expectation so we play fair with the `should` shortcut
clone = self.clone()
clone.transform = args[0]
return clone
if not self.matcher:
raise TypeError('No matchers set. Usage: <value> | should.<matcher>(<expectation>)')
self._init_matcher(*args, **kwargs)
# In deferred mode we will resolve in the __ror__ overload
if not self.deferred:
self.resolve(self.value)
return self
def __enter__(self):
""" Implements the context manager protocol. Specially useful for asserting exceptions
"""
clone = self.clone()
self._contexts.append(clone)
self.reset()
return self
def __exit__(self, exc, value, trace):
# If an assertion failed inside the block just raise that one
if isinstance(value, AssertionError):
return False
expr = self._contexts.pop()
result = ContextManagerResult(exc, value, trace)
expr.resolve(result)
return True
def __eq__(self, other):
""" Overloads the equality operator to trigger a resolution of the matcher
against the other expression value. This allows to easily use expressions
in other libraries like Mock.
"""
clone = self.clone()
try:
clone.resolve(other)
return True
except AssertionError:
return False
# Any exception is silenced and we just return false
except:
return False
def __ne__(self, other):
""" Overload not equal since Python will default to identity instead of negating
the result from equality.
"""
return not self.__eq__(other)
def __repr__(self):
""" This is specially useful when using the library on an interactive interpreter
"""
exp = self.clone()
if exp.matcher:
exp._init_matcher()
if not exp.expr:
return 'Uninitialized expectation <{0}>'.format(self.__class__.__name__)
matcher = exp.evaluate()
return str(matcher)
|
drslump/pyshould
|
pyshould/expectation.py
|
Expectation._find_matcher
|
python
|
def _find_matcher(self, alias):
matcher = lookup(alias)
if not matcher:
msg = 'Matcher "%s" not found' % alias
# Try to find similarly named matchers to help the user
similar = suggest(alias, max=3, cutoff=0.5)
if len(similar) > 1:
last = similar.pop()
msg += '. Perhaps you meant to use %s or %s?' % (', '.join(similar), last)
elif len(similar) > 0:
msg += '. Perhaps you meant to use %s?' % similar.pop()
raise KeyError(msg)
return matcher
|
Finds a matcher based on the given alias or raises an error if no
matcher could be found.
|
train
|
https://github.com/drslump/pyshould/blob/7210859d4c84cfbaa64f91b30c2a541aea788ddf/pyshould/expectation.py#L188-L206
|
[
"def lookup(alias):\n \"\"\" Tries to find a matcher callable associated to the given alias. If\n an exact match does not exists it will try normalizing it and even\n removing underscores to find one.\n \"\"\"\n\n if alias in matchers:\n return matchers[alias]\n else:\n norm = normalize(alias)\n if norm in normalized:\n alias = normalized[norm]\n return matchers[alias]\n\n # Check without snake case\n if -1 != alias.find('_'):\n norm = normalize(alias).replace('_', '')\n return lookup(norm)\n\n return None\n",
"def suggest(alias, max=3, cutoff=0.5):\n \"\"\" Suggest a list of aliases which are similar enough\n \"\"\"\n\n aliases = matchers.keys()\n similar = get_close_matches(alias, aliases, n=max, cutoff=cutoff)\n\n return similar\n"
] |
class Expectation(object):
""" Represents an expectation allowing to configure it with matchers and
finally resolving it.
"""
_contexts = []
def __init__(self, value=None, deferred=False, description=None, factory=False,
def_op=OPERATOR.AND, def_matcher='equal'):
self.reset()
self.value = value
self.deferred = deferred
self.factory = factory
self.description = description
self.def_op = def_op
self.def_matcher = def_matcher
self.transform = None
def reset(self):
""" Resets the state of the expression """
self.expr = []
self.matcher = None
self.last_matcher = None
self.description = None
def clone(self):
""" Clone this expression """
from copy import copy
clone = copy(self)
clone.expr = copy(self.expr)
clone.factory = False
return clone
def __ror__(self, lvalue):
""" Evaluate against the left hand side of the OR (pipe) operator. Since in
Python this operator has a fairly low precedence this method will usually
be called once the whole right hand side of the expression has been evaluated.
Note: We clone and return that clone instead of the self object because
resolving resets the expectation, when using a REPL it's nice to see
the expectation explanation after a successful one.
"""
clone = self.clone()
self.resolve(lvalue)
return clone
def __or__(self, rvalue):
""" Allows the use case: it(value) | should.xxx
Specially useful to wrap mocks which override the __or__ operator
"""
if isinstance(rvalue, Expectation) and self.__class__ is Expectation:
return rvalue.__ror__(self.value)
return NotImplemented
def resolve(self, value=None):
""" Resolve the current expression against the supplied value """
# If we still have an uninitialized matcher init it now
if self.matcher:
self._init_matcher()
# Evaluate the current set of matchers forming the expression
matcher = self.evaluate()
try:
value = self._transform(value)
self._assertion(matcher, value)
except AssertionError as ex:
# By re-raising here the exception we reset the traceback
raise ex
finally:
# Reset the state of the object so we can use it again
if self.deferred:
self.reset()
def _assertion(self, matcher, value):
""" Perform the actual assertion for the given matcher and value. Override
this method to apply a special configuration when performing the assertion.
If the assertion fails it should raise an AssertionError.
"""
# To support the syntax `any_of(subject) | should ...` we check if the
# value to check is an Expectation object and if it is we use the descriptor
# protocol to bind the value's assertion logic to this expectation.
if isinstance(value, Expectation):
assertion = value._assertion.__get__(self, Expectation)
assertion(matcher, value.value)
else:
hc.assert_that(value, matcher)
def _transform(self, value):
""" Applies any defined transformation to the given value
"""
if self.transform:
try:
value = self.transform(value)
except:
import sys
exc_type, exc_obj, exc_tb = sys.exc_info()
raise AssertionError('Error applying transformation <{0}>: {2}: {3}'.format(
self.transform.__name__, value, exc_type.__name__, exc_obj))
return value
def evaluate(self):
""" Converts the current expression into a single matcher, applying
coordination operators to operands according to their binding rules
"""
# Apply Shunting Yard algorithm to convert the infix expression
# into Reverse Polish Notation. Since we have a very limited
# set of operators and binding rules, the implementation becomes
# really simple. The expression is formed of hamcrest matcher instances
# and operators identifiers (ints).
ops = []
rpn = []
for token in self.expr:
if isinstance(token, int):
while len(ops) and token <= ops[-1]:
rpn.append(ops.pop())
ops.append(token)
else:
rpn.append(token)
# Append the remaining operators
while len(ops):
rpn.append(ops.pop())
# Walk the RPN expression to create AllOf/AnyOf matchers
stack = []
for token in rpn:
if isinstance(token, int):
# Handle the NOT case in a special way since it's unary
if token == OPERATOR.NOT:
stack[-1] = IsNot(stack[-1])
continue
# Our operators always need two operands
if len(stack) < 2:
raise RuntimeError('Unable to build a valid expression. Not enough operands available.')
# Check what kind of matcher we need to create
if token == OPERATOR.OR:
matcher = hc.any_of(*stack[-2:])
else: # AND, BUT
matcher = hc.all_of(*stack[-2:])
stack[-2:] = [matcher]
else:
stack.append(token)
if len(stack) != 1:
raise RuntimeError('Unable to build a valid expression. The RPN stack should have just one item.')
matcher = stack.pop()
# If a description has been given include it in the matcher
if self.description:
matcher = hc.described_as(self.description, matcher)
return matcher
def _init_matcher(self, *args, **kwargs):
""" Executes the current matcher appending it to the expression """
# If subject-less expectation are provided as arguments convert them
# to plain Hamcrest matchers in order to allow complex compositions
fn = lambda x: x.evaluate() if isinstance(x, Expectation) else x
args = [fn(x) for x in args]
kwargs = dict((k, fn(v)) for k, v in kwargs.items())
matcher = self.matcher(*args, **kwargs)
self.expr.append(matcher)
self.matcher = None
return matcher
def described_as(self, description, *args):
""" Specify a custom message for the matcher """
if len(args):
description = description.format(*args)
self.description = description
return self
def desc(self, description, *args):
""" Just an alias to described_as """
return self.described_as(description, *args)
def __getattribute__(self, name):
""" Hijack property access to handle some special cases.
Since we might have patched the root object to include
the `should` properties, we have to capture their use
here.
"""
# Ignore .should. style properties
lowname = name.lower()
if lowname in ('should', 'to'):
return self
if lowname == 'should_not':
return ExpectationNot(
self.value,
self.deferred,
self.description,
self.factory,
self.def_op,
self.def_matcher
)
return object.__getattribute__(self, name)
def __getattr__(self, name):
""" Overload property access to interpret them as matchers. """
# Ignore private (protocol) methods
if name[0:2] == '__':
raise AttributeError
# In factory mode we always create a new instance. This avoids
# problems when defining multiple expectations using the `should`
# keyword without resolving every expectation in order.
obj = self.clone() if self.factory else self
# If we still have an uninitialized matcher then init it now
if obj.matcher:
obj._init_matcher()
# In deferred mode we will resolve in the __ror__ overload
if not obj.deferred:
obj.resolve(obj.value)
# Normalize the name
name = re.sub(r'([a-z])([A-Z])', r'\1_\2', name)
parts = name.lower().split('_')
# Check if we have a coordinator as first item
expr = []
if parts[0] == 'and':
expr.append(OPERATOR.AND)
parts.pop(0)
elif parts[0] == 'or':
expr.append(OPERATOR.OR)
parts.pop(0)
elif parts[0] == 'but':
expr.append(OPERATOR.BUT)
parts.pop(0)
# If no coordinator is given assume a default one
elif len(obj.expr):
expr.append(obj.def_op)
# Negation can come just after a combinator (ie: .and_not_be_equal)
if 'not' in parts:
expr.append(OPERATOR.NOT)
parts.pop(parts.index('not'))
if len(parts):
name = '_'.join(parts)
else:
name = obj.last_matcher or obj.def_matcher
# Find a matcher for the computed name
try:
obj.matcher = obj._find_matcher(name)
obj.last_matcher = name
obj.expr.extend(expr)
except KeyError as ex:
# Signal correctly for `hasattr`
raise AttributeError(str(ex))
return obj
def __call__(self, *args, **kwargs):
""" Execute the matcher just registered by __getattr__ passing any given
arguments. If we're in deferred mode we don't resolve the matcher yet,
it'll be done in the __ror__ overload.
"""
# When called directly (ie: should(foo).xxx) register the param as a transform
if (len(args) == 1 and hasattr(args[0], '__call__')
and not self.expr and not self.matcher):
# We have to clone the expectation so we play fair with the `should` shortcut
clone = self.clone()
clone.transform = args[0]
return clone
if not self.matcher:
raise TypeError('No matchers set. Usage: <value> | should.<matcher>(<expectation>)')
self._init_matcher(*args, **kwargs)
# In deferred mode we will resolve in the __ror__ overload
if not self.deferred:
self.resolve(self.value)
return self
def __enter__(self):
""" Implements the context manager protocol. Specially useful for asserting exceptions
"""
clone = self.clone()
self._contexts.append(clone)
self.reset()
return self
def __exit__(self, exc, value, trace):
# If an assertion failed inside the block just raise that one
if isinstance(value, AssertionError):
return False
expr = self._contexts.pop()
result = ContextManagerResult(exc, value, trace)
expr.resolve(result)
return True
def __eq__(self, other):
""" Overloads the equality operator to trigger a resolution of the matcher
against the other expression value. This allows to easily use expressions
in other libraries like Mock.
"""
clone = self.clone()
try:
clone.resolve(other)
return True
except AssertionError:
return False
# Any exception is silenced and we just return false
except:
return False
def __ne__(self, other):
""" Overload not equal since Python will default to identity instead of negating
the result from equality.
"""
return not self.__eq__(other)
def __repr__(self):
""" This is specially useful when using the library on an interactive interpreter
"""
exp = self.clone()
if exp.matcher:
exp._init_matcher()
if not exp.expr:
return 'Uninitialized expectation <{0}>'.format(self.__class__.__name__)
matcher = exp.evaluate()
return str(matcher)
|
drslump/pyshould
|
pyshould/expectation.py
|
Expectation._init_matcher
|
python
|
def _init_matcher(self, *args, **kwargs):
# If subject-less expectation are provided as arguments convert them
# to plain Hamcrest matchers in order to allow complex compositions
fn = lambda x: x.evaluate() if isinstance(x, Expectation) else x
args = [fn(x) for x in args]
kwargs = dict((k, fn(v)) for k, v in kwargs.items())
matcher = self.matcher(*args, **kwargs)
self.expr.append(matcher)
self.matcher = None
return matcher
|
Executes the current matcher appending it to the expression
|
train
|
https://github.com/drslump/pyshould/blob/7210859d4c84cfbaa64f91b30c2a541aea788ddf/pyshould/expectation.py#L208-L220
| null |
class Expectation(object):
""" Represents an expectation allowing to configure it with matchers and
finally resolving it.
"""
_contexts = []
def __init__(self, value=None, deferred=False, description=None, factory=False,
def_op=OPERATOR.AND, def_matcher='equal'):
self.reset()
self.value = value
self.deferred = deferred
self.factory = factory
self.description = description
self.def_op = def_op
self.def_matcher = def_matcher
self.transform = None
def reset(self):
""" Resets the state of the expression """
self.expr = []
self.matcher = None
self.last_matcher = None
self.description = None
def clone(self):
""" Clone this expression """
from copy import copy
clone = copy(self)
clone.expr = copy(self.expr)
clone.factory = False
return clone
def __ror__(self, lvalue):
""" Evaluate against the left hand side of the OR (pipe) operator. Since in
Python this operator has a fairly low precedence this method will usually
be called once the whole right hand side of the expression has been evaluated.
Note: We clone and return that clone instead of the self object because
resolving resets the expectation, when using a REPL it's nice to see
the expectation explanation after a successful one.
"""
clone = self.clone()
self.resolve(lvalue)
return clone
def __or__(self, rvalue):
""" Allows the use case: it(value) | should.xxx
Specially useful to wrap mocks which override the __or__ operator
"""
if isinstance(rvalue, Expectation) and self.__class__ is Expectation:
return rvalue.__ror__(self.value)
return NotImplemented
def resolve(self, value=None):
""" Resolve the current expression against the supplied value """
# If we still have an uninitialized matcher init it now
if self.matcher:
self._init_matcher()
# Evaluate the current set of matchers forming the expression
matcher = self.evaluate()
try:
value = self._transform(value)
self._assertion(matcher, value)
except AssertionError as ex:
# By re-raising here the exception we reset the traceback
raise ex
finally:
# Reset the state of the object so we can use it again
if self.deferred:
self.reset()
def _assertion(self, matcher, value):
""" Perform the actual assertion for the given matcher and value. Override
this method to apply a special configuration when performing the assertion.
If the assertion fails it should raise an AssertionError.
"""
# To support the syntax `any_of(subject) | should ...` we check if the
# value to check is an Expectation object and if it is we use the descriptor
# protocol to bind the value's assertion logic to this expectation.
if isinstance(value, Expectation):
assertion = value._assertion.__get__(self, Expectation)
assertion(matcher, value.value)
else:
hc.assert_that(value, matcher)
def _transform(self, value):
""" Applies any defined transformation to the given value
"""
if self.transform:
try:
value = self.transform(value)
except:
import sys
exc_type, exc_obj, exc_tb = sys.exc_info()
raise AssertionError('Error applying transformation <{0}>: {2}: {3}'.format(
self.transform.__name__, value, exc_type.__name__, exc_obj))
return value
def evaluate(self):
""" Converts the current expression into a single matcher, applying
coordination operators to operands according to their binding rules
"""
# Apply Shunting Yard algorithm to convert the infix expression
# into Reverse Polish Notation. Since we have a very limited
# set of operators and binding rules, the implementation becomes
# really simple. The expression is formed of hamcrest matcher instances
# and operators identifiers (ints).
ops = []
rpn = []
for token in self.expr:
if isinstance(token, int):
while len(ops) and token <= ops[-1]:
rpn.append(ops.pop())
ops.append(token)
else:
rpn.append(token)
# Append the remaining operators
while len(ops):
rpn.append(ops.pop())
# Walk the RPN expression to create AllOf/AnyOf matchers
stack = []
for token in rpn:
if isinstance(token, int):
# Handle the NOT case in a special way since it's unary
if token == OPERATOR.NOT:
stack[-1] = IsNot(stack[-1])
continue
# Our operators always need two operands
if len(stack) < 2:
raise RuntimeError('Unable to build a valid expression. Not enough operands available.')
# Check what kind of matcher we need to create
if token == OPERATOR.OR:
matcher = hc.any_of(*stack[-2:])
else: # AND, BUT
matcher = hc.all_of(*stack[-2:])
stack[-2:] = [matcher]
else:
stack.append(token)
if len(stack) != 1:
raise RuntimeError('Unable to build a valid expression. The RPN stack should have just one item.')
matcher = stack.pop()
# If a description has been given include it in the matcher
if self.description:
matcher = hc.described_as(self.description, matcher)
return matcher
def _find_matcher(self, alias):
""" Finds a matcher based on the given alias or raises an error if no
matcher could be found.
"""
matcher = lookup(alias)
if not matcher:
msg = 'Matcher "%s" not found' % alias
# Try to find similarly named matchers to help the user
similar = suggest(alias, max=3, cutoff=0.5)
if len(similar) > 1:
last = similar.pop()
msg += '. Perhaps you meant to use %s or %s?' % (', '.join(similar), last)
elif len(similar) > 0:
msg += '. Perhaps you meant to use %s?' % similar.pop()
raise KeyError(msg)
return matcher
def described_as(self, description, *args):
""" Specify a custom message for the matcher """
if len(args):
description = description.format(*args)
self.description = description
return self
def desc(self, description, *args):
""" Just an alias to described_as """
return self.described_as(description, *args)
def __getattribute__(self, name):
""" Hijack property access to handle some special cases.
Since we might have patched the root object to include
the `should` properties, we have to capture their use
here.
"""
# Ignore .should. style properties
lowname = name.lower()
if lowname in ('should', 'to'):
return self
if lowname == 'should_not':
return ExpectationNot(
self.value,
self.deferred,
self.description,
self.factory,
self.def_op,
self.def_matcher
)
return object.__getattribute__(self, name)
def __getattr__(self, name):
""" Overload property access to interpret them as matchers. """
# Ignore private (protocol) methods
if name[0:2] == '__':
raise AttributeError
# In factory mode we always create a new instance. This avoids
# problems when defining multiple expectations using the `should`
# keyword without resolving every expectation in order.
obj = self.clone() if self.factory else self
# If we still have an uninitialized matcher then init it now
if obj.matcher:
obj._init_matcher()
# In deferred mode we will resolve in the __ror__ overload
if not obj.deferred:
obj.resolve(obj.value)
# Normalize the name
name = re.sub(r'([a-z])([A-Z])', r'\1_\2', name)
parts = name.lower().split('_')
# Check if we have a coordinator as first item
expr = []
if parts[0] == 'and':
expr.append(OPERATOR.AND)
parts.pop(0)
elif parts[0] == 'or':
expr.append(OPERATOR.OR)
parts.pop(0)
elif parts[0] == 'but':
expr.append(OPERATOR.BUT)
parts.pop(0)
# If no coordinator is given assume a default one
elif len(obj.expr):
expr.append(obj.def_op)
# Negation can come just after a combinator (ie: .and_not_be_equal)
if 'not' in parts:
expr.append(OPERATOR.NOT)
parts.pop(parts.index('not'))
if len(parts):
name = '_'.join(parts)
else:
name = obj.last_matcher or obj.def_matcher
# Find a matcher for the computed name
try:
obj.matcher = obj._find_matcher(name)
obj.last_matcher = name
obj.expr.extend(expr)
except KeyError as ex:
# Signal correctly for `hasattr`
raise AttributeError(str(ex))
return obj
def __call__(self, *args, **kwargs):
""" Execute the matcher just registered by __getattr__ passing any given
arguments. If we're in deferred mode we don't resolve the matcher yet,
it'll be done in the __ror__ overload.
"""
# When called directly (ie: should(foo).xxx) register the param as a transform
if (len(args) == 1 and hasattr(args[0], '__call__')
and not self.expr and not self.matcher):
# We have to clone the expectation so we play fair with the `should` shortcut
clone = self.clone()
clone.transform = args[0]
return clone
if not self.matcher:
raise TypeError('No matchers set. Usage: <value> | should.<matcher>(<expectation>)')
self._init_matcher(*args, **kwargs)
# In deferred mode we will resolve in the __ror__ overload
if not self.deferred:
self.resolve(self.value)
return self
def __enter__(self):
""" Implements the context manager protocol. Specially useful for asserting exceptions
"""
clone = self.clone()
self._contexts.append(clone)
self.reset()
return self
def __exit__(self, exc, value, trace):
# If an assertion failed inside the block just raise that one
if isinstance(value, AssertionError):
return False
expr = self._contexts.pop()
result = ContextManagerResult(exc, value, trace)
expr.resolve(result)
return True
def __eq__(self, other):
""" Overloads the equality operator to trigger a resolution of the matcher
against the other expression value. This allows to easily use expressions
in other libraries like Mock.
"""
clone = self.clone()
try:
clone.resolve(other)
return True
except AssertionError:
return False
# Any exception is silenced and we just return false
except:
return False
def __ne__(self, other):
""" Overload not equal since Python will default to identity instead of negating
the result from equality.
"""
return not self.__eq__(other)
def __repr__(self):
""" This is specially useful when using the library on an interactive interpreter
"""
exp = self.clone()
if exp.matcher:
exp._init_matcher()
if not exp.expr:
return 'Uninitialized expectation <{0}>'.format(self.__class__.__name__)
matcher = exp.evaluate()
return str(matcher)
|
drslump/pyshould
|
pyshould/expectation.py
|
Expectation.described_as
|
python
|
def described_as(self, description, *args):
if len(args):
description = description.format(*args)
self.description = description
return self
|
Specify a custom message for the matcher
|
train
|
https://github.com/drslump/pyshould/blob/7210859d4c84cfbaa64f91b30c2a541aea788ddf/pyshould/expectation.py#L222-L227
| null |
class Expectation(object):
""" Represents an expectation allowing to configure it with matchers and
finally resolving it.
"""
_contexts = []
def __init__(self, value=None, deferred=False, description=None, factory=False,
def_op=OPERATOR.AND, def_matcher='equal'):
self.reset()
self.value = value
self.deferred = deferred
self.factory = factory
self.description = description
self.def_op = def_op
self.def_matcher = def_matcher
self.transform = None
def reset(self):
""" Resets the state of the expression """
self.expr = []
self.matcher = None
self.last_matcher = None
self.description = None
def clone(self):
""" Clone this expression """
from copy import copy
clone = copy(self)
clone.expr = copy(self.expr)
clone.factory = False
return clone
def __ror__(self, lvalue):
""" Evaluate against the left hand side of the OR (pipe) operator. Since in
Python this operator has a fairly low precedence this method will usually
be called once the whole right hand side of the expression has been evaluated.
Note: We clone and return that clone instead of the self object because
resolving resets the expectation, when using a REPL it's nice to see
the expectation explanation after a successful one.
"""
clone = self.clone()
self.resolve(lvalue)
return clone
def __or__(self, rvalue):
""" Allows the use case: it(value) | should.xxx
Specially useful to wrap mocks which override the __or__ operator
"""
if isinstance(rvalue, Expectation) and self.__class__ is Expectation:
return rvalue.__ror__(self.value)
return NotImplemented
def resolve(self, value=None):
""" Resolve the current expression against the supplied value """
# If we still have an uninitialized matcher init it now
if self.matcher:
self._init_matcher()
# Evaluate the current set of matchers forming the expression
matcher = self.evaluate()
try:
value = self._transform(value)
self._assertion(matcher, value)
except AssertionError as ex:
# By re-raising here the exception we reset the traceback
raise ex
finally:
# Reset the state of the object so we can use it again
if self.deferred:
self.reset()
def _assertion(self, matcher, value):
""" Perform the actual assertion for the given matcher and value. Override
this method to apply a special configuration when performing the assertion.
If the assertion fails it should raise an AssertionError.
"""
# To support the syntax `any_of(subject) | should ...` we check if the
# value to check is an Expectation object and if it is we use the descriptor
# protocol to bind the value's assertion logic to this expectation.
if isinstance(value, Expectation):
assertion = value._assertion.__get__(self, Expectation)
assertion(matcher, value.value)
else:
hc.assert_that(value, matcher)
def _transform(self, value):
""" Applies any defined transformation to the given value
"""
if self.transform:
try:
value = self.transform(value)
except:
import sys
exc_type, exc_obj, exc_tb = sys.exc_info()
raise AssertionError('Error applying transformation <{0}>: {2}: {3}'.format(
self.transform.__name__, value, exc_type.__name__, exc_obj))
return value
def evaluate(self):
""" Converts the current expression into a single matcher, applying
coordination operators to operands according to their binding rules
"""
# Apply Shunting Yard algorithm to convert the infix expression
# into Reverse Polish Notation. Since we have a very limited
# set of operators and binding rules, the implementation becomes
# really simple. The expression is formed of hamcrest matcher instances
# and operators identifiers (ints).
ops = []
rpn = []
for token in self.expr:
if isinstance(token, int):
while len(ops) and token <= ops[-1]:
rpn.append(ops.pop())
ops.append(token)
else:
rpn.append(token)
# Append the remaining operators
while len(ops):
rpn.append(ops.pop())
# Walk the RPN expression to create AllOf/AnyOf matchers
stack = []
for token in rpn:
if isinstance(token, int):
# Handle the NOT case in a special way since it's unary
if token == OPERATOR.NOT:
stack[-1] = IsNot(stack[-1])
continue
# Our operators always need two operands
if len(stack) < 2:
raise RuntimeError('Unable to build a valid expression. Not enough operands available.')
# Check what kind of matcher we need to create
if token == OPERATOR.OR:
matcher = hc.any_of(*stack[-2:])
else: # AND, BUT
matcher = hc.all_of(*stack[-2:])
stack[-2:] = [matcher]
else:
stack.append(token)
if len(stack) != 1:
raise RuntimeError('Unable to build a valid expression. The RPN stack should have just one item.')
matcher = stack.pop()
# If a description has been given include it in the matcher
if self.description:
matcher = hc.described_as(self.description, matcher)
return matcher
def _find_matcher(self, alias):
""" Finds a matcher based on the given alias or raises an error if no
matcher could be found.
"""
matcher = lookup(alias)
if not matcher:
msg = 'Matcher "%s" not found' % alias
# Try to find similarly named matchers to help the user
similar = suggest(alias, max=3, cutoff=0.5)
if len(similar) > 1:
last = similar.pop()
msg += '. Perhaps you meant to use %s or %s?' % (', '.join(similar), last)
elif len(similar) > 0:
msg += '. Perhaps you meant to use %s?' % similar.pop()
raise KeyError(msg)
return matcher
def _init_matcher(self, *args, **kwargs):
""" Executes the current matcher appending it to the expression """
# If subject-less expectation are provided as arguments convert them
# to plain Hamcrest matchers in order to allow complex compositions
fn = lambda x: x.evaluate() if isinstance(x, Expectation) else x
args = [fn(x) for x in args]
kwargs = dict((k, fn(v)) for k, v in kwargs.items())
matcher = self.matcher(*args, **kwargs)
self.expr.append(matcher)
self.matcher = None
return matcher
def desc(self, description, *args):
""" Just an alias to described_as """
return self.described_as(description, *args)
def __getattribute__(self, name):
""" Hijack property access to handle some special cases.
Since we might have patched the root object to include
the `should` properties, we have to capture their use
here.
"""
# Ignore .should. style properties
lowname = name.lower()
if lowname in ('should', 'to'):
return self
if lowname == 'should_not':
return ExpectationNot(
self.value,
self.deferred,
self.description,
self.factory,
self.def_op,
self.def_matcher
)
return object.__getattribute__(self, name)
def __getattr__(self, name):
""" Overload property access to interpret them as matchers. """
# Ignore private (protocol) methods
if name[0:2] == '__':
raise AttributeError
# In factory mode we always create a new instance. This avoids
# problems when defining multiple expectations using the `should`
# keyword without resolving every expectation in order.
obj = self.clone() if self.factory else self
# If we still have an uninitialized matcher then init it now
if obj.matcher:
obj._init_matcher()
# In deferred mode we will resolve in the __ror__ overload
if not obj.deferred:
obj.resolve(obj.value)
# Normalize the name
name = re.sub(r'([a-z])([A-Z])', r'\1_\2', name)
parts = name.lower().split('_')
# Check if we have a coordinator as first item
expr = []
if parts[0] == 'and':
expr.append(OPERATOR.AND)
parts.pop(0)
elif parts[0] == 'or':
expr.append(OPERATOR.OR)
parts.pop(0)
elif parts[0] == 'but':
expr.append(OPERATOR.BUT)
parts.pop(0)
# If no coordinator is given assume a default one
elif len(obj.expr):
expr.append(obj.def_op)
# Negation can come just after a combinator (ie: .and_not_be_equal)
if 'not' in parts:
expr.append(OPERATOR.NOT)
parts.pop(parts.index('not'))
if len(parts):
name = '_'.join(parts)
else:
name = obj.last_matcher or obj.def_matcher
# Find a matcher for the computed name
try:
obj.matcher = obj._find_matcher(name)
obj.last_matcher = name
obj.expr.extend(expr)
except KeyError as ex:
# Signal correctly for `hasattr`
raise AttributeError(str(ex))
return obj
def __call__(self, *args, **kwargs):
""" Execute the matcher just registered by __getattr__ passing any given
arguments. If we're in deferred mode we don't resolve the matcher yet,
it'll be done in the __ror__ overload.
"""
# When called directly (ie: should(foo).xxx) register the param as a transform
if (len(args) == 1 and hasattr(args[0], '__call__')
and not self.expr and not self.matcher):
# We have to clone the expectation so we play fair with the `should` shortcut
clone = self.clone()
clone.transform = args[0]
return clone
if not self.matcher:
raise TypeError('No matchers set. Usage: <value> | should.<matcher>(<expectation>)')
self._init_matcher(*args, **kwargs)
# In deferred mode we will resolve in the __ror__ overload
if not self.deferred:
self.resolve(self.value)
return self
def __enter__(self):
""" Implements the context manager protocol. Specially useful for asserting exceptions
"""
clone = self.clone()
self._contexts.append(clone)
self.reset()
return self
def __exit__(self, exc, value, trace):
# If an assertion failed inside the block just raise that one
if isinstance(value, AssertionError):
return False
expr = self._contexts.pop()
result = ContextManagerResult(exc, value, trace)
expr.resolve(result)
return True
def __eq__(self, other):
""" Overloads the equality operator to trigger a resolution of the matcher
against the other expression value. This allows to easily use expressions
in other libraries like Mock.
"""
clone = self.clone()
try:
clone.resolve(other)
return True
except AssertionError:
return False
# Any exception is silenced and we just return false
except:
return False
def __ne__(self, other):
""" Overload not equal since Python will default to identity instead of negating
the result from equality.
"""
return not self.__eq__(other)
def __repr__(self):
""" This is specially useful when using the library on an interactive interpreter
"""
exp = self.clone()
if exp.matcher:
exp._init_matcher()
if not exp.expr:
return 'Uninitialized expectation <{0}>'.format(self.__class__.__name__)
matcher = exp.evaluate()
return str(matcher)
|
drslump/pyshould
|
pyshould/matchers.py
|
register
|
python
|
def register(matcher, *aliases):
docstr = matcher.__doc__ if matcher.__doc__ is not None else ''
helpmatchers[matcher] = docstr.strip()
for alias in aliases:
matchers[alias] = matcher
# Map a normalized version of the alias
norm = normalize(alias)
normalized[norm] = alias
# Map a version without snake case
norm = norm.replace('_', '')
normalized[norm] = alias
|
Register a matcher associated to one or more aliases. Each alias
given is also normalized.
|
train
|
https://github.com/drslump/pyshould/blob/7210859d4c84cfbaa64f91b30c2a541aea788ddf/pyshould/matchers.py#L54-L68
|
[
"def normalize(alias):\n \"\"\" Normalizes an alias by removing adverbs defined in IGNORED_WORDS\n \"\"\"\n # Convert from CamelCase to snake_case\n alias = re.sub(r'([a-z])([A-Z])', r'\\1_\\2', alias)\n # Ignore words\n words = alias.lower().split('_')\n words = filter(lambda w: w not in IGNORED_WORDS, words)\n return '_'.join(words)\n"
] |
"""
Defines the registry of matchers and the standard set of matchers
"""
import re
from datetime import datetime, date
import hamcrest as hc
from difflib import get_close_matches
from hamcrest.core.base_matcher import BaseMatcher
from hamcrest.library.collection.isdict_containingentries import IsDictContainingEntries
from hamcrest.library.collection.issequence_containing import IsSequenceContainingEvery
from hamcrest.core.helpers.wrap_matcher import wrap_matcher
__author__ = "Ivan -DrSlump- Montes"
__email__ = "drslump@pollinimini.net"
__license__ = "MIT"
# Words to ignore when looking up matchers
IGNORED_WORDS = ['should', 'to', 'be', 'a', 'an', 'is', 'the', 'as']
# Map of registered matchers as alias:callable
matchers = {}
# Map of normalized matcher aliases as normalized:alias
normalized = {}
# Help messages associated to matchers
helpmatchers = {}
# All textual representation types in Python 2/3
try:
text_types = (basestring, str, unicode) # python 2
except NameError:
text_types = (str,)
class ContextManagerResult(object):
""" When an expression is used in a `with` statement we capture the params
in the __exit__ method of the expression context manager with this class,
this allows to pass it to the matchers as the value to test, which is mostly
useful for the raise/throw one.
"""
def __init__(self, type_, value, trace):
self.exc_type = type_
self.exc_value = value
self.trace = trace
def __str__(self):
""" Provide a suitable description of the exception for AnyOf/AllOf matchers """
return repr(self.exc_value)
def unregister(matcher):
""" Unregister a matcher (or alias) from the registry
"""
# If it's a string handle it like an alias
if isinstance(matcher, text_types) and matcher in matchers:
matcher = matchers[matcher]
# Find all aliases associated to the matcher
aliases = [k for k, v in matchers.iteritems() if v == matcher]
for alias in aliases:
del matchers[alias]
# Clean up the normalized versions
norms = [k for k, v in normalized.iteritems() if v == alias]
for norm in norms:
del normalized[norm]
# Remove help docstring
if matcher in helpmatchers:
del helpmatchers[matcher]
return len(aliases) > 0
def normalize(alias):
""" Normalizes an alias by removing adverbs defined in IGNORED_WORDS
"""
# Convert from CamelCase to snake_case
alias = re.sub(r'([a-z])([A-Z])', r'\1_\2', alias)
# Ignore words
words = alias.lower().split('_')
words = filter(lambda w: w not in IGNORED_WORDS, words)
return '_'.join(words)
def lookup(alias):
""" Tries to find a matcher callable associated to the given alias. If
an exact match does not exists it will try normalizing it and even
removing underscores to find one.
"""
if alias in matchers:
return matchers[alias]
else:
norm = normalize(alias)
if norm in normalized:
alias = normalized[norm]
return matchers[alias]
# Check without snake case
if -1 != alias.find('_'):
norm = normalize(alias).replace('_', '')
return lookup(norm)
return None
def suggest(alias, max=3, cutoff=0.5):
""" Suggest a list of aliases which are similar enough
"""
aliases = matchers.keys()
similar = get_close_matches(alias, aliases, n=max, cutoff=cutoff)
return similar
def aliases():
""" Obtain the list of aliases """
return list(matchers.keys())
def alias_help(alias):
""" Get help for the given alias """
matcher = lookup(alias)
if not matcher:
return None
return helpmatchers.get(matcher)
# Matchers should be defined with verbose aliases to allow the use of
# natural english where possible. When looking up a matcher common adverbs
# like 'to', 'be' or 'is' are ignored in the comparison.
register(hc.equal_to,
'be_equal_to', 'be_equals_to', 'be_eql_to', 'be_eq_to')
register(hc.instance_of,
'be_an_instance_of', 'be_a', 'be_an')
register(hc.same_instance,
'be_the_same_instance_as', 'be_the_same_as', 'be')
register(hc.has_entry,
'have_the_entry', 'contain_the_entry')
register(hc.has_entries,
'have_the_entries', 'contain_the_entries')
register(hc.has_key,
'have_the_key', 'contain_the_key')
register(hc.has_value,
'have_the_value', 'contain_the_value')
register(hc.is_in,
'be_in', 'be_into', 'be_contained_in')
register(hc.has_item,
'have_the_item', 'contain_the_item')
register(hc.has_items,
'have_the_items', 'contain_the_items')
register(hc.contains_inanyorder,
'have_in_any_order', 'contain_in_any_order')
register(hc.contains,
'have', 'contain')
register(hc.only_contains,
'have_only', 'contain_only')
register(hc.close_to,
'be_close_to')
register(hc.greater_than,
'be_greater_than', 'be_greater', 'be_gt',
'be_above',
'be_more_than', 'be_more')
register(hc.greater_than_or_equal_to,
'be_greater_than_or_equal_to', 'be_greater_or_equal', 'be_ge',
'be_more_than_or_equal', 'be_more_or_equal',
'be_at_least')
register(hc.less_than,
'be_less_than', 'be_less', 'be_lt', 'be_below')
register(hc.less_than_or_equal_to,
'be_less_than_or_equal_to', 'be_less_or_equal', 'be_le',
'be_at_most')
register(hc.has_length,
'have_length', 'have_len')
register(hc.has_property,
'have_the_property', 'contain_the_property',
'have_the_prop', 'contain_the_prop')
register(hc.has_string,
'have_the_string', 'contain_the_string')
register(hc.equal_to_ignoring_case,
'be_equal_to_ignoring_case')
register(hc.equal_to_ignoring_whitespace,
'be_equal_to_ignoring_whitespace')
register(hc.contains_string,
'substr', 'have_the_substr', 'contain_the_substr',
'substring', 'have_the_substring', 'contain_the_substring')
register(hc.ends_with,
'end_with')
register(hc.starts_with,
'start_with', 'begin_with')
register(hc.anything,
'be_anything', 'be_any')
class TypeMatcher(BaseMatcher):
def _matches(self, item):
return isinstance(item, self.__class__.types)
def describe_to(self, description):
description.append_text(self.__class__.expected)
def describe_mismatch(self, item, description):
description.append_text('was a %s ' % item.__class__.__name__)
description.append_description_of(item)
@classmethod
def __call__(cls, *args, **kwargs):
return cls()
class IsInteger(TypeMatcher):
""" Check if the value is an integer """
try:
types = (int, long)
except:
types = (int) # Python 3
expected = 'an integer'
class IsFloat(TypeMatcher):
""" Check if the value is a float """
types = float
expected = 'a float'
class IsComplex(TypeMatcher):
""" Check if the value is a complex number """
types = complex
expected = 'a complex number'
class IsNumeric(TypeMatcher):
""" Check if the value is a numeric type """
try:
types = (int, long, float, complex) # python 2
except NameError:
types = (int, float, complex)
expected = 'a numeric type'
class IsString(TypeMatcher):
""" Check if the value is a string """
types = text_types
expected = 'a string'
class IsStr(TypeMatcher):
""" Check if the value is a str """
try:
types = (basestring, str) # python 2
except NameError:
types = str
expected = 'a str'
class IsUnicode(TypeMatcher):
""" Check if the value is an unicode string """
try:
types = unicode # python 2
except NameError:
types = str
expected = 'a unicode string'
class IsBinary(TypeMatcher):
""" Check if value is a binary string """
try:
types = bytes # python 3
except NameError:
types = str
expected = 'a binary string'
class IsByteArray(TypeMatcher):
""" Check if the value is a bytearray """
types = bytearray
expected = 'a bytearray'
class IsDict(TypeMatcher):
""" Check if the value is a dict """
types = dict
expected = 'a dict'
class IsList(TypeMatcher):
""" Check if the value is a list """
types = list
expected = 'a list'
class IsTuple(TypeMatcher):
""" Check if the value is a tuple """
types = tuple
expected = 'a tuple'
class IsSet(TypeMatcher):
""" Check if the value is a set """
types = set
expected = 'a set'
class IsFrozenSet(TypeMatcher):
""" Check if the value is a frozenset """
types = frozenset
expected = 'a frozenset'
class IsBool(TypeMatcher):
""" Check if the value is a bool """
types = bool
expected = 'a bool'
class IsFunction(TypeMatcher):
""" Check if the value is a function """
import types
types = types.FunctionType
expected = 'a function'
class IsGenerator(BaseMatcher):
""" Checks if the value is a generator function """
def _matches(self, item):
import inspect
return inspect.isgeneratorfunction(item)
def describe_to(self, desc):
desc.append_text('a generator function')
class IsClass(BaseMatcher):
""" Check if the value is a class """
def _matches(self, item):
import inspect
return inspect.isclass(item)
def describe_to(self, desc):
desc.append_text('a class')
class IsDate(TypeMatcher):
""" Check if the value is a date """
types = (datetime, date)
expected = 'a date'
register(IsInteger, 'be_an_integer', 'be_an_int')
register(IsFloat, 'be_a_float')
register(IsComplex, 'be_a_complex_number', 'be_a_complex')
register(IsNumeric, 'be_numeric')
register(IsString, 'be_a_string')
register(IsStr, 'be_a_str')
register(IsUnicode, 'be_an_unicode_string', 'be_an_unicode')
register(IsBinary, 'be_a_binary_string', 'be_a_binary')
register(IsByteArray, 'be_a_bytearray', 'be_a_byte_array')
register(IsDict, 'be_a_dictionary', 'be_a_dict')
register(IsList, 'be_a_list', 'be_an_array')
register(IsTuple, 'be_a_tuple')
register(IsSet, 'be_a_set')
register(IsFrozenSet, 'be_a_frozenset', 'be_a_frozen_set')
register(IsFunction, 'be_a_function', 'be_a_func')
register(IsBool, 'be_a_boolean', 'be_a_bool')
register(IsGenerator, 'be_a_generator')
register(IsClass, 'be_a_class')
register(IsDate, 'be_a_date')
class IsIterable(BaseMatcher):
""" Checks if a value is iterable """
def _matches(self, item):
try:
iter(item)
return True
except TypeError:
return False
def describe_to(self, description):
description.append_text('an iterable value')
register(IsIterable, 'be_an_iterable')
class IsCallable(BaseMatcher):
""" Check if a value is callable """
def _matches(self, item):
return hasattr(item, '__call__')
def describe_to(self, desc):
desc.append_text('a callable value')
register(IsCallable, 'be_callable', 'be_a_callable_value', 'can_be_called')
class IsNone(BaseMatcher):
""" Check if a value is None """
def _matches(self, item):
return True if item is None else False
def describe_to(self, desc):
desc.append_text('a None')
register(IsNone, 'be_none', 'be_a_none_value')
class IsTrue(BaseMatcher):
""" Check if a value is True """
def _matches(self, item):
return item is True
def describe_to(self, desc):
desc.append_text('a True')
class IsFalse(BaseMatcher):
""" Check if a value is False """
def _matches(self, item):
return item is False
def describe_to(self, desc):
desc.append_text('a False')
class IsTruthy(BaseMatcher):
""" Check if a value is truthy """
def _matches(self, item):
return True if item else False
def describe_to(self, desc):
desc.append_text('a truthy value')
class IsFalsy(BaseMatcher):
""" Check if a value is falsy """
def _matches(self, item):
return True if not item else False
def describe_to(self, desc):
desc.append_text('a falsy value')
register(IsTrue, 'be_true')
register(IsFalse, 'be_false')
register(IsTruthy, 'be_a_truthy_value', 'be_truthy', 'be_ok')
register(IsFalsy, 'be_a_falsy_value', 'be_falsy', 'be_ko')
class IsEmpty(BaseMatcher):
""" Check if a value is empty """
def _matches(self, item):
try:
return not bool(len(item))
except:
return False
def describe_to(self, desc):
desc.append_text('an empty value')
def describe_mismatch(self, item, desc):
try:
l = len(item)
desc.append_text('has {0} elements'.format(l))
except:
desc.append_value(item)
desc.append_text(' does not have a length')
register(IsEmpty, 'be_empty')
class RaisesError(BaseMatcher):
""" Checks if calling the value raises an error """
def __init__(self, expected=None, message=None, regex=None):
self.expected = expected
self.message = message
self.regex = regex
self.thrown = None
def _matches(self, item):
# support passing a context manager result
if isinstance(item, ContextManagerResult):
# Python <2.7 may provide a non exception value
if isinstance(item.exc_value, Exception):
self.thrown = item.exc_value
elif item.exc_type is not None:
try:
self.thrown = item.exc_type(*item.exc_value)
except TypeError:
self.thrown = item.exc_type(item.exc_value)
else:
return False
else:
try:
# support passing arguments by feeding a tuple instead of a callable
if not callable(item) and getattr(item, '__getitem__', False):
item[0](*item[1:])
else:
item()
return False
except:
# This should capture any kind of raised value
import sys
self.thrown = sys.exc_info()[1]
# Fail if we have defined an expected error type
if self.expected and not isinstance(self.thrown, self.expected):
return False
# Apply message filters
if self.message:
return self.message == str(self.thrown)
elif self.regex:
return re.match(self.regex, str(self.thrown))
return True
def describe_to(self, desc):
if self.thrown and self.message:
desc.append_text('to raise an exception with message "%s"'
% self.message)
elif self.thrown and self.regex:
desc.append_text('to raise an exception matching /%s/'
% self.regex)
else:
desc.append_text('to raise an exception')
if self.expected:
try:
exps = map(lambda x: x.__name__, self.expected)
except:
exps = [self.expected.__name__]
desc.append_text(' of type <%s>' % '>, <'.join(exps))
def describe_mismatch(self, item, desc):
if self.thrown:
desc.append_text('was ')
desc.append_text('<%s>' % self.thrown.__class__.__name__)
if self.message or self.regex:
desc.append_text(' "%s"' % str(self.thrown))
else:
desc.append_text('no exception was raised')
register(RaisesError,
'raise_an_error', 'raise_an_exception',
'raises_an_error', 'raises_an_exception', 'raises', 'raise',
'throw_an_error', 'throw_an_exception',
'throws_an_error', 'throws_an_exception', 'throws', 'throw')
class Changes(BaseMatcher):
""" Checks if calling a value changes something """
def __init__(self, watch):
self.watch = watch
self.before = None
self.after = None
self.changed = False
def _matches(self, item):
# support passing arguments by feeding a tuple instead of a callable
if not callable(item) and getattr(item, '__getitem__', False):
func = item[0]
params = item[1:]
else:
func = item
params = []
try:
before = self.watcher()
except TypeError:
before = self.watcher
# keep a snapshot of the value in case it's mutable
from copy import deepcopy
self.before = deepcopy(before)
func(*params)
try:
self.after = self.watcher()
except TypeError:
self.after = self.watcher
try:
hc.assert_that(self.after, hc.equal_to(self.before))
self.changed = False
except AssertionError:
self.changed = True
return self.changed
def describe_to(self, desc):
desc.append_text('change something')
def describe_mismatch(self, item, desc):
# To support its proper use when negated we need to check if
# the values actually changed or not
if self.changed:
desc.append_text('did change from ') \
.append_value(self.before) \
.append_text(' to ') \
.append_value(self.after)
else:
desc.append_text('it didn\'t change from ') \
.append_value(self.before)
register(Changes,
'change', 'changes', 'modify', 'modifies')
class Callback(BaseMatcher):
""" Checks against an user supplied callback. The callback
can should return True to indicate a successful match or
False to indicate an unsuccessful one.
"""
def __init__(self, callback):
self.callback = callback
def _matches(self, item):
self.error = None
try:
result = self.callback(item)
# Returning an expectation assumes it's correct (no failure raised)
from .expectation import Expectation
return isinstance(result, Expectation) or bool(result)
except AssertionError:
# Just forward assertion failures
raise
except Exception as ex:
self.error = str(ex)
return False
def describe_to(self, desc):
desc.append_text('passses callback ')
if (isinstance(self.callback, type(lambda: None))
and self.callback.__name__ == '<lambda>'):
desc.append_text(self.callback.__name__)
else:
desc.append_text('{0}'.format(self.callback))
def describe_mismatch(self, item, desc):
if self.error:
desc.append_text('gave an exception "%s"' % self.error)
else:
desc.append_text('returned False')
register(Callback,
'callback', 'pass', 'pass_callback')
class MockCalled(BaseMatcher):
""" Support for checking if mocks where called from the Mock library
"""
def _matches(self, item):
if not hasattr(item, 'called'):
raise Exception('Mock object does not have a <called> attribute')
return item.called
def describe_to(self, desc):
desc.append_text('called')
def describe_mismatch(self, item, desc):
if item.called:
desc.append_text('was called')
else:
desc.append_text('was not called')
register(MockCalled, 'called', 'invoked')
class RegexMatcher(BaseMatcher):
""" Checks against a regular expression """
def __init__(self, regex, flags=0):
self.regex = regex
if isinstance(flags, text_types):
self.flags = 0
for ch in flags.upper():
self.flags |= getattr(re, ch)
else:
self.flags = flags
def _matches(self, item):
# Make sure we are matching against a string
hc.assert_that(item, IsString())
match = re.search(self.regex, item, self.flags)
return match is not None
def describe_to(self, desc):
desc.append_text('matching ')
desc.append_text('/{0}/'.format(self.regex))
register(RegexMatcher, 'match', 'match_regex', 'match_regexp', 'be_matched_by')
class IsObjectContainingEntries(IsDictContainingEntries):
"""Matches if object has the properties from a given dict whose values and
keys satisfy a given matcher.
Examples::
:param inst: The instance or class.
:param mismatch_description: The description in case of failure.
have_properties({
'prop1': should.eq('value1'),
'prop2': should.eq('value2')
})
"""
hidden = set(['should_not', 'should_all', 'should_any', 'should', 'should_none'])
def __init__(self, value_matchers=None, **kwargs):
base_dict = {}
if value_matchers is None:
value_matchers = kwargs
for key, value in value_matchers.items():
base_dict[key] = wrap_matcher(value)
super(IsObjectContainingEntries, self).__init__(base_dict)
def matches(self, inst, mismatch_description=None):
# Make sure we are matching against a dict
try:
keys = dir(inst)
attributes = dict(
(key, getattr(inst, key))
for key in dir(inst)
if not key.startswith('__')
and key not in IsObjectContainingEntries.hidden
)
except Exception as ex:
if mismatch_description:
mismatch_description.append_text(
'unable to extract attributes from value: {0}'.format(ex))
return False
return super(IsObjectContainingEntries, self).matches(
attributes, mismatch_description)
def describe_to(self, desc):
desc.append_text('a class as ')
super(IsObjectContainingEntries, self).describe_to(desc)
register(IsObjectContainingEntries,
'have_the_properties', 'contain_the_properties', 'have_the_attributes', 'contain_the_attributes',
'have_props', 'contain_props', 'have_attrs', 'contain_attrs')
class IsSequenceContainingEveryInOrderSparse(IsSequenceContainingEvery):
"""
Matches if a list contains every given element in the same order but with
optional interleaved items.
No optional elements matching the required ones are allowed.
Mismatch description prioritizes missing items over wrong order.
e.g. [1, 3, 4] IsSequenceContainingEveryInOrder [1, 4]
but NOT IsSequenceContainingEveryInOrder [4, 1]
and NOT IsSequenceContainingEveryInOrder [1, 4, 4]
"""
def __init__(self, *element_matchers):
delegates = [hc.has_item(e) for e in element_matchers]
self.matcher_all = hc.all_of(*delegates)
self.matcher_any = hc.any_of(*delegates)
self.matcher_order = hc.contains(*element_matchers)
self.order_seq = None
def _matches(self, sequence):
self.order_seq = None
try:
seq = list(sequence)
if self.matcher_all.matches(seq):
self.order_seq = [i for i in seq if self.matcher_any.matches([i])]
return self.matcher_order.matches(self.order_seq)
else:
return False
except TypeError:
return False
def describe_mismatch(self, item, mismatch_description):
if self.order_seq is None:
mismatch_description.append_text(' instead of a ')
self.matcher_all.describe_mismatch(item, mismatch_description)
else:
self.matcher_order.describe_mismatch(self.order_seq, mismatch_description)
mismatch_description.append_text(
' from candidate list elements: '
).append_description_of(self.order_seq).append_text(
' that satisfied those conditions from '
).append_description_of(item)
def describe_to(self, description):
self.matcher_all.describe_to(description)
description.append_text(' in this specific order')
register(IsSequenceContainingEveryInOrderSparse,
'contain_sparse', 'have_sparse', 'contain_sparse_in_order',
'contain_in_order_sparse', 'have_every_in_order_sparse',
'have_in_order_sparse', 'contain_every_in_order_sparse')
|
drslump/pyshould
|
pyshould/matchers.py
|
unregister
|
python
|
def unregister(matcher):
# If it's a string handle it like an alias
if isinstance(matcher, text_types) and matcher in matchers:
matcher = matchers[matcher]
# Find all aliases associated to the matcher
aliases = [k for k, v in matchers.iteritems() if v == matcher]
for alias in aliases:
del matchers[alias]
# Clean up the normalized versions
norms = [k for k, v in normalized.iteritems() if v == alias]
for norm in norms:
del normalized[norm]
# Remove help docstring
if matcher in helpmatchers:
del helpmatchers[matcher]
return len(aliases) > 0
|
Unregister a matcher (or alias) from the registry
|
train
|
https://github.com/drslump/pyshould/blob/7210859d4c84cfbaa64f91b30c2a541aea788ddf/pyshould/matchers.py#L71-L91
| null |
"""
Defines the registry of matchers and the standard set of matchers
"""
import re
from datetime import datetime, date
import hamcrest as hc
from difflib import get_close_matches
from hamcrest.core.base_matcher import BaseMatcher
from hamcrest.library.collection.isdict_containingentries import IsDictContainingEntries
from hamcrest.library.collection.issequence_containing import IsSequenceContainingEvery
from hamcrest.core.helpers.wrap_matcher import wrap_matcher
__author__ = "Ivan -DrSlump- Montes"
__email__ = "drslump@pollinimini.net"
__license__ = "MIT"
# Words to ignore when looking up matchers
IGNORED_WORDS = ['should', 'to', 'be', 'a', 'an', 'is', 'the', 'as']
# Map of registered matchers as alias:callable
matchers = {}
# Map of normalized matcher aliases as normalized:alias
normalized = {}
# Help messages associated to matchers
helpmatchers = {}
# All textual representation types in Python 2/3
try:
text_types = (basestring, str, unicode) # python 2
except NameError:
text_types = (str,)
class ContextManagerResult(object):
""" When an expression is used in a `with` statement we capture the params
in the __exit__ method of the expression context manager with this class,
this allows to pass it to the matchers as the value to test, which is mostly
useful for the raise/throw one.
"""
def __init__(self, type_, value, trace):
self.exc_type = type_
self.exc_value = value
self.trace = trace
def __str__(self):
""" Provide a suitable description of the exception for AnyOf/AllOf matchers """
return repr(self.exc_value)
def register(matcher, *aliases):
""" Register a matcher associated to one or more aliases. Each alias
given is also normalized.
"""
docstr = matcher.__doc__ if matcher.__doc__ is not None else ''
helpmatchers[matcher] = docstr.strip()
for alias in aliases:
matchers[alias] = matcher
# Map a normalized version of the alias
norm = normalize(alias)
normalized[norm] = alias
# Map a version without snake case
norm = norm.replace('_', '')
normalized[norm] = alias
def normalize(alias):
""" Normalizes an alias by removing adverbs defined in IGNORED_WORDS
"""
# Convert from CamelCase to snake_case
alias = re.sub(r'([a-z])([A-Z])', r'\1_\2', alias)
# Ignore words
words = alias.lower().split('_')
words = filter(lambda w: w not in IGNORED_WORDS, words)
return '_'.join(words)
def lookup(alias):
""" Tries to find a matcher callable associated to the given alias. If
an exact match does not exists it will try normalizing it and even
removing underscores to find one.
"""
if alias in matchers:
return matchers[alias]
else:
norm = normalize(alias)
if norm in normalized:
alias = normalized[norm]
return matchers[alias]
# Check without snake case
if -1 != alias.find('_'):
norm = normalize(alias).replace('_', '')
return lookup(norm)
return None
def suggest(alias, max=3, cutoff=0.5):
""" Suggest a list of aliases which are similar enough
"""
aliases = matchers.keys()
similar = get_close_matches(alias, aliases, n=max, cutoff=cutoff)
return similar
def aliases():
""" Obtain the list of aliases """
return list(matchers.keys())
def alias_help(alias):
""" Get help for the given alias """
matcher = lookup(alias)
if not matcher:
return None
return helpmatchers.get(matcher)
# Matchers should be defined with verbose aliases to allow the use of
# natural english where possible. When looking up a matcher common adverbs
# like 'to', 'be' or 'is' are ignored in the comparison.
register(hc.equal_to,
'be_equal_to', 'be_equals_to', 'be_eql_to', 'be_eq_to')
register(hc.instance_of,
'be_an_instance_of', 'be_a', 'be_an')
register(hc.same_instance,
'be_the_same_instance_as', 'be_the_same_as', 'be')
register(hc.has_entry,
'have_the_entry', 'contain_the_entry')
register(hc.has_entries,
'have_the_entries', 'contain_the_entries')
register(hc.has_key,
'have_the_key', 'contain_the_key')
register(hc.has_value,
'have_the_value', 'contain_the_value')
register(hc.is_in,
'be_in', 'be_into', 'be_contained_in')
register(hc.has_item,
'have_the_item', 'contain_the_item')
register(hc.has_items,
'have_the_items', 'contain_the_items')
register(hc.contains_inanyorder,
'have_in_any_order', 'contain_in_any_order')
register(hc.contains,
'have', 'contain')
register(hc.only_contains,
'have_only', 'contain_only')
register(hc.close_to,
'be_close_to')
register(hc.greater_than,
'be_greater_than', 'be_greater', 'be_gt',
'be_above',
'be_more_than', 'be_more')
register(hc.greater_than_or_equal_to,
'be_greater_than_or_equal_to', 'be_greater_or_equal', 'be_ge',
'be_more_than_or_equal', 'be_more_or_equal',
'be_at_least')
register(hc.less_than,
'be_less_than', 'be_less', 'be_lt', 'be_below')
register(hc.less_than_or_equal_to,
'be_less_than_or_equal_to', 'be_less_or_equal', 'be_le',
'be_at_most')
register(hc.has_length,
'have_length', 'have_len')
register(hc.has_property,
'have_the_property', 'contain_the_property',
'have_the_prop', 'contain_the_prop')
register(hc.has_string,
'have_the_string', 'contain_the_string')
register(hc.equal_to_ignoring_case,
'be_equal_to_ignoring_case')
register(hc.equal_to_ignoring_whitespace,
'be_equal_to_ignoring_whitespace')
register(hc.contains_string,
'substr', 'have_the_substr', 'contain_the_substr',
'substring', 'have_the_substring', 'contain_the_substring')
register(hc.ends_with,
'end_with')
register(hc.starts_with,
'start_with', 'begin_with')
register(hc.anything,
'be_anything', 'be_any')
class TypeMatcher(BaseMatcher):
def _matches(self, item):
return isinstance(item, self.__class__.types)
def describe_to(self, description):
description.append_text(self.__class__.expected)
def describe_mismatch(self, item, description):
description.append_text('was a %s ' % item.__class__.__name__)
description.append_description_of(item)
@classmethod
def __call__(cls, *args, **kwargs):
return cls()
class IsInteger(TypeMatcher):
""" Check if the value is an integer """
try:
types = (int, long)
except:
types = (int) # Python 3
expected = 'an integer'
class IsFloat(TypeMatcher):
""" Check if the value is a float """
types = float
expected = 'a float'
class IsComplex(TypeMatcher):
""" Check if the value is a complex number """
types = complex
expected = 'a complex number'
class IsNumeric(TypeMatcher):
""" Check if the value is a numeric type """
try:
types = (int, long, float, complex) # python 2
except NameError:
types = (int, float, complex)
expected = 'a numeric type'
class IsString(TypeMatcher):
""" Check if the value is a string """
types = text_types
expected = 'a string'
class IsStr(TypeMatcher):
""" Check if the value is a str """
try:
types = (basestring, str) # python 2
except NameError:
types = str
expected = 'a str'
class IsUnicode(TypeMatcher):
""" Check if the value is an unicode string """
try:
types = unicode # python 2
except NameError:
types = str
expected = 'a unicode string'
class IsBinary(TypeMatcher):
""" Check if value is a binary string """
try:
types = bytes # python 3
except NameError:
types = str
expected = 'a binary string'
class IsByteArray(TypeMatcher):
""" Check if the value is a bytearray """
types = bytearray
expected = 'a bytearray'
class IsDict(TypeMatcher):
""" Check if the value is a dict """
types = dict
expected = 'a dict'
class IsList(TypeMatcher):
""" Check if the value is a list """
types = list
expected = 'a list'
class IsTuple(TypeMatcher):
""" Check if the value is a tuple """
types = tuple
expected = 'a tuple'
class IsSet(TypeMatcher):
""" Check if the value is a set """
types = set
expected = 'a set'
class IsFrozenSet(TypeMatcher):
""" Check if the value is a frozenset """
types = frozenset
expected = 'a frozenset'
class IsBool(TypeMatcher):
""" Check if the value is a bool """
types = bool
expected = 'a bool'
class IsFunction(TypeMatcher):
""" Check if the value is a function """
import types
types = types.FunctionType
expected = 'a function'
class IsGenerator(BaseMatcher):
""" Checks if the value is a generator function """
def _matches(self, item):
import inspect
return inspect.isgeneratorfunction(item)
def describe_to(self, desc):
desc.append_text('a generator function')
class IsClass(BaseMatcher):
""" Check if the value is a class """
def _matches(self, item):
import inspect
return inspect.isclass(item)
def describe_to(self, desc):
desc.append_text('a class')
class IsDate(TypeMatcher):
""" Check if the value is a date """
types = (datetime, date)
expected = 'a date'
register(IsInteger, 'be_an_integer', 'be_an_int')
register(IsFloat, 'be_a_float')
register(IsComplex, 'be_a_complex_number', 'be_a_complex')
register(IsNumeric, 'be_numeric')
register(IsString, 'be_a_string')
register(IsStr, 'be_a_str')
register(IsUnicode, 'be_an_unicode_string', 'be_an_unicode')
register(IsBinary, 'be_a_binary_string', 'be_a_binary')
register(IsByteArray, 'be_a_bytearray', 'be_a_byte_array')
register(IsDict, 'be_a_dictionary', 'be_a_dict')
register(IsList, 'be_a_list', 'be_an_array')
register(IsTuple, 'be_a_tuple')
register(IsSet, 'be_a_set')
register(IsFrozenSet, 'be_a_frozenset', 'be_a_frozen_set')
register(IsFunction, 'be_a_function', 'be_a_func')
register(IsBool, 'be_a_boolean', 'be_a_bool')
register(IsGenerator, 'be_a_generator')
register(IsClass, 'be_a_class')
register(IsDate, 'be_a_date')
class IsIterable(BaseMatcher):
""" Checks if a value is iterable """
def _matches(self, item):
try:
iter(item)
return True
except TypeError:
return False
def describe_to(self, description):
description.append_text('an iterable value')
register(IsIterable, 'be_an_iterable')
class IsCallable(BaseMatcher):
""" Check if a value is callable """
def _matches(self, item):
return hasattr(item, '__call__')
def describe_to(self, desc):
desc.append_text('a callable value')
register(IsCallable, 'be_callable', 'be_a_callable_value', 'can_be_called')
class IsNone(BaseMatcher):
""" Check if a value is None """
def _matches(self, item):
return True if item is None else False
def describe_to(self, desc):
desc.append_text('a None')
register(IsNone, 'be_none', 'be_a_none_value')
class IsTrue(BaseMatcher):
""" Check if a value is True """
def _matches(self, item):
return item is True
def describe_to(self, desc):
desc.append_text('a True')
class IsFalse(BaseMatcher):
""" Check if a value is False """
def _matches(self, item):
return item is False
def describe_to(self, desc):
desc.append_text('a False')
class IsTruthy(BaseMatcher):
""" Check if a value is truthy """
def _matches(self, item):
return True if item else False
def describe_to(self, desc):
desc.append_text('a truthy value')
class IsFalsy(BaseMatcher):
""" Check if a value is falsy """
def _matches(self, item):
return True if not item else False
def describe_to(self, desc):
desc.append_text('a falsy value')
register(IsTrue, 'be_true')
register(IsFalse, 'be_false')
register(IsTruthy, 'be_a_truthy_value', 'be_truthy', 'be_ok')
register(IsFalsy, 'be_a_falsy_value', 'be_falsy', 'be_ko')
class IsEmpty(BaseMatcher):
""" Check if a value is empty """
def _matches(self, item):
try:
return not bool(len(item))
except:
return False
def describe_to(self, desc):
desc.append_text('an empty value')
def describe_mismatch(self, item, desc):
try:
l = len(item)
desc.append_text('has {0} elements'.format(l))
except:
desc.append_value(item)
desc.append_text(' does not have a length')
register(IsEmpty, 'be_empty')
class RaisesError(BaseMatcher):
""" Checks if calling the value raises an error """
def __init__(self, expected=None, message=None, regex=None):
self.expected = expected
self.message = message
self.regex = regex
self.thrown = None
def _matches(self, item):
# support passing a context manager result
if isinstance(item, ContextManagerResult):
# Python <2.7 may provide a non exception value
if isinstance(item.exc_value, Exception):
self.thrown = item.exc_value
elif item.exc_type is not None:
try:
self.thrown = item.exc_type(*item.exc_value)
except TypeError:
self.thrown = item.exc_type(item.exc_value)
else:
return False
else:
try:
# support passing arguments by feeding a tuple instead of a callable
if not callable(item) and getattr(item, '__getitem__', False):
item[0](*item[1:])
else:
item()
return False
except:
# This should capture any kind of raised value
import sys
self.thrown = sys.exc_info()[1]
# Fail if we have defined an expected error type
if self.expected and not isinstance(self.thrown, self.expected):
return False
# Apply message filters
if self.message:
return self.message == str(self.thrown)
elif self.regex:
return re.match(self.regex, str(self.thrown))
return True
def describe_to(self, desc):
if self.thrown and self.message:
desc.append_text('to raise an exception with message "%s"'
% self.message)
elif self.thrown and self.regex:
desc.append_text('to raise an exception matching /%s/'
% self.regex)
else:
desc.append_text('to raise an exception')
if self.expected:
try:
exps = map(lambda x: x.__name__, self.expected)
except:
exps = [self.expected.__name__]
desc.append_text(' of type <%s>' % '>, <'.join(exps))
def describe_mismatch(self, item, desc):
if self.thrown:
desc.append_text('was ')
desc.append_text('<%s>' % self.thrown.__class__.__name__)
if self.message or self.regex:
desc.append_text(' "%s"' % str(self.thrown))
else:
desc.append_text('no exception was raised')
register(RaisesError,
'raise_an_error', 'raise_an_exception',
'raises_an_error', 'raises_an_exception', 'raises', 'raise',
'throw_an_error', 'throw_an_exception',
'throws_an_error', 'throws_an_exception', 'throws', 'throw')
class Changes(BaseMatcher):
""" Checks if calling a value changes something """
def __init__(self, watch):
self.watch = watch
self.before = None
self.after = None
self.changed = False
def _matches(self, item):
# support passing arguments by feeding a tuple instead of a callable
if not callable(item) and getattr(item, '__getitem__', False):
func = item[0]
params = item[1:]
else:
func = item
params = []
try:
before = self.watcher()
except TypeError:
before = self.watcher
# keep a snapshot of the value in case it's mutable
from copy import deepcopy
self.before = deepcopy(before)
func(*params)
try:
self.after = self.watcher()
except TypeError:
self.after = self.watcher
try:
hc.assert_that(self.after, hc.equal_to(self.before))
self.changed = False
except AssertionError:
self.changed = True
return self.changed
def describe_to(self, desc):
desc.append_text('change something')
def describe_mismatch(self, item, desc):
# To support its proper use when negated we need to check if
# the values actually changed or not
if self.changed:
desc.append_text('did change from ') \
.append_value(self.before) \
.append_text(' to ') \
.append_value(self.after)
else:
desc.append_text('it didn\'t change from ') \
.append_value(self.before)
register(Changes,
'change', 'changes', 'modify', 'modifies')
class Callback(BaseMatcher):
""" Checks against an user supplied callback. The callback
can should return True to indicate a successful match or
False to indicate an unsuccessful one.
"""
def __init__(self, callback):
self.callback = callback
def _matches(self, item):
self.error = None
try:
result = self.callback(item)
# Returning an expectation assumes it's correct (no failure raised)
from .expectation import Expectation
return isinstance(result, Expectation) or bool(result)
except AssertionError:
# Just forward assertion failures
raise
except Exception as ex:
self.error = str(ex)
return False
def describe_to(self, desc):
desc.append_text('passses callback ')
if (isinstance(self.callback, type(lambda: None))
and self.callback.__name__ == '<lambda>'):
desc.append_text(self.callback.__name__)
else:
desc.append_text('{0}'.format(self.callback))
def describe_mismatch(self, item, desc):
if self.error:
desc.append_text('gave an exception "%s"' % self.error)
else:
desc.append_text('returned False')
register(Callback,
'callback', 'pass', 'pass_callback')
class MockCalled(BaseMatcher):
""" Support for checking if mocks where called from the Mock library
"""
def _matches(self, item):
if not hasattr(item, 'called'):
raise Exception('Mock object does not have a <called> attribute')
return item.called
def describe_to(self, desc):
desc.append_text('called')
def describe_mismatch(self, item, desc):
if item.called:
desc.append_text('was called')
else:
desc.append_text('was not called')
register(MockCalled, 'called', 'invoked')
class RegexMatcher(BaseMatcher):
""" Checks against a regular expression """
def __init__(self, regex, flags=0):
self.regex = regex
if isinstance(flags, text_types):
self.flags = 0
for ch in flags.upper():
self.flags |= getattr(re, ch)
else:
self.flags = flags
def _matches(self, item):
# Make sure we are matching against a string
hc.assert_that(item, IsString())
match = re.search(self.regex, item, self.flags)
return match is not None
def describe_to(self, desc):
desc.append_text('matching ')
desc.append_text('/{0}/'.format(self.regex))
register(RegexMatcher, 'match', 'match_regex', 'match_regexp', 'be_matched_by')
class IsObjectContainingEntries(IsDictContainingEntries):
"""Matches if object has the properties from a given dict whose values and
keys satisfy a given matcher.
Examples::
:param inst: The instance or class.
:param mismatch_description: The description in case of failure.
have_properties({
'prop1': should.eq('value1'),
'prop2': should.eq('value2')
})
"""
hidden = set(['should_not', 'should_all', 'should_any', 'should', 'should_none'])
def __init__(self, value_matchers=None, **kwargs):
base_dict = {}
if value_matchers is None:
value_matchers = kwargs
for key, value in value_matchers.items():
base_dict[key] = wrap_matcher(value)
super(IsObjectContainingEntries, self).__init__(base_dict)
def matches(self, inst, mismatch_description=None):
# Make sure we are matching against a dict
try:
keys = dir(inst)
attributes = dict(
(key, getattr(inst, key))
for key in dir(inst)
if not key.startswith('__')
and key not in IsObjectContainingEntries.hidden
)
except Exception as ex:
if mismatch_description:
mismatch_description.append_text(
'unable to extract attributes from value: {0}'.format(ex))
return False
return super(IsObjectContainingEntries, self).matches(
attributes, mismatch_description)
def describe_to(self, desc):
desc.append_text('a class as ')
super(IsObjectContainingEntries, self).describe_to(desc)
register(IsObjectContainingEntries,
'have_the_properties', 'contain_the_properties', 'have_the_attributes', 'contain_the_attributes',
'have_props', 'contain_props', 'have_attrs', 'contain_attrs')
class IsSequenceContainingEveryInOrderSparse(IsSequenceContainingEvery):
"""
Matches if a list contains every given element in the same order but with
optional interleaved items.
No optional elements matching the required ones are allowed.
Mismatch description prioritizes missing items over wrong order.
e.g. [1, 3, 4] IsSequenceContainingEveryInOrder [1, 4]
but NOT IsSequenceContainingEveryInOrder [4, 1]
and NOT IsSequenceContainingEveryInOrder [1, 4, 4]
"""
def __init__(self, *element_matchers):
delegates = [hc.has_item(e) for e in element_matchers]
self.matcher_all = hc.all_of(*delegates)
self.matcher_any = hc.any_of(*delegates)
self.matcher_order = hc.contains(*element_matchers)
self.order_seq = None
def _matches(self, sequence):
self.order_seq = None
try:
seq = list(sequence)
if self.matcher_all.matches(seq):
self.order_seq = [i for i in seq if self.matcher_any.matches([i])]
return self.matcher_order.matches(self.order_seq)
else:
return False
except TypeError:
return False
def describe_mismatch(self, item, mismatch_description):
if self.order_seq is None:
mismatch_description.append_text(' instead of a ')
self.matcher_all.describe_mismatch(item, mismatch_description)
else:
self.matcher_order.describe_mismatch(self.order_seq, mismatch_description)
mismatch_description.append_text(
' from candidate list elements: '
).append_description_of(self.order_seq).append_text(
' that satisfied those conditions from '
).append_description_of(item)
def describe_to(self, description):
self.matcher_all.describe_to(description)
description.append_text(' in this specific order')
register(IsSequenceContainingEveryInOrderSparse,
'contain_sparse', 'have_sparse', 'contain_sparse_in_order',
'contain_in_order_sparse', 'have_every_in_order_sparse',
'have_in_order_sparse', 'contain_every_in_order_sparse')
|
drslump/pyshould
|
pyshould/matchers.py
|
normalize
|
python
|
def normalize(alias):
# Convert from CamelCase to snake_case
alias = re.sub(r'([a-z])([A-Z])', r'\1_\2', alias)
# Ignore words
words = alias.lower().split('_')
words = filter(lambda w: w not in IGNORED_WORDS, words)
return '_'.join(words)
|
Normalizes an alias by removing adverbs defined in IGNORED_WORDS
|
train
|
https://github.com/drslump/pyshould/blob/7210859d4c84cfbaa64f91b30c2a541aea788ddf/pyshould/matchers.py#L94-L102
| null |
"""
Defines the registry of matchers and the standard set of matchers
"""
import re
from datetime import datetime, date
import hamcrest as hc
from difflib import get_close_matches
from hamcrest.core.base_matcher import BaseMatcher
from hamcrest.library.collection.isdict_containingentries import IsDictContainingEntries
from hamcrest.library.collection.issequence_containing import IsSequenceContainingEvery
from hamcrest.core.helpers.wrap_matcher import wrap_matcher
__author__ = "Ivan -DrSlump- Montes"
__email__ = "drslump@pollinimini.net"
__license__ = "MIT"
# Words to ignore when looking up matchers
IGNORED_WORDS = ['should', 'to', 'be', 'a', 'an', 'is', 'the', 'as']
# Map of registered matchers as alias:callable
matchers = {}
# Map of normalized matcher aliases as normalized:alias
normalized = {}
# Help messages associated to matchers
helpmatchers = {}
# All textual representation types in Python 2/3
try:
text_types = (basestring, str, unicode) # python 2
except NameError:
text_types = (str,)
class ContextManagerResult(object):
""" When an expression is used in a `with` statement we capture the params
in the __exit__ method of the expression context manager with this class,
this allows to pass it to the matchers as the value to test, which is mostly
useful for the raise/throw one.
"""
def __init__(self, type_, value, trace):
self.exc_type = type_
self.exc_value = value
self.trace = trace
def __str__(self):
""" Provide a suitable description of the exception for AnyOf/AllOf matchers """
return repr(self.exc_value)
def register(matcher, *aliases):
""" Register a matcher associated to one or more aliases. Each alias
given is also normalized.
"""
docstr = matcher.__doc__ if matcher.__doc__ is not None else ''
helpmatchers[matcher] = docstr.strip()
for alias in aliases:
matchers[alias] = matcher
# Map a normalized version of the alias
norm = normalize(alias)
normalized[norm] = alias
# Map a version without snake case
norm = norm.replace('_', '')
normalized[norm] = alias
def unregister(matcher):
""" Unregister a matcher (or alias) from the registry
"""
# If it's a string handle it like an alias
if isinstance(matcher, text_types) and matcher in matchers:
matcher = matchers[matcher]
# Find all aliases associated to the matcher
aliases = [k for k, v in matchers.iteritems() if v == matcher]
for alias in aliases:
del matchers[alias]
# Clean up the normalized versions
norms = [k for k, v in normalized.iteritems() if v == alias]
for norm in norms:
del normalized[norm]
# Remove help docstring
if matcher in helpmatchers:
del helpmatchers[matcher]
return len(aliases) > 0
def lookup(alias):
""" Tries to find a matcher callable associated to the given alias. If
an exact match does not exists it will try normalizing it and even
removing underscores to find one.
"""
if alias in matchers:
return matchers[alias]
else:
norm = normalize(alias)
if norm in normalized:
alias = normalized[norm]
return matchers[alias]
# Check without snake case
if -1 != alias.find('_'):
norm = normalize(alias).replace('_', '')
return lookup(norm)
return None
def suggest(alias, max=3, cutoff=0.5):
""" Suggest a list of aliases which are similar enough
"""
aliases = matchers.keys()
similar = get_close_matches(alias, aliases, n=max, cutoff=cutoff)
return similar
def aliases():
""" Obtain the list of aliases """
return list(matchers.keys())
def alias_help(alias):
""" Get help for the given alias """
matcher = lookup(alias)
if not matcher:
return None
return helpmatchers.get(matcher)
# Matchers should be defined with verbose aliases to allow the use of
# natural english where possible. When looking up a matcher common adverbs
# like 'to', 'be' or 'is' are ignored in the comparison.
register(hc.equal_to,
'be_equal_to', 'be_equals_to', 'be_eql_to', 'be_eq_to')
register(hc.instance_of,
'be_an_instance_of', 'be_a', 'be_an')
register(hc.same_instance,
'be_the_same_instance_as', 'be_the_same_as', 'be')
register(hc.has_entry,
'have_the_entry', 'contain_the_entry')
register(hc.has_entries,
'have_the_entries', 'contain_the_entries')
register(hc.has_key,
'have_the_key', 'contain_the_key')
register(hc.has_value,
'have_the_value', 'contain_the_value')
register(hc.is_in,
'be_in', 'be_into', 'be_contained_in')
register(hc.has_item,
'have_the_item', 'contain_the_item')
register(hc.has_items,
'have_the_items', 'contain_the_items')
register(hc.contains_inanyorder,
'have_in_any_order', 'contain_in_any_order')
register(hc.contains,
'have', 'contain')
register(hc.only_contains,
'have_only', 'contain_only')
register(hc.close_to,
'be_close_to')
register(hc.greater_than,
'be_greater_than', 'be_greater', 'be_gt',
'be_above',
'be_more_than', 'be_more')
register(hc.greater_than_or_equal_to,
'be_greater_than_or_equal_to', 'be_greater_or_equal', 'be_ge',
'be_more_than_or_equal', 'be_more_or_equal',
'be_at_least')
register(hc.less_than,
'be_less_than', 'be_less', 'be_lt', 'be_below')
register(hc.less_than_or_equal_to,
'be_less_than_or_equal_to', 'be_less_or_equal', 'be_le',
'be_at_most')
register(hc.has_length,
'have_length', 'have_len')
register(hc.has_property,
'have_the_property', 'contain_the_property',
'have_the_prop', 'contain_the_prop')
register(hc.has_string,
'have_the_string', 'contain_the_string')
register(hc.equal_to_ignoring_case,
'be_equal_to_ignoring_case')
register(hc.equal_to_ignoring_whitespace,
'be_equal_to_ignoring_whitespace')
register(hc.contains_string,
'substr', 'have_the_substr', 'contain_the_substr',
'substring', 'have_the_substring', 'contain_the_substring')
register(hc.ends_with,
'end_with')
register(hc.starts_with,
'start_with', 'begin_with')
register(hc.anything,
'be_anything', 'be_any')
class TypeMatcher(BaseMatcher):
def _matches(self, item):
return isinstance(item, self.__class__.types)
def describe_to(self, description):
description.append_text(self.__class__.expected)
def describe_mismatch(self, item, description):
description.append_text('was a %s ' % item.__class__.__name__)
description.append_description_of(item)
@classmethod
def __call__(cls, *args, **kwargs):
return cls()
class IsInteger(TypeMatcher):
""" Check if the value is an integer """
try:
types = (int, long)
except:
types = (int) # Python 3
expected = 'an integer'
class IsFloat(TypeMatcher):
""" Check if the value is a float """
types = float
expected = 'a float'
class IsComplex(TypeMatcher):
""" Check if the value is a complex number """
types = complex
expected = 'a complex number'
class IsNumeric(TypeMatcher):
""" Check if the value is a numeric type """
try:
types = (int, long, float, complex) # python 2
except NameError:
types = (int, float, complex)
expected = 'a numeric type'
class IsString(TypeMatcher):
""" Check if the value is a string """
types = text_types
expected = 'a string'
class IsStr(TypeMatcher):
""" Check if the value is a str """
try:
types = (basestring, str) # python 2
except NameError:
types = str
expected = 'a str'
class IsUnicode(TypeMatcher):
""" Check if the value is an unicode string """
try:
types = unicode # python 2
except NameError:
types = str
expected = 'a unicode string'
class IsBinary(TypeMatcher):
""" Check if value is a binary string """
try:
types = bytes # python 3
except NameError:
types = str
expected = 'a binary string'
class IsByteArray(TypeMatcher):
""" Check if the value is a bytearray """
types = bytearray
expected = 'a bytearray'
class IsDict(TypeMatcher):
""" Check if the value is a dict """
types = dict
expected = 'a dict'
class IsList(TypeMatcher):
""" Check if the value is a list """
types = list
expected = 'a list'
class IsTuple(TypeMatcher):
""" Check if the value is a tuple """
types = tuple
expected = 'a tuple'
class IsSet(TypeMatcher):
""" Check if the value is a set """
types = set
expected = 'a set'
class IsFrozenSet(TypeMatcher):
""" Check if the value is a frozenset """
types = frozenset
expected = 'a frozenset'
class IsBool(TypeMatcher):
""" Check if the value is a bool """
types = bool
expected = 'a bool'
class IsFunction(TypeMatcher):
""" Check if the value is a function """
import types
types = types.FunctionType
expected = 'a function'
class IsGenerator(BaseMatcher):
""" Checks if the value is a generator function """
def _matches(self, item):
import inspect
return inspect.isgeneratorfunction(item)
def describe_to(self, desc):
desc.append_text('a generator function')
class IsClass(BaseMatcher):
""" Check if the value is a class """
def _matches(self, item):
import inspect
return inspect.isclass(item)
def describe_to(self, desc):
desc.append_text('a class')
class IsDate(TypeMatcher):
""" Check if the value is a date """
types = (datetime, date)
expected = 'a date'
register(IsInteger, 'be_an_integer', 'be_an_int')
register(IsFloat, 'be_a_float')
register(IsComplex, 'be_a_complex_number', 'be_a_complex')
register(IsNumeric, 'be_numeric')
register(IsString, 'be_a_string')
register(IsStr, 'be_a_str')
register(IsUnicode, 'be_an_unicode_string', 'be_an_unicode')
register(IsBinary, 'be_a_binary_string', 'be_a_binary')
register(IsByteArray, 'be_a_bytearray', 'be_a_byte_array')
register(IsDict, 'be_a_dictionary', 'be_a_dict')
register(IsList, 'be_a_list', 'be_an_array')
register(IsTuple, 'be_a_tuple')
register(IsSet, 'be_a_set')
register(IsFrozenSet, 'be_a_frozenset', 'be_a_frozen_set')
register(IsFunction, 'be_a_function', 'be_a_func')
register(IsBool, 'be_a_boolean', 'be_a_bool')
register(IsGenerator, 'be_a_generator')
register(IsClass, 'be_a_class')
register(IsDate, 'be_a_date')
class IsIterable(BaseMatcher):
""" Checks if a value is iterable """
def _matches(self, item):
try:
iter(item)
return True
except TypeError:
return False
def describe_to(self, description):
description.append_text('an iterable value')
register(IsIterable, 'be_an_iterable')
class IsCallable(BaseMatcher):
""" Check if a value is callable """
def _matches(self, item):
return hasattr(item, '__call__')
def describe_to(self, desc):
desc.append_text('a callable value')
register(IsCallable, 'be_callable', 'be_a_callable_value', 'can_be_called')
class IsNone(BaseMatcher):
""" Check if a value is None """
def _matches(self, item):
return True if item is None else False
def describe_to(self, desc):
desc.append_text('a None')
register(IsNone, 'be_none', 'be_a_none_value')
class IsTrue(BaseMatcher):
""" Check if a value is True """
def _matches(self, item):
return item is True
def describe_to(self, desc):
desc.append_text('a True')
class IsFalse(BaseMatcher):
""" Check if a value is False """
def _matches(self, item):
return item is False
def describe_to(self, desc):
desc.append_text('a False')
class IsTruthy(BaseMatcher):
""" Check if a value is truthy """
def _matches(self, item):
return True if item else False
def describe_to(self, desc):
desc.append_text('a truthy value')
class IsFalsy(BaseMatcher):
""" Check if a value is falsy """
def _matches(self, item):
return True if not item else False
def describe_to(self, desc):
desc.append_text('a falsy value')
register(IsTrue, 'be_true')
register(IsFalse, 'be_false')
register(IsTruthy, 'be_a_truthy_value', 'be_truthy', 'be_ok')
register(IsFalsy, 'be_a_falsy_value', 'be_falsy', 'be_ko')
class IsEmpty(BaseMatcher):
""" Check if a value is empty """
def _matches(self, item):
try:
return not bool(len(item))
except:
return False
def describe_to(self, desc):
desc.append_text('an empty value')
def describe_mismatch(self, item, desc):
try:
l = len(item)
desc.append_text('has {0} elements'.format(l))
except:
desc.append_value(item)
desc.append_text(' does not have a length')
register(IsEmpty, 'be_empty')
class RaisesError(BaseMatcher):
""" Checks if calling the value raises an error """
def __init__(self, expected=None, message=None, regex=None):
self.expected = expected
self.message = message
self.regex = regex
self.thrown = None
def _matches(self, item):
# support passing a context manager result
if isinstance(item, ContextManagerResult):
# Python <2.7 may provide a non exception value
if isinstance(item.exc_value, Exception):
self.thrown = item.exc_value
elif item.exc_type is not None:
try:
self.thrown = item.exc_type(*item.exc_value)
except TypeError:
self.thrown = item.exc_type(item.exc_value)
else:
return False
else:
try:
# support passing arguments by feeding a tuple instead of a callable
if not callable(item) and getattr(item, '__getitem__', False):
item[0](*item[1:])
else:
item()
return False
except:
# This should capture any kind of raised value
import sys
self.thrown = sys.exc_info()[1]
# Fail if we have defined an expected error type
if self.expected and not isinstance(self.thrown, self.expected):
return False
# Apply message filters
if self.message:
return self.message == str(self.thrown)
elif self.regex:
return re.match(self.regex, str(self.thrown))
return True
def describe_to(self, desc):
if self.thrown and self.message:
desc.append_text('to raise an exception with message "%s"'
% self.message)
elif self.thrown and self.regex:
desc.append_text('to raise an exception matching /%s/'
% self.regex)
else:
desc.append_text('to raise an exception')
if self.expected:
try:
exps = map(lambda x: x.__name__, self.expected)
except:
exps = [self.expected.__name__]
desc.append_text(' of type <%s>' % '>, <'.join(exps))
def describe_mismatch(self, item, desc):
if self.thrown:
desc.append_text('was ')
desc.append_text('<%s>' % self.thrown.__class__.__name__)
if self.message or self.regex:
desc.append_text(' "%s"' % str(self.thrown))
else:
desc.append_text('no exception was raised')
register(RaisesError,
'raise_an_error', 'raise_an_exception',
'raises_an_error', 'raises_an_exception', 'raises', 'raise',
'throw_an_error', 'throw_an_exception',
'throws_an_error', 'throws_an_exception', 'throws', 'throw')
class Changes(BaseMatcher):
""" Checks if calling a value changes something """
def __init__(self, watch):
self.watch = watch
self.before = None
self.after = None
self.changed = False
def _matches(self, item):
# support passing arguments by feeding a tuple instead of a callable
if not callable(item) and getattr(item, '__getitem__', False):
func = item[0]
params = item[1:]
else:
func = item
params = []
try:
before = self.watcher()
except TypeError:
before = self.watcher
# keep a snapshot of the value in case it's mutable
from copy import deepcopy
self.before = deepcopy(before)
func(*params)
try:
self.after = self.watcher()
except TypeError:
self.after = self.watcher
try:
hc.assert_that(self.after, hc.equal_to(self.before))
self.changed = False
except AssertionError:
self.changed = True
return self.changed
def describe_to(self, desc):
desc.append_text('change something')
def describe_mismatch(self, item, desc):
# To support its proper use when negated we need to check if
# the values actually changed or not
if self.changed:
desc.append_text('did change from ') \
.append_value(self.before) \
.append_text(' to ') \
.append_value(self.after)
else:
desc.append_text('it didn\'t change from ') \
.append_value(self.before)
register(Changes,
'change', 'changes', 'modify', 'modifies')
class Callback(BaseMatcher):
""" Checks against an user supplied callback. The callback
can should return True to indicate a successful match or
False to indicate an unsuccessful one.
"""
def __init__(self, callback):
self.callback = callback
def _matches(self, item):
self.error = None
try:
result = self.callback(item)
# Returning an expectation assumes it's correct (no failure raised)
from .expectation import Expectation
return isinstance(result, Expectation) or bool(result)
except AssertionError:
# Just forward assertion failures
raise
except Exception as ex:
self.error = str(ex)
return False
def describe_to(self, desc):
desc.append_text('passses callback ')
if (isinstance(self.callback, type(lambda: None))
and self.callback.__name__ == '<lambda>'):
desc.append_text(self.callback.__name__)
else:
desc.append_text('{0}'.format(self.callback))
def describe_mismatch(self, item, desc):
if self.error:
desc.append_text('gave an exception "%s"' % self.error)
else:
desc.append_text('returned False')
register(Callback,
'callback', 'pass', 'pass_callback')
class MockCalled(BaseMatcher):
""" Support for checking if mocks where called from the Mock library
"""
def _matches(self, item):
if not hasattr(item, 'called'):
raise Exception('Mock object does not have a <called> attribute')
return item.called
def describe_to(self, desc):
desc.append_text('called')
def describe_mismatch(self, item, desc):
if item.called:
desc.append_text('was called')
else:
desc.append_text('was not called')
register(MockCalled, 'called', 'invoked')
class RegexMatcher(BaseMatcher):
""" Checks against a regular expression """
def __init__(self, regex, flags=0):
self.regex = regex
if isinstance(flags, text_types):
self.flags = 0
for ch in flags.upper():
self.flags |= getattr(re, ch)
else:
self.flags = flags
def _matches(self, item):
# Make sure we are matching against a string
hc.assert_that(item, IsString())
match = re.search(self.regex, item, self.flags)
return match is not None
def describe_to(self, desc):
desc.append_text('matching ')
desc.append_text('/{0}/'.format(self.regex))
register(RegexMatcher, 'match', 'match_regex', 'match_regexp', 'be_matched_by')
class IsObjectContainingEntries(IsDictContainingEntries):
"""Matches if object has the properties from a given dict whose values and
keys satisfy a given matcher.
Examples::
:param inst: The instance or class.
:param mismatch_description: The description in case of failure.
have_properties({
'prop1': should.eq('value1'),
'prop2': should.eq('value2')
})
"""
hidden = set(['should_not', 'should_all', 'should_any', 'should', 'should_none'])
def __init__(self, value_matchers=None, **kwargs):
base_dict = {}
if value_matchers is None:
value_matchers = kwargs
for key, value in value_matchers.items():
base_dict[key] = wrap_matcher(value)
super(IsObjectContainingEntries, self).__init__(base_dict)
def matches(self, inst, mismatch_description=None):
# Make sure we are matching against a dict
try:
keys = dir(inst)
attributes = dict(
(key, getattr(inst, key))
for key in dir(inst)
if not key.startswith('__')
and key not in IsObjectContainingEntries.hidden
)
except Exception as ex:
if mismatch_description:
mismatch_description.append_text(
'unable to extract attributes from value: {0}'.format(ex))
return False
return super(IsObjectContainingEntries, self).matches(
attributes, mismatch_description)
def describe_to(self, desc):
desc.append_text('a class as ')
super(IsObjectContainingEntries, self).describe_to(desc)
register(IsObjectContainingEntries,
'have_the_properties', 'contain_the_properties', 'have_the_attributes', 'contain_the_attributes',
'have_props', 'contain_props', 'have_attrs', 'contain_attrs')
class IsSequenceContainingEveryInOrderSparse(IsSequenceContainingEvery):
"""
Matches if a list contains every given element in the same order but with
optional interleaved items.
No optional elements matching the required ones are allowed.
Mismatch description prioritizes missing items over wrong order.
e.g. [1, 3, 4] IsSequenceContainingEveryInOrder [1, 4]
but NOT IsSequenceContainingEveryInOrder [4, 1]
and NOT IsSequenceContainingEveryInOrder [1, 4, 4]
"""
def __init__(self, *element_matchers):
delegates = [hc.has_item(e) for e in element_matchers]
self.matcher_all = hc.all_of(*delegates)
self.matcher_any = hc.any_of(*delegates)
self.matcher_order = hc.contains(*element_matchers)
self.order_seq = None
def _matches(self, sequence):
self.order_seq = None
try:
seq = list(sequence)
if self.matcher_all.matches(seq):
self.order_seq = [i for i in seq if self.matcher_any.matches([i])]
return self.matcher_order.matches(self.order_seq)
else:
return False
except TypeError:
return False
def describe_mismatch(self, item, mismatch_description):
if self.order_seq is None:
mismatch_description.append_text(' instead of a ')
self.matcher_all.describe_mismatch(item, mismatch_description)
else:
self.matcher_order.describe_mismatch(self.order_seq, mismatch_description)
mismatch_description.append_text(
' from candidate list elements: '
).append_description_of(self.order_seq).append_text(
' that satisfied those conditions from '
).append_description_of(item)
def describe_to(self, description):
self.matcher_all.describe_to(description)
description.append_text(' in this specific order')
register(IsSequenceContainingEveryInOrderSparse,
'contain_sparse', 'have_sparse', 'contain_sparse_in_order',
'contain_in_order_sparse', 'have_every_in_order_sparse',
'have_in_order_sparse', 'contain_every_in_order_sparse')
|
drslump/pyshould
|
pyshould/matchers.py
|
lookup
|
python
|
def lookup(alias):
if alias in matchers:
return matchers[alias]
else:
norm = normalize(alias)
if norm in normalized:
alias = normalized[norm]
return matchers[alias]
# Check without snake case
if -1 != alias.find('_'):
norm = normalize(alias).replace('_', '')
return lookup(norm)
return None
|
Tries to find a matcher callable associated to the given alias. If
an exact match does not exists it will try normalizing it and even
removing underscores to find one.
|
train
|
https://github.com/drslump/pyshould/blob/7210859d4c84cfbaa64f91b30c2a541aea788ddf/pyshould/matchers.py#L105-L124
|
[
"def normalize(alias):\n \"\"\" Normalizes an alias by removing adverbs defined in IGNORED_WORDS\n \"\"\"\n # Convert from CamelCase to snake_case\n alias = re.sub(r'([a-z])([A-Z])', r'\\1_\\2', alias)\n # Ignore words\n words = alias.lower().split('_')\n words = filter(lambda w: w not in IGNORED_WORDS, words)\n return '_'.join(words)\n",
"def lookup(alias):\n \"\"\" Tries to find a matcher callable associated to the given alias. If\n an exact match does not exists it will try normalizing it and even\n removing underscores to find one.\n \"\"\"\n\n if alias in matchers:\n return matchers[alias]\n else:\n norm = normalize(alias)\n if norm in normalized:\n alias = normalized[norm]\n return matchers[alias]\n\n # Check without snake case\n if -1 != alias.find('_'):\n norm = normalize(alias).replace('_', '')\n return lookup(norm)\n\n return None\n"
] |
"""
Defines the registry of matchers and the standard set of matchers
"""
import re
from datetime import datetime, date
import hamcrest as hc
from difflib import get_close_matches
from hamcrest.core.base_matcher import BaseMatcher
from hamcrest.library.collection.isdict_containingentries import IsDictContainingEntries
from hamcrest.library.collection.issequence_containing import IsSequenceContainingEvery
from hamcrest.core.helpers.wrap_matcher import wrap_matcher
__author__ = "Ivan -DrSlump- Montes"
__email__ = "drslump@pollinimini.net"
__license__ = "MIT"
# Words to ignore when looking up matchers
IGNORED_WORDS = ['should', 'to', 'be', 'a', 'an', 'is', 'the', 'as']
# Map of registered matchers as alias:callable
matchers = {}
# Map of normalized matcher aliases as normalized:alias
normalized = {}
# Help messages associated to matchers
helpmatchers = {}
# All textual representation types in Python 2/3
try:
text_types = (basestring, str, unicode) # python 2
except NameError:
text_types = (str,)
class ContextManagerResult(object):
""" When an expression is used in a `with` statement we capture the params
in the __exit__ method of the expression context manager with this class,
this allows to pass it to the matchers as the value to test, which is mostly
useful for the raise/throw one.
"""
def __init__(self, type_, value, trace):
self.exc_type = type_
self.exc_value = value
self.trace = trace
def __str__(self):
""" Provide a suitable description of the exception for AnyOf/AllOf matchers """
return repr(self.exc_value)
def register(matcher, *aliases):
""" Register a matcher associated to one or more aliases. Each alias
given is also normalized.
"""
docstr = matcher.__doc__ if matcher.__doc__ is not None else ''
helpmatchers[matcher] = docstr.strip()
for alias in aliases:
matchers[alias] = matcher
# Map a normalized version of the alias
norm = normalize(alias)
normalized[norm] = alias
# Map a version without snake case
norm = norm.replace('_', '')
normalized[norm] = alias
def unregister(matcher):
""" Unregister a matcher (or alias) from the registry
"""
# If it's a string handle it like an alias
if isinstance(matcher, text_types) and matcher in matchers:
matcher = matchers[matcher]
# Find all aliases associated to the matcher
aliases = [k for k, v in matchers.iteritems() if v == matcher]
for alias in aliases:
del matchers[alias]
# Clean up the normalized versions
norms = [k for k, v in normalized.iteritems() if v == alias]
for norm in norms:
del normalized[norm]
# Remove help docstring
if matcher in helpmatchers:
del helpmatchers[matcher]
return len(aliases) > 0
def normalize(alias):
""" Normalizes an alias by removing adverbs defined in IGNORED_WORDS
"""
# Convert from CamelCase to snake_case
alias = re.sub(r'([a-z])([A-Z])', r'\1_\2', alias)
# Ignore words
words = alias.lower().split('_')
words = filter(lambda w: w not in IGNORED_WORDS, words)
return '_'.join(words)
def suggest(alias, max=3, cutoff=0.5):
""" Suggest a list of aliases which are similar enough
"""
aliases = matchers.keys()
similar = get_close_matches(alias, aliases, n=max, cutoff=cutoff)
return similar
def aliases():
""" Obtain the list of aliases """
return list(matchers.keys())
def alias_help(alias):
""" Get help for the given alias """
matcher = lookup(alias)
if not matcher:
return None
return helpmatchers.get(matcher)
# Matchers should be defined with verbose aliases to allow the use of
# natural english where possible. When looking up a matcher common adverbs
# like 'to', 'be' or 'is' are ignored in the comparison.
register(hc.equal_to,
'be_equal_to', 'be_equals_to', 'be_eql_to', 'be_eq_to')
register(hc.instance_of,
'be_an_instance_of', 'be_a', 'be_an')
register(hc.same_instance,
'be_the_same_instance_as', 'be_the_same_as', 'be')
register(hc.has_entry,
'have_the_entry', 'contain_the_entry')
register(hc.has_entries,
'have_the_entries', 'contain_the_entries')
register(hc.has_key,
'have_the_key', 'contain_the_key')
register(hc.has_value,
'have_the_value', 'contain_the_value')
register(hc.is_in,
'be_in', 'be_into', 'be_contained_in')
register(hc.has_item,
'have_the_item', 'contain_the_item')
register(hc.has_items,
'have_the_items', 'contain_the_items')
register(hc.contains_inanyorder,
'have_in_any_order', 'contain_in_any_order')
register(hc.contains,
'have', 'contain')
register(hc.only_contains,
'have_only', 'contain_only')
register(hc.close_to,
'be_close_to')
register(hc.greater_than,
'be_greater_than', 'be_greater', 'be_gt',
'be_above',
'be_more_than', 'be_more')
register(hc.greater_than_or_equal_to,
'be_greater_than_or_equal_to', 'be_greater_or_equal', 'be_ge',
'be_more_than_or_equal', 'be_more_or_equal',
'be_at_least')
register(hc.less_than,
'be_less_than', 'be_less', 'be_lt', 'be_below')
register(hc.less_than_or_equal_to,
'be_less_than_or_equal_to', 'be_less_or_equal', 'be_le',
'be_at_most')
register(hc.has_length,
'have_length', 'have_len')
register(hc.has_property,
'have_the_property', 'contain_the_property',
'have_the_prop', 'contain_the_prop')
register(hc.has_string,
'have_the_string', 'contain_the_string')
register(hc.equal_to_ignoring_case,
'be_equal_to_ignoring_case')
register(hc.equal_to_ignoring_whitespace,
'be_equal_to_ignoring_whitespace')
register(hc.contains_string,
'substr', 'have_the_substr', 'contain_the_substr',
'substring', 'have_the_substring', 'contain_the_substring')
register(hc.ends_with,
'end_with')
register(hc.starts_with,
'start_with', 'begin_with')
register(hc.anything,
'be_anything', 'be_any')
class TypeMatcher(BaseMatcher):
def _matches(self, item):
return isinstance(item, self.__class__.types)
def describe_to(self, description):
description.append_text(self.__class__.expected)
def describe_mismatch(self, item, description):
description.append_text('was a %s ' % item.__class__.__name__)
description.append_description_of(item)
@classmethod
def __call__(cls, *args, **kwargs):
return cls()
class IsInteger(TypeMatcher):
""" Check if the value is an integer """
try:
types = (int, long)
except:
types = (int) # Python 3
expected = 'an integer'
class IsFloat(TypeMatcher):
""" Check if the value is a float """
types = float
expected = 'a float'
class IsComplex(TypeMatcher):
""" Check if the value is a complex number """
types = complex
expected = 'a complex number'
class IsNumeric(TypeMatcher):
""" Check if the value is a numeric type """
try:
types = (int, long, float, complex) # python 2
except NameError:
types = (int, float, complex)
expected = 'a numeric type'
class IsString(TypeMatcher):
""" Check if the value is a string """
types = text_types
expected = 'a string'
class IsStr(TypeMatcher):
""" Check if the value is a str """
try:
types = (basestring, str) # python 2
except NameError:
types = str
expected = 'a str'
class IsUnicode(TypeMatcher):
""" Check if the value is an unicode string """
try:
types = unicode # python 2
except NameError:
types = str
expected = 'a unicode string'
class IsBinary(TypeMatcher):
""" Check if value is a binary string """
try:
types = bytes # python 3
except NameError:
types = str
expected = 'a binary string'
class IsByteArray(TypeMatcher):
""" Check if the value is a bytearray """
types = bytearray
expected = 'a bytearray'
class IsDict(TypeMatcher):
""" Check if the value is a dict """
types = dict
expected = 'a dict'
class IsList(TypeMatcher):
""" Check if the value is a list """
types = list
expected = 'a list'
class IsTuple(TypeMatcher):
""" Check if the value is a tuple """
types = tuple
expected = 'a tuple'
class IsSet(TypeMatcher):
""" Check if the value is a set """
types = set
expected = 'a set'
class IsFrozenSet(TypeMatcher):
""" Check if the value is a frozenset """
types = frozenset
expected = 'a frozenset'
class IsBool(TypeMatcher):
""" Check if the value is a bool """
types = bool
expected = 'a bool'
class IsFunction(TypeMatcher):
""" Check if the value is a function """
import types
types = types.FunctionType
expected = 'a function'
class IsGenerator(BaseMatcher):
""" Checks if the value is a generator function """
def _matches(self, item):
import inspect
return inspect.isgeneratorfunction(item)
def describe_to(self, desc):
desc.append_text('a generator function')
class IsClass(BaseMatcher):
""" Check if the value is a class """
def _matches(self, item):
import inspect
return inspect.isclass(item)
def describe_to(self, desc):
desc.append_text('a class')
class IsDate(TypeMatcher):
""" Check if the value is a date """
types = (datetime, date)
expected = 'a date'
register(IsInteger, 'be_an_integer', 'be_an_int')
register(IsFloat, 'be_a_float')
register(IsComplex, 'be_a_complex_number', 'be_a_complex')
register(IsNumeric, 'be_numeric')
register(IsString, 'be_a_string')
register(IsStr, 'be_a_str')
register(IsUnicode, 'be_an_unicode_string', 'be_an_unicode')
register(IsBinary, 'be_a_binary_string', 'be_a_binary')
register(IsByteArray, 'be_a_bytearray', 'be_a_byte_array')
register(IsDict, 'be_a_dictionary', 'be_a_dict')
register(IsList, 'be_a_list', 'be_an_array')
register(IsTuple, 'be_a_tuple')
register(IsSet, 'be_a_set')
register(IsFrozenSet, 'be_a_frozenset', 'be_a_frozen_set')
register(IsFunction, 'be_a_function', 'be_a_func')
register(IsBool, 'be_a_boolean', 'be_a_bool')
register(IsGenerator, 'be_a_generator')
register(IsClass, 'be_a_class')
register(IsDate, 'be_a_date')
class IsIterable(BaseMatcher):
""" Checks if a value is iterable """
def _matches(self, item):
try:
iter(item)
return True
except TypeError:
return False
def describe_to(self, description):
description.append_text('an iterable value')
register(IsIterable, 'be_an_iterable')
class IsCallable(BaseMatcher):
""" Check if a value is callable """
def _matches(self, item):
return hasattr(item, '__call__')
def describe_to(self, desc):
desc.append_text('a callable value')
register(IsCallable, 'be_callable', 'be_a_callable_value', 'can_be_called')
class IsNone(BaseMatcher):
""" Check if a value is None """
def _matches(self, item):
return True if item is None else False
def describe_to(self, desc):
desc.append_text('a None')
register(IsNone, 'be_none', 'be_a_none_value')
class IsTrue(BaseMatcher):
""" Check if a value is True """
def _matches(self, item):
return item is True
def describe_to(self, desc):
desc.append_text('a True')
class IsFalse(BaseMatcher):
""" Check if a value is False """
def _matches(self, item):
return item is False
def describe_to(self, desc):
desc.append_text('a False')
class IsTruthy(BaseMatcher):
""" Check if a value is truthy """
def _matches(self, item):
return True if item else False
def describe_to(self, desc):
desc.append_text('a truthy value')
class IsFalsy(BaseMatcher):
""" Check if a value is falsy """
def _matches(self, item):
return True if not item else False
def describe_to(self, desc):
desc.append_text('a falsy value')
register(IsTrue, 'be_true')
register(IsFalse, 'be_false')
register(IsTruthy, 'be_a_truthy_value', 'be_truthy', 'be_ok')
register(IsFalsy, 'be_a_falsy_value', 'be_falsy', 'be_ko')
class IsEmpty(BaseMatcher):
""" Check if a value is empty """
def _matches(self, item):
try:
return not bool(len(item))
except:
return False
def describe_to(self, desc):
desc.append_text('an empty value')
def describe_mismatch(self, item, desc):
try:
l = len(item)
desc.append_text('has {0} elements'.format(l))
except:
desc.append_value(item)
desc.append_text(' does not have a length')
register(IsEmpty, 'be_empty')
class RaisesError(BaseMatcher):
""" Checks if calling the value raises an error """
def __init__(self, expected=None, message=None, regex=None):
self.expected = expected
self.message = message
self.regex = regex
self.thrown = None
def _matches(self, item):
# support passing a context manager result
if isinstance(item, ContextManagerResult):
# Python <2.7 may provide a non exception value
if isinstance(item.exc_value, Exception):
self.thrown = item.exc_value
elif item.exc_type is not None:
try:
self.thrown = item.exc_type(*item.exc_value)
except TypeError:
self.thrown = item.exc_type(item.exc_value)
else:
return False
else:
try:
# support passing arguments by feeding a tuple instead of a callable
if not callable(item) and getattr(item, '__getitem__', False):
item[0](*item[1:])
else:
item()
return False
except:
# This should capture any kind of raised value
import sys
self.thrown = sys.exc_info()[1]
# Fail if we have defined an expected error type
if self.expected and not isinstance(self.thrown, self.expected):
return False
# Apply message filters
if self.message:
return self.message == str(self.thrown)
elif self.regex:
return re.match(self.regex, str(self.thrown))
return True
def describe_to(self, desc):
if self.thrown and self.message:
desc.append_text('to raise an exception with message "%s"'
% self.message)
elif self.thrown and self.regex:
desc.append_text('to raise an exception matching /%s/'
% self.regex)
else:
desc.append_text('to raise an exception')
if self.expected:
try:
exps = map(lambda x: x.__name__, self.expected)
except:
exps = [self.expected.__name__]
desc.append_text(' of type <%s>' % '>, <'.join(exps))
def describe_mismatch(self, item, desc):
if self.thrown:
desc.append_text('was ')
desc.append_text('<%s>' % self.thrown.__class__.__name__)
if self.message or self.regex:
desc.append_text(' "%s"' % str(self.thrown))
else:
desc.append_text('no exception was raised')
register(RaisesError,
'raise_an_error', 'raise_an_exception',
'raises_an_error', 'raises_an_exception', 'raises', 'raise',
'throw_an_error', 'throw_an_exception',
'throws_an_error', 'throws_an_exception', 'throws', 'throw')
class Changes(BaseMatcher):
""" Checks if calling a value changes something """
def __init__(self, watch):
self.watch = watch
self.before = None
self.after = None
self.changed = False
def _matches(self, item):
# support passing arguments by feeding a tuple instead of a callable
if not callable(item) and getattr(item, '__getitem__', False):
func = item[0]
params = item[1:]
else:
func = item
params = []
try:
before = self.watcher()
except TypeError:
before = self.watcher
# keep a snapshot of the value in case it's mutable
from copy import deepcopy
self.before = deepcopy(before)
func(*params)
try:
self.after = self.watcher()
except TypeError:
self.after = self.watcher
try:
hc.assert_that(self.after, hc.equal_to(self.before))
self.changed = False
except AssertionError:
self.changed = True
return self.changed
def describe_to(self, desc):
desc.append_text('change something')
def describe_mismatch(self, item, desc):
# To support its proper use when negated we need to check if
# the values actually changed or not
if self.changed:
desc.append_text('did change from ') \
.append_value(self.before) \
.append_text(' to ') \
.append_value(self.after)
else:
desc.append_text('it didn\'t change from ') \
.append_value(self.before)
register(Changes,
'change', 'changes', 'modify', 'modifies')
class Callback(BaseMatcher):
""" Checks against an user supplied callback. The callback
can should return True to indicate a successful match or
False to indicate an unsuccessful one.
"""
def __init__(self, callback):
self.callback = callback
def _matches(self, item):
self.error = None
try:
result = self.callback(item)
# Returning an expectation assumes it's correct (no failure raised)
from .expectation import Expectation
return isinstance(result, Expectation) or bool(result)
except AssertionError:
# Just forward assertion failures
raise
except Exception as ex:
self.error = str(ex)
return False
def describe_to(self, desc):
desc.append_text('passses callback ')
if (isinstance(self.callback, type(lambda: None))
and self.callback.__name__ == '<lambda>'):
desc.append_text(self.callback.__name__)
else:
desc.append_text('{0}'.format(self.callback))
def describe_mismatch(self, item, desc):
if self.error:
desc.append_text('gave an exception "%s"' % self.error)
else:
desc.append_text('returned False')
register(Callback,
'callback', 'pass', 'pass_callback')
class MockCalled(BaseMatcher):
""" Support for checking if mocks where called from the Mock library
"""
def _matches(self, item):
if not hasattr(item, 'called'):
raise Exception('Mock object does not have a <called> attribute')
return item.called
def describe_to(self, desc):
desc.append_text('called')
def describe_mismatch(self, item, desc):
if item.called:
desc.append_text('was called')
else:
desc.append_text('was not called')
register(MockCalled, 'called', 'invoked')
class RegexMatcher(BaseMatcher):
""" Checks against a regular expression """
def __init__(self, regex, flags=0):
self.regex = regex
if isinstance(flags, text_types):
self.flags = 0
for ch in flags.upper():
self.flags |= getattr(re, ch)
else:
self.flags = flags
def _matches(self, item):
# Make sure we are matching against a string
hc.assert_that(item, IsString())
match = re.search(self.regex, item, self.flags)
return match is not None
def describe_to(self, desc):
desc.append_text('matching ')
desc.append_text('/{0}/'.format(self.regex))
register(RegexMatcher, 'match', 'match_regex', 'match_regexp', 'be_matched_by')
class IsObjectContainingEntries(IsDictContainingEntries):
"""Matches if object has the properties from a given dict whose values and
keys satisfy a given matcher.
Examples::
:param inst: The instance or class.
:param mismatch_description: The description in case of failure.
have_properties({
'prop1': should.eq('value1'),
'prop2': should.eq('value2')
})
"""
hidden = set(['should_not', 'should_all', 'should_any', 'should', 'should_none'])
def __init__(self, value_matchers=None, **kwargs):
base_dict = {}
if value_matchers is None:
value_matchers = kwargs
for key, value in value_matchers.items():
base_dict[key] = wrap_matcher(value)
super(IsObjectContainingEntries, self).__init__(base_dict)
def matches(self, inst, mismatch_description=None):
# Make sure we are matching against a dict
try:
keys = dir(inst)
attributes = dict(
(key, getattr(inst, key))
for key in dir(inst)
if not key.startswith('__')
and key not in IsObjectContainingEntries.hidden
)
except Exception as ex:
if mismatch_description:
mismatch_description.append_text(
'unable to extract attributes from value: {0}'.format(ex))
return False
return super(IsObjectContainingEntries, self).matches(
attributes, mismatch_description)
def describe_to(self, desc):
desc.append_text('a class as ')
super(IsObjectContainingEntries, self).describe_to(desc)
register(IsObjectContainingEntries,
'have_the_properties', 'contain_the_properties', 'have_the_attributes', 'contain_the_attributes',
'have_props', 'contain_props', 'have_attrs', 'contain_attrs')
class IsSequenceContainingEveryInOrderSparse(IsSequenceContainingEvery):
"""
Matches if a list contains every given element in the same order but with
optional interleaved items.
No optional elements matching the required ones are allowed.
Mismatch description prioritizes missing items over wrong order.
e.g. [1, 3, 4] IsSequenceContainingEveryInOrder [1, 4]
but NOT IsSequenceContainingEveryInOrder [4, 1]
and NOT IsSequenceContainingEveryInOrder [1, 4, 4]
"""
def __init__(self, *element_matchers):
delegates = [hc.has_item(e) for e in element_matchers]
self.matcher_all = hc.all_of(*delegates)
self.matcher_any = hc.any_of(*delegates)
self.matcher_order = hc.contains(*element_matchers)
self.order_seq = None
def _matches(self, sequence):
self.order_seq = None
try:
seq = list(sequence)
if self.matcher_all.matches(seq):
self.order_seq = [i for i in seq if self.matcher_any.matches([i])]
return self.matcher_order.matches(self.order_seq)
else:
return False
except TypeError:
return False
def describe_mismatch(self, item, mismatch_description):
if self.order_seq is None:
mismatch_description.append_text(' instead of a ')
self.matcher_all.describe_mismatch(item, mismatch_description)
else:
self.matcher_order.describe_mismatch(self.order_seq, mismatch_description)
mismatch_description.append_text(
' from candidate list elements: '
).append_description_of(self.order_seq).append_text(
' that satisfied those conditions from '
).append_description_of(item)
def describe_to(self, description):
self.matcher_all.describe_to(description)
description.append_text(' in this specific order')
register(IsSequenceContainingEveryInOrderSparse,
'contain_sparse', 'have_sparse', 'contain_sparse_in_order',
'contain_in_order_sparse', 'have_every_in_order_sparse',
'have_in_order_sparse', 'contain_every_in_order_sparse')
|
drslump/pyshould
|
pyshould/matchers.py
|
suggest
|
python
|
def suggest(alias, max=3, cutoff=0.5):
aliases = matchers.keys()
similar = get_close_matches(alias, aliases, n=max, cutoff=cutoff)
return similar
|
Suggest a list of aliases which are similar enough
|
train
|
https://github.com/drslump/pyshould/blob/7210859d4c84cfbaa64f91b30c2a541aea788ddf/pyshould/matchers.py#L127-L134
| null |
"""
Defines the registry of matchers and the standard set of matchers
"""
import re
from datetime import datetime, date
import hamcrest as hc
from difflib import get_close_matches
from hamcrest.core.base_matcher import BaseMatcher
from hamcrest.library.collection.isdict_containingentries import IsDictContainingEntries
from hamcrest.library.collection.issequence_containing import IsSequenceContainingEvery
from hamcrest.core.helpers.wrap_matcher import wrap_matcher
__author__ = "Ivan -DrSlump- Montes"
__email__ = "drslump@pollinimini.net"
__license__ = "MIT"
# Words to ignore when looking up matchers
IGNORED_WORDS = ['should', 'to', 'be', 'a', 'an', 'is', 'the', 'as']
# Map of registered matchers as alias:callable
matchers = {}
# Map of normalized matcher aliases as normalized:alias
normalized = {}
# Help messages associated to matchers
helpmatchers = {}
# All textual representation types in Python 2/3
try:
text_types = (basestring, str, unicode) # python 2
except NameError:
text_types = (str,)
class ContextManagerResult(object):
""" When an expression is used in a `with` statement we capture the params
in the __exit__ method of the expression context manager with this class,
this allows to pass it to the matchers as the value to test, which is mostly
useful for the raise/throw one.
"""
def __init__(self, type_, value, trace):
self.exc_type = type_
self.exc_value = value
self.trace = trace
def __str__(self):
""" Provide a suitable description of the exception for AnyOf/AllOf matchers """
return repr(self.exc_value)
def register(matcher, *aliases):
""" Register a matcher associated to one or more aliases. Each alias
given is also normalized.
"""
docstr = matcher.__doc__ if matcher.__doc__ is not None else ''
helpmatchers[matcher] = docstr.strip()
for alias in aliases:
matchers[alias] = matcher
# Map a normalized version of the alias
norm = normalize(alias)
normalized[norm] = alias
# Map a version without snake case
norm = norm.replace('_', '')
normalized[norm] = alias
def unregister(matcher):
""" Unregister a matcher (or alias) from the registry
"""
# If it's a string handle it like an alias
if isinstance(matcher, text_types) and matcher in matchers:
matcher = matchers[matcher]
# Find all aliases associated to the matcher
aliases = [k for k, v in matchers.iteritems() if v == matcher]
for alias in aliases:
del matchers[alias]
# Clean up the normalized versions
norms = [k for k, v in normalized.iteritems() if v == alias]
for norm in norms:
del normalized[norm]
# Remove help docstring
if matcher in helpmatchers:
del helpmatchers[matcher]
return len(aliases) > 0
def normalize(alias):
""" Normalizes an alias by removing adverbs defined in IGNORED_WORDS
"""
# Convert from CamelCase to snake_case
alias = re.sub(r'([a-z])([A-Z])', r'\1_\2', alias)
# Ignore words
words = alias.lower().split('_')
words = filter(lambda w: w not in IGNORED_WORDS, words)
return '_'.join(words)
def lookup(alias):
""" Tries to find a matcher callable associated to the given alias. If
an exact match does not exists it will try normalizing it and even
removing underscores to find one.
"""
if alias in matchers:
return matchers[alias]
else:
norm = normalize(alias)
if norm in normalized:
alias = normalized[norm]
return matchers[alias]
# Check without snake case
if -1 != alias.find('_'):
norm = normalize(alias).replace('_', '')
return lookup(norm)
return None
def aliases():
""" Obtain the list of aliases """
return list(matchers.keys())
def alias_help(alias):
""" Get help for the given alias """
matcher = lookup(alias)
if not matcher:
return None
return helpmatchers.get(matcher)
# Matchers should be defined with verbose aliases to allow the use of
# natural english where possible. When looking up a matcher common adverbs
# like 'to', 'be' or 'is' are ignored in the comparison.
register(hc.equal_to,
'be_equal_to', 'be_equals_to', 'be_eql_to', 'be_eq_to')
register(hc.instance_of,
'be_an_instance_of', 'be_a', 'be_an')
register(hc.same_instance,
'be_the_same_instance_as', 'be_the_same_as', 'be')
register(hc.has_entry,
'have_the_entry', 'contain_the_entry')
register(hc.has_entries,
'have_the_entries', 'contain_the_entries')
register(hc.has_key,
'have_the_key', 'contain_the_key')
register(hc.has_value,
'have_the_value', 'contain_the_value')
register(hc.is_in,
'be_in', 'be_into', 'be_contained_in')
register(hc.has_item,
'have_the_item', 'contain_the_item')
register(hc.has_items,
'have_the_items', 'contain_the_items')
register(hc.contains_inanyorder,
'have_in_any_order', 'contain_in_any_order')
register(hc.contains,
'have', 'contain')
register(hc.only_contains,
'have_only', 'contain_only')
register(hc.close_to,
'be_close_to')
register(hc.greater_than,
'be_greater_than', 'be_greater', 'be_gt',
'be_above',
'be_more_than', 'be_more')
register(hc.greater_than_or_equal_to,
'be_greater_than_or_equal_to', 'be_greater_or_equal', 'be_ge',
'be_more_than_or_equal', 'be_more_or_equal',
'be_at_least')
register(hc.less_than,
'be_less_than', 'be_less', 'be_lt', 'be_below')
register(hc.less_than_or_equal_to,
'be_less_than_or_equal_to', 'be_less_or_equal', 'be_le',
'be_at_most')
register(hc.has_length,
'have_length', 'have_len')
register(hc.has_property,
'have_the_property', 'contain_the_property',
'have_the_prop', 'contain_the_prop')
register(hc.has_string,
'have_the_string', 'contain_the_string')
register(hc.equal_to_ignoring_case,
'be_equal_to_ignoring_case')
register(hc.equal_to_ignoring_whitespace,
'be_equal_to_ignoring_whitespace')
register(hc.contains_string,
'substr', 'have_the_substr', 'contain_the_substr',
'substring', 'have_the_substring', 'contain_the_substring')
register(hc.ends_with,
'end_with')
register(hc.starts_with,
'start_with', 'begin_with')
register(hc.anything,
'be_anything', 'be_any')
class TypeMatcher(BaseMatcher):
def _matches(self, item):
return isinstance(item, self.__class__.types)
def describe_to(self, description):
description.append_text(self.__class__.expected)
def describe_mismatch(self, item, description):
description.append_text('was a %s ' % item.__class__.__name__)
description.append_description_of(item)
@classmethod
def __call__(cls, *args, **kwargs):
return cls()
class IsInteger(TypeMatcher):
""" Check if the value is an integer """
try:
types = (int, long)
except:
types = (int) # Python 3
expected = 'an integer'
class IsFloat(TypeMatcher):
""" Check if the value is a float """
types = float
expected = 'a float'
class IsComplex(TypeMatcher):
""" Check if the value is a complex number """
types = complex
expected = 'a complex number'
class IsNumeric(TypeMatcher):
""" Check if the value is a numeric type """
try:
types = (int, long, float, complex) # python 2
except NameError:
types = (int, float, complex)
expected = 'a numeric type'
class IsString(TypeMatcher):
""" Check if the value is a string """
types = text_types
expected = 'a string'
class IsStr(TypeMatcher):
""" Check if the value is a str """
try:
types = (basestring, str) # python 2
except NameError:
types = str
expected = 'a str'
class IsUnicode(TypeMatcher):
""" Check if the value is an unicode string """
try:
types = unicode # python 2
except NameError:
types = str
expected = 'a unicode string'
class IsBinary(TypeMatcher):
""" Check if value is a binary string """
try:
types = bytes # python 3
except NameError:
types = str
expected = 'a binary string'
class IsByteArray(TypeMatcher):
""" Check if the value is a bytearray """
types = bytearray
expected = 'a bytearray'
class IsDict(TypeMatcher):
""" Check if the value is a dict """
types = dict
expected = 'a dict'
class IsList(TypeMatcher):
""" Check if the value is a list """
types = list
expected = 'a list'
class IsTuple(TypeMatcher):
""" Check if the value is a tuple """
types = tuple
expected = 'a tuple'
class IsSet(TypeMatcher):
""" Check if the value is a set """
types = set
expected = 'a set'
class IsFrozenSet(TypeMatcher):
""" Check if the value is a frozenset """
types = frozenset
expected = 'a frozenset'
class IsBool(TypeMatcher):
""" Check if the value is a bool """
types = bool
expected = 'a bool'
class IsFunction(TypeMatcher):
""" Check if the value is a function """
import types
types = types.FunctionType
expected = 'a function'
class IsGenerator(BaseMatcher):
""" Checks if the value is a generator function """
def _matches(self, item):
import inspect
return inspect.isgeneratorfunction(item)
def describe_to(self, desc):
desc.append_text('a generator function')
class IsClass(BaseMatcher):
""" Check if the value is a class """
def _matches(self, item):
import inspect
return inspect.isclass(item)
def describe_to(self, desc):
desc.append_text('a class')
class IsDate(TypeMatcher):
""" Check if the value is a date """
types = (datetime, date)
expected = 'a date'
register(IsInteger, 'be_an_integer', 'be_an_int')
register(IsFloat, 'be_a_float')
register(IsComplex, 'be_a_complex_number', 'be_a_complex')
register(IsNumeric, 'be_numeric')
register(IsString, 'be_a_string')
register(IsStr, 'be_a_str')
register(IsUnicode, 'be_an_unicode_string', 'be_an_unicode')
register(IsBinary, 'be_a_binary_string', 'be_a_binary')
register(IsByteArray, 'be_a_bytearray', 'be_a_byte_array')
register(IsDict, 'be_a_dictionary', 'be_a_dict')
register(IsList, 'be_a_list', 'be_an_array')
register(IsTuple, 'be_a_tuple')
register(IsSet, 'be_a_set')
register(IsFrozenSet, 'be_a_frozenset', 'be_a_frozen_set')
register(IsFunction, 'be_a_function', 'be_a_func')
register(IsBool, 'be_a_boolean', 'be_a_bool')
register(IsGenerator, 'be_a_generator')
register(IsClass, 'be_a_class')
register(IsDate, 'be_a_date')
class IsIterable(BaseMatcher):
""" Checks if a value is iterable """
def _matches(self, item):
try:
iter(item)
return True
except TypeError:
return False
def describe_to(self, description):
description.append_text('an iterable value')
register(IsIterable, 'be_an_iterable')
class IsCallable(BaseMatcher):
""" Check if a value is callable """
def _matches(self, item):
return hasattr(item, '__call__')
def describe_to(self, desc):
desc.append_text('a callable value')
register(IsCallable, 'be_callable', 'be_a_callable_value', 'can_be_called')
class IsNone(BaseMatcher):
""" Check if a value is None """
def _matches(self, item):
return True if item is None else False
def describe_to(self, desc):
desc.append_text('a None')
register(IsNone, 'be_none', 'be_a_none_value')
class IsTrue(BaseMatcher):
""" Check if a value is True """
def _matches(self, item):
return item is True
def describe_to(self, desc):
desc.append_text('a True')
class IsFalse(BaseMatcher):
""" Check if a value is False """
def _matches(self, item):
return item is False
def describe_to(self, desc):
desc.append_text('a False')
class IsTruthy(BaseMatcher):
""" Check if a value is truthy """
def _matches(self, item):
return True if item else False
def describe_to(self, desc):
desc.append_text('a truthy value')
class IsFalsy(BaseMatcher):
""" Check if a value is falsy """
def _matches(self, item):
return True if not item else False
def describe_to(self, desc):
desc.append_text('a falsy value')
register(IsTrue, 'be_true')
register(IsFalse, 'be_false')
register(IsTruthy, 'be_a_truthy_value', 'be_truthy', 'be_ok')
register(IsFalsy, 'be_a_falsy_value', 'be_falsy', 'be_ko')
class IsEmpty(BaseMatcher):
""" Check if a value is empty """
def _matches(self, item):
try:
return not bool(len(item))
except:
return False
def describe_to(self, desc):
desc.append_text('an empty value')
def describe_mismatch(self, item, desc):
try:
l = len(item)
desc.append_text('has {0} elements'.format(l))
except:
desc.append_value(item)
desc.append_text(' does not have a length')
register(IsEmpty, 'be_empty')
class RaisesError(BaseMatcher):
""" Checks if calling the value raises an error """
def __init__(self, expected=None, message=None, regex=None):
self.expected = expected
self.message = message
self.regex = regex
self.thrown = None
def _matches(self, item):
# support passing a context manager result
if isinstance(item, ContextManagerResult):
# Python <2.7 may provide a non exception value
if isinstance(item.exc_value, Exception):
self.thrown = item.exc_value
elif item.exc_type is not None:
try:
self.thrown = item.exc_type(*item.exc_value)
except TypeError:
self.thrown = item.exc_type(item.exc_value)
else:
return False
else:
try:
# support passing arguments by feeding a tuple instead of a callable
if not callable(item) and getattr(item, '__getitem__', False):
item[0](*item[1:])
else:
item()
return False
except:
# This should capture any kind of raised value
import sys
self.thrown = sys.exc_info()[1]
# Fail if we have defined an expected error type
if self.expected and not isinstance(self.thrown, self.expected):
return False
# Apply message filters
if self.message:
return self.message == str(self.thrown)
elif self.regex:
return re.match(self.regex, str(self.thrown))
return True
def describe_to(self, desc):
if self.thrown and self.message:
desc.append_text('to raise an exception with message "%s"'
% self.message)
elif self.thrown and self.regex:
desc.append_text('to raise an exception matching /%s/'
% self.regex)
else:
desc.append_text('to raise an exception')
if self.expected:
try:
exps = map(lambda x: x.__name__, self.expected)
except:
exps = [self.expected.__name__]
desc.append_text(' of type <%s>' % '>, <'.join(exps))
def describe_mismatch(self, item, desc):
if self.thrown:
desc.append_text('was ')
desc.append_text('<%s>' % self.thrown.__class__.__name__)
if self.message or self.regex:
desc.append_text(' "%s"' % str(self.thrown))
else:
desc.append_text('no exception was raised')
register(RaisesError,
'raise_an_error', 'raise_an_exception',
'raises_an_error', 'raises_an_exception', 'raises', 'raise',
'throw_an_error', 'throw_an_exception',
'throws_an_error', 'throws_an_exception', 'throws', 'throw')
class Changes(BaseMatcher):
""" Checks if calling a value changes something """
def __init__(self, watch):
self.watch = watch
self.before = None
self.after = None
self.changed = False
def _matches(self, item):
# support passing arguments by feeding a tuple instead of a callable
if not callable(item) and getattr(item, '__getitem__', False):
func = item[0]
params = item[1:]
else:
func = item
params = []
try:
before = self.watcher()
except TypeError:
before = self.watcher
# keep a snapshot of the value in case it's mutable
from copy import deepcopy
self.before = deepcopy(before)
func(*params)
try:
self.after = self.watcher()
except TypeError:
self.after = self.watcher
try:
hc.assert_that(self.after, hc.equal_to(self.before))
self.changed = False
except AssertionError:
self.changed = True
return self.changed
def describe_to(self, desc):
desc.append_text('change something')
def describe_mismatch(self, item, desc):
# To support its proper use when negated we need to check if
# the values actually changed or not
if self.changed:
desc.append_text('did change from ') \
.append_value(self.before) \
.append_text(' to ') \
.append_value(self.after)
else:
desc.append_text('it didn\'t change from ') \
.append_value(self.before)
register(Changes,
'change', 'changes', 'modify', 'modifies')
class Callback(BaseMatcher):
""" Checks against an user supplied callback. The callback
can should return True to indicate a successful match or
False to indicate an unsuccessful one.
"""
def __init__(self, callback):
self.callback = callback
def _matches(self, item):
self.error = None
try:
result = self.callback(item)
# Returning an expectation assumes it's correct (no failure raised)
from .expectation import Expectation
return isinstance(result, Expectation) or bool(result)
except AssertionError:
# Just forward assertion failures
raise
except Exception as ex:
self.error = str(ex)
return False
def describe_to(self, desc):
desc.append_text('passses callback ')
if (isinstance(self.callback, type(lambda: None))
and self.callback.__name__ == '<lambda>'):
desc.append_text(self.callback.__name__)
else:
desc.append_text('{0}'.format(self.callback))
def describe_mismatch(self, item, desc):
if self.error:
desc.append_text('gave an exception "%s"' % self.error)
else:
desc.append_text('returned False')
register(Callback,
'callback', 'pass', 'pass_callback')
class MockCalled(BaseMatcher):
""" Support for checking if mocks where called from the Mock library
"""
def _matches(self, item):
if not hasattr(item, 'called'):
raise Exception('Mock object does not have a <called> attribute')
return item.called
def describe_to(self, desc):
desc.append_text('called')
def describe_mismatch(self, item, desc):
if item.called:
desc.append_text('was called')
else:
desc.append_text('was not called')
register(MockCalled, 'called', 'invoked')
class RegexMatcher(BaseMatcher):
""" Checks against a regular expression """
def __init__(self, regex, flags=0):
self.regex = regex
if isinstance(flags, text_types):
self.flags = 0
for ch in flags.upper():
self.flags |= getattr(re, ch)
else:
self.flags = flags
def _matches(self, item):
# Make sure we are matching against a string
hc.assert_that(item, IsString())
match = re.search(self.regex, item, self.flags)
return match is not None
def describe_to(self, desc):
desc.append_text('matching ')
desc.append_text('/{0}/'.format(self.regex))
register(RegexMatcher, 'match', 'match_regex', 'match_regexp', 'be_matched_by')
class IsObjectContainingEntries(IsDictContainingEntries):
"""Matches if object has the properties from a given dict whose values and
keys satisfy a given matcher.
Examples::
:param inst: The instance or class.
:param mismatch_description: The description in case of failure.
have_properties({
'prop1': should.eq('value1'),
'prop2': should.eq('value2')
})
"""
hidden = set(['should_not', 'should_all', 'should_any', 'should', 'should_none'])
def __init__(self, value_matchers=None, **kwargs):
base_dict = {}
if value_matchers is None:
value_matchers = kwargs
for key, value in value_matchers.items():
base_dict[key] = wrap_matcher(value)
super(IsObjectContainingEntries, self).__init__(base_dict)
def matches(self, inst, mismatch_description=None):
# Make sure we are matching against a dict
try:
keys = dir(inst)
attributes = dict(
(key, getattr(inst, key))
for key in dir(inst)
if not key.startswith('__')
and key not in IsObjectContainingEntries.hidden
)
except Exception as ex:
if mismatch_description:
mismatch_description.append_text(
'unable to extract attributes from value: {0}'.format(ex))
return False
return super(IsObjectContainingEntries, self).matches(
attributes, mismatch_description)
def describe_to(self, desc):
desc.append_text('a class as ')
super(IsObjectContainingEntries, self).describe_to(desc)
register(IsObjectContainingEntries,
'have_the_properties', 'contain_the_properties', 'have_the_attributes', 'contain_the_attributes',
'have_props', 'contain_props', 'have_attrs', 'contain_attrs')
class IsSequenceContainingEveryInOrderSparse(IsSequenceContainingEvery):
"""
Matches if a list contains every given element in the same order but with
optional interleaved items.
No optional elements matching the required ones are allowed.
Mismatch description prioritizes missing items over wrong order.
e.g. [1, 3, 4] IsSequenceContainingEveryInOrder [1, 4]
but NOT IsSequenceContainingEveryInOrder [4, 1]
and NOT IsSequenceContainingEveryInOrder [1, 4, 4]
"""
def __init__(self, *element_matchers):
delegates = [hc.has_item(e) for e in element_matchers]
self.matcher_all = hc.all_of(*delegates)
self.matcher_any = hc.any_of(*delegates)
self.matcher_order = hc.contains(*element_matchers)
self.order_seq = None
def _matches(self, sequence):
self.order_seq = None
try:
seq = list(sequence)
if self.matcher_all.matches(seq):
self.order_seq = [i for i in seq if self.matcher_any.matches([i])]
return self.matcher_order.matches(self.order_seq)
else:
return False
except TypeError:
return False
def describe_mismatch(self, item, mismatch_description):
if self.order_seq is None:
mismatch_description.append_text(' instead of a ')
self.matcher_all.describe_mismatch(item, mismatch_description)
else:
self.matcher_order.describe_mismatch(self.order_seq, mismatch_description)
mismatch_description.append_text(
' from candidate list elements: '
).append_description_of(self.order_seq).append_text(
' that satisfied those conditions from '
).append_description_of(item)
def describe_to(self, description):
self.matcher_all.describe_to(description)
description.append_text(' in this specific order')
register(IsSequenceContainingEveryInOrderSparse,
'contain_sparse', 'have_sparse', 'contain_sparse_in_order',
'contain_in_order_sparse', 'have_every_in_order_sparse',
'have_in_order_sparse', 'contain_every_in_order_sparse')
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.