repository_name
stringclasses 316
values | func_path_in_repository
stringlengths 6
223
| func_name
stringlengths 1
134
| language
stringclasses 1
value | func_code_string
stringlengths 57
65.5k
| func_documentation_string
stringlengths 1
46.3k
| split_name
stringclasses 1
value | func_code_url
stringlengths 91
315
| called_functions
listlengths 1
156
⌀ | enclosing_scope
stringlengths 2
1.48M
|
|---|---|---|---|---|---|---|---|---|---|
pudo/jsonmapping
|
jsonmapping/statements.py
|
StatementsVisitor.get_subject
|
python
|
def get_subject(self, data):
if not isinstance(data, Mapping):
return None
if data.get(self.subject):
return data.get(self.subject)
return uuid.uuid4().urn
|
Try to get a unique ID from the object. By default, this will be
the 'id' field of any given object, or a field specified by the
'rdfSubject' property. If no other option is available, a UUID will be
generated.
|
train
|
https://github.com/pudo/jsonmapping/blob/4cf0a20a393ba82e00651c6fd39522a67a0155de/jsonmapping/statements.py#L22-L31
| null |
class StatementsVisitor(SchemaVisitor):
""" This class has utility functions for transforming JSON schema defined
objects into a series of RDF-like statements (i.e. subject, predicate,
object, context) quads. It can be used independently of any specific
storage backend, including RDF. """
@property
def subject(self):
return self.schema.get('rdfSubject', 'id')
@property
def predicate(self):
return self.schema.get('rdfName', self.name)
@property
def reverse(self):
""" Reverse links make sense for object to object links where we later
may want to also query the reverse of the relationship, e.g. when obj1
is a child of obj2, we want to infer that obj2 is a parent of obj1. """
name = self.schema.get('rdfReverse')
if name is not None:
return name
if self.parent is not None and self.parent.is_array:
return self.parent.reverse
def get_property(self, predicate):
for prop in self.properties:
if predicate == prop.name:
return prop
def triplify(self, data, parent=None):
""" Recursively generate statements from the data supplied. """
if data is None:
return
if self.is_object:
for res in self._triplify_object(data, parent):
yield res
elif self.is_array:
for item in data:
for res in self.items.triplify(item, parent):
yield res
else:
# TODO: figure out if I ever want to check for reverse here.
type_name = typecast.name(data)
obj = typecast.stringify(type_name, data)
if obj is not None:
obj = obj.strip()
yield (parent, self.predicate, obj, type_name)
def _triplify_object(self, data, parent):
""" Create bi-directional statements for object relationships. """
subject = self.get_subject(data)
if self.path:
yield (subject, TYPE_SCHEMA, self.path, TYPE_SCHEMA)
if parent is not None:
yield (parent, self.predicate, subject, TYPE_LINK)
if self.reverse is not None:
yield (subject, self.reverse, parent, TYPE_LINK)
for prop in self.properties:
for res in prop.triplify(data.get(prop.name), subject):
yield res
# Clever Method Names Award, 2014 and two years running
def objectify(self, load, node, depth=2, path=None):
""" Given a node ID, return an object the information available about
this node. This accepts a loader function as it's first argument, which
is expected to return all tuples of (predicate, object, source) for
the given subject. """
if path is None:
path = set()
if self.is_object:
if depth < 1:
return
return self._objectify_object(load, node, depth, path)
elif self.is_array:
if depth < 1:
return
return [self.items.objectify(load, node, depth, path)]
else:
return node
def _objectify_object(self, load, node, depth, path):
# Support inline objects which don't count towards the depth.
next_depth = depth
if not self.schema.get('inline'):
next_depth = depth - 1
sub_path = path.union([node])
obj = {
self.subject: node,
'$schema': self.path,
'$sources': [],
'$collections': [],
'$authors': [],
'$attrcount': 0,
'$linkcount': 0,
}
for stmt in load(node):
prop = self.get_property(stmt['predicate'])
if prop is None:
continue
if stmt['object'] in path and not prop.is_value:
continue
if prop.name not in obj:
obj['$attrcount'] += 1
if stmt['type'] == TYPE_LINK:
obj['$linkcount'] += 1
if stmt.get('source') and \
stmt.get('source') not in obj['$sources']:
obj['$sources'].append(stmt.get('source'))
if stmt.get('collection') and \
stmt.get('collection') not in obj['$collections']:
obj['$collections'].append(stmt.get('collection'))
if stmt.get('author') and \
stmt.get('author') not in obj['$authors']:
obj['$authors'].append(stmt.get('author'))
value = prop.objectify(load, stmt['object'], next_depth, sub_path)
if value is None:
continue
if prop.is_array and prop.name in obj:
obj[prop.name].extend(value)
else:
obj[prop.name] = value
return obj
|
pudo/jsonmapping
|
jsonmapping/statements.py
|
StatementsVisitor.reverse
|
python
|
def reverse(self):
name = self.schema.get('rdfReverse')
if name is not None:
return name
if self.parent is not None and self.parent.is_array:
return self.parent.reverse
|
Reverse links make sense for object to object links where we later
may want to also query the reverse of the relationship, e.g. when obj1
is a child of obj2, we want to infer that obj2 is a parent of obj1.
|
train
|
https://github.com/pudo/jsonmapping/blob/4cf0a20a393ba82e00651c6fd39522a67a0155de/jsonmapping/statements.py#L38-L46
| null |
class StatementsVisitor(SchemaVisitor):
""" This class has utility functions for transforming JSON schema defined
objects into a series of RDF-like statements (i.e. subject, predicate,
object, context) quads. It can be used independently of any specific
storage backend, including RDF. """
@property
def subject(self):
return self.schema.get('rdfSubject', 'id')
def get_subject(self, data):
""" Try to get a unique ID from the object. By default, this will be
the 'id' field of any given object, or a field specified by the
'rdfSubject' property. If no other option is available, a UUID will be
generated. """
if not isinstance(data, Mapping):
return None
if data.get(self.subject):
return data.get(self.subject)
return uuid.uuid4().urn
@property
def predicate(self):
return self.schema.get('rdfName', self.name)
@property
def get_property(self, predicate):
for prop in self.properties:
if predicate == prop.name:
return prop
def triplify(self, data, parent=None):
""" Recursively generate statements from the data supplied. """
if data is None:
return
if self.is_object:
for res in self._triplify_object(data, parent):
yield res
elif self.is_array:
for item in data:
for res in self.items.triplify(item, parent):
yield res
else:
# TODO: figure out if I ever want to check for reverse here.
type_name = typecast.name(data)
obj = typecast.stringify(type_name, data)
if obj is not None:
obj = obj.strip()
yield (parent, self.predicate, obj, type_name)
def _triplify_object(self, data, parent):
""" Create bi-directional statements for object relationships. """
subject = self.get_subject(data)
if self.path:
yield (subject, TYPE_SCHEMA, self.path, TYPE_SCHEMA)
if parent is not None:
yield (parent, self.predicate, subject, TYPE_LINK)
if self.reverse is not None:
yield (subject, self.reverse, parent, TYPE_LINK)
for prop in self.properties:
for res in prop.triplify(data.get(prop.name), subject):
yield res
# Clever Method Names Award, 2014 and two years running
def objectify(self, load, node, depth=2, path=None):
""" Given a node ID, return an object the information available about
this node. This accepts a loader function as it's first argument, which
is expected to return all tuples of (predicate, object, source) for
the given subject. """
if path is None:
path = set()
if self.is_object:
if depth < 1:
return
return self._objectify_object(load, node, depth, path)
elif self.is_array:
if depth < 1:
return
return [self.items.objectify(load, node, depth, path)]
else:
return node
def _objectify_object(self, load, node, depth, path):
# Support inline objects which don't count towards the depth.
next_depth = depth
if not self.schema.get('inline'):
next_depth = depth - 1
sub_path = path.union([node])
obj = {
self.subject: node,
'$schema': self.path,
'$sources': [],
'$collections': [],
'$authors': [],
'$attrcount': 0,
'$linkcount': 0,
}
for stmt in load(node):
prop = self.get_property(stmt['predicate'])
if prop is None:
continue
if stmt['object'] in path and not prop.is_value:
continue
if prop.name not in obj:
obj['$attrcount'] += 1
if stmt['type'] == TYPE_LINK:
obj['$linkcount'] += 1
if stmt.get('source') and \
stmt.get('source') not in obj['$sources']:
obj['$sources'].append(stmt.get('source'))
if stmt.get('collection') and \
stmt.get('collection') not in obj['$collections']:
obj['$collections'].append(stmt.get('collection'))
if stmt.get('author') and \
stmt.get('author') not in obj['$authors']:
obj['$authors'].append(stmt.get('author'))
value = prop.objectify(load, stmt['object'], next_depth, sub_path)
if value is None:
continue
if prop.is_array and prop.name in obj:
obj[prop.name].extend(value)
else:
obj[prop.name] = value
return obj
|
pudo/jsonmapping
|
jsonmapping/statements.py
|
StatementsVisitor.triplify
|
python
|
def triplify(self, data, parent=None):
if data is None:
return
if self.is_object:
for res in self._triplify_object(data, parent):
yield res
elif self.is_array:
for item in data:
for res in self.items.triplify(item, parent):
yield res
else:
# TODO: figure out if I ever want to check for reverse here.
type_name = typecast.name(data)
obj = typecast.stringify(type_name, data)
if obj is not None:
obj = obj.strip()
yield (parent, self.predicate, obj, type_name)
|
Recursively generate statements from the data supplied.
|
train
|
https://github.com/pudo/jsonmapping/blob/4cf0a20a393ba82e00651c6fd39522a67a0155de/jsonmapping/statements.py#L53-L71
|
[
"def _triplify_object(self, data, parent):\n \"\"\" Create bi-directional statements for object relationships. \"\"\"\n subject = self.get_subject(data)\n if self.path:\n yield (subject, TYPE_SCHEMA, self.path, TYPE_SCHEMA)\n\n if parent is not None:\n yield (parent, self.predicate, subject, TYPE_LINK)\n if self.reverse is not None:\n yield (subject, self.reverse, parent, TYPE_LINK)\n\n for prop in self.properties:\n for res in prop.triplify(data.get(prop.name), subject):\n yield res\n"
] |
class StatementsVisitor(SchemaVisitor):
""" This class has utility functions for transforming JSON schema defined
objects into a series of RDF-like statements (i.e. subject, predicate,
object, context) quads. It can be used independently of any specific
storage backend, including RDF. """
@property
def subject(self):
return self.schema.get('rdfSubject', 'id')
def get_subject(self, data):
""" Try to get a unique ID from the object. By default, this will be
the 'id' field of any given object, or a field specified by the
'rdfSubject' property. If no other option is available, a UUID will be
generated. """
if not isinstance(data, Mapping):
return None
if data.get(self.subject):
return data.get(self.subject)
return uuid.uuid4().urn
@property
def predicate(self):
return self.schema.get('rdfName', self.name)
@property
def reverse(self):
""" Reverse links make sense for object to object links where we later
may want to also query the reverse of the relationship, e.g. when obj1
is a child of obj2, we want to infer that obj2 is a parent of obj1. """
name = self.schema.get('rdfReverse')
if name is not None:
return name
if self.parent is not None and self.parent.is_array:
return self.parent.reverse
def get_property(self, predicate):
for prop in self.properties:
if predicate == prop.name:
return prop
def _triplify_object(self, data, parent):
""" Create bi-directional statements for object relationships. """
subject = self.get_subject(data)
if self.path:
yield (subject, TYPE_SCHEMA, self.path, TYPE_SCHEMA)
if parent is not None:
yield (parent, self.predicate, subject, TYPE_LINK)
if self.reverse is not None:
yield (subject, self.reverse, parent, TYPE_LINK)
for prop in self.properties:
for res in prop.triplify(data.get(prop.name), subject):
yield res
# Clever Method Names Award, 2014 and two years running
def objectify(self, load, node, depth=2, path=None):
""" Given a node ID, return an object the information available about
this node. This accepts a loader function as it's first argument, which
is expected to return all tuples of (predicate, object, source) for
the given subject. """
if path is None:
path = set()
if self.is_object:
if depth < 1:
return
return self._objectify_object(load, node, depth, path)
elif self.is_array:
if depth < 1:
return
return [self.items.objectify(load, node, depth, path)]
else:
return node
def _objectify_object(self, load, node, depth, path):
# Support inline objects which don't count towards the depth.
next_depth = depth
if not self.schema.get('inline'):
next_depth = depth - 1
sub_path = path.union([node])
obj = {
self.subject: node,
'$schema': self.path,
'$sources': [],
'$collections': [],
'$authors': [],
'$attrcount': 0,
'$linkcount': 0,
}
for stmt in load(node):
prop = self.get_property(stmt['predicate'])
if prop is None:
continue
if stmt['object'] in path and not prop.is_value:
continue
if prop.name not in obj:
obj['$attrcount'] += 1
if stmt['type'] == TYPE_LINK:
obj['$linkcount'] += 1
if stmt.get('source') and \
stmt.get('source') not in obj['$sources']:
obj['$sources'].append(stmt.get('source'))
if stmt.get('collection') and \
stmt.get('collection') not in obj['$collections']:
obj['$collections'].append(stmt.get('collection'))
if stmt.get('author') and \
stmt.get('author') not in obj['$authors']:
obj['$authors'].append(stmt.get('author'))
value = prop.objectify(load, stmt['object'], next_depth, sub_path)
if value is None:
continue
if prop.is_array and prop.name in obj:
obj[prop.name].extend(value)
else:
obj[prop.name] = value
return obj
|
pudo/jsonmapping
|
jsonmapping/statements.py
|
StatementsVisitor._triplify_object
|
python
|
def _triplify_object(self, data, parent):
subject = self.get_subject(data)
if self.path:
yield (subject, TYPE_SCHEMA, self.path, TYPE_SCHEMA)
if parent is not None:
yield (parent, self.predicate, subject, TYPE_LINK)
if self.reverse is not None:
yield (subject, self.reverse, parent, TYPE_LINK)
for prop in self.properties:
for res in prop.triplify(data.get(prop.name), subject):
yield res
|
Create bi-directional statements for object relationships.
|
train
|
https://github.com/pudo/jsonmapping/blob/4cf0a20a393ba82e00651c6fd39522a67a0155de/jsonmapping/statements.py#L73-L86
|
[
"def get_subject(self, data):\n \"\"\" Try to get a unique ID from the object. By default, this will be\n the 'id' field of any given object, or a field specified by the\n 'rdfSubject' property. If no other option is available, a UUID will be\n generated. \"\"\"\n if not isinstance(data, Mapping):\n return None\n if data.get(self.subject):\n return data.get(self.subject)\n return uuid.uuid4().urn\n"
] |
class StatementsVisitor(SchemaVisitor):
""" This class has utility functions for transforming JSON schema defined
objects into a series of RDF-like statements (i.e. subject, predicate,
object, context) quads. It can be used independently of any specific
storage backend, including RDF. """
@property
def subject(self):
return self.schema.get('rdfSubject', 'id')
def get_subject(self, data):
""" Try to get a unique ID from the object. By default, this will be
the 'id' field of any given object, or a field specified by the
'rdfSubject' property. If no other option is available, a UUID will be
generated. """
if not isinstance(data, Mapping):
return None
if data.get(self.subject):
return data.get(self.subject)
return uuid.uuid4().urn
@property
def predicate(self):
return self.schema.get('rdfName', self.name)
@property
def reverse(self):
""" Reverse links make sense for object to object links where we later
may want to also query the reverse of the relationship, e.g. when obj1
is a child of obj2, we want to infer that obj2 is a parent of obj1. """
name = self.schema.get('rdfReverse')
if name is not None:
return name
if self.parent is not None and self.parent.is_array:
return self.parent.reverse
def get_property(self, predicate):
for prop in self.properties:
if predicate == prop.name:
return prop
def triplify(self, data, parent=None):
""" Recursively generate statements from the data supplied. """
if data is None:
return
if self.is_object:
for res in self._triplify_object(data, parent):
yield res
elif self.is_array:
for item in data:
for res in self.items.triplify(item, parent):
yield res
else:
# TODO: figure out if I ever want to check for reverse here.
type_name = typecast.name(data)
obj = typecast.stringify(type_name, data)
if obj is not None:
obj = obj.strip()
yield (parent, self.predicate, obj, type_name)
# Clever Method Names Award, 2014 and two years running
def objectify(self, load, node, depth=2, path=None):
""" Given a node ID, return an object the information available about
this node. This accepts a loader function as it's first argument, which
is expected to return all tuples of (predicate, object, source) for
the given subject. """
if path is None:
path = set()
if self.is_object:
if depth < 1:
return
return self._objectify_object(load, node, depth, path)
elif self.is_array:
if depth < 1:
return
return [self.items.objectify(load, node, depth, path)]
else:
return node
def _objectify_object(self, load, node, depth, path):
# Support inline objects which don't count towards the depth.
next_depth = depth
if not self.schema.get('inline'):
next_depth = depth - 1
sub_path = path.union([node])
obj = {
self.subject: node,
'$schema': self.path,
'$sources': [],
'$collections': [],
'$authors': [],
'$attrcount': 0,
'$linkcount': 0,
}
for stmt in load(node):
prop = self.get_property(stmt['predicate'])
if prop is None:
continue
if stmt['object'] in path and not prop.is_value:
continue
if prop.name not in obj:
obj['$attrcount'] += 1
if stmt['type'] == TYPE_LINK:
obj['$linkcount'] += 1
if stmt.get('source') and \
stmt.get('source') not in obj['$sources']:
obj['$sources'].append(stmt.get('source'))
if stmt.get('collection') and \
stmt.get('collection') not in obj['$collections']:
obj['$collections'].append(stmt.get('collection'))
if stmt.get('author') and \
stmt.get('author') not in obj['$authors']:
obj['$authors'].append(stmt.get('author'))
value = prop.objectify(load, stmt['object'], next_depth, sub_path)
if value is None:
continue
if prop.is_array and prop.name in obj:
obj[prop.name].extend(value)
else:
obj[prop.name] = value
return obj
|
pudo/jsonmapping
|
jsonmapping/statements.py
|
StatementsVisitor.objectify
|
python
|
def objectify(self, load, node, depth=2, path=None):
if path is None:
path = set()
if self.is_object:
if depth < 1:
return
return self._objectify_object(load, node, depth, path)
elif self.is_array:
if depth < 1:
return
return [self.items.objectify(load, node, depth, path)]
else:
return node
|
Given a node ID, return an object the information available about
this node. This accepts a loader function as it's first argument, which
is expected to return all tuples of (predicate, object, source) for
the given subject.
|
train
|
https://github.com/pudo/jsonmapping/blob/4cf0a20a393ba82e00651c6fd39522a67a0155de/jsonmapping/statements.py#L89-L106
|
[
"def _objectify_object(self, load, node, depth, path):\n # Support inline objects which don't count towards the depth.\n next_depth = depth\n if not self.schema.get('inline'):\n next_depth = depth - 1\n\n sub_path = path.union([node])\n obj = {\n self.subject: node,\n '$schema': self.path,\n '$sources': [],\n '$collections': [],\n '$authors': [],\n '$attrcount': 0,\n '$linkcount': 0,\n }\n for stmt in load(node):\n prop = self.get_property(stmt['predicate'])\n if prop is None:\n continue\n if stmt['object'] in path and not prop.is_value:\n continue\n if prop.name not in obj:\n obj['$attrcount'] += 1\n if stmt['type'] == TYPE_LINK:\n obj['$linkcount'] += 1\n\n if stmt.get('source') and \\\n stmt.get('source') not in obj['$sources']:\n obj['$sources'].append(stmt.get('source'))\n\n if stmt.get('collection') and \\\n stmt.get('collection') not in obj['$collections']:\n obj['$collections'].append(stmt.get('collection'))\n\n if stmt.get('author') and \\\n stmt.get('author') not in obj['$authors']:\n obj['$authors'].append(stmt.get('author'))\n\n value = prop.objectify(load, stmt['object'], next_depth, sub_path)\n if value is None:\n continue\n\n if prop.is_array and prop.name in obj:\n obj[prop.name].extend(value)\n else:\n obj[prop.name] = value\n return obj\n"
] |
class StatementsVisitor(SchemaVisitor):
""" This class has utility functions for transforming JSON schema defined
objects into a series of RDF-like statements (i.e. subject, predicate,
object, context) quads. It can be used independently of any specific
storage backend, including RDF. """
@property
def subject(self):
return self.schema.get('rdfSubject', 'id')
def get_subject(self, data):
""" Try to get a unique ID from the object. By default, this will be
the 'id' field of any given object, or a field specified by the
'rdfSubject' property. If no other option is available, a UUID will be
generated. """
if not isinstance(data, Mapping):
return None
if data.get(self.subject):
return data.get(self.subject)
return uuid.uuid4().urn
@property
def predicate(self):
return self.schema.get('rdfName', self.name)
@property
def reverse(self):
""" Reverse links make sense for object to object links where we later
may want to also query the reverse of the relationship, e.g. when obj1
is a child of obj2, we want to infer that obj2 is a parent of obj1. """
name = self.schema.get('rdfReverse')
if name is not None:
return name
if self.parent is not None and self.parent.is_array:
return self.parent.reverse
def get_property(self, predicate):
for prop in self.properties:
if predicate == prop.name:
return prop
def triplify(self, data, parent=None):
""" Recursively generate statements from the data supplied. """
if data is None:
return
if self.is_object:
for res in self._triplify_object(data, parent):
yield res
elif self.is_array:
for item in data:
for res in self.items.triplify(item, parent):
yield res
else:
# TODO: figure out if I ever want to check for reverse here.
type_name = typecast.name(data)
obj = typecast.stringify(type_name, data)
if obj is not None:
obj = obj.strip()
yield (parent, self.predicate, obj, type_name)
def _triplify_object(self, data, parent):
""" Create bi-directional statements for object relationships. """
subject = self.get_subject(data)
if self.path:
yield (subject, TYPE_SCHEMA, self.path, TYPE_SCHEMA)
if parent is not None:
yield (parent, self.predicate, subject, TYPE_LINK)
if self.reverse is not None:
yield (subject, self.reverse, parent, TYPE_LINK)
for prop in self.properties:
for res in prop.triplify(data.get(prop.name), subject):
yield res
# Clever Method Names Award, 2014 and two years running
def _objectify_object(self, load, node, depth, path):
# Support inline objects which don't count towards the depth.
next_depth = depth
if not self.schema.get('inline'):
next_depth = depth - 1
sub_path = path.union([node])
obj = {
self.subject: node,
'$schema': self.path,
'$sources': [],
'$collections': [],
'$authors': [],
'$attrcount': 0,
'$linkcount': 0,
}
for stmt in load(node):
prop = self.get_property(stmt['predicate'])
if prop is None:
continue
if stmt['object'] in path and not prop.is_value:
continue
if prop.name not in obj:
obj['$attrcount'] += 1
if stmt['type'] == TYPE_LINK:
obj['$linkcount'] += 1
if stmt.get('source') and \
stmt.get('source') not in obj['$sources']:
obj['$sources'].append(stmt.get('source'))
if stmt.get('collection') and \
stmt.get('collection') not in obj['$collections']:
obj['$collections'].append(stmt.get('collection'))
if stmt.get('author') and \
stmt.get('author') not in obj['$authors']:
obj['$authors'].append(stmt.get('author'))
value = prop.objectify(load, stmt['object'], next_depth, sub_path)
if value is None:
continue
if prop.is_array and prop.name in obj:
obj[prop.name].extend(value)
else:
obj[prop.name] = value
return obj
|
Arello-Mobile/swagger2rst
|
swg2rst/swagger/abstract_type_object.py
|
convert
|
python
|
def convert(data):
try:
st = basestring
except NameError:
st = str
if isinstance(data, st):
return str(data)
elif isinstance(data, Mapping):
return dict(map(convert, data.iteritems()))
elif isinstance(data, Iterable):
return type(data)(map(convert, data))
else:
return data
|
Convert from unicode to native ascii
|
train
|
https://github.com/Arello-Mobile/swagger2rst/blob/e519f70701477dcc9f0bb237ee5b8e08e848701b/swg2rst/swagger/abstract_type_object.py#L103-L118
| null |
import json
from hashlib import md5
from collections import Mapping, Iterable
from .constants import SchemaTypes
class AbstractTypeObject(object):
_type = None
type_format = None
properties = None
item = None #: set if type is array
def __init__(self, obj, name, root, storage):
self.raw = obj
self.name = name
self.root = root
self.storage = storage
def get_type_properties(self, property_obj, name, additional_prop=False):
""" Get internal properties of property (extended in schema)
:param dict property_obj: raw property object
:param str name: name of property
:param bool additional_prop: recursion's param
:return: Type, format and internal properties of property
:rtype: tuple(str, str, dict)
"""
property_type = property_obj.get('type', 'object')
property_format = property_obj.get('format')
property_dict = {}
if property_type in ['object', 'array']:
schema_type = SchemaTypes.MAPPED if additional_prop else SchemaTypes.INLINE
schema_id = self._get_object_schema_id(property_obj, schema_type)
if not ('$ref' in property_obj or self.storage.get(schema_id)):
_schema = self.storage.create_schema(
property_obj, name, schema_type, root=self.root)
self._after_create_schema(_schema)
property_type = schema_id
property_dict['default'] = property_obj.get('default')
property_dict['maximum'] = property_obj.get('maximum')
property_dict['exclusive_maximum'] = property_obj.get('exclusiveMaximum')
property_dict['minimum'] = property_obj.get('minimum')
property_dict['exclusive_minimum'] = property_obj.get('exclusiveMinimum')
property_dict['max_length'] = property_obj.get('maxLength')
property_dict['min_length'] = property_obj.get('minLength')
#TODO: fixme. remove ugly convert. add property template renderer instead
property_dict['enum'] = convert(property_obj.get('enum'))
#TODO: fixme. cleanup empty properties. add configurable filter for properties instead
property_dict = {k: v for k, v in property_dict.items() if v}
return property_type, property_format, property_dict
@staticmethod
def _get_id(base):
m = md5()
m.update(base.encode('utf-8'))
return m.hexdigest()
def _get_object_schema_id(self, obj, schema_type):
if (schema_type == SchemaTypes.prefixes[SchemaTypes.MAPPED]) and ('$ref' in obj):
base = obj['$ref']
prefix = schema_type
elif '$ref' in obj:
base = obj['$ref']
prefix = SchemaTypes.prefixes[SchemaTypes.DEFINITION]
else:
base = json.dumps(obj)
prefix = SchemaTypes.prefixes[schema_type]
return '{}_{}'.format(prefix, self._get_id(base))
def set_type_by_schema(self, schema_obj, schema_type):
"""
Set property type by schema object
Schema will create, if it doesn't exists in collection
:param dict schema_obj: raw schema object
:param str schema_type:
"""
schema_id = self._get_object_schema_id(schema_obj, schema_type)
if not self.storage.contains(schema_id):
schema = self.storage.create_schema(
schema_obj, self.name, schema_type, root=self.root)
assert schema.schema_id == schema_id
self._type = schema_id
def _after_create_schema(self, schema):
pass
@property
def type(self):
return self._type
@property
def is_array(self):
return self._type == 'array'
|
Arello-Mobile/swagger2rst
|
swg2rst/swagger/abstract_type_object.py
|
AbstractTypeObject.get_type_properties
|
python
|
def get_type_properties(self, property_obj, name, additional_prop=False):
property_type = property_obj.get('type', 'object')
property_format = property_obj.get('format')
property_dict = {}
if property_type in ['object', 'array']:
schema_type = SchemaTypes.MAPPED if additional_prop else SchemaTypes.INLINE
schema_id = self._get_object_schema_id(property_obj, schema_type)
if not ('$ref' in property_obj or self.storage.get(schema_id)):
_schema = self.storage.create_schema(
property_obj, name, schema_type, root=self.root)
self._after_create_schema(_schema)
property_type = schema_id
property_dict['default'] = property_obj.get('default')
property_dict['maximum'] = property_obj.get('maximum')
property_dict['exclusive_maximum'] = property_obj.get('exclusiveMaximum')
property_dict['minimum'] = property_obj.get('minimum')
property_dict['exclusive_minimum'] = property_obj.get('exclusiveMinimum')
property_dict['max_length'] = property_obj.get('maxLength')
property_dict['min_length'] = property_obj.get('minLength')
#TODO: fixme. remove ugly convert. add property template renderer instead
property_dict['enum'] = convert(property_obj.get('enum'))
#TODO: fixme. cleanup empty properties. add configurable filter for properties instead
property_dict = {k: v for k, v in property_dict.items() if v}
return property_type, property_format, property_dict
|
Get internal properties of property (extended in schema)
:param dict property_obj: raw property object
:param str name: name of property
:param bool additional_prop: recursion's param
:return: Type, format and internal properties of property
:rtype: tuple(str, str, dict)
|
train
|
https://github.com/Arello-Mobile/swagger2rst/blob/e519f70701477dcc9f0bb237ee5b8e08e848701b/swg2rst/swagger/abstract_type_object.py#L19-L55
|
[
"def convert(data):\n \"\"\"\n Convert from unicode to native ascii\n \"\"\"\n try:\n st = basestring\n except NameError:\n st = str\n if isinstance(data, st):\n return str(data)\n elif isinstance(data, Mapping):\n return dict(map(convert, data.iteritems()))\n elif isinstance(data, Iterable):\n return type(data)(map(convert, data))\n else:\n return data\n",
"def _get_object_schema_id(self, obj, schema_type):\n if (schema_type == SchemaTypes.prefixes[SchemaTypes.MAPPED]) and ('$ref' in obj):\n base = obj['$ref']\n prefix = schema_type\n elif '$ref' in obj:\n base = obj['$ref']\n prefix = SchemaTypes.prefixes[SchemaTypes.DEFINITION]\n else:\n base = json.dumps(obj)\n prefix = SchemaTypes.prefixes[schema_type]\n return '{}_{}'.format(prefix, self._get_id(base))\n"
] |
class AbstractTypeObject(object):
_type = None
type_format = None
properties = None
item = None #: set if type is array
def __init__(self, obj, name, root, storage):
self.raw = obj
self.name = name
self.root = root
self.storage = storage
@staticmethod
def _get_id(base):
m = md5()
m.update(base.encode('utf-8'))
return m.hexdigest()
def _get_object_schema_id(self, obj, schema_type):
if (schema_type == SchemaTypes.prefixes[SchemaTypes.MAPPED]) and ('$ref' in obj):
base = obj['$ref']
prefix = schema_type
elif '$ref' in obj:
base = obj['$ref']
prefix = SchemaTypes.prefixes[SchemaTypes.DEFINITION]
else:
base = json.dumps(obj)
prefix = SchemaTypes.prefixes[schema_type]
return '{}_{}'.format(prefix, self._get_id(base))
def set_type_by_schema(self, schema_obj, schema_type):
"""
Set property type by schema object
Schema will create, if it doesn't exists in collection
:param dict schema_obj: raw schema object
:param str schema_type:
"""
schema_id = self._get_object_schema_id(schema_obj, schema_type)
if not self.storage.contains(schema_id):
schema = self.storage.create_schema(
schema_obj, self.name, schema_type, root=self.root)
assert schema.schema_id == schema_id
self._type = schema_id
def _after_create_schema(self, schema):
pass
@property
def type(self):
return self._type
@property
def is_array(self):
return self._type == 'array'
|
Arello-Mobile/swagger2rst
|
swg2rst/swagger/abstract_type_object.py
|
AbstractTypeObject.set_type_by_schema
|
python
|
def set_type_by_schema(self, schema_obj, schema_type):
schema_id = self._get_object_schema_id(schema_obj, schema_type)
if not self.storage.contains(schema_id):
schema = self.storage.create_schema(
schema_obj, self.name, schema_type, root=self.root)
assert schema.schema_id == schema_id
self._type = schema_id
|
Set property type by schema object
Schema will create, if it doesn't exists in collection
:param dict schema_obj: raw schema object
:param str schema_type:
|
train
|
https://github.com/Arello-Mobile/swagger2rst/blob/e519f70701477dcc9f0bb237ee5b8e08e848701b/swg2rst/swagger/abstract_type_object.py#L75-L89
|
[
"def _get_object_schema_id(self, obj, schema_type):\n if (schema_type == SchemaTypes.prefixes[SchemaTypes.MAPPED]) and ('$ref' in obj):\n base = obj['$ref']\n prefix = schema_type\n elif '$ref' in obj:\n base = obj['$ref']\n prefix = SchemaTypes.prefixes[SchemaTypes.DEFINITION]\n else:\n base = json.dumps(obj)\n prefix = SchemaTypes.prefixes[schema_type]\n return '{}_{}'.format(prefix, self._get_id(base))\n"
] |
class AbstractTypeObject(object):
_type = None
type_format = None
properties = None
item = None #: set if type is array
def __init__(self, obj, name, root, storage):
self.raw = obj
self.name = name
self.root = root
self.storage = storage
def get_type_properties(self, property_obj, name, additional_prop=False):
""" Get internal properties of property (extended in schema)
:param dict property_obj: raw property object
:param str name: name of property
:param bool additional_prop: recursion's param
:return: Type, format and internal properties of property
:rtype: tuple(str, str, dict)
"""
property_type = property_obj.get('type', 'object')
property_format = property_obj.get('format')
property_dict = {}
if property_type in ['object', 'array']:
schema_type = SchemaTypes.MAPPED if additional_prop else SchemaTypes.INLINE
schema_id = self._get_object_schema_id(property_obj, schema_type)
if not ('$ref' in property_obj or self.storage.get(schema_id)):
_schema = self.storage.create_schema(
property_obj, name, schema_type, root=self.root)
self._after_create_schema(_schema)
property_type = schema_id
property_dict['default'] = property_obj.get('default')
property_dict['maximum'] = property_obj.get('maximum')
property_dict['exclusive_maximum'] = property_obj.get('exclusiveMaximum')
property_dict['minimum'] = property_obj.get('minimum')
property_dict['exclusive_minimum'] = property_obj.get('exclusiveMinimum')
property_dict['max_length'] = property_obj.get('maxLength')
property_dict['min_length'] = property_obj.get('minLength')
#TODO: fixme. remove ugly convert. add property template renderer instead
property_dict['enum'] = convert(property_obj.get('enum'))
#TODO: fixme. cleanup empty properties. add configurable filter for properties instead
property_dict = {k: v for k, v in property_dict.items() if v}
return property_type, property_format, property_dict
@staticmethod
def _get_id(base):
m = md5()
m.update(base.encode('utf-8'))
return m.hexdigest()
def _get_object_schema_id(self, obj, schema_type):
if (schema_type == SchemaTypes.prefixes[SchemaTypes.MAPPED]) and ('$ref' in obj):
base = obj['$ref']
prefix = schema_type
elif '$ref' in obj:
base = obj['$ref']
prefix = SchemaTypes.prefixes[SchemaTypes.DEFINITION]
else:
base = json.dumps(obj)
prefix = SchemaTypes.prefixes[schema_type]
return '{}_{}'.format(prefix, self._get_id(base))
def _after_create_schema(self, schema):
pass
@property
def type(self):
return self._type
@property
def is_array(self):
return self._type == 'array'
|
Arello-Mobile/swagger2rst
|
swg2rst/swagger/base_swagger_object.py
|
BaseSwaggerObject._fill_schemas_from_definitions
|
python
|
def _fill_schemas_from_definitions(self, obj):
if obj.get('definitions'):
self.schemas.clear()
all_of_stack = []
for name, definition in obj['definitions'].items():
if 'allOf' in definition:
all_of_stack.append((name, definition))
else:
self.schemas.create_schema(
definition, name, SchemaTypes.DEFINITION, root=self)
while all_of_stack:
name, definition = all_of_stack.pop(0)
self.schemas.create_schema(
definition, name, SchemaTypes.DEFINITION, root=self)
|
At first create schemas without 'AllOf'
:param obj:
:return: None
|
train
|
https://github.com/Arello-Mobile/swagger2rst/blob/e519f70701477dcc9f0bb237ee5b8e08e848701b/swg2rst/swagger/base_swagger_object.py#L108-L125
| null |
class BaseSwaggerObject(SecurityMixin):
"""
Represents Swagger Object
"""
raw = None
#: Operation collection
#:
#: key: operation_id, value: Operation object
operations = None
#: Operations grouped by tags
#:
#: key: tag name, value: list of Operation object
tags = None
schemas = SchemaObjects
#: Parameter definitions from Parameters Definitions Object
#:
#: key: reference path, value: Parameter object
parameter_definitions = None
#: Response definitions from Responses Definitions Object
#:
#: key: reference path, value: Response object
response_definitions = None
#: Security definitions from Security Definitions Object
#:
#: key: security name, value: SecurityDefinition object
security_definitions = None
#: Represents tag descriptions from Swagger Tag Object
#:
#: key: tag name, value: dict with keys ``description`` and ``externalDocs``
tag_descriptions = None
#: Example Manager. Must be subclass of Exampilator
exampilator = None
def __init__(self, obj, exampilator=None, examples=None):
self.exampilator = exampilator or Exampilator
assert issubclass(self.exampilator, Exampilator)
if examples:
try:
self.exampilator.schema_validate(examples, examples_json_schema)
except ValidationError as err:
raise ConverterError(err.message)
self.exampilator.fill_examples(examples)
if obj.get('swagger') != '2.0':
raise ConverterError('Invalid Swagger version')
self._fill_root_parameters(obj)
self._fill_schemas_from_definitions(obj)
self._fill_parameter_definitions(obj)
self._fill_response_definitions(obj)
self._fill_security_definitions(obj)
self._fill_securities(obj)
self._fill_operations(obj)
def _fill_operations(self, *args):
self.operations = {}
self._fill_tag_descriptions()
self.tags = defaultdict(list)
for path, operations in self.raw['paths'].items():
path_params = []
for param in operations.get('parameters', []):
if param.get('$ref'):
path_params.append(self.parameter_definitions[param['$ref']])
else:
path_params.append(
Parameter(param, name=param['name'], root=self, storage=self.schemas))
for method, operation in operations.items():
if method == 'parameters':
continue
op = Operation(operation, method, path, self, self.schemas, path_params)
self.operations[op.operation_id] = op
for tag in op.tags:
self.tags[tag].append(op)
def _fill_tag_descriptions(self):
if 'tags' in self.raw:
self.tag_descriptions = {}
for tag in self.raw['tags']:
if 'description' in tag or 'externalDocs' in tag:
self.tag_descriptions[tag['name']] = {
'description': tag.get('description'),
'externalDocs': tag.get('externalDocs')
}
def _fill_parameter_definitions(self, obj):
if obj.get('parameters'):
self.parameter_definitions = {}
for name, parameter in obj['parameters'].items():
key = '#/parameters/{}'.format(name)
self.parameter_definitions[key] = Parameter(
parameter, name=parameter['name'], root=self, storage=self.schemas)
def _fill_response_definitions(self, obj):
if obj.get('responses'):
self.response_definitions = {}
for name, response in obj['responses'].items():
key = '#/responses/{}'.format(name)
self.response_definitions[key] = Response(
response, name=name, root=self, storage=self.schemas)
def _fill_security_definitions(self, obj):
if obj.get('securityDefinitions'):
self.security_definitions = {
name: SecurityDefinition(name, _obj) for name, _obj in obj['securityDefinitions'].items()
}
def _fill_root_parameters(self, obj):
if not obj:
return None
self.raw = obj
self.info = obj.get('info')
self.host = obj.get('host', '')
self.base_path = obj.get('basePath', '')
self.consumes = obj.get('consumes', ['application/json'])
self.produces = obj.get('produces', ['application/json'])
self.schemes = obj.get('schemes', ['http'])
self.external_docs = obj.get('externalDocs')
|
Arello-Mobile/swagger2rst
|
swg2rst/swagger/schema.py
|
Schema.get_type_properties
|
python
|
def get_type_properties(self, property_obj, name, additional_prop=False):
property_type, property_format, property_dict = \
super(Schema, self).get_type_properties(property_obj, name, additional_prop=additional_prop)
_schema = self.storage.get(property_type)
if _schema and ('additionalProperties' in property_obj):
_property_type, _property_format, _property_dict = super(Schema, self).get_type_properties(
property_obj['additionalProperties'], '{}-mapped'.format(name), additional_prop=True)
if _property_type not in PRIMITIVE_TYPES:
SchemaMapWrapper.wrap(self.storage.get(_property_type))
_schema.nested_schemas.add(_property_type)
else:
_schema.type_format = _property_type
return property_type, property_format, property_dict
|
Extend parents 'Get internal properties of property'-method
|
train
|
https://github.com/Arello-Mobile/swagger2rst/blob/e519f70701477dcc9f0bb237ee5b8e08e848701b/swg2rst/swagger/schema.py#L55-L71
|
[
"def get_type_properties(self, property_obj, name, additional_prop=False):\n \"\"\" Get internal properties of property (extended in schema)\n\n :param dict property_obj: raw property object\n :param str name: name of property\n :param bool additional_prop: recursion's param\n :return: Type, format and internal properties of property\n :rtype: tuple(str, str, dict)\n \"\"\"\n property_type = property_obj.get('type', 'object')\n property_format = property_obj.get('format')\n property_dict = {}\n\n if property_type in ['object', 'array']:\n schema_type = SchemaTypes.MAPPED if additional_prop else SchemaTypes.INLINE\n schema_id = self._get_object_schema_id(property_obj, schema_type)\n if not ('$ref' in property_obj or self.storage.get(schema_id)):\n _schema = self.storage.create_schema(\n property_obj, name, schema_type, root=self.root)\n self._after_create_schema(_schema)\n property_type = schema_id\n\n property_dict['default'] = property_obj.get('default')\n property_dict['maximum'] = property_obj.get('maximum')\n property_dict['exclusive_maximum'] = property_obj.get('exclusiveMaximum')\n property_dict['minimum'] = property_obj.get('minimum')\n property_dict['exclusive_minimum'] = property_obj.get('exclusiveMinimum')\n property_dict['max_length'] = property_obj.get('maxLength')\n property_dict['min_length'] = property_obj.get('minLength')\n\n #TODO: fixme. remove ugly convert. add property template renderer instead\n property_dict['enum'] = convert(property_obj.get('enum'))\n\n #TODO: fixme. cleanup empty properties. add configurable filter for properties instead\n property_dict = {k: v for k, v in property_dict.items() if v}\n\n return property_type, property_format, property_dict\n",
"def wrap(schema):\n if isinstance(schema, Schema):\n schema.__class__ = SchemaMapWrapper\n"
] |
class Schema(AbstractTypeObject):
"""
Represents Swagger Schema Object
"""
schema_id = None
schema_type = None #: definition or inline
ref_path = None #: path for definition schemas
nested_schemas = None
all_of = None
def __init__(self, obj, schema_type, **kwargs):
assert schema_type in SchemaTypes.prefixes
super(Schema, self).__init__(obj, **kwargs)
self.nested_schemas = set()
self.schema_type = schema_type
self.description = obj.get('description', '').replace('"', '\'')
self._type = obj.get('type', 'object')
self.type_format = obj.get('format')
self.schema_example = obj.get('example')
self.read_only = obj.get('readOnly', False)
self.external_docs = obj.get('externalDocs')
if self._type in PRIMITIVE_TYPES:
self.properties = [{
'name': kwargs.get('name', ''),
'description': obj.get('description', ''),
'required': obj.get('required', False),
'type': self.type,
'type_format': self.type_format,
'type_properties': self.get_type_properties(obj, '')[2],
}]
if schema_type == SchemaTypes.DEFINITION:
self.ref_path = '#/definitions/{}'.format(self.name)
if self.is_array:
self.item = dict(zip(
('type', 'type_format', 'type_properties'),
self.get_type_properties(obj['items'], self.name)
))
self.name += '_array'
if self.item['type'] not in PRIMITIVE_TYPES:
self.nested_schemas.add(self.item['type'])
self._set_properties(obj)
self._parse_all_of_property(obj)
self._set_schema_id()
def _set_schema_id(self):
_id = self._get_id(self.ref_path or json.dumps(self.raw))
self.schema_id = '{}_{}'.format(
SchemaTypes.prefixes[self.schema_type], _id)
def _set_properties(self, obj):
if obj.get('properties'):
self.properties = []
required_fields = self.raw.get('required', [])
for name, property_obj in self.raw['properties'].items():
property_type, property_format, prop = self.get_type_properties(property_obj, name)
if property_type not in PRIMITIVE_TYPES:
self.nested_schemas.add(property_type)
_obj = {
'name': name,
'description': '',
'required': name in required_fields,
'type': property_type,
'type_format': property_format,
'type_properties': prop,
}
if 'description' in property_obj:
_obj['description'] = property_obj['description'].replace('"', '\'')
self.properties.append(_obj)
def _parse_all_of_property(self, obj):
if not obj.get('allOf'):
return None
self.all_of = []
schema = None
for _obj in obj['allOf']:
_id = self._get_object_schema_id(_obj, SchemaTypes.INLINE)
if not self.storage.contains(_id):
schema = self.storage.create_schema(_obj, 'inline', SchemaTypes.INLINE, self.root)
assert schema.schema_id == _id
if len(self.all_of) > 0:
self.storage.merge_schemas(
self.storage.get(self.all_of[-1]),
schema if schema else self.storage.get(_id)
)
self.all_of.append(_id)
self.nested_schemas.add(_id)
def _after_create_schema(self, schema):
pass
@property
def is_inline(self):
return self.schema_type == SchemaTypes.INLINE
@property
def is_inline_array(self):
return self.is_inline and self.is_array
def __repr__(self):
return self.name
|
Arello-Mobile/swagger2rst
|
swg2rst/utils/rst.py
|
SwaggerObject.sorted
|
python
|
def sorted(collection):
if len(collection) < 1:
return collection
if isinstance(collection, dict):
return sorted(collection.items(), key=lambda x: x[0])
if isinstance(list(collection)[0], Operation):
key = lambda x: x.operation_id
elif isinstance(list(collection)[0], str):
key = lambda x: SchemaObjects.get(x).name
else:
raise TypeError(type(collection[0]))
return sorted(collection, key=key)
|
sorting dict by key,
schema-collection by schema-name
operations by id
|
train
|
https://github.com/Arello-Mobile/swagger2rst/blob/e519f70701477dcc9f0bb237ee5b8e08e848701b/swg2rst/utils/rst.py#L24-L42
| null |
class SwaggerObject(BaseSwaggerObject):
@staticmethod
def get_regular_properties(self, _type, *args, **kwargs):
"""Make table with properties by schema_id
:param str _type:
:rtype: str
"""
if not SchemaObjects.contains(_type):
return _type
schema = SchemaObjects.get(_type)
if schema.schema_type == SchemaTypes.DEFINITION and not kwargs.get('definition'):
return ''
head = """.. csv-table::
:delim: |
:header: "Name", "Required", "Type", "Format", "Properties", "Description"
:widths: 20, 10, 15, 15, 30, 25
"""
body = []
if schema.properties:
for p in schema.properties:
body.append(' {} | {} | {} | {} | {} | {}'.format(
p.get('name') or '',
'Yes' if p.get('required') else 'No',
self.get_type_description(p['type'], *args, **kwargs),
p.get('type_format') or '',
'{}'.format(p.get('type_properties') or ''),
p.get('description') or '')
)
body.sort()
return (head + '\n'.join(body))
def get_type_description(self, _type, suffix='', *args, **kwargs):
""" Get description of type
:param suffix:
:param str _type:
:rtype: str
"""
if not SchemaObjects.contains(_type):
return _type
schema = SchemaObjects.get(_type)
if schema.all_of:
models = ','.join(
(self.get_type_description(_type, *args, **kwargs) for _type in schema.all_of)
)
result = '{}'.format(models.split(',')[0])
for r in models.split(',')[1:]:
result += ' extended {}'.format(r)
elif schema.is_array:
result = 'array of {}'.format(
self.get_type_description(schema.item['type'], *args, **kwargs))
else:
result = ':ref:`{} <{}{}>`'.format(schema.name, schema.schema_id, suffix)
return result
def get_additional_properties(self, _type, *args, **kwargs):
"""Make head and table with additional properties by schema_id
:param str _type:
:rtype: str
"""
if not SchemaObjects.contains(_type):
return _type
schema = SchemaObjects.get(_type)
body = []
for sch in schema.nested_schemas: # complex types
nested_schema = SchemaObjects.get(sch)
if not (nested_schema or isinstance(nested_schema, SchemaMapWrapper)):
continue
body.append('Map of {{"key":"{}"}}\n\n'.format(self.get_type_description(
nested_schema.schema_id, *args, **kwargs)) # head
)
if nested_schema.is_array: # table
_schema = SchemaObjects.get(nested_schema.item.get('type'))
if _schema and _schema.schema_type == SchemaTypes.INLINE:
body.append(self.get_regular_properties(_schema.schema_id, *args, **kwargs))
else:
body.append(self.get_regular_properties(nested_schema.schema_id, *args, **kwargs))
if schema.type_format: # basic types, only head
body.append(
'Map of {{"key":"{}"}}'.format(self.get_type_description(schema.type_format, *args, **kwargs)))
return ''.join(body)
|
Arello-Mobile/swagger2rst
|
swg2rst/utils/rst.py
|
SwaggerObject.get_regular_properties
|
python
|
def get_regular_properties(self, _type, *args, **kwargs):
if not SchemaObjects.contains(_type):
return _type
schema = SchemaObjects.get(_type)
if schema.schema_type == SchemaTypes.DEFINITION and not kwargs.get('definition'):
return ''
head = """.. csv-table::
:delim: |
:header: "Name", "Required", "Type", "Format", "Properties", "Description"
:widths: 20, 10, 15, 15, 30, 25
"""
body = []
if schema.properties:
for p in schema.properties:
body.append(' {} | {} | {} | {} | {} | {}'.format(
p.get('name') or '',
'Yes' if p.get('required') else 'No',
self.get_type_description(p['type'], *args, **kwargs),
p.get('type_format') or '',
'{}'.format(p.get('type_properties') or ''),
p.get('description') or '')
)
body.sort()
return (head + '\n'.join(body))
|
Make table with properties by schema_id
:param str _type:
:rtype: str
|
train
|
https://github.com/Arello-Mobile/swagger2rst/blob/e519f70701477dcc9f0bb237ee5b8e08e848701b/swg2rst/utils/rst.py#L44-L72
|
[
"def contains(cls, key):\n \"\"\" Check schema existence in collection by id\n\n :param str key:\n :rtype: bool\n \"\"\"\n return key in cls._schemas\n"
] |
class SwaggerObject(BaseSwaggerObject):
@staticmethod
def sorted(collection):
"""
sorting dict by key,
schema-collection by schema-name
operations by id
"""
if len(collection) < 1:
return collection
if isinstance(collection, dict):
return sorted(collection.items(), key=lambda x: x[0])
if isinstance(list(collection)[0], Operation):
key = lambda x: x.operation_id
elif isinstance(list(collection)[0], str):
key = lambda x: SchemaObjects.get(x).name
else:
raise TypeError(type(collection[0]))
return sorted(collection, key=key)
def get_type_description(self, _type, suffix='', *args, **kwargs):
""" Get description of type
:param suffix:
:param str _type:
:rtype: str
"""
if not SchemaObjects.contains(_type):
return _type
schema = SchemaObjects.get(_type)
if schema.all_of:
models = ','.join(
(self.get_type_description(_type, *args, **kwargs) for _type in schema.all_of)
)
result = '{}'.format(models.split(',')[0])
for r in models.split(',')[1:]:
result += ' extended {}'.format(r)
elif schema.is_array:
result = 'array of {}'.format(
self.get_type_description(schema.item['type'], *args, **kwargs))
else:
result = ':ref:`{} <{}{}>`'.format(schema.name, schema.schema_id, suffix)
return result
def get_additional_properties(self, _type, *args, **kwargs):
"""Make head and table with additional properties by schema_id
:param str _type:
:rtype: str
"""
if not SchemaObjects.contains(_type):
return _type
schema = SchemaObjects.get(_type)
body = []
for sch in schema.nested_schemas: # complex types
nested_schema = SchemaObjects.get(sch)
if not (nested_schema or isinstance(nested_schema, SchemaMapWrapper)):
continue
body.append('Map of {{"key":"{}"}}\n\n'.format(self.get_type_description(
nested_schema.schema_id, *args, **kwargs)) # head
)
if nested_schema.is_array: # table
_schema = SchemaObjects.get(nested_schema.item.get('type'))
if _schema and _schema.schema_type == SchemaTypes.INLINE:
body.append(self.get_regular_properties(_schema.schema_id, *args, **kwargs))
else:
body.append(self.get_regular_properties(nested_schema.schema_id, *args, **kwargs))
if schema.type_format: # basic types, only head
body.append(
'Map of {{"key":"{}"}}'.format(self.get_type_description(schema.type_format, *args, **kwargs)))
return ''.join(body)
|
Arello-Mobile/swagger2rst
|
swg2rst/utils/rst.py
|
SwaggerObject.get_type_description
|
python
|
def get_type_description(self, _type, suffix='', *args, **kwargs):
if not SchemaObjects.contains(_type):
return _type
schema = SchemaObjects.get(_type)
if schema.all_of:
models = ','.join(
(self.get_type_description(_type, *args, **kwargs) for _type in schema.all_of)
)
result = '{}'.format(models.split(',')[0])
for r in models.split(',')[1:]:
result += ' extended {}'.format(r)
elif schema.is_array:
result = 'array of {}'.format(
self.get_type_description(schema.item['type'], *args, **kwargs))
else:
result = ':ref:`{} <{}{}>`'.format(schema.name, schema.schema_id, suffix)
return result
|
Get description of type
:param suffix:
:param str _type:
:rtype: str
|
train
|
https://github.com/Arello-Mobile/swagger2rst/blob/e519f70701477dcc9f0bb237ee5b8e08e848701b/swg2rst/utils/rst.py#L74-L95
|
[
"def contains(cls, key):\n \"\"\" Check schema existence in collection by id\n\n :param str key:\n :rtype: bool\n \"\"\"\n return key in cls._schemas\n"
] |
class SwaggerObject(BaseSwaggerObject):
@staticmethod
def sorted(collection):
"""
sorting dict by key,
schema-collection by schema-name
operations by id
"""
if len(collection) < 1:
return collection
if isinstance(collection, dict):
return sorted(collection.items(), key=lambda x: x[0])
if isinstance(list(collection)[0], Operation):
key = lambda x: x.operation_id
elif isinstance(list(collection)[0], str):
key = lambda x: SchemaObjects.get(x).name
else:
raise TypeError(type(collection[0]))
return sorted(collection, key=key)
def get_regular_properties(self, _type, *args, **kwargs):
"""Make table with properties by schema_id
:param str _type:
:rtype: str
"""
if not SchemaObjects.contains(_type):
return _type
schema = SchemaObjects.get(_type)
if schema.schema_type == SchemaTypes.DEFINITION and not kwargs.get('definition'):
return ''
head = """.. csv-table::
:delim: |
:header: "Name", "Required", "Type", "Format", "Properties", "Description"
:widths: 20, 10, 15, 15, 30, 25
"""
body = []
if schema.properties:
for p in schema.properties:
body.append(' {} | {} | {} | {} | {} | {}'.format(
p.get('name') or '',
'Yes' if p.get('required') else 'No',
self.get_type_description(p['type'], *args, **kwargs),
p.get('type_format') or '',
'{}'.format(p.get('type_properties') or ''),
p.get('description') or '')
)
body.sort()
return (head + '\n'.join(body))
def get_additional_properties(self, _type, *args, **kwargs):
"""Make head and table with additional properties by schema_id
:param str _type:
:rtype: str
"""
if not SchemaObjects.contains(_type):
return _type
schema = SchemaObjects.get(_type)
body = []
for sch in schema.nested_schemas: # complex types
nested_schema = SchemaObjects.get(sch)
if not (nested_schema or isinstance(nested_schema, SchemaMapWrapper)):
continue
body.append('Map of {{"key":"{}"}}\n\n'.format(self.get_type_description(
nested_schema.schema_id, *args, **kwargs)) # head
)
if nested_schema.is_array: # table
_schema = SchemaObjects.get(nested_schema.item.get('type'))
if _schema and _schema.schema_type == SchemaTypes.INLINE:
body.append(self.get_regular_properties(_schema.schema_id, *args, **kwargs))
else:
body.append(self.get_regular_properties(nested_schema.schema_id, *args, **kwargs))
if schema.type_format: # basic types, only head
body.append(
'Map of {{"key":"{}"}}'.format(self.get_type_description(schema.type_format, *args, **kwargs)))
return ''.join(body)
|
Arello-Mobile/swagger2rst
|
swg2rst/utils/rst.py
|
SwaggerObject.get_additional_properties
|
python
|
def get_additional_properties(self, _type, *args, **kwargs):
if not SchemaObjects.contains(_type):
return _type
schema = SchemaObjects.get(_type)
body = []
for sch in schema.nested_schemas: # complex types
nested_schema = SchemaObjects.get(sch)
if not (nested_schema or isinstance(nested_schema, SchemaMapWrapper)):
continue
body.append('Map of {{"key":"{}"}}\n\n'.format(self.get_type_description(
nested_schema.schema_id, *args, **kwargs)) # head
)
if nested_schema.is_array: # table
_schema = SchemaObjects.get(nested_schema.item.get('type'))
if _schema and _schema.schema_type == SchemaTypes.INLINE:
body.append(self.get_regular_properties(_schema.schema_id, *args, **kwargs))
else:
body.append(self.get_regular_properties(nested_schema.schema_id, *args, **kwargs))
if schema.type_format: # basic types, only head
body.append(
'Map of {{"key":"{}"}}'.format(self.get_type_description(schema.type_format, *args, **kwargs)))
return ''.join(body)
|
Make head and table with additional properties by schema_id
:param str _type:
:rtype: str
|
train
|
https://github.com/Arello-Mobile/swagger2rst/blob/e519f70701477dcc9f0bb237ee5b8e08e848701b/swg2rst/utils/rst.py#L97-L123
|
[
"def contains(cls, key):\n \"\"\" Check schema existence in collection by id\n\n :param str key:\n :rtype: bool\n \"\"\"\n return key in cls._schemas\n"
] |
class SwaggerObject(BaseSwaggerObject):
@staticmethod
def sorted(collection):
"""
sorting dict by key,
schema-collection by schema-name
operations by id
"""
if len(collection) < 1:
return collection
if isinstance(collection, dict):
return sorted(collection.items(), key=lambda x: x[0])
if isinstance(list(collection)[0], Operation):
key = lambda x: x.operation_id
elif isinstance(list(collection)[0], str):
key = lambda x: SchemaObjects.get(x).name
else:
raise TypeError(type(collection[0]))
return sorted(collection, key=key)
def get_regular_properties(self, _type, *args, **kwargs):
"""Make table with properties by schema_id
:param str _type:
:rtype: str
"""
if not SchemaObjects.contains(_type):
return _type
schema = SchemaObjects.get(_type)
if schema.schema_type == SchemaTypes.DEFINITION and not kwargs.get('definition'):
return ''
head = """.. csv-table::
:delim: |
:header: "Name", "Required", "Type", "Format", "Properties", "Description"
:widths: 20, 10, 15, 15, 30, 25
"""
body = []
if schema.properties:
for p in schema.properties:
body.append(' {} | {} | {} | {} | {} | {}'.format(
p.get('name') or '',
'Yes' if p.get('required') else 'No',
self.get_type_description(p['type'], *args, **kwargs),
p.get('type_format') or '',
'{}'.format(p.get('type_properties') or ''),
p.get('description') or '')
)
body.sort()
return (head + '\n'.join(body))
def get_type_description(self, _type, suffix='', *args, **kwargs):
""" Get description of type
:param suffix:
:param str _type:
:rtype: str
"""
if not SchemaObjects.contains(_type):
return _type
schema = SchemaObjects.get(_type)
if schema.all_of:
models = ','.join(
(self.get_type_description(_type, *args, **kwargs) for _type in schema.all_of)
)
result = '{}'.format(models.split(',')[0])
for r in models.split(',')[1:]:
result += ' extended {}'.format(r)
elif schema.is_array:
result = 'array of {}'.format(
self.get_type_description(schema.item['type'], *args, **kwargs))
else:
result = ':ref:`{} <{}{}>`'.format(schema.name, schema.schema_id, suffix)
return result
|
Arello-Mobile/swagger2rst
|
swg2rst/swagger/schema_objects.py
|
SchemaObjects.create_schema
|
python
|
def create_schema(cls, obj, name, schema_type, root):
if schema_type == SchemaTypes.MAPPED:
schema = SchemaMapWrapper(obj, storage=cls, name=name, root=root)
else:
schema = Schema(obj, schema_type, storage=cls, name=name, root=root)
cls.add_schema(schema)
return schema
|
Create Schema object
:param dict obj: swagger schema object
:param str name: schema name
:param str schema_type: schema location.
Can be ``inline``, ``definition`` or ``mapped``
:param BaseSwaggerObject root: root doc
:return: new schema
:rtype: Schema
|
train
|
https://github.com/Arello-Mobile/swagger2rst/blob/e519f70701477dcc9f0bb237ee5b8e08e848701b/swg2rst/swagger/schema_objects.py#L15-L31
| null |
class SchemaObjects(object):
"""
Schema collection
"""
_schemas = OrderedDict()
@classmethod
@classmethod
def add_schema(cls, schema):
""" Add schema object to collection
:param Schema schema:
"""
cls._schemas[schema.schema_id] = schema
@classmethod
def get(cls, schema_id):
""" Get schema object from collection by id
:param str schema_id:
:return: schema
:rtype: Schema
"""
return cls._schemas.get(schema_id)
@classmethod
def get_schemas(cls, schema_types=None, sort=True):
"""
Get schemas by type. If ``schema_type`` is None, return all schemas
:param schema_types: list of schema types
:type schema_types: list or None
:param bool sort: sort by name
:return: list of schemas
:rtype: list
"""
result = filter(lambda x: not x.is_inline_array, cls._schemas.values())
if schema_types:
result = filter(lambda x: x.schema_type in schema_types, result)
if sort:
result = sorted(result, key=attrgetter('name'))
return result
@classmethod
def contains(cls, key):
""" Check schema existence in collection by id
:param str key:
:rtype: bool
"""
return key in cls._schemas
@classmethod
def clear(cls):
cls._schemas = OrderedDict()
@classmethod
def merge_schemas(cls, schema, _schema):
"""Return second Schema, which is extended by first Schema
https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md#composition-and-inheritance-polymorphism
"""
tmp = schema.properties[:] # copy
prop = {}
to_dict = lambda e: prop.update({e.pop('name'): e})
[to_dict(i) for i in tmp] # map(to_dict, tmp)
for _prop in _schema.properties:
if prop.get(_prop['name']):
prop.pop(_prop['name'])
if prop:
for k, v in prop.items():
v['name'] = k
_schema.properties.append(v)
return _schema
|
Arello-Mobile/swagger2rst
|
swg2rst/swagger/schema_objects.py
|
SchemaObjects.get_schemas
|
python
|
def get_schemas(cls, schema_types=None, sort=True):
result = filter(lambda x: not x.is_inline_array, cls._schemas.values())
if schema_types:
result = filter(lambda x: x.schema_type in schema_types, result)
if sort:
result = sorted(result, key=attrgetter('name'))
return result
|
Get schemas by type. If ``schema_type`` is None, return all schemas
:param schema_types: list of schema types
:type schema_types: list or None
:param bool sort: sort by name
:return: list of schemas
:rtype: list
|
train
|
https://github.com/Arello-Mobile/swagger2rst/blob/e519f70701477dcc9f0bb237ee5b8e08e848701b/swg2rst/swagger/schema_objects.py#L52-L67
| null |
class SchemaObjects(object):
"""
Schema collection
"""
_schemas = OrderedDict()
@classmethod
def create_schema(cls, obj, name, schema_type, root):
""" Create Schema object
:param dict obj: swagger schema object
:param str name: schema name
:param str schema_type: schema location.
Can be ``inline``, ``definition`` or ``mapped``
:param BaseSwaggerObject root: root doc
:return: new schema
:rtype: Schema
"""
if schema_type == SchemaTypes.MAPPED:
schema = SchemaMapWrapper(obj, storage=cls, name=name, root=root)
else:
schema = Schema(obj, schema_type, storage=cls, name=name, root=root)
cls.add_schema(schema)
return schema
@classmethod
def add_schema(cls, schema):
""" Add schema object to collection
:param Schema schema:
"""
cls._schemas[schema.schema_id] = schema
@classmethod
def get(cls, schema_id):
""" Get schema object from collection by id
:param str schema_id:
:return: schema
:rtype: Schema
"""
return cls._schemas.get(schema_id)
@classmethod
@classmethod
def contains(cls, key):
""" Check schema existence in collection by id
:param str key:
:rtype: bool
"""
return key in cls._schemas
@classmethod
def clear(cls):
cls._schemas = OrderedDict()
@classmethod
def merge_schemas(cls, schema, _schema):
"""Return second Schema, which is extended by first Schema
https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md#composition-and-inheritance-polymorphism
"""
tmp = schema.properties[:] # copy
prop = {}
to_dict = lambda e: prop.update({e.pop('name'): e})
[to_dict(i) for i in tmp] # map(to_dict, tmp)
for _prop in _schema.properties:
if prop.get(_prop['name']):
prop.pop(_prop['name'])
if prop:
for k, v in prop.items():
v['name'] = k
_schema.properties.append(v)
return _schema
|
Arello-Mobile/swagger2rst
|
swg2rst/swagger/schema_objects.py
|
SchemaObjects.merge_schemas
|
python
|
def merge_schemas(cls, schema, _schema):
tmp = schema.properties[:] # copy
prop = {}
to_dict = lambda e: prop.update({e.pop('name'): e})
[to_dict(i) for i in tmp] # map(to_dict, tmp)
for _prop in _schema.properties:
if prop.get(_prop['name']):
prop.pop(_prop['name'])
if prop:
for k, v in prop.items():
v['name'] = k
_schema.properties.append(v)
return _schema
|
Return second Schema, which is extended by first Schema
https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md#composition-and-inheritance-polymorphism
|
train
|
https://github.com/Arello-Mobile/swagger2rst/blob/e519f70701477dcc9f0bb237ee5b8e08e848701b/swg2rst/swagger/schema_objects.py#L83-L98
| null |
class SchemaObjects(object):
"""
Schema collection
"""
_schemas = OrderedDict()
@classmethod
def create_schema(cls, obj, name, schema_type, root):
""" Create Schema object
:param dict obj: swagger schema object
:param str name: schema name
:param str schema_type: schema location.
Can be ``inline``, ``definition`` or ``mapped``
:param BaseSwaggerObject root: root doc
:return: new schema
:rtype: Schema
"""
if schema_type == SchemaTypes.MAPPED:
schema = SchemaMapWrapper(obj, storage=cls, name=name, root=root)
else:
schema = Schema(obj, schema_type, storage=cls, name=name, root=root)
cls.add_schema(schema)
return schema
@classmethod
def add_schema(cls, schema):
""" Add schema object to collection
:param Schema schema:
"""
cls._schemas[schema.schema_id] = schema
@classmethod
def get(cls, schema_id):
""" Get schema object from collection by id
:param str schema_id:
:return: schema
:rtype: Schema
"""
return cls._schemas.get(schema_id)
@classmethod
def get_schemas(cls, schema_types=None, sort=True):
"""
Get schemas by type. If ``schema_type`` is None, return all schemas
:param schema_types: list of schema types
:type schema_types: list or None
:param bool sort: sort by name
:return: list of schemas
:rtype: list
"""
result = filter(lambda x: not x.is_inline_array, cls._schemas.values())
if schema_types:
result = filter(lambda x: x.schema_type in schema_types, result)
if sort:
result = sorted(result, key=attrgetter('name'))
return result
@classmethod
def contains(cls, key):
""" Check schema existence in collection by id
:param str key:
:rtype: bool
"""
return key in cls._schemas
@classmethod
def clear(cls):
cls._schemas = OrderedDict()
@classmethod
|
Arello-Mobile/swagger2rst
|
swg2rst/utils/exampilators.py
|
Exampilator.get_example_by_schema
|
python
|
def get_example_by_schema(cls, schema, ignored_schemas=None, paths=None, name=''):
if schema.schema_example:
return schema.schema_example
if ignored_schemas is None:
ignored_schemas = []
if paths is None:
paths = []
if name:
paths = list(map(lambda path: '.'.join((path, name)), paths))
if schema.ref_path:
paths.append(schema.ref_path)
if schema.schema_id in ignored_schemas:
result = [] if schema.is_array else {}
else:
schemas = ignored_schemas + [schema.schema_id]
kwargs = dict(
ignored_schemas=schemas,
paths=paths
)
if schema.is_array:
result = cls.get_example_for_array(
schema.item, **kwargs)
elif schema.type in PRIMITIVE_TYPES:
result = cls.get_example_value_for_primitive_type(
schema.type, schema.raw, schema.type_format, paths=paths
)
elif schema.all_of:
result = {}
for _schema_id in schema.all_of:
schema = SchemaObjects.get(_schema_id)
result.update(cls.get_example_by_schema(schema, **kwargs))
else:
result = cls.get_example_for_object(
schema.properties, nested=schema.nested_schemas, **kwargs)
return result
|
Get example by schema object
:param Schema schema: current schema
:param list ignored_schemas: list of previous schemas
for avoid circular references
:param list paths: list object paths (ex. #/definitions/Model.property)
If nested schemas exists, custom examples checks in order from paths
:param str name: name of property schema object if exists
:return: dict or list (if schema is array)
|
train
|
https://github.com/Arello-Mobile/swagger2rst/blob/e519f70701477dcc9f0bb237ee5b8e08e848701b/swg2rst/utils/exampilators.py#L107-L156
|
[
"def get(cls, schema_id):\n \"\"\" Get schema object from collection by id\n\n :param str schema_id:\n :return: schema\n :rtype: Schema\n \"\"\"\n return cls._schemas.get(schema_id)\n"
] |
class Exampilator(object):
"""
Example Manager
"""
DEFAULT_EXAMPLES = DEFAULT_EXAMPLES.copy()
CUSTOM_EXAMPLES = dict()
EXAMPLE_ARRAY_ITEMS_COUNT = 2
logger = get_logger()
_json_format_checker = FormatChecker()
@classmethod
def fill_examples(cls, examples):
if 'array_items_count' in examples:
cls.EXAMPLE_ARRAY_ITEMS_COUNT = examples['array_items_count']
if 'types' in examples:
cls.DEFAULT_EXAMPLES.update(examples['types'])
if 'definitions' in examples:
for path, fields in examples['definitions'].items():
for field, value in fields.items():
key = '.'.join((path, field))
cls.CUSTOM_EXAMPLES[key] = value
if 'paths' in examples:
for path, methods in examples['paths'].items():
key = "#/paths/'{}'".format(path)
for method, operations in methods.items():
for section, fields in operations.items():
for field, value in fields.items():
_key = '/'.join((key, method, section, field))
cls.CUSTOM_EXAMPLES[_key] = value
@classmethod
def get_example_value_for_primitive_type(cls, type_, properties, format_, **kw):
paths = kw.get('paths')
if paths:
result, path = cls._get_custom_example(paths)
if result:
cls._example_validate(path, result, type_, format_)
return result
if properties.get('default') is not None:
result = properties['default']
elif properties.get('enum'):
result = properties['enum'][0]
else:
result = getattr(cls, '%s_example' % type_)(properties, format_)
return result
@classmethod
def string_example(cls, properties, type_format):
if type_format in cls.DEFAULT_EXAMPLES:
result = cls.DEFAULT_EXAMPLES[type_format]
else:
result = cls.DEFAULT_EXAMPLES['string']
if properties.get('min_length'):
result.ljust(properties['min_length'], 'a')
if properties.get('max_length'):
result = result[:properties['max_length']]
return result
@classmethod
def integer_example(cls, properties, *args):
result = cls.DEFAULT_EXAMPLES['integer']
if properties.get('minimum') is not None and result < properties['minimum']:
result = properties['minimum']
if properties.get('exclusive_minimum', False):
result += 1
elif properties.get('maximum') is not None and result > properties['maximum']:
result = properties['maximum']
if properties.get('exclusive_maximum', False):
result -= 1
return result
@classmethod
def number_example(cls, properties, *args):
return cls.integer_example(properties)
@classmethod
def boolean_example(cls, *args):
return cls.DEFAULT_EXAMPLES['boolean']
@classmethod
@classmethod
def get_body_example(cls, operation):
""" Get example for body parameter example by operation
:param Operation operation: operation object
"""
path = "#/paths/'{0.path}'/{0.method}/parameters/{name}".format(
operation, name=operation.body.name or 'body')
return cls.get_example_by_schema(operation.body, paths=[path])
@classmethod
def get_response_example(cls, operation, response):
""" Get example for response object by operation object
:param Operation operation: operation object
:param Response response: response object
"""
path = "#/paths/'{}'/{}/responses/{}".format(
operation.path, operation.method, response.name)
kwargs = dict(paths=[path])
if response.type in PRIMITIVE_TYPES:
result = cls.get_example_value_for_primitive_type(
response.type, response.properties, response.type_format, **kwargs)
else:
schema = SchemaObjects.get(response.type)
result = cls.get_example_by_schema(schema, **kwargs)
return result
@classmethod
def get_header_example(cls, header):
""" Get example for header object
:param Header header: Header object
:return: example
:rtype: dict
"""
if header.is_array:
result = cls.get_example_for_array(header.item)
else:
example_method = getattr(cls, '{}_example'.format(header.type))
result = example_method(header.properties, header.type_format)
return {header.name: result}
@classmethod
def get_property_example(cls, property_, nested=None, **kw):
""" Get example for property
:param dict property_:
:param set nested:
:return: example value
"""
paths = kw.get('paths', [])
name = kw.get('name', '')
result = None
if name and paths:
paths = list(map(lambda path: '.'.join((path, name)), paths))
result, path = cls._get_custom_example(paths)
if result is not None and property_['type'] in PRIMITIVE_TYPES:
cls._example_validate(
path, result, property_['type'], property_['type_format'])
return result
if SchemaObjects.contains(property_['type']):
schema = SchemaObjects.get(property_['type'])
if result is not None:
if schema.is_array:
if not isinstance(result, list):
result = [result] * cls.EXAMPLE_ARRAY_ITEMS_COUNT
else:
if isinstance(result, list):
cls.logger.warning(
'Example type mismatch in path {}'.format(schema.ref_path))
else:
result = cls.get_example_by_schema(schema, **kw)
if (not result) and schema.nested_schemas:
for _schema_id in schema.nested_schemas:
_schema = SchemaObjects.get(_schema_id)
if _schema:
if isinstance(_schema, SchemaMapWrapper):
result[_schema.name] = cls.get_example_by_schema(_schema, **kw)
elif _schema.nested_schemas:
for _schema__id in _schema.nested_schemas:
_schema_ = SchemaObjects.get(_schema__id)
if isinstance(_schema_, SchemaMapWrapper):
result[_schema.name] = cls.get_example_by_schema(_schema_, **kw)
else:
result = cls.get_example_value_for_primitive_type(
property_['type'],
property_['type_properties'],
property_['type_format'],
**kw
)
return result
@classmethod
def _get_custom_example(cls, paths):
if cls.CUSTOM_EXAMPLES:
for path in paths:
if path in cls.CUSTOM_EXAMPLES:
return cls.CUSTOM_EXAMPLES[path], path
return None, ''
@classmethod
def get_example_for_array(cls, obj_item, **kw):
return [cls.get_property_example(obj_item, **kw)] * cls.EXAMPLE_ARRAY_ITEMS_COUNT
@classmethod
def get_example_for_object(cls, properties, nested=None, **kw):
result = {}
if properties:
for _property in properties:
kw['name'] = _property['name']
result[_property['name']] = cls.get_property_example(
_property, nested=nested, **kw)
return result
@classmethod
def schema_validate(cls, obj, json_schema):
schema_validate(obj, json_schema, format_checker=cls._json_format_checker)
@classmethod
def _example_validate(cls, path, value, type_, format_=None):
_json_schema = {'type': type_}
if format_:
_json_schema['format'] = format_
try:
cls.schema_validate(value, _json_schema)
except (ValidationError, SchemaError):
cls.logger.warning('Example type mismatch in path {}'.format(path))
|
Arello-Mobile/swagger2rst
|
swg2rst/utils/exampilators.py
|
Exampilator.get_body_example
|
python
|
def get_body_example(cls, operation):
path = "#/paths/'{0.path}'/{0.method}/parameters/{name}".format(
operation, name=operation.body.name or 'body')
return cls.get_example_by_schema(operation.body, paths=[path])
|
Get example for body parameter example by operation
:param Operation operation: operation object
|
train
|
https://github.com/Arello-Mobile/swagger2rst/blob/e519f70701477dcc9f0bb237ee5b8e08e848701b/swg2rst/utils/exampilators.py#L159-L166
| null |
class Exampilator(object):
"""
Example Manager
"""
DEFAULT_EXAMPLES = DEFAULT_EXAMPLES.copy()
CUSTOM_EXAMPLES = dict()
EXAMPLE_ARRAY_ITEMS_COUNT = 2
logger = get_logger()
_json_format_checker = FormatChecker()
@classmethod
def fill_examples(cls, examples):
if 'array_items_count' in examples:
cls.EXAMPLE_ARRAY_ITEMS_COUNT = examples['array_items_count']
if 'types' in examples:
cls.DEFAULT_EXAMPLES.update(examples['types'])
if 'definitions' in examples:
for path, fields in examples['definitions'].items():
for field, value in fields.items():
key = '.'.join((path, field))
cls.CUSTOM_EXAMPLES[key] = value
if 'paths' in examples:
for path, methods in examples['paths'].items():
key = "#/paths/'{}'".format(path)
for method, operations in methods.items():
for section, fields in operations.items():
for field, value in fields.items():
_key = '/'.join((key, method, section, field))
cls.CUSTOM_EXAMPLES[_key] = value
@classmethod
def get_example_value_for_primitive_type(cls, type_, properties, format_, **kw):
paths = kw.get('paths')
if paths:
result, path = cls._get_custom_example(paths)
if result:
cls._example_validate(path, result, type_, format_)
return result
if properties.get('default') is not None:
result = properties['default']
elif properties.get('enum'):
result = properties['enum'][0]
else:
result = getattr(cls, '%s_example' % type_)(properties, format_)
return result
@classmethod
def string_example(cls, properties, type_format):
if type_format in cls.DEFAULT_EXAMPLES:
result = cls.DEFAULT_EXAMPLES[type_format]
else:
result = cls.DEFAULT_EXAMPLES['string']
if properties.get('min_length'):
result.ljust(properties['min_length'], 'a')
if properties.get('max_length'):
result = result[:properties['max_length']]
return result
@classmethod
def integer_example(cls, properties, *args):
result = cls.DEFAULT_EXAMPLES['integer']
if properties.get('minimum') is not None and result < properties['minimum']:
result = properties['minimum']
if properties.get('exclusive_minimum', False):
result += 1
elif properties.get('maximum') is not None and result > properties['maximum']:
result = properties['maximum']
if properties.get('exclusive_maximum', False):
result -= 1
return result
@classmethod
def number_example(cls, properties, *args):
return cls.integer_example(properties)
@classmethod
def boolean_example(cls, *args):
return cls.DEFAULT_EXAMPLES['boolean']
@classmethod
def get_example_by_schema(cls, schema, ignored_schemas=None, paths=None, name=''):
""" Get example by schema object
:param Schema schema: current schema
:param list ignored_schemas: list of previous schemas
for avoid circular references
:param list paths: list object paths (ex. #/definitions/Model.property)
If nested schemas exists, custom examples checks in order from paths
:param str name: name of property schema object if exists
:return: dict or list (if schema is array)
"""
if schema.schema_example:
return schema.schema_example
if ignored_schemas is None:
ignored_schemas = []
if paths is None:
paths = []
if name:
paths = list(map(lambda path: '.'.join((path, name)), paths))
if schema.ref_path:
paths.append(schema.ref_path)
if schema.schema_id in ignored_schemas:
result = [] if schema.is_array else {}
else:
schemas = ignored_schemas + [schema.schema_id]
kwargs = dict(
ignored_schemas=schemas,
paths=paths
)
if schema.is_array:
result = cls.get_example_for_array(
schema.item, **kwargs)
elif schema.type in PRIMITIVE_TYPES:
result = cls.get_example_value_for_primitive_type(
schema.type, schema.raw, schema.type_format, paths=paths
)
elif schema.all_of:
result = {}
for _schema_id in schema.all_of:
schema = SchemaObjects.get(_schema_id)
result.update(cls.get_example_by_schema(schema, **kwargs))
else:
result = cls.get_example_for_object(
schema.properties, nested=schema.nested_schemas, **kwargs)
return result
@classmethod
@classmethod
def get_response_example(cls, operation, response):
""" Get example for response object by operation object
:param Operation operation: operation object
:param Response response: response object
"""
path = "#/paths/'{}'/{}/responses/{}".format(
operation.path, operation.method, response.name)
kwargs = dict(paths=[path])
if response.type in PRIMITIVE_TYPES:
result = cls.get_example_value_for_primitive_type(
response.type, response.properties, response.type_format, **kwargs)
else:
schema = SchemaObjects.get(response.type)
result = cls.get_example_by_schema(schema, **kwargs)
return result
@classmethod
def get_header_example(cls, header):
""" Get example for header object
:param Header header: Header object
:return: example
:rtype: dict
"""
if header.is_array:
result = cls.get_example_for_array(header.item)
else:
example_method = getattr(cls, '{}_example'.format(header.type))
result = example_method(header.properties, header.type_format)
return {header.name: result}
@classmethod
def get_property_example(cls, property_, nested=None, **kw):
""" Get example for property
:param dict property_:
:param set nested:
:return: example value
"""
paths = kw.get('paths', [])
name = kw.get('name', '')
result = None
if name and paths:
paths = list(map(lambda path: '.'.join((path, name)), paths))
result, path = cls._get_custom_example(paths)
if result is not None and property_['type'] in PRIMITIVE_TYPES:
cls._example_validate(
path, result, property_['type'], property_['type_format'])
return result
if SchemaObjects.contains(property_['type']):
schema = SchemaObjects.get(property_['type'])
if result is not None:
if schema.is_array:
if not isinstance(result, list):
result = [result] * cls.EXAMPLE_ARRAY_ITEMS_COUNT
else:
if isinstance(result, list):
cls.logger.warning(
'Example type mismatch in path {}'.format(schema.ref_path))
else:
result = cls.get_example_by_schema(schema, **kw)
if (not result) and schema.nested_schemas:
for _schema_id in schema.nested_schemas:
_schema = SchemaObjects.get(_schema_id)
if _schema:
if isinstance(_schema, SchemaMapWrapper):
result[_schema.name] = cls.get_example_by_schema(_schema, **kw)
elif _schema.nested_schemas:
for _schema__id in _schema.nested_schemas:
_schema_ = SchemaObjects.get(_schema__id)
if isinstance(_schema_, SchemaMapWrapper):
result[_schema.name] = cls.get_example_by_schema(_schema_, **kw)
else:
result = cls.get_example_value_for_primitive_type(
property_['type'],
property_['type_properties'],
property_['type_format'],
**kw
)
return result
@classmethod
def _get_custom_example(cls, paths):
if cls.CUSTOM_EXAMPLES:
for path in paths:
if path in cls.CUSTOM_EXAMPLES:
return cls.CUSTOM_EXAMPLES[path], path
return None, ''
@classmethod
def get_example_for_array(cls, obj_item, **kw):
return [cls.get_property_example(obj_item, **kw)] * cls.EXAMPLE_ARRAY_ITEMS_COUNT
@classmethod
def get_example_for_object(cls, properties, nested=None, **kw):
result = {}
if properties:
for _property in properties:
kw['name'] = _property['name']
result[_property['name']] = cls.get_property_example(
_property, nested=nested, **kw)
return result
@classmethod
def schema_validate(cls, obj, json_schema):
schema_validate(obj, json_schema, format_checker=cls._json_format_checker)
@classmethod
def _example_validate(cls, path, value, type_, format_=None):
_json_schema = {'type': type_}
if format_:
_json_schema['format'] = format_
try:
cls.schema_validate(value, _json_schema)
except (ValidationError, SchemaError):
cls.logger.warning('Example type mismatch in path {}'.format(path))
|
Arello-Mobile/swagger2rst
|
swg2rst/utils/exampilators.py
|
Exampilator.get_response_example
|
python
|
def get_response_example(cls, operation, response):
path = "#/paths/'{}'/{}/responses/{}".format(
operation.path, operation.method, response.name)
kwargs = dict(paths=[path])
if response.type in PRIMITIVE_TYPES:
result = cls.get_example_value_for_primitive_type(
response.type, response.properties, response.type_format, **kwargs)
else:
schema = SchemaObjects.get(response.type)
result = cls.get_example_by_schema(schema, **kwargs)
return result
|
Get example for response object by operation object
:param Operation operation: operation object
:param Response response: response object
|
train
|
https://github.com/Arello-Mobile/swagger2rst/blob/e519f70701477dcc9f0bb237ee5b8e08e848701b/swg2rst/utils/exampilators.py#L169-L186
|
[
"def get(cls, schema_id):\n \"\"\" Get schema object from collection by id\n\n :param str schema_id:\n :return: schema\n :rtype: Schema\n \"\"\"\n return cls._schemas.get(schema_id)\n"
] |
class Exampilator(object):
"""
Example Manager
"""
DEFAULT_EXAMPLES = DEFAULT_EXAMPLES.copy()
CUSTOM_EXAMPLES = dict()
EXAMPLE_ARRAY_ITEMS_COUNT = 2
logger = get_logger()
_json_format_checker = FormatChecker()
@classmethod
def fill_examples(cls, examples):
if 'array_items_count' in examples:
cls.EXAMPLE_ARRAY_ITEMS_COUNT = examples['array_items_count']
if 'types' in examples:
cls.DEFAULT_EXAMPLES.update(examples['types'])
if 'definitions' in examples:
for path, fields in examples['definitions'].items():
for field, value in fields.items():
key = '.'.join((path, field))
cls.CUSTOM_EXAMPLES[key] = value
if 'paths' in examples:
for path, methods in examples['paths'].items():
key = "#/paths/'{}'".format(path)
for method, operations in methods.items():
for section, fields in operations.items():
for field, value in fields.items():
_key = '/'.join((key, method, section, field))
cls.CUSTOM_EXAMPLES[_key] = value
@classmethod
def get_example_value_for_primitive_type(cls, type_, properties, format_, **kw):
paths = kw.get('paths')
if paths:
result, path = cls._get_custom_example(paths)
if result:
cls._example_validate(path, result, type_, format_)
return result
if properties.get('default') is not None:
result = properties['default']
elif properties.get('enum'):
result = properties['enum'][0]
else:
result = getattr(cls, '%s_example' % type_)(properties, format_)
return result
@classmethod
def string_example(cls, properties, type_format):
if type_format in cls.DEFAULT_EXAMPLES:
result = cls.DEFAULT_EXAMPLES[type_format]
else:
result = cls.DEFAULT_EXAMPLES['string']
if properties.get('min_length'):
result.ljust(properties['min_length'], 'a')
if properties.get('max_length'):
result = result[:properties['max_length']]
return result
@classmethod
def integer_example(cls, properties, *args):
result = cls.DEFAULT_EXAMPLES['integer']
if properties.get('minimum') is not None and result < properties['minimum']:
result = properties['minimum']
if properties.get('exclusive_minimum', False):
result += 1
elif properties.get('maximum') is not None and result > properties['maximum']:
result = properties['maximum']
if properties.get('exclusive_maximum', False):
result -= 1
return result
@classmethod
def number_example(cls, properties, *args):
return cls.integer_example(properties)
@classmethod
def boolean_example(cls, *args):
return cls.DEFAULT_EXAMPLES['boolean']
@classmethod
def get_example_by_schema(cls, schema, ignored_schemas=None, paths=None, name=''):
""" Get example by schema object
:param Schema schema: current schema
:param list ignored_schemas: list of previous schemas
for avoid circular references
:param list paths: list object paths (ex. #/definitions/Model.property)
If nested schemas exists, custom examples checks in order from paths
:param str name: name of property schema object if exists
:return: dict or list (if schema is array)
"""
if schema.schema_example:
return schema.schema_example
if ignored_schemas is None:
ignored_schemas = []
if paths is None:
paths = []
if name:
paths = list(map(lambda path: '.'.join((path, name)), paths))
if schema.ref_path:
paths.append(schema.ref_path)
if schema.schema_id in ignored_schemas:
result = [] if schema.is_array else {}
else:
schemas = ignored_schemas + [schema.schema_id]
kwargs = dict(
ignored_schemas=schemas,
paths=paths
)
if schema.is_array:
result = cls.get_example_for_array(
schema.item, **kwargs)
elif schema.type in PRIMITIVE_TYPES:
result = cls.get_example_value_for_primitive_type(
schema.type, schema.raw, schema.type_format, paths=paths
)
elif schema.all_of:
result = {}
for _schema_id in schema.all_of:
schema = SchemaObjects.get(_schema_id)
result.update(cls.get_example_by_schema(schema, **kwargs))
else:
result = cls.get_example_for_object(
schema.properties, nested=schema.nested_schemas, **kwargs)
return result
@classmethod
def get_body_example(cls, operation):
""" Get example for body parameter example by operation
:param Operation operation: operation object
"""
path = "#/paths/'{0.path}'/{0.method}/parameters/{name}".format(
operation, name=operation.body.name or 'body')
return cls.get_example_by_schema(operation.body, paths=[path])
@classmethod
@classmethod
def get_header_example(cls, header):
""" Get example for header object
:param Header header: Header object
:return: example
:rtype: dict
"""
if header.is_array:
result = cls.get_example_for_array(header.item)
else:
example_method = getattr(cls, '{}_example'.format(header.type))
result = example_method(header.properties, header.type_format)
return {header.name: result}
@classmethod
def get_property_example(cls, property_, nested=None, **kw):
""" Get example for property
:param dict property_:
:param set nested:
:return: example value
"""
paths = kw.get('paths', [])
name = kw.get('name', '')
result = None
if name and paths:
paths = list(map(lambda path: '.'.join((path, name)), paths))
result, path = cls._get_custom_example(paths)
if result is not None and property_['type'] in PRIMITIVE_TYPES:
cls._example_validate(
path, result, property_['type'], property_['type_format'])
return result
if SchemaObjects.contains(property_['type']):
schema = SchemaObjects.get(property_['type'])
if result is not None:
if schema.is_array:
if not isinstance(result, list):
result = [result] * cls.EXAMPLE_ARRAY_ITEMS_COUNT
else:
if isinstance(result, list):
cls.logger.warning(
'Example type mismatch in path {}'.format(schema.ref_path))
else:
result = cls.get_example_by_schema(schema, **kw)
if (not result) and schema.nested_schemas:
for _schema_id in schema.nested_schemas:
_schema = SchemaObjects.get(_schema_id)
if _schema:
if isinstance(_schema, SchemaMapWrapper):
result[_schema.name] = cls.get_example_by_schema(_schema, **kw)
elif _schema.nested_schemas:
for _schema__id in _schema.nested_schemas:
_schema_ = SchemaObjects.get(_schema__id)
if isinstance(_schema_, SchemaMapWrapper):
result[_schema.name] = cls.get_example_by_schema(_schema_, **kw)
else:
result = cls.get_example_value_for_primitive_type(
property_['type'],
property_['type_properties'],
property_['type_format'],
**kw
)
return result
@classmethod
def _get_custom_example(cls, paths):
if cls.CUSTOM_EXAMPLES:
for path in paths:
if path in cls.CUSTOM_EXAMPLES:
return cls.CUSTOM_EXAMPLES[path], path
return None, ''
@classmethod
def get_example_for_array(cls, obj_item, **kw):
return [cls.get_property_example(obj_item, **kw)] * cls.EXAMPLE_ARRAY_ITEMS_COUNT
@classmethod
def get_example_for_object(cls, properties, nested=None, **kw):
result = {}
if properties:
for _property in properties:
kw['name'] = _property['name']
result[_property['name']] = cls.get_property_example(
_property, nested=nested, **kw)
return result
@classmethod
def schema_validate(cls, obj, json_schema):
schema_validate(obj, json_schema, format_checker=cls._json_format_checker)
@classmethod
def _example_validate(cls, path, value, type_, format_=None):
_json_schema = {'type': type_}
if format_:
_json_schema['format'] = format_
try:
cls.schema_validate(value, _json_schema)
except (ValidationError, SchemaError):
cls.logger.warning('Example type mismatch in path {}'.format(path))
|
Arello-Mobile/swagger2rst
|
swg2rst/utils/exampilators.py
|
Exampilator.get_header_example
|
python
|
def get_header_example(cls, header):
if header.is_array:
result = cls.get_example_for_array(header.item)
else:
example_method = getattr(cls, '{}_example'.format(header.type))
result = example_method(header.properties, header.type_format)
return {header.name: result}
|
Get example for header object
:param Header header: Header object
:return: example
:rtype: dict
|
train
|
https://github.com/Arello-Mobile/swagger2rst/blob/e519f70701477dcc9f0bb237ee5b8e08e848701b/swg2rst/utils/exampilators.py#L189-L201
| null |
class Exampilator(object):
"""
Example Manager
"""
DEFAULT_EXAMPLES = DEFAULT_EXAMPLES.copy()
CUSTOM_EXAMPLES = dict()
EXAMPLE_ARRAY_ITEMS_COUNT = 2
logger = get_logger()
_json_format_checker = FormatChecker()
@classmethod
def fill_examples(cls, examples):
if 'array_items_count' in examples:
cls.EXAMPLE_ARRAY_ITEMS_COUNT = examples['array_items_count']
if 'types' in examples:
cls.DEFAULT_EXAMPLES.update(examples['types'])
if 'definitions' in examples:
for path, fields in examples['definitions'].items():
for field, value in fields.items():
key = '.'.join((path, field))
cls.CUSTOM_EXAMPLES[key] = value
if 'paths' in examples:
for path, methods in examples['paths'].items():
key = "#/paths/'{}'".format(path)
for method, operations in methods.items():
for section, fields in operations.items():
for field, value in fields.items():
_key = '/'.join((key, method, section, field))
cls.CUSTOM_EXAMPLES[_key] = value
@classmethod
def get_example_value_for_primitive_type(cls, type_, properties, format_, **kw):
paths = kw.get('paths')
if paths:
result, path = cls._get_custom_example(paths)
if result:
cls._example_validate(path, result, type_, format_)
return result
if properties.get('default') is not None:
result = properties['default']
elif properties.get('enum'):
result = properties['enum'][0]
else:
result = getattr(cls, '%s_example' % type_)(properties, format_)
return result
@classmethod
def string_example(cls, properties, type_format):
if type_format in cls.DEFAULT_EXAMPLES:
result = cls.DEFAULT_EXAMPLES[type_format]
else:
result = cls.DEFAULT_EXAMPLES['string']
if properties.get('min_length'):
result.ljust(properties['min_length'], 'a')
if properties.get('max_length'):
result = result[:properties['max_length']]
return result
@classmethod
def integer_example(cls, properties, *args):
result = cls.DEFAULT_EXAMPLES['integer']
if properties.get('minimum') is not None and result < properties['minimum']:
result = properties['minimum']
if properties.get('exclusive_minimum', False):
result += 1
elif properties.get('maximum') is not None and result > properties['maximum']:
result = properties['maximum']
if properties.get('exclusive_maximum', False):
result -= 1
return result
@classmethod
def number_example(cls, properties, *args):
return cls.integer_example(properties)
@classmethod
def boolean_example(cls, *args):
return cls.DEFAULT_EXAMPLES['boolean']
@classmethod
def get_example_by_schema(cls, schema, ignored_schemas=None, paths=None, name=''):
""" Get example by schema object
:param Schema schema: current schema
:param list ignored_schemas: list of previous schemas
for avoid circular references
:param list paths: list object paths (ex. #/definitions/Model.property)
If nested schemas exists, custom examples checks in order from paths
:param str name: name of property schema object if exists
:return: dict or list (if schema is array)
"""
if schema.schema_example:
return schema.schema_example
if ignored_schemas is None:
ignored_schemas = []
if paths is None:
paths = []
if name:
paths = list(map(lambda path: '.'.join((path, name)), paths))
if schema.ref_path:
paths.append(schema.ref_path)
if schema.schema_id in ignored_schemas:
result = [] if schema.is_array else {}
else:
schemas = ignored_schemas + [schema.schema_id]
kwargs = dict(
ignored_schemas=schemas,
paths=paths
)
if schema.is_array:
result = cls.get_example_for_array(
schema.item, **kwargs)
elif schema.type in PRIMITIVE_TYPES:
result = cls.get_example_value_for_primitive_type(
schema.type, schema.raw, schema.type_format, paths=paths
)
elif schema.all_of:
result = {}
for _schema_id in schema.all_of:
schema = SchemaObjects.get(_schema_id)
result.update(cls.get_example_by_schema(schema, **kwargs))
else:
result = cls.get_example_for_object(
schema.properties, nested=schema.nested_schemas, **kwargs)
return result
@classmethod
def get_body_example(cls, operation):
""" Get example for body parameter example by operation
:param Operation operation: operation object
"""
path = "#/paths/'{0.path}'/{0.method}/parameters/{name}".format(
operation, name=operation.body.name or 'body')
return cls.get_example_by_schema(operation.body, paths=[path])
@classmethod
def get_response_example(cls, operation, response):
""" Get example for response object by operation object
:param Operation operation: operation object
:param Response response: response object
"""
path = "#/paths/'{}'/{}/responses/{}".format(
operation.path, operation.method, response.name)
kwargs = dict(paths=[path])
if response.type in PRIMITIVE_TYPES:
result = cls.get_example_value_for_primitive_type(
response.type, response.properties, response.type_format, **kwargs)
else:
schema = SchemaObjects.get(response.type)
result = cls.get_example_by_schema(schema, **kwargs)
return result
@classmethod
@classmethod
def get_property_example(cls, property_, nested=None, **kw):
""" Get example for property
:param dict property_:
:param set nested:
:return: example value
"""
paths = kw.get('paths', [])
name = kw.get('name', '')
result = None
if name and paths:
paths = list(map(lambda path: '.'.join((path, name)), paths))
result, path = cls._get_custom_example(paths)
if result is not None and property_['type'] in PRIMITIVE_TYPES:
cls._example_validate(
path, result, property_['type'], property_['type_format'])
return result
if SchemaObjects.contains(property_['type']):
schema = SchemaObjects.get(property_['type'])
if result is not None:
if schema.is_array:
if not isinstance(result, list):
result = [result] * cls.EXAMPLE_ARRAY_ITEMS_COUNT
else:
if isinstance(result, list):
cls.logger.warning(
'Example type mismatch in path {}'.format(schema.ref_path))
else:
result = cls.get_example_by_schema(schema, **kw)
if (not result) and schema.nested_schemas:
for _schema_id in schema.nested_schemas:
_schema = SchemaObjects.get(_schema_id)
if _schema:
if isinstance(_schema, SchemaMapWrapper):
result[_schema.name] = cls.get_example_by_schema(_schema, **kw)
elif _schema.nested_schemas:
for _schema__id in _schema.nested_schemas:
_schema_ = SchemaObjects.get(_schema__id)
if isinstance(_schema_, SchemaMapWrapper):
result[_schema.name] = cls.get_example_by_schema(_schema_, **kw)
else:
result = cls.get_example_value_for_primitive_type(
property_['type'],
property_['type_properties'],
property_['type_format'],
**kw
)
return result
@classmethod
def _get_custom_example(cls, paths):
if cls.CUSTOM_EXAMPLES:
for path in paths:
if path in cls.CUSTOM_EXAMPLES:
return cls.CUSTOM_EXAMPLES[path], path
return None, ''
@classmethod
def get_example_for_array(cls, obj_item, **kw):
return [cls.get_property_example(obj_item, **kw)] * cls.EXAMPLE_ARRAY_ITEMS_COUNT
@classmethod
def get_example_for_object(cls, properties, nested=None, **kw):
result = {}
if properties:
for _property in properties:
kw['name'] = _property['name']
result[_property['name']] = cls.get_property_example(
_property, nested=nested, **kw)
return result
@classmethod
def schema_validate(cls, obj, json_schema):
schema_validate(obj, json_schema, format_checker=cls._json_format_checker)
@classmethod
def _example_validate(cls, path, value, type_, format_=None):
_json_schema = {'type': type_}
if format_:
_json_schema['format'] = format_
try:
cls.schema_validate(value, _json_schema)
except (ValidationError, SchemaError):
cls.logger.warning('Example type mismatch in path {}'.format(path))
|
Arello-Mobile/swagger2rst
|
swg2rst/utils/exampilators.py
|
Exampilator.get_property_example
|
python
|
def get_property_example(cls, property_, nested=None, **kw):
paths = kw.get('paths', [])
name = kw.get('name', '')
result = None
if name and paths:
paths = list(map(lambda path: '.'.join((path, name)), paths))
result, path = cls._get_custom_example(paths)
if result is not None and property_['type'] in PRIMITIVE_TYPES:
cls._example_validate(
path, result, property_['type'], property_['type_format'])
return result
if SchemaObjects.contains(property_['type']):
schema = SchemaObjects.get(property_['type'])
if result is not None:
if schema.is_array:
if not isinstance(result, list):
result = [result] * cls.EXAMPLE_ARRAY_ITEMS_COUNT
else:
if isinstance(result, list):
cls.logger.warning(
'Example type mismatch in path {}'.format(schema.ref_path))
else:
result = cls.get_example_by_schema(schema, **kw)
if (not result) and schema.nested_schemas:
for _schema_id in schema.nested_schemas:
_schema = SchemaObjects.get(_schema_id)
if _schema:
if isinstance(_schema, SchemaMapWrapper):
result[_schema.name] = cls.get_example_by_schema(_schema, **kw)
elif _schema.nested_schemas:
for _schema__id in _schema.nested_schemas:
_schema_ = SchemaObjects.get(_schema__id)
if isinstance(_schema_, SchemaMapWrapper):
result[_schema.name] = cls.get_example_by_schema(_schema_, **kw)
else:
result = cls.get_example_value_for_primitive_type(
property_['type'],
property_['type_properties'],
property_['type_format'],
**kw
)
return result
|
Get example for property
:param dict property_:
:param set nested:
:return: example value
|
train
|
https://github.com/Arello-Mobile/swagger2rst/blob/e519f70701477dcc9f0bb237ee5b8e08e848701b/swg2rst/utils/exampilators.py#L204-L253
|
[
"def contains(cls, key):\n \"\"\" Check schema existence in collection by id\n\n :param str key:\n :rtype: bool\n \"\"\"\n return key in cls._schemas\n"
] |
class Exampilator(object):
"""
Example Manager
"""
DEFAULT_EXAMPLES = DEFAULT_EXAMPLES.copy()
CUSTOM_EXAMPLES = dict()
EXAMPLE_ARRAY_ITEMS_COUNT = 2
logger = get_logger()
_json_format_checker = FormatChecker()
@classmethod
def fill_examples(cls, examples):
if 'array_items_count' in examples:
cls.EXAMPLE_ARRAY_ITEMS_COUNT = examples['array_items_count']
if 'types' in examples:
cls.DEFAULT_EXAMPLES.update(examples['types'])
if 'definitions' in examples:
for path, fields in examples['definitions'].items():
for field, value in fields.items():
key = '.'.join((path, field))
cls.CUSTOM_EXAMPLES[key] = value
if 'paths' in examples:
for path, methods in examples['paths'].items():
key = "#/paths/'{}'".format(path)
for method, operations in methods.items():
for section, fields in operations.items():
for field, value in fields.items():
_key = '/'.join((key, method, section, field))
cls.CUSTOM_EXAMPLES[_key] = value
@classmethod
def get_example_value_for_primitive_type(cls, type_, properties, format_, **kw):
paths = kw.get('paths')
if paths:
result, path = cls._get_custom_example(paths)
if result:
cls._example_validate(path, result, type_, format_)
return result
if properties.get('default') is not None:
result = properties['default']
elif properties.get('enum'):
result = properties['enum'][0]
else:
result = getattr(cls, '%s_example' % type_)(properties, format_)
return result
@classmethod
def string_example(cls, properties, type_format):
if type_format in cls.DEFAULT_EXAMPLES:
result = cls.DEFAULT_EXAMPLES[type_format]
else:
result = cls.DEFAULT_EXAMPLES['string']
if properties.get('min_length'):
result.ljust(properties['min_length'], 'a')
if properties.get('max_length'):
result = result[:properties['max_length']]
return result
@classmethod
def integer_example(cls, properties, *args):
result = cls.DEFAULT_EXAMPLES['integer']
if properties.get('minimum') is not None and result < properties['minimum']:
result = properties['minimum']
if properties.get('exclusive_minimum', False):
result += 1
elif properties.get('maximum') is not None and result > properties['maximum']:
result = properties['maximum']
if properties.get('exclusive_maximum', False):
result -= 1
return result
@classmethod
def number_example(cls, properties, *args):
return cls.integer_example(properties)
@classmethod
def boolean_example(cls, *args):
return cls.DEFAULT_EXAMPLES['boolean']
@classmethod
def get_example_by_schema(cls, schema, ignored_schemas=None, paths=None, name=''):
""" Get example by schema object
:param Schema schema: current schema
:param list ignored_schemas: list of previous schemas
for avoid circular references
:param list paths: list object paths (ex. #/definitions/Model.property)
If nested schemas exists, custom examples checks in order from paths
:param str name: name of property schema object if exists
:return: dict or list (if schema is array)
"""
if schema.schema_example:
return schema.schema_example
if ignored_schemas is None:
ignored_schemas = []
if paths is None:
paths = []
if name:
paths = list(map(lambda path: '.'.join((path, name)), paths))
if schema.ref_path:
paths.append(schema.ref_path)
if schema.schema_id in ignored_schemas:
result = [] if schema.is_array else {}
else:
schemas = ignored_schemas + [schema.schema_id]
kwargs = dict(
ignored_schemas=schemas,
paths=paths
)
if schema.is_array:
result = cls.get_example_for_array(
schema.item, **kwargs)
elif schema.type in PRIMITIVE_TYPES:
result = cls.get_example_value_for_primitive_type(
schema.type, schema.raw, schema.type_format, paths=paths
)
elif schema.all_of:
result = {}
for _schema_id in schema.all_of:
schema = SchemaObjects.get(_schema_id)
result.update(cls.get_example_by_schema(schema, **kwargs))
else:
result = cls.get_example_for_object(
schema.properties, nested=schema.nested_schemas, **kwargs)
return result
@classmethod
def get_body_example(cls, operation):
""" Get example for body parameter example by operation
:param Operation operation: operation object
"""
path = "#/paths/'{0.path}'/{0.method}/parameters/{name}".format(
operation, name=operation.body.name or 'body')
return cls.get_example_by_schema(operation.body, paths=[path])
@classmethod
def get_response_example(cls, operation, response):
""" Get example for response object by operation object
:param Operation operation: operation object
:param Response response: response object
"""
path = "#/paths/'{}'/{}/responses/{}".format(
operation.path, operation.method, response.name)
kwargs = dict(paths=[path])
if response.type in PRIMITIVE_TYPES:
result = cls.get_example_value_for_primitive_type(
response.type, response.properties, response.type_format, **kwargs)
else:
schema = SchemaObjects.get(response.type)
result = cls.get_example_by_schema(schema, **kwargs)
return result
@classmethod
def get_header_example(cls, header):
""" Get example for header object
:param Header header: Header object
:return: example
:rtype: dict
"""
if header.is_array:
result = cls.get_example_for_array(header.item)
else:
example_method = getattr(cls, '{}_example'.format(header.type))
result = example_method(header.properties, header.type_format)
return {header.name: result}
@classmethod
@classmethod
def _get_custom_example(cls, paths):
if cls.CUSTOM_EXAMPLES:
for path in paths:
if path in cls.CUSTOM_EXAMPLES:
return cls.CUSTOM_EXAMPLES[path], path
return None, ''
@classmethod
def get_example_for_array(cls, obj_item, **kw):
return [cls.get_property_example(obj_item, **kw)] * cls.EXAMPLE_ARRAY_ITEMS_COUNT
@classmethod
def get_example_for_object(cls, properties, nested=None, **kw):
result = {}
if properties:
for _property in properties:
kw['name'] = _property['name']
result[_property['name']] = cls.get_property_example(
_property, nested=nested, **kw)
return result
@classmethod
def schema_validate(cls, obj, json_schema):
schema_validate(obj, json_schema, format_checker=cls._json_format_checker)
@classmethod
def _example_validate(cls, path, value, type_, format_=None):
_json_schema = {'type': type_}
if format_:
_json_schema['format'] = format_
try:
cls.schema_validate(value, _json_schema)
except (ValidationError, SchemaError):
cls.logger.warning('Example type mismatch in path {}'.format(path))
|
Arello-Mobile/swagger2rst
|
swg2rst/swagger/operation.py
|
Operation.get_parameters_by_location
|
python
|
def get_parameters_by_location(self, locations=None, excludes=None):
result = self.parameters
if locations:
result = filter(lambda x: x.location_in in locations, result)
if excludes:
result = filter(lambda x: x.location_in not in excludes, result)
return list(result)
|
Get parameters list by location
:param locations: list of locations
:type locations: list or None
:param excludes: list of excludes locations
:type excludes: list or None
:return: list of Parameter
:rtype: list
|
train
|
https://github.com/Arello-Mobile/swagger2rst/blob/e519f70701477dcc9f0bb237ee5b8e08e848701b/swg2rst/swagger/operation.py#L72-L87
| null |
class Operation(SecurityMixin):
"""
Represents Swagger Operation Object
"""
parameters = None
responses = None
method = None
path = None
root = None #: root swagger object
def __init__(self, obj, method, path, root, storage, path_params=None):
self.method = method
self.path = path
self.root = root
self.storage = storage
self.operation_id = obj.get(
'operationId', self.get_operation_id(method, path))
self.summary = obj.get('summary')
self.description = obj.get('description')
self.consumes = obj.get('consumes', self.root.consumes)
self.produces = obj.get('produces', self.root.produces)
self.schemes = obj.get('schemes', self.root.schemes)
self._fill_parameters(obj.get('parameters', []), path_params)
self._fill_responses(obj['responses'])
self.deprecated = obj.get('deprecated', False)
self.tags = obj.get('tags', ['default'])
self.external_docs = obj.get('externalDocs')
self._fill_securities(obj)
@staticmethod
def get_operation_id(method, path):
op_id = '{}_{}'.format(method, path)
# copy-paste from swagger-js
op_id = re.sub(r'[\s!@#$%^&*()+=\[{\]};:<>|./?,\'"-]', '_', op_id)
op_id = re.sub(r'(_){2,}', '_', op_id)
op_id = re.sub(r'^[_]*', '', op_id)
op_id = re.sub(r'([_]*)$', '', op_id)
return op_id
def _fill_parameters(self, params, path_params):
self.parameters = []
for obj in params:
if '$ref' in obj:
self.parameters.append(self.root.parameter_definitions[obj['$ref']])
else:
self.parameters.append(
Parameter(obj, name=obj['name'], root=self.root, storage=self.storage))
if path_params:
self.parameters += path_params
if len(self.get_parameters_by_location(['body'])) > 1:
raise ConverterError(
'Invalid source file: More than one body parameters in %s' % self.path)
def _fill_responses(self, responses):
self.responses = {}
for code, obj in responses.items():
if '$ref' in obj:
self.responses[code] = self.root.response_definitions[obj['$ref']]
else:
self.responses[code] = Response(obj, name=code, root=self.root, storage=self.storage)
@cached_property
def body(self):
""" Return body request parameter
:return: Body parameter
:rtype: Parameter or None
"""
body = self.get_parameters_by_location(['body'])
return self.root.schemas.get(body[0].type) if body else None
|
Arello-Mobile/swagger2rst
|
swg2rst/swagger/operation.py
|
Operation.body
|
python
|
def body(self):
body = self.get_parameters_by_location(['body'])
return self.root.schemas.get(body[0].type) if body else None
|
Return body request parameter
:return: Body parameter
:rtype: Parameter or None
|
train
|
https://github.com/Arello-Mobile/swagger2rst/blob/e519f70701477dcc9f0bb237ee5b8e08e848701b/swg2rst/swagger/operation.py#L90-L97
|
[
"def get_parameters_by_location(self, locations=None, excludes=None):\n \"\"\" Get parameters list by location\n\n :param locations: list of locations\n :type locations: list or None\n :param excludes: list of excludes locations\n :type excludes: list or None\n :return: list of Parameter\n :rtype: list\n \"\"\"\n result = self.parameters\n if locations:\n result = filter(lambda x: x.location_in in locations, result)\n if excludes:\n result = filter(lambda x: x.location_in not in excludes, result)\n return list(result)\n"
] |
class Operation(SecurityMixin):
"""
Represents Swagger Operation Object
"""
parameters = None
responses = None
method = None
path = None
root = None #: root swagger object
def __init__(self, obj, method, path, root, storage, path_params=None):
self.method = method
self.path = path
self.root = root
self.storage = storage
self.operation_id = obj.get(
'operationId', self.get_operation_id(method, path))
self.summary = obj.get('summary')
self.description = obj.get('description')
self.consumes = obj.get('consumes', self.root.consumes)
self.produces = obj.get('produces', self.root.produces)
self.schemes = obj.get('schemes', self.root.schemes)
self._fill_parameters(obj.get('parameters', []), path_params)
self._fill_responses(obj['responses'])
self.deprecated = obj.get('deprecated', False)
self.tags = obj.get('tags', ['default'])
self.external_docs = obj.get('externalDocs')
self._fill_securities(obj)
@staticmethod
def get_operation_id(method, path):
op_id = '{}_{}'.format(method, path)
# copy-paste from swagger-js
op_id = re.sub(r'[\s!@#$%^&*()+=\[{\]};:<>|./?,\'"-]', '_', op_id)
op_id = re.sub(r'(_){2,}', '_', op_id)
op_id = re.sub(r'^[_]*', '', op_id)
op_id = re.sub(r'([_]*)$', '', op_id)
return op_id
def _fill_parameters(self, params, path_params):
self.parameters = []
for obj in params:
if '$ref' in obj:
self.parameters.append(self.root.parameter_definitions[obj['$ref']])
else:
self.parameters.append(
Parameter(obj, name=obj['name'], root=self.root, storage=self.storage))
if path_params:
self.parameters += path_params
if len(self.get_parameters_by_location(['body'])) > 1:
raise ConverterError(
'Invalid source file: More than one body parameters in %s' % self.path)
def _fill_responses(self, responses):
self.responses = {}
for code, obj in responses.items():
if '$ref' in obj:
self.responses[code] = self.root.response_definitions[obj['$ref']]
else:
self.responses[code] = Response(obj, name=code, root=self.root, storage=self.storage)
def get_parameters_by_location(self, locations=None, excludes=None):
""" Get parameters list by location
:param locations: list of locations
:type locations: list or None
:param excludes: list of excludes locations
:type excludes: list or None
:return: list of Parameter
:rtype: list
"""
result = self.parameters
if locations:
result = filter(lambda x: x.location_in in locations, result)
if excludes:
result = filter(lambda x: x.location_in not in excludes, result)
return list(result)
@cached_property
|
todbot/blink1-python
|
blink1/blink1.py
|
blink1
|
python
|
def blink1(switch_off=True, gamma=None, white_point=None):
b1 = Blink1(gamma=gamma, white_point=white_point)
yield b1
if switch_off:
b1.off()
b1.close()
|
Context manager which automatically shuts down the Blink(1)
after use.
:param switch_off: turn blink(1) off when existing context
:param gamma: set gamma curve (as tuple)
:param white_point: set white point (as tuple)
|
train
|
https://github.com/todbot/blink1-python/blob/7a5183becd9662f88da3c29afd3447403f4ef82f/blink1/blink1.py#L303-L314
|
[
"def close(self):\n self.dev.close()\n self.dev = None\n",
"def off(self):\n \"\"\"Switch the blink(1) off instantly\n \"\"\"\n self.fade_to_color(0, 'black')\n"
] |
"""
blink1.py -- blink(1) Python library using python hidapi
All platforms:
% pip install blink1
"""
import logging
import time
import sys
from contextlib import contextmanager
import webcolors
import hid
import os
#from builtins import str as text
from .kelvin import kelvin_to_rgb, COLOR_TEMPERATURES
class Blink1ConnectionFailed(RuntimeError):
"""Raised when we cannot connect to a Blink(1)
"""
class InvalidColor(ValueError):
"""Raised when the user requests an implausible colour
"""
log = logging.getLogger(__name__)
logging.basicConfig(format='%(levelname)s:%(message)s',
level=logging.DEBUG if os.getenv('DEBUGBLINK1') else logging.INFO )
DEFAULT_GAMMA = (2, 2, 2)
DEFAULT_WHITE_POINT = (255, 255, 255)
REPORT_ID = 0x01
VENDOR_ID = 0x27b8
PRODUCT_ID = 0x01ed
class ColorCorrect(object):
"""Apply a gamma correction to any selected RGB color, see:
http://en.wikipedia.org/wiki/Gamma_correction
"""
def __init__(self, gamma, white_point):
"""
:param gamma: Tuple of r,g,b gamma values
:param white_point: White point expressed as (r,g,b), integer color temperature (in Kelvin) or a string value.
All gamma values should be 0 > x >= 1
"""
self.gamma = gamma
if isinstance(white_point, str):
kelvin = COLOR_TEMPERATURES[white_point]
self.white_point = kelvin_to_rgb(kelvin)
elif isinstance(white_point,(int,float)):
self.white_point = kelvin_to_rgb(white_point)
else:
self.white_point = white_point
@staticmethod
def gamma_correct(gamma, white, luminance):
return round(white * (luminance / 255.0) ** gamma)
def __call__(self, r, g, b):
color = [r,g,b]
return tuple(self.gamma_correct(g, w, l) for (g, w, l) in zip(self.gamma, self.white_point, color) )
class Blink1:
"""Light controller class, sends messages to the blink(1) and blink(1) mk2 via USB HID.
"""
def __init__(self, serial_number=None, gamma=None, white_point=None):
"""
:param serial_number: serial number of blink(1) to open, otherwise first found
:param gamma: Triple of gammas for each channel e.g. (2, 2, 2)
"""
self.cc = ColorCorrect(
gamma=(gamma or DEFAULT_GAMMA),
white_point=(white_point or DEFAULT_WHITE_POINT)
)
self.dev = self.find(serial_number)
def close(self):
self.dev.close()
self.dev = None
@staticmethod
def find(serial_number=None):
"""
Find a praticular blink(1) device, or the first one
:param serial_number: serial number of blink(1) device (from Blink1.list())
"""
try:
hidraw = hid.device(VENDOR_ID,PRODUCT_ID,serial_number)
hidraw.open(VENDOR_ID,PRODUCT_ID,serial_number)
# hidraw = hid.device(VENDOR_ID,PRODUCT_ID,unicode(serial_number))
# hidraw.open(VENDOR_ID,PRODUCT_ID,unicode(serial_number))
except IOError as e: # python2
raise Blink1ConnectionFailed(e)
hidraw = None
except OSError as e: # python3
raise Blink1ConnectionFailed(e)
hidraw = None
return hidraw
@staticmethod
def list():
"""
List blink(1) devices connected, by serial number
:return: List of blink(1) device serial numbers
"""
try:
devs = hid.enumerate(VENDOR_ID,PRODUCT_ID)
serials = list(map(lambda d:d.get('serial_number'), devs))
return serials
except IOError as e:
return []
def notfound(self):
return None # fixme what to do here
def write(self,buf):
"""
Write command to blink(1), low-level internal use
Send USB Feature Report 0x01 to blink(1) with 8-byte payload
Note: arg 'buf' must be 8 bytes or bad things happen
"""
log.debug("blink1write:" + ",".join('0x%02x' % v for v in buf))
self.dev.send_feature_report(buf)
def read(self):
"""
Read command result from blink(1), low-level internal use
Receive USB Feature Report 0x01 from blink(1) with 8-byte payload
Note: buf must be 8 bytes or bad things happen
"""
buf = self.dev.get_feature_report(REPORT_ID,9)
log.debug("blink1read: " + ",".join('0x%02x' % v for v in buf))
return buf
def fade_to_rgb_uncorrected(self, fade_milliseconds, red, green, blue, led_number=0):
"""
Command blink(1) to fade to RGB color, no color correction applied.
"""
action = ord('c')
fade_time = int(fade_milliseconds / 10)
th = (fade_time & 0xff00) >> 8
tl = fade_time & 0x00ff
buf = [REPORT_ID, action, int(red), int(green), int(blue), th, tl, led_number, 0]
self.write( buf )
def fade_to_rgb(self,fade_milliseconds, red, green, blue, led_number=0):
r, g, b = self.cc(red, green, blue)
return self.fade_to_rgb_uncorrected(fade_milliseconds, r, g, b, led_number)
@staticmethod
def color_to_rgb(color):
"""
Convert color name or hexcode to (r,g,b) tuple
"""
if isinstance(color, tuple):
return color
if color.startswith('#'):
try:
return webcolors.hex_to_rgb(color)
except ValueError:
raise InvalidColor(color)
try:
return webcolors.name_to_rgb(color)
except ValueError:
raise InvalidColor(color)
def fade_to_color(self, fade_milliseconds, color):
"""
Fade the light to a known colour in a
:param fade_milliseconds: Duration of the fade in milliseconds
:param color: Named color to fade to
:return: None
"""
red, green, blue = self.color_to_rgb(color)
return self.fade_to_rgb(fade_milliseconds, red, green, blue)
def off(self):
"""Switch the blink(1) off instantly
"""
self.fade_to_color(0, 'black')
def get_version(self):
"""Get blink(1) firmware version
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('v'), 0, 0, 0, 0, 0, 0, 0]
self.write(buf)
time.sleep(.05)
version_raw = self.read()
version = (version_raw[3] - ord('0')) * 100 + (version_raw[4] - ord('0'))
return str(version)
def get_serial_number(self):
"""Get blink(1) serial number
:return blink(1) serial number as string
"""
return self.dev.get_serial_number_string()
# return usb.util.get_string(self.dev, 256, 3)
def play(self, start_pos=0, end_pos=0, count=0):
"""Play internal color pattern
:param start_pos: pattern line to start from
:param end_pos: pattern line to end at
:param count: number of times to play, 0=play forever
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('p'), 1, int(start_pos), int(end_pos), int(count), 0, 0, 0]
return self.write(buf);
def stop(self):
"""Stop internal color pattern playing
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('p'), 0, 0, 0, 0, 0, 0, 0]
return self.write(buf);
def savePattern(self):
"""Save internal RAM pattern to flash
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('W'), 0xBE, 0xEF, 0xCA, 0xFE, 0, 0, 0]
return self.write(buf);
def setLedN(self, led_number=0):
"""Set the 'current LED' value for writePatternLine
:param led_number: LED to adjust, 0=all, 1=LEDA, 2=LEDB
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('l'), led_number, 0,0,0,0,0,0]
self.write(buf)
def writePatternLine(self, step_milliseconds, color, pos, led_number=0):
"""Write a color & step time color pattern line to RAM
:param step_milliseconds: how long for this pattern line to take
:param color: LED color
:param pos: color pattern line number (0-15)
:param led_number: LED to adjust, 0=all, 1=LEDA, 2=LEDB
"""
if ( self.dev == None ): return ''
self.setLedN(led_number)
red, green, blue = self.color_to_rgb(color)
r, g, b = self.cc(red, green, blue)
step_time = int(step_milliseconds / 10)
th = (step_time & 0xff00) >> 8
tl = step_time & 0x00ff
buf = [REPORT_ID, ord('P'), int(r), int(g), int(b), th,tl, pos, 0]
return self.write(buf);
def readPatternLine(self, pos):
"""Read a color pattern line at position
:param pos: pattern line to read
:return pattern line data as tuple (r,g,b, step_millis)
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('R'), 0, 0, 0, 0, 0, int(pos), 0]
self.write(buf)
buf = self.read()
(r,g,b) = (buf[2],buf[3],buf[4])
step_millis = ((buf[5] << 8) | buf[6]) * 10
return (r,g,b,step_millis)
def readPattern(self):
"""Read the entire color pattern
:return List of pattern line tuples
"""
if ( self.dev == None ): return ''
pattern=[]
for i in range(0,16): # FIXME: adjustable for diff blink(1) models
pattern.append( self.readPatternLine(i) )
return pattern
def serverTickle(self, enable, timeout_millis=0, stay_lit=False, start_pos=0, end_pos=16):
"""Enable/disable servertickle / serverdown watchdog
:param: enable: Set True to enable serverTickle
:param: timeout_millis: millisecs until servertickle is triggered
:param: stay_lit: Set True to keep current color of blink(1), False to turn off
:param: start_pos: Sub-pattern start position in whole color pattern
:param: end_pos: Sub-pattern end position in whole color pattern
"""
if ( self.dev == None ): return ''
en = int(enable == True)
timeout_time = int(timeout_millis/10)
th = (timeout_time & 0xff00) >>8
tl = timeout_time & 0x00ff
st = int(stay_lit == True)
buf = [REPORT_ID, ord('D'), en, th, tl, st, start_pos, end_pos, 0]
self.write(buf)
@contextmanager
|
todbot/blink1-python
|
blink1/blink1.py
|
Blink1.find
|
python
|
def find(serial_number=None):
try:
hidraw = hid.device(VENDOR_ID,PRODUCT_ID,serial_number)
hidraw.open(VENDOR_ID,PRODUCT_ID,serial_number)
# hidraw = hid.device(VENDOR_ID,PRODUCT_ID,unicode(serial_number))
# hidraw.open(VENDOR_ID,PRODUCT_ID,unicode(serial_number))
except IOError as e: # python2
raise Blink1ConnectionFailed(e)
hidraw = None
except OSError as e: # python3
raise Blink1ConnectionFailed(e)
hidraw = None
return hidraw
|
Find a praticular blink(1) device, or the first one
:param serial_number: serial number of blink(1) device (from Blink1.list())
|
train
|
https://github.com/todbot/blink1-python/blob/7a5183becd9662f88da3c29afd3447403f4ef82f/blink1/blink1.py#L90-L107
| null |
class Blink1:
"""Light controller class, sends messages to the blink(1) and blink(1) mk2 via USB HID.
"""
def __init__(self, serial_number=None, gamma=None, white_point=None):
"""
:param serial_number: serial number of blink(1) to open, otherwise first found
:param gamma: Triple of gammas for each channel e.g. (2, 2, 2)
"""
self.cc = ColorCorrect(
gamma=(gamma or DEFAULT_GAMMA),
white_point=(white_point or DEFAULT_WHITE_POINT)
)
self.dev = self.find(serial_number)
def close(self):
self.dev.close()
self.dev = None
@staticmethod
@staticmethod
def list():
"""
List blink(1) devices connected, by serial number
:return: List of blink(1) device serial numbers
"""
try:
devs = hid.enumerate(VENDOR_ID,PRODUCT_ID)
serials = list(map(lambda d:d.get('serial_number'), devs))
return serials
except IOError as e:
return []
def notfound(self):
return None # fixme what to do here
def write(self,buf):
"""
Write command to blink(1), low-level internal use
Send USB Feature Report 0x01 to blink(1) with 8-byte payload
Note: arg 'buf' must be 8 bytes or bad things happen
"""
log.debug("blink1write:" + ",".join('0x%02x' % v for v in buf))
self.dev.send_feature_report(buf)
def read(self):
"""
Read command result from blink(1), low-level internal use
Receive USB Feature Report 0x01 from blink(1) with 8-byte payload
Note: buf must be 8 bytes or bad things happen
"""
buf = self.dev.get_feature_report(REPORT_ID,9)
log.debug("blink1read: " + ",".join('0x%02x' % v for v in buf))
return buf
def fade_to_rgb_uncorrected(self, fade_milliseconds, red, green, blue, led_number=0):
"""
Command blink(1) to fade to RGB color, no color correction applied.
"""
action = ord('c')
fade_time = int(fade_milliseconds / 10)
th = (fade_time & 0xff00) >> 8
tl = fade_time & 0x00ff
buf = [REPORT_ID, action, int(red), int(green), int(blue), th, tl, led_number, 0]
self.write( buf )
def fade_to_rgb(self,fade_milliseconds, red, green, blue, led_number=0):
r, g, b = self.cc(red, green, blue)
return self.fade_to_rgb_uncorrected(fade_milliseconds, r, g, b, led_number)
@staticmethod
def color_to_rgb(color):
"""
Convert color name or hexcode to (r,g,b) tuple
"""
if isinstance(color, tuple):
return color
if color.startswith('#'):
try:
return webcolors.hex_to_rgb(color)
except ValueError:
raise InvalidColor(color)
try:
return webcolors.name_to_rgb(color)
except ValueError:
raise InvalidColor(color)
def fade_to_color(self, fade_milliseconds, color):
"""
Fade the light to a known colour in a
:param fade_milliseconds: Duration of the fade in milliseconds
:param color: Named color to fade to
:return: None
"""
red, green, blue = self.color_to_rgb(color)
return self.fade_to_rgb(fade_milliseconds, red, green, blue)
def off(self):
"""Switch the blink(1) off instantly
"""
self.fade_to_color(0, 'black')
def get_version(self):
"""Get blink(1) firmware version
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('v'), 0, 0, 0, 0, 0, 0, 0]
self.write(buf)
time.sleep(.05)
version_raw = self.read()
version = (version_raw[3] - ord('0')) * 100 + (version_raw[4] - ord('0'))
return str(version)
def get_serial_number(self):
"""Get blink(1) serial number
:return blink(1) serial number as string
"""
return self.dev.get_serial_number_string()
# return usb.util.get_string(self.dev, 256, 3)
def play(self, start_pos=0, end_pos=0, count=0):
"""Play internal color pattern
:param start_pos: pattern line to start from
:param end_pos: pattern line to end at
:param count: number of times to play, 0=play forever
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('p'), 1, int(start_pos), int(end_pos), int(count), 0, 0, 0]
return self.write(buf);
def stop(self):
"""Stop internal color pattern playing
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('p'), 0, 0, 0, 0, 0, 0, 0]
return self.write(buf);
def savePattern(self):
"""Save internal RAM pattern to flash
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('W'), 0xBE, 0xEF, 0xCA, 0xFE, 0, 0, 0]
return self.write(buf);
def setLedN(self, led_number=0):
"""Set the 'current LED' value for writePatternLine
:param led_number: LED to adjust, 0=all, 1=LEDA, 2=LEDB
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('l'), led_number, 0,0,0,0,0,0]
self.write(buf)
def writePatternLine(self, step_milliseconds, color, pos, led_number=0):
"""Write a color & step time color pattern line to RAM
:param step_milliseconds: how long for this pattern line to take
:param color: LED color
:param pos: color pattern line number (0-15)
:param led_number: LED to adjust, 0=all, 1=LEDA, 2=LEDB
"""
if ( self.dev == None ): return ''
self.setLedN(led_number)
red, green, blue = self.color_to_rgb(color)
r, g, b = self.cc(red, green, blue)
step_time = int(step_milliseconds / 10)
th = (step_time & 0xff00) >> 8
tl = step_time & 0x00ff
buf = [REPORT_ID, ord('P'), int(r), int(g), int(b), th,tl, pos, 0]
return self.write(buf);
def readPatternLine(self, pos):
"""Read a color pattern line at position
:param pos: pattern line to read
:return pattern line data as tuple (r,g,b, step_millis)
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('R'), 0, 0, 0, 0, 0, int(pos), 0]
self.write(buf)
buf = self.read()
(r,g,b) = (buf[2],buf[3],buf[4])
step_millis = ((buf[5] << 8) | buf[6]) * 10
return (r,g,b,step_millis)
def readPattern(self):
"""Read the entire color pattern
:return List of pattern line tuples
"""
if ( self.dev == None ): return ''
pattern=[]
for i in range(0,16): # FIXME: adjustable for diff blink(1) models
pattern.append( self.readPatternLine(i) )
return pattern
def serverTickle(self, enable, timeout_millis=0, stay_lit=False, start_pos=0, end_pos=16):
"""Enable/disable servertickle / serverdown watchdog
:param: enable: Set True to enable serverTickle
:param: timeout_millis: millisecs until servertickle is triggered
:param: stay_lit: Set True to keep current color of blink(1), False to turn off
:param: start_pos: Sub-pattern start position in whole color pattern
:param: end_pos: Sub-pattern end position in whole color pattern
"""
if ( self.dev == None ): return ''
en = int(enable == True)
timeout_time = int(timeout_millis/10)
th = (timeout_time & 0xff00) >>8
tl = timeout_time & 0x00ff
st = int(stay_lit == True)
buf = [REPORT_ID, ord('D'), en, th, tl, st, start_pos, end_pos, 0]
self.write(buf)
|
todbot/blink1-python
|
blink1/blink1.py
|
Blink1.list
|
python
|
def list():
try:
devs = hid.enumerate(VENDOR_ID,PRODUCT_ID)
serials = list(map(lambda d:d.get('serial_number'), devs))
return serials
except IOError as e:
return []
|
List blink(1) devices connected, by serial number
:return: List of blink(1) device serial numbers
|
train
|
https://github.com/todbot/blink1-python/blob/7a5183becd9662f88da3c29afd3447403f4ef82f/blink1/blink1.py#L110-L120
| null |
class Blink1:
"""Light controller class, sends messages to the blink(1) and blink(1) mk2 via USB HID.
"""
def __init__(self, serial_number=None, gamma=None, white_point=None):
"""
:param serial_number: serial number of blink(1) to open, otherwise first found
:param gamma: Triple of gammas for each channel e.g. (2, 2, 2)
"""
self.cc = ColorCorrect(
gamma=(gamma or DEFAULT_GAMMA),
white_point=(white_point or DEFAULT_WHITE_POINT)
)
self.dev = self.find(serial_number)
def close(self):
self.dev.close()
self.dev = None
@staticmethod
def find(serial_number=None):
"""
Find a praticular blink(1) device, or the first one
:param serial_number: serial number of blink(1) device (from Blink1.list())
"""
try:
hidraw = hid.device(VENDOR_ID,PRODUCT_ID,serial_number)
hidraw.open(VENDOR_ID,PRODUCT_ID,serial_number)
# hidraw = hid.device(VENDOR_ID,PRODUCT_ID,unicode(serial_number))
# hidraw.open(VENDOR_ID,PRODUCT_ID,unicode(serial_number))
except IOError as e: # python2
raise Blink1ConnectionFailed(e)
hidraw = None
except OSError as e: # python3
raise Blink1ConnectionFailed(e)
hidraw = None
return hidraw
@staticmethod
def notfound(self):
return None # fixme what to do here
def write(self,buf):
"""
Write command to blink(1), low-level internal use
Send USB Feature Report 0x01 to blink(1) with 8-byte payload
Note: arg 'buf' must be 8 bytes or bad things happen
"""
log.debug("blink1write:" + ",".join('0x%02x' % v for v in buf))
self.dev.send_feature_report(buf)
def read(self):
"""
Read command result from blink(1), low-level internal use
Receive USB Feature Report 0x01 from blink(1) with 8-byte payload
Note: buf must be 8 bytes or bad things happen
"""
buf = self.dev.get_feature_report(REPORT_ID,9)
log.debug("blink1read: " + ",".join('0x%02x' % v for v in buf))
return buf
def fade_to_rgb_uncorrected(self, fade_milliseconds, red, green, blue, led_number=0):
"""
Command blink(1) to fade to RGB color, no color correction applied.
"""
action = ord('c')
fade_time = int(fade_milliseconds / 10)
th = (fade_time & 0xff00) >> 8
tl = fade_time & 0x00ff
buf = [REPORT_ID, action, int(red), int(green), int(blue), th, tl, led_number, 0]
self.write( buf )
def fade_to_rgb(self,fade_milliseconds, red, green, blue, led_number=0):
r, g, b = self.cc(red, green, blue)
return self.fade_to_rgb_uncorrected(fade_milliseconds, r, g, b, led_number)
@staticmethod
def color_to_rgb(color):
"""
Convert color name or hexcode to (r,g,b) tuple
"""
if isinstance(color, tuple):
return color
if color.startswith('#'):
try:
return webcolors.hex_to_rgb(color)
except ValueError:
raise InvalidColor(color)
try:
return webcolors.name_to_rgb(color)
except ValueError:
raise InvalidColor(color)
def fade_to_color(self, fade_milliseconds, color):
"""
Fade the light to a known colour in a
:param fade_milliseconds: Duration of the fade in milliseconds
:param color: Named color to fade to
:return: None
"""
red, green, blue = self.color_to_rgb(color)
return self.fade_to_rgb(fade_milliseconds, red, green, blue)
def off(self):
"""Switch the blink(1) off instantly
"""
self.fade_to_color(0, 'black')
def get_version(self):
"""Get blink(1) firmware version
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('v'), 0, 0, 0, 0, 0, 0, 0]
self.write(buf)
time.sleep(.05)
version_raw = self.read()
version = (version_raw[3] - ord('0')) * 100 + (version_raw[4] - ord('0'))
return str(version)
def get_serial_number(self):
"""Get blink(1) serial number
:return blink(1) serial number as string
"""
return self.dev.get_serial_number_string()
# return usb.util.get_string(self.dev, 256, 3)
def play(self, start_pos=0, end_pos=0, count=0):
"""Play internal color pattern
:param start_pos: pattern line to start from
:param end_pos: pattern line to end at
:param count: number of times to play, 0=play forever
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('p'), 1, int(start_pos), int(end_pos), int(count), 0, 0, 0]
return self.write(buf);
def stop(self):
"""Stop internal color pattern playing
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('p'), 0, 0, 0, 0, 0, 0, 0]
return self.write(buf);
def savePattern(self):
"""Save internal RAM pattern to flash
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('W'), 0xBE, 0xEF, 0xCA, 0xFE, 0, 0, 0]
return self.write(buf);
def setLedN(self, led_number=0):
"""Set the 'current LED' value for writePatternLine
:param led_number: LED to adjust, 0=all, 1=LEDA, 2=LEDB
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('l'), led_number, 0,0,0,0,0,0]
self.write(buf)
def writePatternLine(self, step_milliseconds, color, pos, led_number=0):
"""Write a color & step time color pattern line to RAM
:param step_milliseconds: how long for this pattern line to take
:param color: LED color
:param pos: color pattern line number (0-15)
:param led_number: LED to adjust, 0=all, 1=LEDA, 2=LEDB
"""
if ( self.dev == None ): return ''
self.setLedN(led_number)
red, green, blue = self.color_to_rgb(color)
r, g, b = self.cc(red, green, blue)
step_time = int(step_milliseconds / 10)
th = (step_time & 0xff00) >> 8
tl = step_time & 0x00ff
buf = [REPORT_ID, ord('P'), int(r), int(g), int(b), th,tl, pos, 0]
return self.write(buf);
def readPatternLine(self, pos):
"""Read a color pattern line at position
:param pos: pattern line to read
:return pattern line data as tuple (r,g,b, step_millis)
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('R'), 0, 0, 0, 0, 0, int(pos), 0]
self.write(buf)
buf = self.read()
(r,g,b) = (buf[2],buf[3],buf[4])
step_millis = ((buf[5] << 8) | buf[6]) * 10
return (r,g,b,step_millis)
def readPattern(self):
"""Read the entire color pattern
:return List of pattern line tuples
"""
if ( self.dev == None ): return ''
pattern=[]
for i in range(0,16): # FIXME: adjustable for diff blink(1) models
pattern.append( self.readPatternLine(i) )
return pattern
def serverTickle(self, enable, timeout_millis=0, stay_lit=False, start_pos=0, end_pos=16):
"""Enable/disable servertickle / serverdown watchdog
:param: enable: Set True to enable serverTickle
:param: timeout_millis: millisecs until servertickle is triggered
:param: stay_lit: Set True to keep current color of blink(1), False to turn off
:param: start_pos: Sub-pattern start position in whole color pattern
:param: end_pos: Sub-pattern end position in whole color pattern
"""
if ( self.dev == None ): return ''
en = int(enable == True)
timeout_time = int(timeout_millis/10)
th = (timeout_time & 0xff00) >>8
tl = timeout_time & 0x00ff
st = int(stay_lit == True)
buf = [REPORT_ID, ord('D'), en, th, tl, st, start_pos, end_pos, 0]
self.write(buf)
|
todbot/blink1-python
|
blink1/blink1.py
|
Blink1.write
|
python
|
def write(self,buf):
log.debug("blink1write:" + ",".join('0x%02x' % v for v in buf))
self.dev.send_feature_report(buf)
|
Write command to blink(1), low-level internal use
Send USB Feature Report 0x01 to blink(1) with 8-byte payload
Note: arg 'buf' must be 8 bytes or bad things happen
|
train
|
https://github.com/todbot/blink1-python/blob/7a5183becd9662f88da3c29afd3447403f4ef82f/blink1/blink1.py#L125-L132
| null |
class Blink1:
"""Light controller class, sends messages to the blink(1) and blink(1) mk2 via USB HID.
"""
def __init__(self, serial_number=None, gamma=None, white_point=None):
"""
:param serial_number: serial number of blink(1) to open, otherwise first found
:param gamma: Triple of gammas for each channel e.g. (2, 2, 2)
"""
self.cc = ColorCorrect(
gamma=(gamma or DEFAULT_GAMMA),
white_point=(white_point or DEFAULT_WHITE_POINT)
)
self.dev = self.find(serial_number)
def close(self):
self.dev.close()
self.dev = None
@staticmethod
def find(serial_number=None):
"""
Find a praticular blink(1) device, or the first one
:param serial_number: serial number of blink(1) device (from Blink1.list())
"""
try:
hidraw = hid.device(VENDOR_ID,PRODUCT_ID,serial_number)
hidraw.open(VENDOR_ID,PRODUCT_ID,serial_number)
# hidraw = hid.device(VENDOR_ID,PRODUCT_ID,unicode(serial_number))
# hidraw.open(VENDOR_ID,PRODUCT_ID,unicode(serial_number))
except IOError as e: # python2
raise Blink1ConnectionFailed(e)
hidraw = None
except OSError as e: # python3
raise Blink1ConnectionFailed(e)
hidraw = None
return hidraw
@staticmethod
def list():
"""
List blink(1) devices connected, by serial number
:return: List of blink(1) device serial numbers
"""
try:
devs = hid.enumerate(VENDOR_ID,PRODUCT_ID)
serials = list(map(lambda d:d.get('serial_number'), devs))
return serials
except IOError as e:
return []
def notfound(self):
return None # fixme what to do here
def read(self):
"""
Read command result from blink(1), low-level internal use
Receive USB Feature Report 0x01 from blink(1) with 8-byte payload
Note: buf must be 8 bytes or bad things happen
"""
buf = self.dev.get_feature_report(REPORT_ID,9)
log.debug("blink1read: " + ",".join('0x%02x' % v for v in buf))
return buf
def fade_to_rgb_uncorrected(self, fade_milliseconds, red, green, blue, led_number=0):
"""
Command blink(1) to fade to RGB color, no color correction applied.
"""
action = ord('c')
fade_time = int(fade_milliseconds / 10)
th = (fade_time & 0xff00) >> 8
tl = fade_time & 0x00ff
buf = [REPORT_ID, action, int(red), int(green), int(blue), th, tl, led_number, 0]
self.write( buf )
def fade_to_rgb(self,fade_milliseconds, red, green, blue, led_number=0):
r, g, b = self.cc(red, green, blue)
return self.fade_to_rgb_uncorrected(fade_milliseconds, r, g, b, led_number)
@staticmethod
def color_to_rgb(color):
"""
Convert color name or hexcode to (r,g,b) tuple
"""
if isinstance(color, tuple):
return color
if color.startswith('#'):
try:
return webcolors.hex_to_rgb(color)
except ValueError:
raise InvalidColor(color)
try:
return webcolors.name_to_rgb(color)
except ValueError:
raise InvalidColor(color)
def fade_to_color(self, fade_milliseconds, color):
"""
Fade the light to a known colour in a
:param fade_milliseconds: Duration of the fade in milliseconds
:param color: Named color to fade to
:return: None
"""
red, green, blue = self.color_to_rgb(color)
return self.fade_to_rgb(fade_milliseconds, red, green, blue)
def off(self):
"""Switch the blink(1) off instantly
"""
self.fade_to_color(0, 'black')
def get_version(self):
"""Get blink(1) firmware version
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('v'), 0, 0, 0, 0, 0, 0, 0]
self.write(buf)
time.sleep(.05)
version_raw = self.read()
version = (version_raw[3] - ord('0')) * 100 + (version_raw[4] - ord('0'))
return str(version)
def get_serial_number(self):
"""Get blink(1) serial number
:return blink(1) serial number as string
"""
return self.dev.get_serial_number_string()
# return usb.util.get_string(self.dev, 256, 3)
def play(self, start_pos=0, end_pos=0, count=0):
"""Play internal color pattern
:param start_pos: pattern line to start from
:param end_pos: pattern line to end at
:param count: number of times to play, 0=play forever
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('p'), 1, int(start_pos), int(end_pos), int(count), 0, 0, 0]
return self.write(buf);
def stop(self):
"""Stop internal color pattern playing
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('p'), 0, 0, 0, 0, 0, 0, 0]
return self.write(buf);
def savePattern(self):
"""Save internal RAM pattern to flash
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('W'), 0xBE, 0xEF, 0xCA, 0xFE, 0, 0, 0]
return self.write(buf);
def setLedN(self, led_number=0):
"""Set the 'current LED' value for writePatternLine
:param led_number: LED to adjust, 0=all, 1=LEDA, 2=LEDB
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('l'), led_number, 0,0,0,0,0,0]
self.write(buf)
def writePatternLine(self, step_milliseconds, color, pos, led_number=0):
"""Write a color & step time color pattern line to RAM
:param step_milliseconds: how long for this pattern line to take
:param color: LED color
:param pos: color pattern line number (0-15)
:param led_number: LED to adjust, 0=all, 1=LEDA, 2=LEDB
"""
if ( self.dev == None ): return ''
self.setLedN(led_number)
red, green, blue = self.color_to_rgb(color)
r, g, b = self.cc(red, green, blue)
step_time = int(step_milliseconds / 10)
th = (step_time & 0xff00) >> 8
tl = step_time & 0x00ff
buf = [REPORT_ID, ord('P'), int(r), int(g), int(b), th,tl, pos, 0]
return self.write(buf);
def readPatternLine(self, pos):
"""Read a color pattern line at position
:param pos: pattern line to read
:return pattern line data as tuple (r,g,b, step_millis)
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('R'), 0, 0, 0, 0, 0, int(pos), 0]
self.write(buf)
buf = self.read()
(r,g,b) = (buf[2],buf[3],buf[4])
step_millis = ((buf[5] << 8) | buf[6]) * 10
return (r,g,b,step_millis)
def readPattern(self):
"""Read the entire color pattern
:return List of pattern line tuples
"""
if ( self.dev == None ): return ''
pattern=[]
for i in range(0,16): # FIXME: adjustable for diff blink(1) models
pattern.append( self.readPatternLine(i) )
return pattern
def serverTickle(self, enable, timeout_millis=0, stay_lit=False, start_pos=0, end_pos=16):
"""Enable/disable servertickle / serverdown watchdog
:param: enable: Set True to enable serverTickle
:param: timeout_millis: millisecs until servertickle is triggered
:param: stay_lit: Set True to keep current color of blink(1), False to turn off
:param: start_pos: Sub-pattern start position in whole color pattern
:param: end_pos: Sub-pattern end position in whole color pattern
"""
if ( self.dev == None ): return ''
en = int(enable == True)
timeout_time = int(timeout_millis/10)
th = (timeout_time & 0xff00) >>8
tl = timeout_time & 0x00ff
st = int(stay_lit == True)
buf = [REPORT_ID, ord('D'), en, th, tl, st, start_pos, end_pos, 0]
self.write(buf)
|
todbot/blink1-python
|
blink1/blink1.py
|
Blink1.read
|
python
|
def read(self):
buf = self.dev.get_feature_report(REPORT_ID,9)
log.debug("blink1read: " + ",".join('0x%02x' % v for v in buf))
return buf
|
Read command result from blink(1), low-level internal use
Receive USB Feature Report 0x01 from blink(1) with 8-byte payload
Note: buf must be 8 bytes or bad things happen
|
train
|
https://github.com/todbot/blink1-python/blob/7a5183becd9662f88da3c29afd3447403f4ef82f/blink1/blink1.py#L134-L142
| null |
class Blink1:
"""Light controller class, sends messages to the blink(1) and blink(1) mk2 via USB HID.
"""
def __init__(self, serial_number=None, gamma=None, white_point=None):
"""
:param serial_number: serial number of blink(1) to open, otherwise first found
:param gamma: Triple of gammas for each channel e.g. (2, 2, 2)
"""
self.cc = ColorCorrect(
gamma=(gamma or DEFAULT_GAMMA),
white_point=(white_point or DEFAULT_WHITE_POINT)
)
self.dev = self.find(serial_number)
def close(self):
self.dev.close()
self.dev = None
@staticmethod
def find(serial_number=None):
"""
Find a praticular blink(1) device, or the first one
:param serial_number: serial number of blink(1) device (from Blink1.list())
"""
try:
hidraw = hid.device(VENDOR_ID,PRODUCT_ID,serial_number)
hidraw.open(VENDOR_ID,PRODUCT_ID,serial_number)
# hidraw = hid.device(VENDOR_ID,PRODUCT_ID,unicode(serial_number))
# hidraw.open(VENDOR_ID,PRODUCT_ID,unicode(serial_number))
except IOError as e: # python2
raise Blink1ConnectionFailed(e)
hidraw = None
except OSError as e: # python3
raise Blink1ConnectionFailed(e)
hidraw = None
return hidraw
@staticmethod
def list():
"""
List blink(1) devices connected, by serial number
:return: List of blink(1) device serial numbers
"""
try:
devs = hid.enumerate(VENDOR_ID,PRODUCT_ID)
serials = list(map(lambda d:d.get('serial_number'), devs))
return serials
except IOError as e:
return []
def notfound(self):
return None # fixme what to do here
def write(self,buf):
"""
Write command to blink(1), low-level internal use
Send USB Feature Report 0x01 to blink(1) with 8-byte payload
Note: arg 'buf' must be 8 bytes or bad things happen
"""
log.debug("blink1write:" + ",".join('0x%02x' % v for v in buf))
self.dev.send_feature_report(buf)
def fade_to_rgb_uncorrected(self, fade_milliseconds, red, green, blue, led_number=0):
"""
Command blink(1) to fade to RGB color, no color correction applied.
"""
action = ord('c')
fade_time = int(fade_milliseconds / 10)
th = (fade_time & 0xff00) >> 8
tl = fade_time & 0x00ff
buf = [REPORT_ID, action, int(red), int(green), int(blue), th, tl, led_number, 0]
self.write( buf )
def fade_to_rgb(self,fade_milliseconds, red, green, blue, led_number=0):
r, g, b = self.cc(red, green, blue)
return self.fade_to_rgb_uncorrected(fade_milliseconds, r, g, b, led_number)
@staticmethod
def color_to_rgb(color):
"""
Convert color name or hexcode to (r,g,b) tuple
"""
if isinstance(color, tuple):
return color
if color.startswith('#'):
try:
return webcolors.hex_to_rgb(color)
except ValueError:
raise InvalidColor(color)
try:
return webcolors.name_to_rgb(color)
except ValueError:
raise InvalidColor(color)
def fade_to_color(self, fade_milliseconds, color):
"""
Fade the light to a known colour in a
:param fade_milliseconds: Duration of the fade in milliseconds
:param color: Named color to fade to
:return: None
"""
red, green, blue = self.color_to_rgb(color)
return self.fade_to_rgb(fade_milliseconds, red, green, blue)
def off(self):
"""Switch the blink(1) off instantly
"""
self.fade_to_color(0, 'black')
def get_version(self):
"""Get blink(1) firmware version
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('v'), 0, 0, 0, 0, 0, 0, 0]
self.write(buf)
time.sleep(.05)
version_raw = self.read()
version = (version_raw[3] - ord('0')) * 100 + (version_raw[4] - ord('0'))
return str(version)
def get_serial_number(self):
"""Get blink(1) serial number
:return blink(1) serial number as string
"""
return self.dev.get_serial_number_string()
# return usb.util.get_string(self.dev, 256, 3)
def play(self, start_pos=0, end_pos=0, count=0):
"""Play internal color pattern
:param start_pos: pattern line to start from
:param end_pos: pattern line to end at
:param count: number of times to play, 0=play forever
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('p'), 1, int(start_pos), int(end_pos), int(count), 0, 0, 0]
return self.write(buf);
def stop(self):
"""Stop internal color pattern playing
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('p'), 0, 0, 0, 0, 0, 0, 0]
return self.write(buf);
def savePattern(self):
"""Save internal RAM pattern to flash
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('W'), 0xBE, 0xEF, 0xCA, 0xFE, 0, 0, 0]
return self.write(buf);
def setLedN(self, led_number=0):
"""Set the 'current LED' value for writePatternLine
:param led_number: LED to adjust, 0=all, 1=LEDA, 2=LEDB
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('l'), led_number, 0,0,0,0,0,0]
self.write(buf)
def writePatternLine(self, step_milliseconds, color, pos, led_number=0):
"""Write a color & step time color pattern line to RAM
:param step_milliseconds: how long for this pattern line to take
:param color: LED color
:param pos: color pattern line number (0-15)
:param led_number: LED to adjust, 0=all, 1=LEDA, 2=LEDB
"""
if ( self.dev == None ): return ''
self.setLedN(led_number)
red, green, blue = self.color_to_rgb(color)
r, g, b = self.cc(red, green, blue)
step_time = int(step_milliseconds / 10)
th = (step_time & 0xff00) >> 8
tl = step_time & 0x00ff
buf = [REPORT_ID, ord('P'), int(r), int(g), int(b), th,tl, pos, 0]
return self.write(buf);
def readPatternLine(self, pos):
"""Read a color pattern line at position
:param pos: pattern line to read
:return pattern line data as tuple (r,g,b, step_millis)
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('R'), 0, 0, 0, 0, 0, int(pos), 0]
self.write(buf)
buf = self.read()
(r,g,b) = (buf[2],buf[3],buf[4])
step_millis = ((buf[5] << 8) | buf[6]) * 10
return (r,g,b,step_millis)
def readPattern(self):
"""Read the entire color pattern
:return List of pattern line tuples
"""
if ( self.dev == None ): return ''
pattern=[]
for i in range(0,16): # FIXME: adjustable for diff blink(1) models
pattern.append( self.readPatternLine(i) )
return pattern
def serverTickle(self, enable, timeout_millis=0, stay_lit=False, start_pos=0, end_pos=16):
"""Enable/disable servertickle / serverdown watchdog
:param: enable: Set True to enable serverTickle
:param: timeout_millis: millisecs until servertickle is triggered
:param: stay_lit: Set True to keep current color of blink(1), False to turn off
:param: start_pos: Sub-pattern start position in whole color pattern
:param: end_pos: Sub-pattern end position in whole color pattern
"""
if ( self.dev == None ): return ''
en = int(enable == True)
timeout_time = int(timeout_millis/10)
th = (timeout_time & 0xff00) >>8
tl = timeout_time & 0x00ff
st = int(stay_lit == True)
buf = [REPORT_ID, ord('D'), en, th, tl, st, start_pos, end_pos, 0]
self.write(buf)
|
todbot/blink1-python
|
blink1/blink1.py
|
Blink1.fade_to_rgb_uncorrected
|
python
|
def fade_to_rgb_uncorrected(self, fade_milliseconds, red, green, blue, led_number=0):
action = ord('c')
fade_time = int(fade_milliseconds / 10)
th = (fade_time & 0xff00) >> 8
tl = fade_time & 0x00ff
buf = [REPORT_ID, action, int(red), int(green), int(blue), th, tl, led_number, 0]
self.write( buf )
|
Command blink(1) to fade to RGB color, no color correction applied.
|
train
|
https://github.com/todbot/blink1-python/blob/7a5183becd9662f88da3c29afd3447403f4ef82f/blink1/blink1.py#L144-L153
|
[
"def write(self,buf):\n \"\"\"\n Write command to blink(1), low-level internal use\n Send USB Feature Report 0x01 to blink(1) with 8-byte payload\n Note: arg 'buf' must be 8 bytes or bad things happen\n \"\"\"\n log.debug(\"blink1write:\" + \",\".join('0x%02x' % v for v in buf))\n self.dev.send_feature_report(buf)\n"
] |
class Blink1:
"""Light controller class, sends messages to the blink(1) and blink(1) mk2 via USB HID.
"""
def __init__(self, serial_number=None, gamma=None, white_point=None):
"""
:param serial_number: serial number of blink(1) to open, otherwise first found
:param gamma: Triple of gammas for each channel e.g. (2, 2, 2)
"""
self.cc = ColorCorrect(
gamma=(gamma or DEFAULT_GAMMA),
white_point=(white_point or DEFAULT_WHITE_POINT)
)
self.dev = self.find(serial_number)
def close(self):
self.dev.close()
self.dev = None
@staticmethod
def find(serial_number=None):
"""
Find a praticular blink(1) device, or the first one
:param serial_number: serial number of blink(1) device (from Blink1.list())
"""
try:
hidraw = hid.device(VENDOR_ID,PRODUCT_ID,serial_number)
hidraw.open(VENDOR_ID,PRODUCT_ID,serial_number)
# hidraw = hid.device(VENDOR_ID,PRODUCT_ID,unicode(serial_number))
# hidraw.open(VENDOR_ID,PRODUCT_ID,unicode(serial_number))
except IOError as e: # python2
raise Blink1ConnectionFailed(e)
hidraw = None
except OSError as e: # python3
raise Blink1ConnectionFailed(e)
hidraw = None
return hidraw
@staticmethod
def list():
"""
List blink(1) devices connected, by serial number
:return: List of blink(1) device serial numbers
"""
try:
devs = hid.enumerate(VENDOR_ID,PRODUCT_ID)
serials = list(map(lambda d:d.get('serial_number'), devs))
return serials
except IOError as e:
return []
def notfound(self):
return None # fixme what to do here
def write(self,buf):
"""
Write command to blink(1), low-level internal use
Send USB Feature Report 0x01 to blink(1) with 8-byte payload
Note: arg 'buf' must be 8 bytes or bad things happen
"""
log.debug("blink1write:" + ",".join('0x%02x' % v for v in buf))
self.dev.send_feature_report(buf)
def read(self):
"""
Read command result from blink(1), low-level internal use
Receive USB Feature Report 0x01 from blink(1) with 8-byte payload
Note: buf must be 8 bytes or bad things happen
"""
buf = self.dev.get_feature_report(REPORT_ID,9)
log.debug("blink1read: " + ",".join('0x%02x' % v for v in buf))
return buf
def fade_to_rgb(self,fade_milliseconds, red, green, blue, led_number=0):
r, g, b = self.cc(red, green, blue)
return self.fade_to_rgb_uncorrected(fade_milliseconds, r, g, b, led_number)
@staticmethod
def color_to_rgb(color):
"""
Convert color name or hexcode to (r,g,b) tuple
"""
if isinstance(color, tuple):
return color
if color.startswith('#'):
try:
return webcolors.hex_to_rgb(color)
except ValueError:
raise InvalidColor(color)
try:
return webcolors.name_to_rgb(color)
except ValueError:
raise InvalidColor(color)
def fade_to_color(self, fade_milliseconds, color):
"""
Fade the light to a known colour in a
:param fade_milliseconds: Duration of the fade in milliseconds
:param color: Named color to fade to
:return: None
"""
red, green, blue = self.color_to_rgb(color)
return self.fade_to_rgb(fade_milliseconds, red, green, blue)
def off(self):
"""Switch the blink(1) off instantly
"""
self.fade_to_color(0, 'black')
def get_version(self):
"""Get blink(1) firmware version
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('v'), 0, 0, 0, 0, 0, 0, 0]
self.write(buf)
time.sleep(.05)
version_raw = self.read()
version = (version_raw[3] - ord('0')) * 100 + (version_raw[4] - ord('0'))
return str(version)
def get_serial_number(self):
"""Get blink(1) serial number
:return blink(1) serial number as string
"""
return self.dev.get_serial_number_string()
# return usb.util.get_string(self.dev, 256, 3)
def play(self, start_pos=0, end_pos=0, count=0):
"""Play internal color pattern
:param start_pos: pattern line to start from
:param end_pos: pattern line to end at
:param count: number of times to play, 0=play forever
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('p'), 1, int(start_pos), int(end_pos), int(count), 0, 0, 0]
return self.write(buf);
def stop(self):
"""Stop internal color pattern playing
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('p'), 0, 0, 0, 0, 0, 0, 0]
return self.write(buf);
def savePattern(self):
"""Save internal RAM pattern to flash
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('W'), 0xBE, 0xEF, 0xCA, 0xFE, 0, 0, 0]
return self.write(buf);
def setLedN(self, led_number=0):
"""Set the 'current LED' value for writePatternLine
:param led_number: LED to adjust, 0=all, 1=LEDA, 2=LEDB
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('l'), led_number, 0,0,0,0,0,0]
self.write(buf)
def writePatternLine(self, step_milliseconds, color, pos, led_number=0):
"""Write a color & step time color pattern line to RAM
:param step_milliseconds: how long for this pattern line to take
:param color: LED color
:param pos: color pattern line number (0-15)
:param led_number: LED to adjust, 0=all, 1=LEDA, 2=LEDB
"""
if ( self.dev == None ): return ''
self.setLedN(led_number)
red, green, blue = self.color_to_rgb(color)
r, g, b = self.cc(red, green, blue)
step_time = int(step_milliseconds / 10)
th = (step_time & 0xff00) >> 8
tl = step_time & 0x00ff
buf = [REPORT_ID, ord('P'), int(r), int(g), int(b), th,tl, pos, 0]
return self.write(buf);
def readPatternLine(self, pos):
"""Read a color pattern line at position
:param pos: pattern line to read
:return pattern line data as tuple (r,g,b, step_millis)
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('R'), 0, 0, 0, 0, 0, int(pos), 0]
self.write(buf)
buf = self.read()
(r,g,b) = (buf[2],buf[3],buf[4])
step_millis = ((buf[5] << 8) | buf[6]) * 10
return (r,g,b,step_millis)
def readPattern(self):
"""Read the entire color pattern
:return List of pattern line tuples
"""
if ( self.dev == None ): return ''
pattern=[]
for i in range(0,16): # FIXME: adjustable for diff blink(1) models
pattern.append( self.readPatternLine(i) )
return pattern
def serverTickle(self, enable, timeout_millis=0, stay_lit=False, start_pos=0, end_pos=16):
"""Enable/disable servertickle / serverdown watchdog
:param: enable: Set True to enable serverTickle
:param: timeout_millis: millisecs until servertickle is triggered
:param: stay_lit: Set True to keep current color of blink(1), False to turn off
:param: start_pos: Sub-pattern start position in whole color pattern
:param: end_pos: Sub-pattern end position in whole color pattern
"""
if ( self.dev == None ): return ''
en = int(enable == True)
timeout_time = int(timeout_millis/10)
th = (timeout_time & 0xff00) >>8
tl = timeout_time & 0x00ff
st = int(stay_lit == True)
buf = [REPORT_ID, ord('D'), en, th, tl, st, start_pos, end_pos, 0]
self.write(buf)
|
todbot/blink1-python
|
blink1/blink1.py
|
Blink1.color_to_rgb
|
python
|
def color_to_rgb(color):
if isinstance(color, tuple):
return color
if color.startswith('#'):
try:
return webcolors.hex_to_rgb(color)
except ValueError:
raise InvalidColor(color)
try:
return webcolors.name_to_rgb(color)
except ValueError:
raise InvalidColor(color)
|
Convert color name or hexcode to (r,g,b) tuple
|
train
|
https://github.com/todbot/blink1-python/blob/7a5183becd9662f88da3c29afd3447403f4ef82f/blink1/blink1.py#L160-L175
| null |
class Blink1:
"""Light controller class, sends messages to the blink(1) and blink(1) mk2 via USB HID.
"""
def __init__(self, serial_number=None, gamma=None, white_point=None):
"""
:param serial_number: serial number of blink(1) to open, otherwise first found
:param gamma: Triple of gammas for each channel e.g. (2, 2, 2)
"""
self.cc = ColorCorrect(
gamma=(gamma or DEFAULT_GAMMA),
white_point=(white_point or DEFAULT_WHITE_POINT)
)
self.dev = self.find(serial_number)
def close(self):
self.dev.close()
self.dev = None
@staticmethod
def find(serial_number=None):
"""
Find a praticular blink(1) device, or the first one
:param serial_number: serial number of blink(1) device (from Blink1.list())
"""
try:
hidraw = hid.device(VENDOR_ID,PRODUCT_ID,serial_number)
hidraw.open(VENDOR_ID,PRODUCT_ID,serial_number)
# hidraw = hid.device(VENDOR_ID,PRODUCT_ID,unicode(serial_number))
# hidraw.open(VENDOR_ID,PRODUCT_ID,unicode(serial_number))
except IOError as e: # python2
raise Blink1ConnectionFailed(e)
hidraw = None
except OSError as e: # python3
raise Blink1ConnectionFailed(e)
hidraw = None
return hidraw
@staticmethod
def list():
"""
List blink(1) devices connected, by serial number
:return: List of blink(1) device serial numbers
"""
try:
devs = hid.enumerate(VENDOR_ID,PRODUCT_ID)
serials = list(map(lambda d:d.get('serial_number'), devs))
return serials
except IOError as e:
return []
def notfound(self):
return None # fixme what to do here
def write(self,buf):
"""
Write command to blink(1), low-level internal use
Send USB Feature Report 0x01 to blink(1) with 8-byte payload
Note: arg 'buf' must be 8 bytes or bad things happen
"""
log.debug("blink1write:" + ",".join('0x%02x' % v for v in buf))
self.dev.send_feature_report(buf)
def read(self):
"""
Read command result from blink(1), low-level internal use
Receive USB Feature Report 0x01 from blink(1) with 8-byte payload
Note: buf must be 8 bytes or bad things happen
"""
buf = self.dev.get_feature_report(REPORT_ID,9)
log.debug("blink1read: " + ",".join('0x%02x' % v for v in buf))
return buf
def fade_to_rgb_uncorrected(self, fade_milliseconds, red, green, blue, led_number=0):
"""
Command blink(1) to fade to RGB color, no color correction applied.
"""
action = ord('c')
fade_time = int(fade_milliseconds / 10)
th = (fade_time & 0xff00) >> 8
tl = fade_time & 0x00ff
buf = [REPORT_ID, action, int(red), int(green), int(blue), th, tl, led_number, 0]
self.write( buf )
def fade_to_rgb(self,fade_milliseconds, red, green, blue, led_number=0):
r, g, b = self.cc(red, green, blue)
return self.fade_to_rgb_uncorrected(fade_milliseconds, r, g, b, led_number)
@staticmethod
def fade_to_color(self, fade_milliseconds, color):
"""
Fade the light to a known colour in a
:param fade_milliseconds: Duration of the fade in milliseconds
:param color: Named color to fade to
:return: None
"""
red, green, blue = self.color_to_rgb(color)
return self.fade_to_rgb(fade_milliseconds, red, green, blue)
def off(self):
"""Switch the blink(1) off instantly
"""
self.fade_to_color(0, 'black')
def get_version(self):
"""Get blink(1) firmware version
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('v'), 0, 0, 0, 0, 0, 0, 0]
self.write(buf)
time.sleep(.05)
version_raw = self.read()
version = (version_raw[3] - ord('0')) * 100 + (version_raw[4] - ord('0'))
return str(version)
def get_serial_number(self):
"""Get blink(1) serial number
:return blink(1) serial number as string
"""
return self.dev.get_serial_number_string()
# return usb.util.get_string(self.dev, 256, 3)
def play(self, start_pos=0, end_pos=0, count=0):
"""Play internal color pattern
:param start_pos: pattern line to start from
:param end_pos: pattern line to end at
:param count: number of times to play, 0=play forever
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('p'), 1, int(start_pos), int(end_pos), int(count), 0, 0, 0]
return self.write(buf);
def stop(self):
"""Stop internal color pattern playing
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('p'), 0, 0, 0, 0, 0, 0, 0]
return self.write(buf);
def savePattern(self):
"""Save internal RAM pattern to flash
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('W'), 0xBE, 0xEF, 0xCA, 0xFE, 0, 0, 0]
return self.write(buf);
def setLedN(self, led_number=0):
"""Set the 'current LED' value for writePatternLine
:param led_number: LED to adjust, 0=all, 1=LEDA, 2=LEDB
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('l'), led_number, 0,0,0,0,0,0]
self.write(buf)
def writePatternLine(self, step_milliseconds, color, pos, led_number=0):
"""Write a color & step time color pattern line to RAM
:param step_milliseconds: how long for this pattern line to take
:param color: LED color
:param pos: color pattern line number (0-15)
:param led_number: LED to adjust, 0=all, 1=LEDA, 2=LEDB
"""
if ( self.dev == None ): return ''
self.setLedN(led_number)
red, green, blue = self.color_to_rgb(color)
r, g, b = self.cc(red, green, blue)
step_time = int(step_milliseconds / 10)
th = (step_time & 0xff00) >> 8
tl = step_time & 0x00ff
buf = [REPORT_ID, ord('P'), int(r), int(g), int(b), th,tl, pos, 0]
return self.write(buf);
def readPatternLine(self, pos):
"""Read a color pattern line at position
:param pos: pattern line to read
:return pattern line data as tuple (r,g,b, step_millis)
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('R'), 0, 0, 0, 0, 0, int(pos), 0]
self.write(buf)
buf = self.read()
(r,g,b) = (buf[2],buf[3],buf[4])
step_millis = ((buf[5] << 8) | buf[6]) * 10
return (r,g,b,step_millis)
def readPattern(self):
"""Read the entire color pattern
:return List of pattern line tuples
"""
if ( self.dev == None ): return ''
pattern=[]
for i in range(0,16): # FIXME: adjustable for diff blink(1) models
pattern.append( self.readPatternLine(i) )
return pattern
def serverTickle(self, enable, timeout_millis=0, stay_lit=False, start_pos=0, end_pos=16):
"""Enable/disable servertickle / serverdown watchdog
:param: enable: Set True to enable serverTickle
:param: timeout_millis: millisecs until servertickle is triggered
:param: stay_lit: Set True to keep current color of blink(1), False to turn off
:param: start_pos: Sub-pattern start position in whole color pattern
:param: end_pos: Sub-pattern end position in whole color pattern
"""
if ( self.dev == None ): return ''
en = int(enable == True)
timeout_time = int(timeout_millis/10)
th = (timeout_time & 0xff00) >>8
tl = timeout_time & 0x00ff
st = int(stay_lit == True)
buf = [REPORT_ID, ord('D'), en, th, tl, st, start_pos, end_pos, 0]
self.write(buf)
|
todbot/blink1-python
|
blink1/blink1.py
|
Blink1.fade_to_color
|
python
|
def fade_to_color(self, fade_milliseconds, color):
red, green, blue = self.color_to_rgb(color)
return self.fade_to_rgb(fade_milliseconds, red, green, blue)
|
Fade the light to a known colour in a
:param fade_milliseconds: Duration of the fade in milliseconds
:param color: Named color to fade to
:return: None
|
train
|
https://github.com/todbot/blink1-python/blob/7a5183becd9662f88da3c29afd3447403f4ef82f/blink1/blink1.py#L178-L187
|
[
"def fade_to_rgb(self,fade_milliseconds, red, green, blue, led_number=0):\n r, g, b = self.cc(red, green, blue)\n return self.fade_to_rgb_uncorrected(fade_milliseconds, r, g, b, led_number)\n",
"def color_to_rgb(color):\n \"\"\"\n Convert color name or hexcode to (r,g,b) tuple\n \"\"\"\n if isinstance(color, tuple):\n return color\n if color.startswith('#'):\n try:\n return webcolors.hex_to_rgb(color)\n except ValueError:\n raise InvalidColor(color)\n\n try:\n return webcolors.name_to_rgb(color)\n except ValueError:\n raise InvalidColor(color)\n"
] |
class Blink1:
"""Light controller class, sends messages to the blink(1) and blink(1) mk2 via USB HID.
"""
def __init__(self, serial_number=None, gamma=None, white_point=None):
"""
:param serial_number: serial number of blink(1) to open, otherwise first found
:param gamma: Triple of gammas for each channel e.g. (2, 2, 2)
"""
self.cc = ColorCorrect(
gamma=(gamma or DEFAULT_GAMMA),
white_point=(white_point or DEFAULT_WHITE_POINT)
)
self.dev = self.find(serial_number)
def close(self):
self.dev.close()
self.dev = None
@staticmethod
def find(serial_number=None):
"""
Find a praticular blink(1) device, or the first one
:param serial_number: serial number of blink(1) device (from Blink1.list())
"""
try:
hidraw = hid.device(VENDOR_ID,PRODUCT_ID,serial_number)
hidraw.open(VENDOR_ID,PRODUCT_ID,serial_number)
# hidraw = hid.device(VENDOR_ID,PRODUCT_ID,unicode(serial_number))
# hidraw.open(VENDOR_ID,PRODUCT_ID,unicode(serial_number))
except IOError as e: # python2
raise Blink1ConnectionFailed(e)
hidraw = None
except OSError as e: # python3
raise Blink1ConnectionFailed(e)
hidraw = None
return hidraw
@staticmethod
def list():
"""
List blink(1) devices connected, by serial number
:return: List of blink(1) device serial numbers
"""
try:
devs = hid.enumerate(VENDOR_ID,PRODUCT_ID)
serials = list(map(lambda d:d.get('serial_number'), devs))
return serials
except IOError as e:
return []
def notfound(self):
return None # fixme what to do here
def write(self,buf):
"""
Write command to blink(1), low-level internal use
Send USB Feature Report 0x01 to blink(1) with 8-byte payload
Note: arg 'buf' must be 8 bytes or bad things happen
"""
log.debug("blink1write:" + ",".join('0x%02x' % v for v in buf))
self.dev.send_feature_report(buf)
def read(self):
"""
Read command result from blink(1), low-level internal use
Receive USB Feature Report 0x01 from blink(1) with 8-byte payload
Note: buf must be 8 bytes or bad things happen
"""
buf = self.dev.get_feature_report(REPORT_ID,9)
log.debug("blink1read: " + ",".join('0x%02x' % v for v in buf))
return buf
def fade_to_rgb_uncorrected(self, fade_milliseconds, red, green, blue, led_number=0):
"""
Command blink(1) to fade to RGB color, no color correction applied.
"""
action = ord('c')
fade_time = int(fade_milliseconds / 10)
th = (fade_time & 0xff00) >> 8
tl = fade_time & 0x00ff
buf = [REPORT_ID, action, int(red), int(green), int(blue), th, tl, led_number, 0]
self.write( buf )
def fade_to_rgb(self,fade_milliseconds, red, green, blue, led_number=0):
r, g, b = self.cc(red, green, blue)
return self.fade_to_rgb_uncorrected(fade_milliseconds, r, g, b, led_number)
@staticmethod
def color_to_rgb(color):
"""
Convert color name or hexcode to (r,g,b) tuple
"""
if isinstance(color, tuple):
return color
if color.startswith('#'):
try:
return webcolors.hex_to_rgb(color)
except ValueError:
raise InvalidColor(color)
try:
return webcolors.name_to_rgb(color)
except ValueError:
raise InvalidColor(color)
def off(self):
"""Switch the blink(1) off instantly
"""
self.fade_to_color(0, 'black')
def get_version(self):
"""Get blink(1) firmware version
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('v'), 0, 0, 0, 0, 0, 0, 0]
self.write(buf)
time.sleep(.05)
version_raw = self.read()
version = (version_raw[3] - ord('0')) * 100 + (version_raw[4] - ord('0'))
return str(version)
def get_serial_number(self):
"""Get blink(1) serial number
:return blink(1) serial number as string
"""
return self.dev.get_serial_number_string()
# return usb.util.get_string(self.dev, 256, 3)
def play(self, start_pos=0, end_pos=0, count=0):
"""Play internal color pattern
:param start_pos: pattern line to start from
:param end_pos: pattern line to end at
:param count: number of times to play, 0=play forever
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('p'), 1, int(start_pos), int(end_pos), int(count), 0, 0, 0]
return self.write(buf);
def stop(self):
"""Stop internal color pattern playing
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('p'), 0, 0, 0, 0, 0, 0, 0]
return self.write(buf);
def savePattern(self):
"""Save internal RAM pattern to flash
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('W'), 0xBE, 0xEF, 0xCA, 0xFE, 0, 0, 0]
return self.write(buf);
def setLedN(self, led_number=0):
"""Set the 'current LED' value for writePatternLine
:param led_number: LED to adjust, 0=all, 1=LEDA, 2=LEDB
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('l'), led_number, 0,0,0,0,0,0]
self.write(buf)
def writePatternLine(self, step_milliseconds, color, pos, led_number=0):
"""Write a color & step time color pattern line to RAM
:param step_milliseconds: how long for this pattern line to take
:param color: LED color
:param pos: color pattern line number (0-15)
:param led_number: LED to adjust, 0=all, 1=LEDA, 2=LEDB
"""
if ( self.dev == None ): return ''
self.setLedN(led_number)
red, green, blue = self.color_to_rgb(color)
r, g, b = self.cc(red, green, blue)
step_time = int(step_milliseconds / 10)
th = (step_time & 0xff00) >> 8
tl = step_time & 0x00ff
buf = [REPORT_ID, ord('P'), int(r), int(g), int(b), th,tl, pos, 0]
return self.write(buf);
def readPatternLine(self, pos):
"""Read a color pattern line at position
:param pos: pattern line to read
:return pattern line data as tuple (r,g,b, step_millis)
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('R'), 0, 0, 0, 0, 0, int(pos), 0]
self.write(buf)
buf = self.read()
(r,g,b) = (buf[2],buf[3],buf[4])
step_millis = ((buf[5] << 8) | buf[6]) * 10
return (r,g,b,step_millis)
def readPattern(self):
"""Read the entire color pattern
:return List of pattern line tuples
"""
if ( self.dev == None ): return ''
pattern=[]
for i in range(0,16): # FIXME: adjustable for diff blink(1) models
pattern.append( self.readPatternLine(i) )
return pattern
def serverTickle(self, enable, timeout_millis=0, stay_lit=False, start_pos=0, end_pos=16):
"""Enable/disable servertickle / serverdown watchdog
:param: enable: Set True to enable serverTickle
:param: timeout_millis: millisecs until servertickle is triggered
:param: stay_lit: Set True to keep current color of blink(1), False to turn off
:param: start_pos: Sub-pattern start position in whole color pattern
:param: end_pos: Sub-pattern end position in whole color pattern
"""
if ( self.dev == None ): return ''
en = int(enable == True)
timeout_time = int(timeout_millis/10)
th = (timeout_time & 0xff00) >>8
tl = timeout_time & 0x00ff
st = int(stay_lit == True)
buf = [REPORT_ID, ord('D'), en, th, tl, st, start_pos, end_pos, 0]
self.write(buf)
|
todbot/blink1-python
|
blink1/blink1.py
|
Blink1.get_version
|
python
|
def get_version(self):
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('v'), 0, 0, 0, 0, 0, 0, 0]
self.write(buf)
time.sleep(.05)
version_raw = self.read()
version = (version_raw[3] - ord('0')) * 100 + (version_raw[4] - ord('0'))
return str(version)
|
Get blink(1) firmware version
|
train
|
https://github.com/todbot/blink1-python/blob/7a5183becd9662f88da3c29afd3447403f4ef82f/blink1/blink1.py#L194-L203
|
[
"def write(self,buf):\n \"\"\"\n Write command to blink(1), low-level internal use\n Send USB Feature Report 0x01 to blink(1) with 8-byte payload\n Note: arg 'buf' must be 8 bytes or bad things happen\n \"\"\"\n log.debug(\"blink1write:\" + \",\".join('0x%02x' % v for v in buf))\n self.dev.send_feature_report(buf)\n",
"def read(self):\n \"\"\"\n Read command result from blink(1), low-level internal use\n Receive USB Feature Report 0x01 from blink(1) with 8-byte payload\n Note: buf must be 8 bytes or bad things happen\n \"\"\"\n buf = self.dev.get_feature_report(REPORT_ID,9)\n log.debug(\"blink1read: \" + \",\".join('0x%02x' % v for v in buf))\n return buf\n"
] |
class Blink1:
"""Light controller class, sends messages to the blink(1) and blink(1) mk2 via USB HID.
"""
def __init__(self, serial_number=None, gamma=None, white_point=None):
"""
:param serial_number: serial number of blink(1) to open, otherwise first found
:param gamma: Triple of gammas for each channel e.g. (2, 2, 2)
"""
self.cc = ColorCorrect(
gamma=(gamma or DEFAULT_GAMMA),
white_point=(white_point or DEFAULT_WHITE_POINT)
)
self.dev = self.find(serial_number)
def close(self):
self.dev.close()
self.dev = None
@staticmethod
def find(serial_number=None):
"""
Find a praticular blink(1) device, or the first one
:param serial_number: serial number of blink(1) device (from Blink1.list())
"""
try:
hidraw = hid.device(VENDOR_ID,PRODUCT_ID,serial_number)
hidraw.open(VENDOR_ID,PRODUCT_ID,serial_number)
# hidraw = hid.device(VENDOR_ID,PRODUCT_ID,unicode(serial_number))
# hidraw.open(VENDOR_ID,PRODUCT_ID,unicode(serial_number))
except IOError as e: # python2
raise Blink1ConnectionFailed(e)
hidraw = None
except OSError as e: # python3
raise Blink1ConnectionFailed(e)
hidraw = None
return hidraw
@staticmethod
def list():
"""
List blink(1) devices connected, by serial number
:return: List of blink(1) device serial numbers
"""
try:
devs = hid.enumerate(VENDOR_ID,PRODUCT_ID)
serials = list(map(lambda d:d.get('serial_number'), devs))
return serials
except IOError as e:
return []
def notfound(self):
return None # fixme what to do here
def write(self,buf):
"""
Write command to blink(1), low-level internal use
Send USB Feature Report 0x01 to blink(1) with 8-byte payload
Note: arg 'buf' must be 8 bytes or bad things happen
"""
log.debug("blink1write:" + ",".join('0x%02x' % v for v in buf))
self.dev.send_feature_report(buf)
def read(self):
"""
Read command result from blink(1), low-level internal use
Receive USB Feature Report 0x01 from blink(1) with 8-byte payload
Note: buf must be 8 bytes or bad things happen
"""
buf = self.dev.get_feature_report(REPORT_ID,9)
log.debug("blink1read: " + ",".join('0x%02x' % v for v in buf))
return buf
def fade_to_rgb_uncorrected(self, fade_milliseconds, red, green, blue, led_number=0):
"""
Command blink(1) to fade to RGB color, no color correction applied.
"""
action = ord('c')
fade_time = int(fade_milliseconds / 10)
th = (fade_time & 0xff00) >> 8
tl = fade_time & 0x00ff
buf = [REPORT_ID, action, int(red), int(green), int(blue), th, tl, led_number, 0]
self.write( buf )
def fade_to_rgb(self,fade_milliseconds, red, green, blue, led_number=0):
r, g, b = self.cc(red, green, blue)
return self.fade_to_rgb_uncorrected(fade_milliseconds, r, g, b, led_number)
@staticmethod
def color_to_rgb(color):
"""
Convert color name or hexcode to (r,g,b) tuple
"""
if isinstance(color, tuple):
return color
if color.startswith('#'):
try:
return webcolors.hex_to_rgb(color)
except ValueError:
raise InvalidColor(color)
try:
return webcolors.name_to_rgb(color)
except ValueError:
raise InvalidColor(color)
def fade_to_color(self, fade_milliseconds, color):
"""
Fade the light to a known colour in a
:param fade_milliseconds: Duration of the fade in milliseconds
:param color: Named color to fade to
:return: None
"""
red, green, blue = self.color_to_rgb(color)
return self.fade_to_rgb(fade_milliseconds, red, green, blue)
def off(self):
"""Switch the blink(1) off instantly
"""
self.fade_to_color(0, 'black')
def get_serial_number(self):
"""Get blink(1) serial number
:return blink(1) serial number as string
"""
return self.dev.get_serial_number_string()
# return usb.util.get_string(self.dev, 256, 3)
def play(self, start_pos=0, end_pos=0, count=0):
"""Play internal color pattern
:param start_pos: pattern line to start from
:param end_pos: pattern line to end at
:param count: number of times to play, 0=play forever
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('p'), 1, int(start_pos), int(end_pos), int(count), 0, 0, 0]
return self.write(buf);
def stop(self):
"""Stop internal color pattern playing
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('p'), 0, 0, 0, 0, 0, 0, 0]
return self.write(buf);
def savePattern(self):
"""Save internal RAM pattern to flash
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('W'), 0xBE, 0xEF, 0xCA, 0xFE, 0, 0, 0]
return self.write(buf);
def setLedN(self, led_number=0):
"""Set the 'current LED' value for writePatternLine
:param led_number: LED to adjust, 0=all, 1=LEDA, 2=LEDB
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('l'), led_number, 0,0,0,0,0,0]
self.write(buf)
def writePatternLine(self, step_milliseconds, color, pos, led_number=0):
"""Write a color & step time color pattern line to RAM
:param step_milliseconds: how long for this pattern line to take
:param color: LED color
:param pos: color pattern line number (0-15)
:param led_number: LED to adjust, 0=all, 1=LEDA, 2=LEDB
"""
if ( self.dev == None ): return ''
self.setLedN(led_number)
red, green, blue = self.color_to_rgb(color)
r, g, b = self.cc(red, green, blue)
step_time = int(step_milliseconds / 10)
th = (step_time & 0xff00) >> 8
tl = step_time & 0x00ff
buf = [REPORT_ID, ord('P'), int(r), int(g), int(b), th,tl, pos, 0]
return self.write(buf);
def readPatternLine(self, pos):
"""Read a color pattern line at position
:param pos: pattern line to read
:return pattern line data as tuple (r,g,b, step_millis)
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('R'), 0, 0, 0, 0, 0, int(pos), 0]
self.write(buf)
buf = self.read()
(r,g,b) = (buf[2],buf[3],buf[4])
step_millis = ((buf[5] << 8) | buf[6]) * 10
return (r,g,b,step_millis)
def readPattern(self):
"""Read the entire color pattern
:return List of pattern line tuples
"""
if ( self.dev == None ): return ''
pattern=[]
for i in range(0,16): # FIXME: adjustable for diff blink(1) models
pattern.append( self.readPatternLine(i) )
return pattern
def serverTickle(self, enable, timeout_millis=0, stay_lit=False, start_pos=0, end_pos=16):
"""Enable/disable servertickle / serverdown watchdog
:param: enable: Set True to enable serverTickle
:param: timeout_millis: millisecs until servertickle is triggered
:param: stay_lit: Set True to keep current color of blink(1), False to turn off
:param: start_pos: Sub-pattern start position in whole color pattern
:param: end_pos: Sub-pattern end position in whole color pattern
"""
if ( self.dev == None ): return ''
en = int(enable == True)
timeout_time = int(timeout_millis/10)
th = (timeout_time & 0xff00) >>8
tl = timeout_time & 0x00ff
st = int(stay_lit == True)
buf = [REPORT_ID, ord('D'), en, th, tl, st, start_pos, end_pos, 0]
self.write(buf)
|
todbot/blink1-python
|
blink1/blink1.py
|
Blink1.play
|
python
|
def play(self, start_pos=0, end_pos=0, count=0):
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('p'), 1, int(start_pos), int(end_pos), int(count), 0, 0, 0]
return self.write(buf);
|
Play internal color pattern
:param start_pos: pattern line to start from
:param end_pos: pattern line to end at
:param count: number of times to play, 0=play forever
|
train
|
https://github.com/todbot/blink1-python/blob/7a5183becd9662f88da3c29afd3447403f4ef82f/blink1/blink1.py#L212-L220
|
[
"def write(self,buf):\n \"\"\"\n Write command to blink(1), low-level internal use\n Send USB Feature Report 0x01 to blink(1) with 8-byte payload\n Note: arg 'buf' must be 8 bytes or bad things happen\n \"\"\"\n log.debug(\"blink1write:\" + \",\".join('0x%02x' % v for v in buf))\n self.dev.send_feature_report(buf)\n"
] |
class Blink1:
"""Light controller class, sends messages to the blink(1) and blink(1) mk2 via USB HID.
"""
def __init__(self, serial_number=None, gamma=None, white_point=None):
"""
:param serial_number: serial number of blink(1) to open, otherwise first found
:param gamma: Triple of gammas for each channel e.g. (2, 2, 2)
"""
self.cc = ColorCorrect(
gamma=(gamma or DEFAULT_GAMMA),
white_point=(white_point or DEFAULT_WHITE_POINT)
)
self.dev = self.find(serial_number)
def close(self):
self.dev.close()
self.dev = None
@staticmethod
def find(serial_number=None):
"""
Find a praticular blink(1) device, or the first one
:param serial_number: serial number of blink(1) device (from Blink1.list())
"""
try:
hidraw = hid.device(VENDOR_ID,PRODUCT_ID,serial_number)
hidraw.open(VENDOR_ID,PRODUCT_ID,serial_number)
# hidraw = hid.device(VENDOR_ID,PRODUCT_ID,unicode(serial_number))
# hidraw.open(VENDOR_ID,PRODUCT_ID,unicode(serial_number))
except IOError as e: # python2
raise Blink1ConnectionFailed(e)
hidraw = None
except OSError as e: # python3
raise Blink1ConnectionFailed(e)
hidraw = None
return hidraw
@staticmethod
def list():
"""
List blink(1) devices connected, by serial number
:return: List of blink(1) device serial numbers
"""
try:
devs = hid.enumerate(VENDOR_ID,PRODUCT_ID)
serials = list(map(lambda d:d.get('serial_number'), devs))
return serials
except IOError as e:
return []
def notfound(self):
return None # fixme what to do here
def write(self,buf):
"""
Write command to blink(1), low-level internal use
Send USB Feature Report 0x01 to blink(1) with 8-byte payload
Note: arg 'buf' must be 8 bytes or bad things happen
"""
log.debug("blink1write:" + ",".join('0x%02x' % v for v in buf))
self.dev.send_feature_report(buf)
def read(self):
"""
Read command result from blink(1), low-level internal use
Receive USB Feature Report 0x01 from blink(1) with 8-byte payload
Note: buf must be 8 bytes or bad things happen
"""
buf = self.dev.get_feature_report(REPORT_ID,9)
log.debug("blink1read: " + ",".join('0x%02x' % v for v in buf))
return buf
def fade_to_rgb_uncorrected(self, fade_milliseconds, red, green, blue, led_number=0):
"""
Command blink(1) to fade to RGB color, no color correction applied.
"""
action = ord('c')
fade_time = int(fade_milliseconds / 10)
th = (fade_time & 0xff00) >> 8
tl = fade_time & 0x00ff
buf = [REPORT_ID, action, int(red), int(green), int(blue), th, tl, led_number, 0]
self.write( buf )
def fade_to_rgb(self,fade_milliseconds, red, green, blue, led_number=0):
r, g, b = self.cc(red, green, blue)
return self.fade_to_rgb_uncorrected(fade_milliseconds, r, g, b, led_number)
@staticmethod
def color_to_rgb(color):
"""
Convert color name or hexcode to (r,g,b) tuple
"""
if isinstance(color, tuple):
return color
if color.startswith('#'):
try:
return webcolors.hex_to_rgb(color)
except ValueError:
raise InvalidColor(color)
try:
return webcolors.name_to_rgb(color)
except ValueError:
raise InvalidColor(color)
def fade_to_color(self, fade_milliseconds, color):
"""
Fade the light to a known colour in a
:param fade_milliseconds: Duration of the fade in milliseconds
:param color: Named color to fade to
:return: None
"""
red, green, blue = self.color_to_rgb(color)
return self.fade_to_rgb(fade_milliseconds, red, green, blue)
def off(self):
"""Switch the blink(1) off instantly
"""
self.fade_to_color(0, 'black')
def get_version(self):
"""Get blink(1) firmware version
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('v'), 0, 0, 0, 0, 0, 0, 0]
self.write(buf)
time.sleep(.05)
version_raw = self.read()
version = (version_raw[3] - ord('0')) * 100 + (version_raw[4] - ord('0'))
return str(version)
def get_serial_number(self):
"""Get blink(1) serial number
:return blink(1) serial number as string
"""
return self.dev.get_serial_number_string()
# return usb.util.get_string(self.dev, 256, 3)
def stop(self):
"""Stop internal color pattern playing
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('p'), 0, 0, 0, 0, 0, 0, 0]
return self.write(buf);
def savePattern(self):
"""Save internal RAM pattern to flash
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('W'), 0xBE, 0xEF, 0xCA, 0xFE, 0, 0, 0]
return self.write(buf);
def setLedN(self, led_number=0):
"""Set the 'current LED' value for writePatternLine
:param led_number: LED to adjust, 0=all, 1=LEDA, 2=LEDB
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('l'), led_number, 0,0,0,0,0,0]
self.write(buf)
def writePatternLine(self, step_milliseconds, color, pos, led_number=0):
"""Write a color & step time color pattern line to RAM
:param step_milliseconds: how long for this pattern line to take
:param color: LED color
:param pos: color pattern line number (0-15)
:param led_number: LED to adjust, 0=all, 1=LEDA, 2=LEDB
"""
if ( self.dev == None ): return ''
self.setLedN(led_number)
red, green, blue = self.color_to_rgb(color)
r, g, b = self.cc(red, green, blue)
step_time = int(step_milliseconds / 10)
th = (step_time & 0xff00) >> 8
tl = step_time & 0x00ff
buf = [REPORT_ID, ord('P'), int(r), int(g), int(b), th,tl, pos, 0]
return self.write(buf);
def readPatternLine(self, pos):
"""Read a color pattern line at position
:param pos: pattern line to read
:return pattern line data as tuple (r,g,b, step_millis)
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('R'), 0, 0, 0, 0, 0, int(pos), 0]
self.write(buf)
buf = self.read()
(r,g,b) = (buf[2],buf[3],buf[4])
step_millis = ((buf[5] << 8) | buf[6]) * 10
return (r,g,b,step_millis)
def readPattern(self):
"""Read the entire color pattern
:return List of pattern line tuples
"""
if ( self.dev == None ): return ''
pattern=[]
for i in range(0,16): # FIXME: adjustable for diff blink(1) models
pattern.append( self.readPatternLine(i) )
return pattern
def serverTickle(self, enable, timeout_millis=0, stay_lit=False, start_pos=0, end_pos=16):
"""Enable/disable servertickle / serverdown watchdog
:param: enable: Set True to enable serverTickle
:param: timeout_millis: millisecs until servertickle is triggered
:param: stay_lit: Set True to keep current color of blink(1), False to turn off
:param: start_pos: Sub-pattern start position in whole color pattern
:param: end_pos: Sub-pattern end position in whole color pattern
"""
if ( self.dev == None ): return ''
en = int(enable == True)
timeout_time = int(timeout_millis/10)
th = (timeout_time & 0xff00) >>8
tl = timeout_time & 0x00ff
st = int(stay_lit == True)
buf = [REPORT_ID, ord('D'), en, th, tl, st, start_pos, end_pos, 0]
self.write(buf)
|
todbot/blink1-python
|
blink1/blink1.py
|
Blink1.stop
|
python
|
def stop(self):
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('p'), 0, 0, 0, 0, 0, 0, 0]
return self.write(buf);
|
Stop internal color pattern playing
|
train
|
https://github.com/todbot/blink1-python/blob/7a5183becd9662f88da3c29afd3447403f4ef82f/blink1/blink1.py#L222-L227
|
[
"def write(self,buf):\n \"\"\"\n Write command to blink(1), low-level internal use\n Send USB Feature Report 0x01 to blink(1) with 8-byte payload\n Note: arg 'buf' must be 8 bytes or bad things happen\n \"\"\"\n log.debug(\"blink1write:\" + \",\".join('0x%02x' % v for v in buf))\n self.dev.send_feature_report(buf)\n"
] |
class Blink1:
"""Light controller class, sends messages to the blink(1) and blink(1) mk2 via USB HID.
"""
def __init__(self, serial_number=None, gamma=None, white_point=None):
"""
:param serial_number: serial number of blink(1) to open, otherwise first found
:param gamma: Triple of gammas for each channel e.g. (2, 2, 2)
"""
self.cc = ColorCorrect(
gamma=(gamma or DEFAULT_GAMMA),
white_point=(white_point or DEFAULT_WHITE_POINT)
)
self.dev = self.find(serial_number)
def close(self):
self.dev.close()
self.dev = None
@staticmethod
def find(serial_number=None):
"""
Find a praticular blink(1) device, or the first one
:param serial_number: serial number of blink(1) device (from Blink1.list())
"""
try:
hidraw = hid.device(VENDOR_ID,PRODUCT_ID,serial_number)
hidraw.open(VENDOR_ID,PRODUCT_ID,serial_number)
# hidraw = hid.device(VENDOR_ID,PRODUCT_ID,unicode(serial_number))
# hidraw.open(VENDOR_ID,PRODUCT_ID,unicode(serial_number))
except IOError as e: # python2
raise Blink1ConnectionFailed(e)
hidraw = None
except OSError as e: # python3
raise Blink1ConnectionFailed(e)
hidraw = None
return hidraw
@staticmethod
def list():
"""
List blink(1) devices connected, by serial number
:return: List of blink(1) device serial numbers
"""
try:
devs = hid.enumerate(VENDOR_ID,PRODUCT_ID)
serials = list(map(lambda d:d.get('serial_number'), devs))
return serials
except IOError as e:
return []
def notfound(self):
return None # fixme what to do here
def write(self,buf):
"""
Write command to blink(1), low-level internal use
Send USB Feature Report 0x01 to blink(1) with 8-byte payload
Note: arg 'buf' must be 8 bytes or bad things happen
"""
log.debug("blink1write:" + ",".join('0x%02x' % v for v in buf))
self.dev.send_feature_report(buf)
def read(self):
"""
Read command result from blink(1), low-level internal use
Receive USB Feature Report 0x01 from blink(1) with 8-byte payload
Note: buf must be 8 bytes or bad things happen
"""
buf = self.dev.get_feature_report(REPORT_ID,9)
log.debug("blink1read: " + ",".join('0x%02x' % v for v in buf))
return buf
def fade_to_rgb_uncorrected(self, fade_milliseconds, red, green, blue, led_number=0):
"""
Command blink(1) to fade to RGB color, no color correction applied.
"""
action = ord('c')
fade_time = int(fade_milliseconds / 10)
th = (fade_time & 0xff00) >> 8
tl = fade_time & 0x00ff
buf = [REPORT_ID, action, int(red), int(green), int(blue), th, tl, led_number, 0]
self.write( buf )
def fade_to_rgb(self,fade_milliseconds, red, green, blue, led_number=0):
r, g, b = self.cc(red, green, blue)
return self.fade_to_rgb_uncorrected(fade_milliseconds, r, g, b, led_number)
@staticmethod
def color_to_rgb(color):
"""
Convert color name or hexcode to (r,g,b) tuple
"""
if isinstance(color, tuple):
return color
if color.startswith('#'):
try:
return webcolors.hex_to_rgb(color)
except ValueError:
raise InvalidColor(color)
try:
return webcolors.name_to_rgb(color)
except ValueError:
raise InvalidColor(color)
def fade_to_color(self, fade_milliseconds, color):
"""
Fade the light to a known colour in a
:param fade_milliseconds: Duration of the fade in milliseconds
:param color: Named color to fade to
:return: None
"""
red, green, blue = self.color_to_rgb(color)
return self.fade_to_rgb(fade_milliseconds, red, green, blue)
def off(self):
"""Switch the blink(1) off instantly
"""
self.fade_to_color(0, 'black')
def get_version(self):
"""Get blink(1) firmware version
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('v'), 0, 0, 0, 0, 0, 0, 0]
self.write(buf)
time.sleep(.05)
version_raw = self.read()
version = (version_raw[3] - ord('0')) * 100 + (version_raw[4] - ord('0'))
return str(version)
def get_serial_number(self):
"""Get blink(1) serial number
:return blink(1) serial number as string
"""
return self.dev.get_serial_number_string()
# return usb.util.get_string(self.dev, 256, 3)
def play(self, start_pos=0, end_pos=0, count=0):
"""Play internal color pattern
:param start_pos: pattern line to start from
:param end_pos: pattern line to end at
:param count: number of times to play, 0=play forever
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('p'), 1, int(start_pos), int(end_pos), int(count), 0, 0, 0]
return self.write(buf);
def savePattern(self):
"""Save internal RAM pattern to flash
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('W'), 0xBE, 0xEF, 0xCA, 0xFE, 0, 0, 0]
return self.write(buf);
def setLedN(self, led_number=0):
"""Set the 'current LED' value for writePatternLine
:param led_number: LED to adjust, 0=all, 1=LEDA, 2=LEDB
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('l'), led_number, 0,0,0,0,0,0]
self.write(buf)
def writePatternLine(self, step_milliseconds, color, pos, led_number=0):
"""Write a color & step time color pattern line to RAM
:param step_milliseconds: how long for this pattern line to take
:param color: LED color
:param pos: color pattern line number (0-15)
:param led_number: LED to adjust, 0=all, 1=LEDA, 2=LEDB
"""
if ( self.dev == None ): return ''
self.setLedN(led_number)
red, green, blue = self.color_to_rgb(color)
r, g, b = self.cc(red, green, blue)
step_time = int(step_milliseconds / 10)
th = (step_time & 0xff00) >> 8
tl = step_time & 0x00ff
buf = [REPORT_ID, ord('P'), int(r), int(g), int(b), th,tl, pos, 0]
return self.write(buf);
def readPatternLine(self, pos):
"""Read a color pattern line at position
:param pos: pattern line to read
:return pattern line data as tuple (r,g,b, step_millis)
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('R'), 0, 0, 0, 0, 0, int(pos), 0]
self.write(buf)
buf = self.read()
(r,g,b) = (buf[2],buf[3],buf[4])
step_millis = ((buf[5] << 8) | buf[6]) * 10
return (r,g,b,step_millis)
def readPattern(self):
"""Read the entire color pattern
:return List of pattern line tuples
"""
if ( self.dev == None ): return ''
pattern=[]
for i in range(0,16): # FIXME: adjustable for diff blink(1) models
pattern.append( self.readPatternLine(i) )
return pattern
def serverTickle(self, enable, timeout_millis=0, stay_lit=False, start_pos=0, end_pos=16):
"""Enable/disable servertickle / serverdown watchdog
:param: enable: Set True to enable serverTickle
:param: timeout_millis: millisecs until servertickle is triggered
:param: stay_lit: Set True to keep current color of blink(1), False to turn off
:param: start_pos: Sub-pattern start position in whole color pattern
:param: end_pos: Sub-pattern end position in whole color pattern
"""
if ( self.dev == None ): return ''
en = int(enable == True)
timeout_time = int(timeout_millis/10)
th = (timeout_time & 0xff00) >>8
tl = timeout_time & 0x00ff
st = int(stay_lit == True)
buf = [REPORT_ID, ord('D'), en, th, tl, st, start_pos, end_pos, 0]
self.write(buf)
|
todbot/blink1-python
|
blink1/blink1.py
|
Blink1.savePattern
|
python
|
def savePattern(self):
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('W'), 0xBE, 0xEF, 0xCA, 0xFE, 0, 0, 0]
return self.write(buf);
|
Save internal RAM pattern to flash
|
train
|
https://github.com/todbot/blink1-python/blob/7a5183becd9662f88da3c29afd3447403f4ef82f/blink1/blink1.py#L229-L234
|
[
"def write(self,buf):\n \"\"\"\n Write command to blink(1), low-level internal use\n Send USB Feature Report 0x01 to blink(1) with 8-byte payload\n Note: arg 'buf' must be 8 bytes or bad things happen\n \"\"\"\n log.debug(\"blink1write:\" + \",\".join('0x%02x' % v for v in buf))\n self.dev.send_feature_report(buf)\n"
] |
class Blink1:
"""Light controller class, sends messages to the blink(1) and blink(1) mk2 via USB HID.
"""
def __init__(self, serial_number=None, gamma=None, white_point=None):
"""
:param serial_number: serial number of blink(1) to open, otherwise first found
:param gamma: Triple of gammas for each channel e.g. (2, 2, 2)
"""
self.cc = ColorCorrect(
gamma=(gamma or DEFAULT_GAMMA),
white_point=(white_point or DEFAULT_WHITE_POINT)
)
self.dev = self.find(serial_number)
def close(self):
self.dev.close()
self.dev = None
@staticmethod
def find(serial_number=None):
"""
Find a praticular blink(1) device, or the first one
:param serial_number: serial number of blink(1) device (from Blink1.list())
"""
try:
hidraw = hid.device(VENDOR_ID,PRODUCT_ID,serial_number)
hidraw.open(VENDOR_ID,PRODUCT_ID,serial_number)
# hidraw = hid.device(VENDOR_ID,PRODUCT_ID,unicode(serial_number))
# hidraw.open(VENDOR_ID,PRODUCT_ID,unicode(serial_number))
except IOError as e: # python2
raise Blink1ConnectionFailed(e)
hidraw = None
except OSError as e: # python3
raise Blink1ConnectionFailed(e)
hidraw = None
return hidraw
@staticmethod
def list():
"""
List blink(1) devices connected, by serial number
:return: List of blink(1) device serial numbers
"""
try:
devs = hid.enumerate(VENDOR_ID,PRODUCT_ID)
serials = list(map(lambda d:d.get('serial_number'), devs))
return serials
except IOError as e:
return []
def notfound(self):
return None # fixme what to do here
def write(self,buf):
"""
Write command to blink(1), low-level internal use
Send USB Feature Report 0x01 to blink(1) with 8-byte payload
Note: arg 'buf' must be 8 bytes or bad things happen
"""
log.debug("blink1write:" + ",".join('0x%02x' % v for v in buf))
self.dev.send_feature_report(buf)
def read(self):
"""
Read command result from blink(1), low-level internal use
Receive USB Feature Report 0x01 from blink(1) with 8-byte payload
Note: buf must be 8 bytes or bad things happen
"""
buf = self.dev.get_feature_report(REPORT_ID,9)
log.debug("blink1read: " + ",".join('0x%02x' % v for v in buf))
return buf
def fade_to_rgb_uncorrected(self, fade_milliseconds, red, green, blue, led_number=0):
"""
Command blink(1) to fade to RGB color, no color correction applied.
"""
action = ord('c')
fade_time = int(fade_milliseconds / 10)
th = (fade_time & 0xff00) >> 8
tl = fade_time & 0x00ff
buf = [REPORT_ID, action, int(red), int(green), int(blue), th, tl, led_number, 0]
self.write( buf )
def fade_to_rgb(self,fade_milliseconds, red, green, blue, led_number=0):
r, g, b = self.cc(red, green, blue)
return self.fade_to_rgb_uncorrected(fade_milliseconds, r, g, b, led_number)
@staticmethod
def color_to_rgb(color):
"""
Convert color name or hexcode to (r,g,b) tuple
"""
if isinstance(color, tuple):
return color
if color.startswith('#'):
try:
return webcolors.hex_to_rgb(color)
except ValueError:
raise InvalidColor(color)
try:
return webcolors.name_to_rgb(color)
except ValueError:
raise InvalidColor(color)
def fade_to_color(self, fade_milliseconds, color):
"""
Fade the light to a known colour in a
:param fade_milliseconds: Duration of the fade in milliseconds
:param color: Named color to fade to
:return: None
"""
red, green, blue = self.color_to_rgb(color)
return self.fade_to_rgb(fade_milliseconds, red, green, blue)
def off(self):
"""Switch the blink(1) off instantly
"""
self.fade_to_color(0, 'black')
def get_version(self):
"""Get blink(1) firmware version
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('v'), 0, 0, 0, 0, 0, 0, 0]
self.write(buf)
time.sleep(.05)
version_raw = self.read()
version = (version_raw[3] - ord('0')) * 100 + (version_raw[4] - ord('0'))
return str(version)
def get_serial_number(self):
"""Get blink(1) serial number
:return blink(1) serial number as string
"""
return self.dev.get_serial_number_string()
# return usb.util.get_string(self.dev, 256, 3)
def play(self, start_pos=0, end_pos=0, count=0):
"""Play internal color pattern
:param start_pos: pattern line to start from
:param end_pos: pattern line to end at
:param count: number of times to play, 0=play forever
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('p'), 1, int(start_pos), int(end_pos), int(count), 0, 0, 0]
return self.write(buf);
def stop(self):
"""Stop internal color pattern playing
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('p'), 0, 0, 0, 0, 0, 0, 0]
return self.write(buf);
def setLedN(self, led_number=0):
"""Set the 'current LED' value for writePatternLine
:param led_number: LED to adjust, 0=all, 1=LEDA, 2=LEDB
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('l'), led_number, 0,0,0,0,0,0]
self.write(buf)
def writePatternLine(self, step_milliseconds, color, pos, led_number=0):
"""Write a color & step time color pattern line to RAM
:param step_milliseconds: how long for this pattern line to take
:param color: LED color
:param pos: color pattern line number (0-15)
:param led_number: LED to adjust, 0=all, 1=LEDA, 2=LEDB
"""
if ( self.dev == None ): return ''
self.setLedN(led_number)
red, green, blue = self.color_to_rgb(color)
r, g, b = self.cc(red, green, blue)
step_time = int(step_milliseconds / 10)
th = (step_time & 0xff00) >> 8
tl = step_time & 0x00ff
buf = [REPORT_ID, ord('P'), int(r), int(g), int(b), th,tl, pos, 0]
return self.write(buf);
def readPatternLine(self, pos):
"""Read a color pattern line at position
:param pos: pattern line to read
:return pattern line data as tuple (r,g,b, step_millis)
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('R'), 0, 0, 0, 0, 0, int(pos), 0]
self.write(buf)
buf = self.read()
(r,g,b) = (buf[2],buf[3],buf[4])
step_millis = ((buf[5] << 8) | buf[6]) * 10
return (r,g,b,step_millis)
def readPattern(self):
"""Read the entire color pattern
:return List of pattern line tuples
"""
if ( self.dev == None ): return ''
pattern=[]
for i in range(0,16): # FIXME: adjustable for diff blink(1) models
pattern.append( self.readPatternLine(i) )
return pattern
def serverTickle(self, enable, timeout_millis=0, stay_lit=False, start_pos=0, end_pos=16):
"""Enable/disable servertickle / serverdown watchdog
:param: enable: Set True to enable serverTickle
:param: timeout_millis: millisecs until servertickle is triggered
:param: stay_lit: Set True to keep current color of blink(1), False to turn off
:param: start_pos: Sub-pattern start position in whole color pattern
:param: end_pos: Sub-pattern end position in whole color pattern
"""
if ( self.dev == None ): return ''
en = int(enable == True)
timeout_time = int(timeout_millis/10)
th = (timeout_time & 0xff00) >>8
tl = timeout_time & 0x00ff
st = int(stay_lit == True)
buf = [REPORT_ID, ord('D'), en, th, tl, st, start_pos, end_pos, 0]
self.write(buf)
|
todbot/blink1-python
|
blink1/blink1.py
|
Blink1.setLedN
|
python
|
def setLedN(self, led_number=0):
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('l'), led_number, 0,0,0,0,0,0]
self.write(buf)
|
Set the 'current LED' value for writePatternLine
:param led_number: LED to adjust, 0=all, 1=LEDA, 2=LEDB
|
train
|
https://github.com/todbot/blink1-python/blob/7a5183becd9662f88da3c29afd3447403f4ef82f/blink1/blink1.py#L236-L242
|
[
"def write(self,buf):\n \"\"\"\n Write command to blink(1), low-level internal use\n Send USB Feature Report 0x01 to blink(1) with 8-byte payload\n Note: arg 'buf' must be 8 bytes or bad things happen\n \"\"\"\n log.debug(\"blink1write:\" + \",\".join('0x%02x' % v for v in buf))\n self.dev.send_feature_report(buf)\n"
] |
class Blink1:
"""Light controller class, sends messages to the blink(1) and blink(1) mk2 via USB HID.
"""
def __init__(self, serial_number=None, gamma=None, white_point=None):
"""
:param serial_number: serial number of blink(1) to open, otherwise first found
:param gamma: Triple of gammas for each channel e.g. (2, 2, 2)
"""
self.cc = ColorCorrect(
gamma=(gamma or DEFAULT_GAMMA),
white_point=(white_point or DEFAULT_WHITE_POINT)
)
self.dev = self.find(serial_number)
def close(self):
self.dev.close()
self.dev = None
@staticmethod
def find(serial_number=None):
"""
Find a praticular blink(1) device, or the first one
:param serial_number: serial number of blink(1) device (from Blink1.list())
"""
try:
hidraw = hid.device(VENDOR_ID,PRODUCT_ID,serial_number)
hidraw.open(VENDOR_ID,PRODUCT_ID,serial_number)
# hidraw = hid.device(VENDOR_ID,PRODUCT_ID,unicode(serial_number))
# hidraw.open(VENDOR_ID,PRODUCT_ID,unicode(serial_number))
except IOError as e: # python2
raise Blink1ConnectionFailed(e)
hidraw = None
except OSError as e: # python3
raise Blink1ConnectionFailed(e)
hidraw = None
return hidraw
@staticmethod
def list():
"""
List blink(1) devices connected, by serial number
:return: List of blink(1) device serial numbers
"""
try:
devs = hid.enumerate(VENDOR_ID,PRODUCT_ID)
serials = list(map(lambda d:d.get('serial_number'), devs))
return serials
except IOError as e:
return []
def notfound(self):
return None # fixme what to do here
def write(self,buf):
"""
Write command to blink(1), low-level internal use
Send USB Feature Report 0x01 to blink(1) with 8-byte payload
Note: arg 'buf' must be 8 bytes or bad things happen
"""
log.debug("blink1write:" + ",".join('0x%02x' % v for v in buf))
self.dev.send_feature_report(buf)
def read(self):
"""
Read command result from blink(1), low-level internal use
Receive USB Feature Report 0x01 from blink(1) with 8-byte payload
Note: buf must be 8 bytes or bad things happen
"""
buf = self.dev.get_feature_report(REPORT_ID,9)
log.debug("blink1read: " + ",".join('0x%02x' % v for v in buf))
return buf
def fade_to_rgb_uncorrected(self, fade_milliseconds, red, green, blue, led_number=0):
"""
Command blink(1) to fade to RGB color, no color correction applied.
"""
action = ord('c')
fade_time = int(fade_milliseconds / 10)
th = (fade_time & 0xff00) >> 8
tl = fade_time & 0x00ff
buf = [REPORT_ID, action, int(red), int(green), int(blue), th, tl, led_number, 0]
self.write( buf )
def fade_to_rgb(self,fade_milliseconds, red, green, blue, led_number=0):
r, g, b = self.cc(red, green, blue)
return self.fade_to_rgb_uncorrected(fade_milliseconds, r, g, b, led_number)
@staticmethod
def color_to_rgb(color):
"""
Convert color name or hexcode to (r,g,b) tuple
"""
if isinstance(color, tuple):
return color
if color.startswith('#'):
try:
return webcolors.hex_to_rgb(color)
except ValueError:
raise InvalidColor(color)
try:
return webcolors.name_to_rgb(color)
except ValueError:
raise InvalidColor(color)
def fade_to_color(self, fade_milliseconds, color):
"""
Fade the light to a known colour in a
:param fade_milliseconds: Duration of the fade in milliseconds
:param color: Named color to fade to
:return: None
"""
red, green, blue = self.color_to_rgb(color)
return self.fade_to_rgb(fade_milliseconds, red, green, blue)
def off(self):
"""Switch the blink(1) off instantly
"""
self.fade_to_color(0, 'black')
def get_version(self):
"""Get blink(1) firmware version
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('v'), 0, 0, 0, 0, 0, 0, 0]
self.write(buf)
time.sleep(.05)
version_raw = self.read()
version = (version_raw[3] - ord('0')) * 100 + (version_raw[4] - ord('0'))
return str(version)
def get_serial_number(self):
"""Get blink(1) serial number
:return blink(1) serial number as string
"""
return self.dev.get_serial_number_string()
# return usb.util.get_string(self.dev, 256, 3)
def play(self, start_pos=0, end_pos=0, count=0):
"""Play internal color pattern
:param start_pos: pattern line to start from
:param end_pos: pattern line to end at
:param count: number of times to play, 0=play forever
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('p'), 1, int(start_pos), int(end_pos), int(count), 0, 0, 0]
return self.write(buf);
def stop(self):
"""Stop internal color pattern playing
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('p'), 0, 0, 0, 0, 0, 0, 0]
return self.write(buf);
def savePattern(self):
"""Save internal RAM pattern to flash
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('W'), 0xBE, 0xEF, 0xCA, 0xFE, 0, 0, 0]
return self.write(buf);
def writePatternLine(self, step_milliseconds, color, pos, led_number=0):
"""Write a color & step time color pattern line to RAM
:param step_milliseconds: how long for this pattern line to take
:param color: LED color
:param pos: color pattern line number (0-15)
:param led_number: LED to adjust, 0=all, 1=LEDA, 2=LEDB
"""
if ( self.dev == None ): return ''
self.setLedN(led_number)
red, green, blue = self.color_to_rgb(color)
r, g, b = self.cc(red, green, blue)
step_time = int(step_milliseconds / 10)
th = (step_time & 0xff00) >> 8
tl = step_time & 0x00ff
buf = [REPORT_ID, ord('P'), int(r), int(g), int(b), th,tl, pos, 0]
return self.write(buf);
def readPatternLine(self, pos):
"""Read a color pattern line at position
:param pos: pattern line to read
:return pattern line data as tuple (r,g,b, step_millis)
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('R'), 0, 0, 0, 0, 0, int(pos), 0]
self.write(buf)
buf = self.read()
(r,g,b) = (buf[2],buf[3],buf[4])
step_millis = ((buf[5] << 8) | buf[6]) * 10
return (r,g,b,step_millis)
def readPattern(self):
"""Read the entire color pattern
:return List of pattern line tuples
"""
if ( self.dev == None ): return ''
pattern=[]
for i in range(0,16): # FIXME: adjustable for diff blink(1) models
pattern.append( self.readPatternLine(i) )
return pattern
def serverTickle(self, enable, timeout_millis=0, stay_lit=False, start_pos=0, end_pos=16):
"""Enable/disable servertickle / serverdown watchdog
:param: enable: Set True to enable serverTickle
:param: timeout_millis: millisecs until servertickle is triggered
:param: stay_lit: Set True to keep current color of blink(1), False to turn off
:param: start_pos: Sub-pattern start position in whole color pattern
:param: end_pos: Sub-pattern end position in whole color pattern
"""
if ( self.dev == None ): return ''
en = int(enable == True)
timeout_time = int(timeout_millis/10)
th = (timeout_time & 0xff00) >>8
tl = timeout_time & 0x00ff
st = int(stay_lit == True)
buf = [REPORT_ID, ord('D'), en, th, tl, st, start_pos, end_pos, 0]
self.write(buf)
|
todbot/blink1-python
|
blink1/blink1.py
|
Blink1.writePatternLine
|
python
|
def writePatternLine(self, step_milliseconds, color, pos, led_number=0):
if ( self.dev == None ): return ''
self.setLedN(led_number)
red, green, blue = self.color_to_rgb(color)
r, g, b = self.cc(red, green, blue)
step_time = int(step_milliseconds / 10)
th = (step_time & 0xff00) >> 8
tl = step_time & 0x00ff
buf = [REPORT_ID, ord('P'), int(r), int(g), int(b), th,tl, pos, 0]
return self.write(buf);
|
Write a color & step time color pattern line to RAM
:param step_milliseconds: how long for this pattern line to take
:param color: LED color
:param pos: color pattern line number (0-15)
:param led_number: LED to adjust, 0=all, 1=LEDA, 2=LEDB
|
train
|
https://github.com/todbot/blink1-python/blob/7a5183becd9662f88da3c29afd3447403f4ef82f/blink1/blink1.py#L244-L259
|
[
"def write(self,buf):\n \"\"\"\n Write command to blink(1), low-level internal use\n Send USB Feature Report 0x01 to blink(1) with 8-byte payload\n Note: arg 'buf' must be 8 bytes or bad things happen\n \"\"\"\n log.debug(\"blink1write:\" + \",\".join('0x%02x' % v for v in buf))\n self.dev.send_feature_report(buf)\n",
"def color_to_rgb(color):\n \"\"\"\n Convert color name or hexcode to (r,g,b) tuple\n \"\"\"\n if isinstance(color, tuple):\n return color\n if color.startswith('#'):\n try:\n return webcolors.hex_to_rgb(color)\n except ValueError:\n raise InvalidColor(color)\n\n try:\n return webcolors.name_to_rgb(color)\n except ValueError:\n raise InvalidColor(color)\n",
"def setLedN(self, led_number=0):\n \"\"\"Set the 'current LED' value for writePatternLine\n :param led_number: LED to adjust, 0=all, 1=LEDA, 2=LEDB\n \"\"\"\n if ( self.dev == None ): return ''\n buf = [REPORT_ID, ord('l'), led_number, 0,0,0,0,0,0]\n self.write(buf) \n"
] |
class Blink1:
"""Light controller class, sends messages to the blink(1) and blink(1) mk2 via USB HID.
"""
def __init__(self, serial_number=None, gamma=None, white_point=None):
"""
:param serial_number: serial number of blink(1) to open, otherwise first found
:param gamma: Triple of gammas for each channel e.g. (2, 2, 2)
"""
self.cc = ColorCorrect(
gamma=(gamma or DEFAULT_GAMMA),
white_point=(white_point or DEFAULT_WHITE_POINT)
)
self.dev = self.find(serial_number)
def close(self):
self.dev.close()
self.dev = None
@staticmethod
def find(serial_number=None):
"""
Find a praticular blink(1) device, or the first one
:param serial_number: serial number of blink(1) device (from Blink1.list())
"""
try:
hidraw = hid.device(VENDOR_ID,PRODUCT_ID,serial_number)
hidraw.open(VENDOR_ID,PRODUCT_ID,serial_number)
# hidraw = hid.device(VENDOR_ID,PRODUCT_ID,unicode(serial_number))
# hidraw.open(VENDOR_ID,PRODUCT_ID,unicode(serial_number))
except IOError as e: # python2
raise Blink1ConnectionFailed(e)
hidraw = None
except OSError as e: # python3
raise Blink1ConnectionFailed(e)
hidraw = None
return hidraw
@staticmethod
def list():
"""
List blink(1) devices connected, by serial number
:return: List of blink(1) device serial numbers
"""
try:
devs = hid.enumerate(VENDOR_ID,PRODUCT_ID)
serials = list(map(lambda d:d.get('serial_number'), devs))
return serials
except IOError as e:
return []
def notfound(self):
return None # fixme what to do here
def write(self,buf):
"""
Write command to blink(1), low-level internal use
Send USB Feature Report 0x01 to blink(1) with 8-byte payload
Note: arg 'buf' must be 8 bytes or bad things happen
"""
log.debug("blink1write:" + ",".join('0x%02x' % v for v in buf))
self.dev.send_feature_report(buf)
def read(self):
"""
Read command result from blink(1), low-level internal use
Receive USB Feature Report 0x01 from blink(1) with 8-byte payload
Note: buf must be 8 bytes or bad things happen
"""
buf = self.dev.get_feature_report(REPORT_ID,9)
log.debug("blink1read: " + ",".join('0x%02x' % v for v in buf))
return buf
def fade_to_rgb_uncorrected(self, fade_milliseconds, red, green, blue, led_number=0):
"""
Command blink(1) to fade to RGB color, no color correction applied.
"""
action = ord('c')
fade_time = int(fade_milliseconds / 10)
th = (fade_time & 0xff00) >> 8
tl = fade_time & 0x00ff
buf = [REPORT_ID, action, int(red), int(green), int(blue), th, tl, led_number, 0]
self.write( buf )
def fade_to_rgb(self,fade_milliseconds, red, green, blue, led_number=0):
r, g, b = self.cc(red, green, blue)
return self.fade_to_rgb_uncorrected(fade_milliseconds, r, g, b, led_number)
@staticmethod
def color_to_rgb(color):
"""
Convert color name or hexcode to (r,g,b) tuple
"""
if isinstance(color, tuple):
return color
if color.startswith('#'):
try:
return webcolors.hex_to_rgb(color)
except ValueError:
raise InvalidColor(color)
try:
return webcolors.name_to_rgb(color)
except ValueError:
raise InvalidColor(color)
def fade_to_color(self, fade_milliseconds, color):
"""
Fade the light to a known colour in a
:param fade_milliseconds: Duration of the fade in milliseconds
:param color: Named color to fade to
:return: None
"""
red, green, blue = self.color_to_rgb(color)
return self.fade_to_rgb(fade_milliseconds, red, green, blue)
def off(self):
"""Switch the blink(1) off instantly
"""
self.fade_to_color(0, 'black')
def get_version(self):
"""Get blink(1) firmware version
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('v'), 0, 0, 0, 0, 0, 0, 0]
self.write(buf)
time.sleep(.05)
version_raw = self.read()
version = (version_raw[3] - ord('0')) * 100 + (version_raw[4] - ord('0'))
return str(version)
def get_serial_number(self):
"""Get blink(1) serial number
:return blink(1) serial number as string
"""
return self.dev.get_serial_number_string()
# return usb.util.get_string(self.dev, 256, 3)
def play(self, start_pos=0, end_pos=0, count=0):
"""Play internal color pattern
:param start_pos: pattern line to start from
:param end_pos: pattern line to end at
:param count: number of times to play, 0=play forever
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('p'), 1, int(start_pos), int(end_pos), int(count), 0, 0, 0]
return self.write(buf);
def stop(self):
"""Stop internal color pattern playing
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('p'), 0, 0, 0, 0, 0, 0, 0]
return self.write(buf);
def savePattern(self):
"""Save internal RAM pattern to flash
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('W'), 0xBE, 0xEF, 0xCA, 0xFE, 0, 0, 0]
return self.write(buf);
def setLedN(self, led_number=0):
"""Set the 'current LED' value for writePatternLine
:param led_number: LED to adjust, 0=all, 1=LEDA, 2=LEDB
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('l'), led_number, 0,0,0,0,0,0]
self.write(buf)
def readPatternLine(self, pos):
"""Read a color pattern line at position
:param pos: pattern line to read
:return pattern line data as tuple (r,g,b, step_millis)
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('R'), 0, 0, 0, 0, 0, int(pos), 0]
self.write(buf)
buf = self.read()
(r,g,b) = (buf[2],buf[3],buf[4])
step_millis = ((buf[5] << 8) | buf[6]) * 10
return (r,g,b,step_millis)
def readPattern(self):
"""Read the entire color pattern
:return List of pattern line tuples
"""
if ( self.dev == None ): return ''
pattern=[]
for i in range(0,16): # FIXME: adjustable for diff blink(1) models
pattern.append( self.readPatternLine(i) )
return pattern
def serverTickle(self, enable, timeout_millis=0, stay_lit=False, start_pos=0, end_pos=16):
"""Enable/disable servertickle / serverdown watchdog
:param: enable: Set True to enable serverTickle
:param: timeout_millis: millisecs until servertickle is triggered
:param: stay_lit: Set True to keep current color of blink(1), False to turn off
:param: start_pos: Sub-pattern start position in whole color pattern
:param: end_pos: Sub-pattern end position in whole color pattern
"""
if ( self.dev == None ): return ''
en = int(enable == True)
timeout_time = int(timeout_millis/10)
th = (timeout_time & 0xff00) >>8
tl = timeout_time & 0x00ff
st = int(stay_lit == True)
buf = [REPORT_ID, ord('D'), en, th, tl, st, start_pos, end_pos, 0]
self.write(buf)
|
todbot/blink1-python
|
blink1/blink1.py
|
Blink1.readPatternLine
|
python
|
def readPatternLine(self, pos):
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('R'), 0, 0, 0, 0, 0, int(pos), 0]
self.write(buf)
buf = self.read()
(r,g,b) = (buf[2],buf[3],buf[4])
step_millis = ((buf[5] << 8) | buf[6]) * 10
return (r,g,b,step_millis)
|
Read a color pattern line at position
:param pos: pattern line to read
:return pattern line data as tuple (r,g,b, step_millis)
|
train
|
https://github.com/todbot/blink1-python/blob/7a5183becd9662f88da3c29afd3447403f4ef82f/blink1/blink1.py#L261-L272
|
[
"def write(self,buf):\n \"\"\"\n Write command to blink(1), low-level internal use\n Send USB Feature Report 0x01 to blink(1) with 8-byte payload\n Note: arg 'buf' must be 8 bytes or bad things happen\n \"\"\"\n log.debug(\"blink1write:\" + \",\".join('0x%02x' % v for v in buf))\n self.dev.send_feature_report(buf)\n",
"def read(self):\n \"\"\"\n Read command result from blink(1), low-level internal use\n Receive USB Feature Report 0x01 from blink(1) with 8-byte payload\n Note: buf must be 8 bytes or bad things happen\n \"\"\"\n buf = self.dev.get_feature_report(REPORT_ID,9)\n log.debug(\"blink1read: \" + \",\".join('0x%02x' % v for v in buf))\n return buf\n"
] |
class Blink1:
"""Light controller class, sends messages to the blink(1) and blink(1) mk2 via USB HID.
"""
def __init__(self, serial_number=None, gamma=None, white_point=None):
"""
:param serial_number: serial number of blink(1) to open, otherwise first found
:param gamma: Triple of gammas for each channel e.g. (2, 2, 2)
"""
self.cc = ColorCorrect(
gamma=(gamma or DEFAULT_GAMMA),
white_point=(white_point or DEFAULT_WHITE_POINT)
)
self.dev = self.find(serial_number)
def close(self):
self.dev.close()
self.dev = None
@staticmethod
def find(serial_number=None):
"""
Find a praticular blink(1) device, or the first one
:param serial_number: serial number of blink(1) device (from Blink1.list())
"""
try:
hidraw = hid.device(VENDOR_ID,PRODUCT_ID,serial_number)
hidraw.open(VENDOR_ID,PRODUCT_ID,serial_number)
# hidraw = hid.device(VENDOR_ID,PRODUCT_ID,unicode(serial_number))
# hidraw.open(VENDOR_ID,PRODUCT_ID,unicode(serial_number))
except IOError as e: # python2
raise Blink1ConnectionFailed(e)
hidraw = None
except OSError as e: # python3
raise Blink1ConnectionFailed(e)
hidraw = None
return hidraw
@staticmethod
def list():
"""
List blink(1) devices connected, by serial number
:return: List of blink(1) device serial numbers
"""
try:
devs = hid.enumerate(VENDOR_ID,PRODUCT_ID)
serials = list(map(lambda d:d.get('serial_number'), devs))
return serials
except IOError as e:
return []
def notfound(self):
return None # fixme what to do here
def write(self,buf):
"""
Write command to blink(1), low-level internal use
Send USB Feature Report 0x01 to blink(1) with 8-byte payload
Note: arg 'buf' must be 8 bytes or bad things happen
"""
log.debug("blink1write:" + ",".join('0x%02x' % v for v in buf))
self.dev.send_feature_report(buf)
def read(self):
"""
Read command result from blink(1), low-level internal use
Receive USB Feature Report 0x01 from blink(1) with 8-byte payload
Note: buf must be 8 bytes or bad things happen
"""
buf = self.dev.get_feature_report(REPORT_ID,9)
log.debug("blink1read: " + ",".join('0x%02x' % v for v in buf))
return buf
def fade_to_rgb_uncorrected(self, fade_milliseconds, red, green, blue, led_number=0):
"""
Command blink(1) to fade to RGB color, no color correction applied.
"""
action = ord('c')
fade_time = int(fade_milliseconds / 10)
th = (fade_time & 0xff00) >> 8
tl = fade_time & 0x00ff
buf = [REPORT_ID, action, int(red), int(green), int(blue), th, tl, led_number, 0]
self.write( buf )
def fade_to_rgb(self,fade_milliseconds, red, green, blue, led_number=0):
r, g, b = self.cc(red, green, blue)
return self.fade_to_rgb_uncorrected(fade_milliseconds, r, g, b, led_number)
@staticmethod
def color_to_rgb(color):
"""
Convert color name or hexcode to (r,g,b) tuple
"""
if isinstance(color, tuple):
return color
if color.startswith('#'):
try:
return webcolors.hex_to_rgb(color)
except ValueError:
raise InvalidColor(color)
try:
return webcolors.name_to_rgb(color)
except ValueError:
raise InvalidColor(color)
def fade_to_color(self, fade_milliseconds, color):
"""
Fade the light to a known colour in a
:param fade_milliseconds: Duration of the fade in milliseconds
:param color: Named color to fade to
:return: None
"""
red, green, blue = self.color_to_rgb(color)
return self.fade_to_rgb(fade_milliseconds, red, green, blue)
def off(self):
"""Switch the blink(1) off instantly
"""
self.fade_to_color(0, 'black')
def get_version(self):
"""Get blink(1) firmware version
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('v'), 0, 0, 0, 0, 0, 0, 0]
self.write(buf)
time.sleep(.05)
version_raw = self.read()
version = (version_raw[3] - ord('0')) * 100 + (version_raw[4] - ord('0'))
return str(version)
def get_serial_number(self):
"""Get blink(1) serial number
:return blink(1) serial number as string
"""
return self.dev.get_serial_number_string()
# return usb.util.get_string(self.dev, 256, 3)
def play(self, start_pos=0, end_pos=0, count=0):
"""Play internal color pattern
:param start_pos: pattern line to start from
:param end_pos: pattern line to end at
:param count: number of times to play, 0=play forever
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('p'), 1, int(start_pos), int(end_pos), int(count), 0, 0, 0]
return self.write(buf);
def stop(self):
"""Stop internal color pattern playing
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('p'), 0, 0, 0, 0, 0, 0, 0]
return self.write(buf);
def savePattern(self):
"""Save internal RAM pattern to flash
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('W'), 0xBE, 0xEF, 0xCA, 0xFE, 0, 0, 0]
return self.write(buf);
def setLedN(self, led_number=0):
"""Set the 'current LED' value for writePatternLine
:param led_number: LED to adjust, 0=all, 1=LEDA, 2=LEDB
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('l'), led_number, 0,0,0,0,0,0]
self.write(buf)
def writePatternLine(self, step_milliseconds, color, pos, led_number=0):
"""Write a color & step time color pattern line to RAM
:param step_milliseconds: how long for this pattern line to take
:param color: LED color
:param pos: color pattern line number (0-15)
:param led_number: LED to adjust, 0=all, 1=LEDA, 2=LEDB
"""
if ( self.dev == None ): return ''
self.setLedN(led_number)
red, green, blue = self.color_to_rgb(color)
r, g, b = self.cc(red, green, blue)
step_time = int(step_milliseconds / 10)
th = (step_time & 0xff00) >> 8
tl = step_time & 0x00ff
buf = [REPORT_ID, ord('P'), int(r), int(g), int(b), th,tl, pos, 0]
return self.write(buf);
def readPattern(self):
"""Read the entire color pattern
:return List of pattern line tuples
"""
if ( self.dev == None ): return ''
pattern=[]
for i in range(0,16): # FIXME: adjustable for diff blink(1) models
pattern.append( self.readPatternLine(i) )
return pattern
def serverTickle(self, enable, timeout_millis=0, stay_lit=False, start_pos=0, end_pos=16):
"""Enable/disable servertickle / serverdown watchdog
:param: enable: Set True to enable serverTickle
:param: timeout_millis: millisecs until servertickle is triggered
:param: stay_lit: Set True to keep current color of blink(1), False to turn off
:param: start_pos: Sub-pattern start position in whole color pattern
:param: end_pos: Sub-pattern end position in whole color pattern
"""
if ( self.dev == None ): return ''
en = int(enable == True)
timeout_time = int(timeout_millis/10)
th = (timeout_time & 0xff00) >>8
tl = timeout_time & 0x00ff
st = int(stay_lit == True)
buf = [REPORT_ID, ord('D'), en, th, tl, st, start_pos, end_pos, 0]
self.write(buf)
|
todbot/blink1-python
|
blink1/blink1.py
|
Blink1.readPattern
|
python
|
def readPattern(self):
if ( self.dev == None ): return ''
pattern=[]
for i in range(0,16): # FIXME: adjustable for diff blink(1) models
pattern.append( self.readPatternLine(i) )
return pattern
|
Read the entire color pattern
:return List of pattern line tuples
|
train
|
https://github.com/todbot/blink1-python/blob/7a5183becd9662f88da3c29afd3447403f4ef82f/blink1/blink1.py#L274-L282
|
[
"def readPatternLine(self, pos):\n \"\"\"Read a color pattern line at position\n :param pos: pattern line to read\n :return pattern line data as tuple (r,g,b, step_millis)\n \"\"\"\n if ( self.dev == None ): return ''\n buf = [REPORT_ID, ord('R'), 0, 0, 0, 0, 0, int(pos), 0]\n self.write(buf)\n buf = self.read()\n (r,g,b) = (buf[2],buf[3],buf[4])\n step_millis = ((buf[5] << 8) | buf[6]) * 10\n return (r,g,b,step_millis)\n"
] |
class Blink1:
"""Light controller class, sends messages to the blink(1) and blink(1) mk2 via USB HID.
"""
def __init__(self, serial_number=None, gamma=None, white_point=None):
"""
:param serial_number: serial number of blink(1) to open, otherwise first found
:param gamma: Triple of gammas for each channel e.g. (2, 2, 2)
"""
self.cc = ColorCorrect(
gamma=(gamma or DEFAULT_GAMMA),
white_point=(white_point or DEFAULT_WHITE_POINT)
)
self.dev = self.find(serial_number)
def close(self):
self.dev.close()
self.dev = None
@staticmethod
def find(serial_number=None):
"""
Find a praticular blink(1) device, or the first one
:param serial_number: serial number of blink(1) device (from Blink1.list())
"""
try:
hidraw = hid.device(VENDOR_ID,PRODUCT_ID,serial_number)
hidraw.open(VENDOR_ID,PRODUCT_ID,serial_number)
# hidraw = hid.device(VENDOR_ID,PRODUCT_ID,unicode(serial_number))
# hidraw.open(VENDOR_ID,PRODUCT_ID,unicode(serial_number))
except IOError as e: # python2
raise Blink1ConnectionFailed(e)
hidraw = None
except OSError as e: # python3
raise Blink1ConnectionFailed(e)
hidraw = None
return hidraw
@staticmethod
def list():
"""
List blink(1) devices connected, by serial number
:return: List of blink(1) device serial numbers
"""
try:
devs = hid.enumerate(VENDOR_ID,PRODUCT_ID)
serials = list(map(lambda d:d.get('serial_number'), devs))
return serials
except IOError as e:
return []
def notfound(self):
return None # fixme what to do here
def write(self,buf):
"""
Write command to blink(1), low-level internal use
Send USB Feature Report 0x01 to blink(1) with 8-byte payload
Note: arg 'buf' must be 8 bytes or bad things happen
"""
log.debug("blink1write:" + ",".join('0x%02x' % v for v in buf))
self.dev.send_feature_report(buf)
def read(self):
"""
Read command result from blink(1), low-level internal use
Receive USB Feature Report 0x01 from blink(1) with 8-byte payload
Note: buf must be 8 bytes or bad things happen
"""
buf = self.dev.get_feature_report(REPORT_ID,9)
log.debug("blink1read: " + ",".join('0x%02x' % v for v in buf))
return buf
def fade_to_rgb_uncorrected(self, fade_milliseconds, red, green, blue, led_number=0):
"""
Command blink(1) to fade to RGB color, no color correction applied.
"""
action = ord('c')
fade_time = int(fade_milliseconds / 10)
th = (fade_time & 0xff00) >> 8
tl = fade_time & 0x00ff
buf = [REPORT_ID, action, int(red), int(green), int(blue), th, tl, led_number, 0]
self.write( buf )
def fade_to_rgb(self,fade_milliseconds, red, green, blue, led_number=0):
r, g, b = self.cc(red, green, blue)
return self.fade_to_rgb_uncorrected(fade_milliseconds, r, g, b, led_number)
@staticmethod
def color_to_rgb(color):
"""
Convert color name or hexcode to (r,g,b) tuple
"""
if isinstance(color, tuple):
return color
if color.startswith('#'):
try:
return webcolors.hex_to_rgb(color)
except ValueError:
raise InvalidColor(color)
try:
return webcolors.name_to_rgb(color)
except ValueError:
raise InvalidColor(color)
def fade_to_color(self, fade_milliseconds, color):
"""
Fade the light to a known colour in a
:param fade_milliseconds: Duration of the fade in milliseconds
:param color: Named color to fade to
:return: None
"""
red, green, blue = self.color_to_rgb(color)
return self.fade_to_rgb(fade_milliseconds, red, green, blue)
def off(self):
"""Switch the blink(1) off instantly
"""
self.fade_to_color(0, 'black')
def get_version(self):
"""Get blink(1) firmware version
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('v'), 0, 0, 0, 0, 0, 0, 0]
self.write(buf)
time.sleep(.05)
version_raw = self.read()
version = (version_raw[3] - ord('0')) * 100 + (version_raw[4] - ord('0'))
return str(version)
def get_serial_number(self):
"""Get blink(1) serial number
:return blink(1) serial number as string
"""
return self.dev.get_serial_number_string()
# return usb.util.get_string(self.dev, 256, 3)
def play(self, start_pos=0, end_pos=0, count=0):
"""Play internal color pattern
:param start_pos: pattern line to start from
:param end_pos: pattern line to end at
:param count: number of times to play, 0=play forever
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('p'), 1, int(start_pos), int(end_pos), int(count), 0, 0, 0]
return self.write(buf);
def stop(self):
"""Stop internal color pattern playing
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('p'), 0, 0, 0, 0, 0, 0, 0]
return self.write(buf);
def savePattern(self):
"""Save internal RAM pattern to flash
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('W'), 0xBE, 0xEF, 0xCA, 0xFE, 0, 0, 0]
return self.write(buf);
def setLedN(self, led_number=0):
"""Set the 'current LED' value for writePatternLine
:param led_number: LED to adjust, 0=all, 1=LEDA, 2=LEDB
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('l'), led_number, 0,0,0,0,0,0]
self.write(buf)
def writePatternLine(self, step_milliseconds, color, pos, led_number=0):
"""Write a color & step time color pattern line to RAM
:param step_milliseconds: how long for this pattern line to take
:param color: LED color
:param pos: color pattern line number (0-15)
:param led_number: LED to adjust, 0=all, 1=LEDA, 2=LEDB
"""
if ( self.dev == None ): return ''
self.setLedN(led_number)
red, green, blue = self.color_to_rgb(color)
r, g, b = self.cc(red, green, blue)
step_time = int(step_milliseconds / 10)
th = (step_time & 0xff00) >> 8
tl = step_time & 0x00ff
buf = [REPORT_ID, ord('P'), int(r), int(g), int(b), th,tl, pos, 0]
return self.write(buf);
def readPatternLine(self, pos):
"""Read a color pattern line at position
:param pos: pattern line to read
:return pattern line data as tuple (r,g,b, step_millis)
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('R'), 0, 0, 0, 0, 0, int(pos), 0]
self.write(buf)
buf = self.read()
(r,g,b) = (buf[2],buf[3],buf[4])
step_millis = ((buf[5] << 8) | buf[6]) * 10
return (r,g,b,step_millis)
def serverTickle(self, enable, timeout_millis=0, stay_lit=False, start_pos=0, end_pos=16):
"""Enable/disable servertickle / serverdown watchdog
:param: enable: Set True to enable serverTickle
:param: timeout_millis: millisecs until servertickle is triggered
:param: stay_lit: Set True to keep current color of blink(1), False to turn off
:param: start_pos: Sub-pattern start position in whole color pattern
:param: end_pos: Sub-pattern end position in whole color pattern
"""
if ( self.dev == None ): return ''
en = int(enable == True)
timeout_time = int(timeout_millis/10)
th = (timeout_time & 0xff00) >>8
tl = timeout_time & 0x00ff
st = int(stay_lit == True)
buf = [REPORT_ID, ord('D'), en, th, tl, st, start_pos, end_pos, 0]
self.write(buf)
|
todbot/blink1-python
|
blink1/blink1.py
|
Blink1.serverTickle
|
python
|
def serverTickle(self, enable, timeout_millis=0, stay_lit=False, start_pos=0, end_pos=16):
if ( self.dev == None ): return ''
en = int(enable == True)
timeout_time = int(timeout_millis/10)
th = (timeout_time & 0xff00) >>8
tl = timeout_time & 0x00ff
st = int(stay_lit == True)
buf = [REPORT_ID, ord('D'), en, th, tl, st, start_pos, end_pos, 0]
self.write(buf)
|
Enable/disable servertickle / serverdown watchdog
:param: enable: Set True to enable serverTickle
:param: timeout_millis: millisecs until servertickle is triggered
:param: stay_lit: Set True to keep current color of blink(1), False to turn off
:param: start_pos: Sub-pattern start position in whole color pattern
:param: end_pos: Sub-pattern end position in whole color pattern
|
train
|
https://github.com/todbot/blink1-python/blob/7a5183becd9662f88da3c29afd3447403f4ef82f/blink1/blink1.py#L284-L299
|
[
"def write(self,buf):\n \"\"\"\n Write command to blink(1), low-level internal use\n Send USB Feature Report 0x01 to blink(1) with 8-byte payload\n Note: arg 'buf' must be 8 bytes or bad things happen\n \"\"\"\n log.debug(\"blink1write:\" + \",\".join('0x%02x' % v for v in buf))\n self.dev.send_feature_report(buf)\n"
] |
class Blink1:
"""Light controller class, sends messages to the blink(1) and blink(1) mk2 via USB HID.
"""
def __init__(self, serial_number=None, gamma=None, white_point=None):
"""
:param serial_number: serial number of blink(1) to open, otherwise first found
:param gamma: Triple of gammas for each channel e.g. (2, 2, 2)
"""
self.cc = ColorCorrect(
gamma=(gamma or DEFAULT_GAMMA),
white_point=(white_point or DEFAULT_WHITE_POINT)
)
self.dev = self.find(serial_number)
def close(self):
self.dev.close()
self.dev = None
@staticmethod
def find(serial_number=None):
"""
Find a praticular blink(1) device, or the first one
:param serial_number: serial number of blink(1) device (from Blink1.list())
"""
try:
hidraw = hid.device(VENDOR_ID,PRODUCT_ID,serial_number)
hidraw.open(VENDOR_ID,PRODUCT_ID,serial_number)
# hidraw = hid.device(VENDOR_ID,PRODUCT_ID,unicode(serial_number))
# hidraw.open(VENDOR_ID,PRODUCT_ID,unicode(serial_number))
except IOError as e: # python2
raise Blink1ConnectionFailed(e)
hidraw = None
except OSError as e: # python3
raise Blink1ConnectionFailed(e)
hidraw = None
return hidraw
@staticmethod
def list():
"""
List blink(1) devices connected, by serial number
:return: List of blink(1) device serial numbers
"""
try:
devs = hid.enumerate(VENDOR_ID,PRODUCT_ID)
serials = list(map(lambda d:d.get('serial_number'), devs))
return serials
except IOError as e:
return []
def notfound(self):
return None # fixme what to do here
def write(self,buf):
"""
Write command to blink(1), low-level internal use
Send USB Feature Report 0x01 to blink(1) with 8-byte payload
Note: arg 'buf' must be 8 bytes or bad things happen
"""
log.debug("blink1write:" + ",".join('0x%02x' % v for v in buf))
self.dev.send_feature_report(buf)
def read(self):
"""
Read command result from blink(1), low-level internal use
Receive USB Feature Report 0x01 from blink(1) with 8-byte payload
Note: buf must be 8 bytes or bad things happen
"""
buf = self.dev.get_feature_report(REPORT_ID,9)
log.debug("blink1read: " + ",".join('0x%02x' % v for v in buf))
return buf
def fade_to_rgb_uncorrected(self, fade_milliseconds, red, green, blue, led_number=0):
"""
Command blink(1) to fade to RGB color, no color correction applied.
"""
action = ord('c')
fade_time = int(fade_milliseconds / 10)
th = (fade_time & 0xff00) >> 8
tl = fade_time & 0x00ff
buf = [REPORT_ID, action, int(red), int(green), int(blue), th, tl, led_number, 0]
self.write( buf )
def fade_to_rgb(self,fade_milliseconds, red, green, blue, led_number=0):
r, g, b = self.cc(red, green, blue)
return self.fade_to_rgb_uncorrected(fade_milliseconds, r, g, b, led_number)
@staticmethod
def color_to_rgb(color):
"""
Convert color name or hexcode to (r,g,b) tuple
"""
if isinstance(color, tuple):
return color
if color.startswith('#'):
try:
return webcolors.hex_to_rgb(color)
except ValueError:
raise InvalidColor(color)
try:
return webcolors.name_to_rgb(color)
except ValueError:
raise InvalidColor(color)
def fade_to_color(self, fade_milliseconds, color):
"""
Fade the light to a known colour in a
:param fade_milliseconds: Duration of the fade in milliseconds
:param color: Named color to fade to
:return: None
"""
red, green, blue = self.color_to_rgb(color)
return self.fade_to_rgb(fade_milliseconds, red, green, blue)
def off(self):
"""Switch the blink(1) off instantly
"""
self.fade_to_color(0, 'black')
def get_version(self):
"""Get blink(1) firmware version
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('v'), 0, 0, 0, 0, 0, 0, 0]
self.write(buf)
time.sleep(.05)
version_raw = self.read()
version = (version_raw[3] - ord('0')) * 100 + (version_raw[4] - ord('0'))
return str(version)
def get_serial_number(self):
"""Get blink(1) serial number
:return blink(1) serial number as string
"""
return self.dev.get_serial_number_string()
# return usb.util.get_string(self.dev, 256, 3)
def play(self, start_pos=0, end_pos=0, count=0):
"""Play internal color pattern
:param start_pos: pattern line to start from
:param end_pos: pattern line to end at
:param count: number of times to play, 0=play forever
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('p'), 1, int(start_pos), int(end_pos), int(count), 0, 0, 0]
return self.write(buf);
def stop(self):
"""Stop internal color pattern playing
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('p'), 0, 0, 0, 0, 0, 0, 0]
return self.write(buf);
def savePattern(self):
"""Save internal RAM pattern to flash
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('W'), 0xBE, 0xEF, 0xCA, 0xFE, 0, 0, 0]
return self.write(buf);
def setLedN(self, led_number=0):
"""Set the 'current LED' value for writePatternLine
:param led_number: LED to adjust, 0=all, 1=LEDA, 2=LEDB
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('l'), led_number, 0,0,0,0,0,0]
self.write(buf)
def writePatternLine(self, step_milliseconds, color, pos, led_number=0):
"""Write a color & step time color pattern line to RAM
:param step_milliseconds: how long for this pattern line to take
:param color: LED color
:param pos: color pattern line number (0-15)
:param led_number: LED to adjust, 0=all, 1=LEDA, 2=LEDB
"""
if ( self.dev == None ): return ''
self.setLedN(led_number)
red, green, blue = self.color_to_rgb(color)
r, g, b = self.cc(red, green, blue)
step_time = int(step_milliseconds / 10)
th = (step_time & 0xff00) >> 8
tl = step_time & 0x00ff
buf = [REPORT_ID, ord('P'), int(r), int(g), int(b), th,tl, pos, 0]
return self.write(buf);
def readPatternLine(self, pos):
"""Read a color pattern line at position
:param pos: pattern line to read
:return pattern line data as tuple (r,g,b, step_millis)
"""
if ( self.dev == None ): return ''
buf = [REPORT_ID, ord('R'), 0, 0, 0, 0, 0, int(pos), 0]
self.write(buf)
buf = self.read()
(r,g,b) = (buf[2],buf[3],buf[4])
step_millis = ((buf[5] << 8) | buf[6]) * 10
return (r,g,b,step_millis)
def readPattern(self):
"""Read the entire color pattern
:return List of pattern line tuples
"""
if ( self.dev == None ): return ''
pattern=[]
for i in range(0,16): # FIXME: adjustable for diff blink(1) models
pattern.append( self.readPatternLine(i) )
return pattern
|
todbot/blink1-python
|
blink1/kelvin.py
|
correct_output
|
python
|
def correct_output(luminosity):
if luminosity < 0:
val = 0
elif luminosity > 255:
val = 255
else:
val = luminosity
return round(val)
|
:param luminosity: Input luminosity
:return: Luminosity limited to the 0 <= l <= 255 range.
|
train
|
https://github.com/todbot/blink1-python/blob/7a5183becd9662f88da3c29afd3447403f4ef82f/blink1/kelvin.py#L24-L35
| null |
"""
Python implementation of Tanner Helland's color color conversion code.
http://www.tannerhelland.com/4435/convert-temperature-rgb-algorithm-code/
"""
import math
# Aproximate colour temperatures for common lighting conditions.
COLOR_TEMPERATURES={
'candle':1900,
'sunrise':2000,
'incandescent':2500,
'tungsten':3200,
'halogen':3350,
'sunlight':5000,
'overcast':6000,
'shade':7000,
'blue-sky':10000,
'warm-fluorescent':2700,
'fluorescent':37500,
'cool-fluorescent':5000,
}
def kelvin_to_rgb(kelvin):
"""
Convert a color temperature given in kelvin to an approximate RGB value.
:param kelvin: Color temp in K
:return: Tuple of (r, g, b), equivalent color for the temperature
"""
temp = kelvin / 100.0
# Calculate Red:
if temp <= 66:
red = 255
else:
red = 329.698727446 * ((temp - 60) ** -0.1332047592)
# Calculate Green:
if temp <= 66:
green = 99.4708025861 * math.log(temp) - 161.1195681661
else:
green = 288.1221695283 * ((temp - 60) ** -0.0755148492)
#Calculate Blue:
if temp > 66:
blue = 255
elif temp <= 19:
blue = 0
else:
blue = 138.5177312231 * math.log(temp - 10) - 305.0447927307
return tuple(correct_output(c) for c in (red, green, blue))
|
todbot/blink1-python
|
blink1/kelvin.py
|
kelvin_to_rgb
|
python
|
def kelvin_to_rgb(kelvin):
temp = kelvin / 100.0
# Calculate Red:
if temp <= 66:
red = 255
else:
red = 329.698727446 * ((temp - 60) ** -0.1332047592)
# Calculate Green:
if temp <= 66:
green = 99.4708025861 * math.log(temp) - 161.1195681661
else:
green = 288.1221695283 * ((temp - 60) ** -0.0755148492)
#Calculate Blue:
if temp > 66:
blue = 255
elif temp <= 19:
blue = 0
else:
blue = 138.5177312231 * math.log(temp - 10) - 305.0447927307
return tuple(correct_output(c) for c in (red, green, blue))
|
Convert a color temperature given in kelvin to an approximate RGB value.
:param kelvin: Color temp in K
:return: Tuple of (r, g, b), equivalent color for the temperature
|
train
|
https://github.com/todbot/blink1-python/blob/7a5183becd9662f88da3c29afd3447403f4ef82f/blink1/kelvin.py#L37-L67
| null |
"""
Python implementation of Tanner Helland's color color conversion code.
http://www.tannerhelland.com/4435/convert-temperature-rgb-algorithm-code/
"""
import math
# Aproximate colour temperatures for common lighting conditions.
COLOR_TEMPERATURES={
'candle':1900,
'sunrise':2000,
'incandescent':2500,
'tungsten':3200,
'halogen':3350,
'sunlight':5000,
'overcast':6000,
'shade':7000,
'blue-sky':10000,
'warm-fluorescent':2700,
'fluorescent':37500,
'cool-fluorescent':5000,
}
def correct_output(luminosity):
"""
:param luminosity: Input luminosity
:return: Luminosity limited to the 0 <= l <= 255 range.
"""
if luminosity < 0:
val = 0
elif luminosity > 255:
val = 255
else:
val = luminosity
return round(val)
|
Pixelapse/pyglass
|
pyglass/quicklook/export.py
|
embedded_preview
|
python
|
def embedded_preview(src_path):
''' Returns path to temporary copy of embedded QuickLook preview, if it exists '''
try:
assert(exists(src_path) and isdir(src_path))
preview_list = glob(join(src_path, '[Q|q]uicklook', '[P|p]review.*'))
assert(preview_list) # Assert there's at least one preview file
preview_path = preview_list[0] # Simplistically, assume there's only one
with NamedTemporaryFile(prefix='pyglass', suffix=extension(preview_path), delete=False) as tempfileobj:
dest_path = tempfileobj.name
shutil.copy(preview_path, dest_path)
assert(exists(dest_path))
return dest_path
except:
return None
|
Returns path to temporary copy of embedded QuickLook preview, if it exists
|
train
|
https://github.com/Pixelapse/pyglass/blob/83cd0ff2b0b7cdaf4ec6f54559a626e67455cd33/pyglass/quicklook/export.py#L20-L36
|
[
"def extension(path_str):\n ''' Returns lowercased file extension for the path '''\n return os.path.splitext(path_str)[1].lower()\n"
] |
# -*- coding: utf-8 -*-
# Default libs
import shutil
from tempfile import NamedTemporaryFile, mkdtemp
from os.path import isdir, exists, join, basename
from glob import glob
# Library modules
from pxprocess import check_call
# Project modules
from ..settings import QLMANAGE
from ..utils import extension
############################################################
# DONT CALL DIRECTLY
############################################################
def generator_preview(src_path):
''' Returns path to the preview created by the generator '''
try:
assert(exists(src_path))
dest_dir = mkdtemp(prefix='pyglass')
cmd = [QLMANAGE, '-p', src_path, '-o', dest_dir]
assert(check_call(cmd) == 0)
src_filename = basename(src_path)
dest_list = glob(join(dest_dir, '%s.qlpreview' % (src_filename), '[P|p]review.*'))
assert(dest_list)
dest_path = dest_list[0]
assert(exists(dest_path))
return dest_path
except:
return None
def thumbnail_preview(src_path):
''' Returns the path to small thumbnail preview. '''
try:
assert(exists(src_path))
width = '1980'
dest_dir = mkdtemp(prefix='pyglass')
cmd = [QLMANAGE, '-t', '-s', width, src_path, '-o', dest_dir]
assert(check_call(cmd) == 0)
src_filename = basename(src_path)
dest_list = glob(join(dest_dir, '%s.png' % (src_filename)))
assert(dest_list)
dest_path = dest_list[0]
assert(exists(dest_path))
return dest_path
except:
return None
|
Pixelapse/pyglass
|
pyglass/quicklook/export.py
|
thumbnail_preview
|
python
|
def thumbnail_preview(src_path):
''' Returns the path to small thumbnail preview. '''
try:
assert(exists(src_path))
width = '1980'
dest_dir = mkdtemp(prefix='pyglass')
cmd = [QLMANAGE, '-t', '-s', width, src_path, '-o', dest_dir]
assert(check_call(cmd) == 0)
src_filename = basename(src_path)
dest_list = glob(join(dest_dir, '%s.png' % (src_filename)))
assert(dest_list)
dest_path = dest_list[0]
assert(exists(dest_path))
return dest_path
except:
return None
|
Returns the path to small thumbnail preview.
|
train
|
https://github.com/Pixelapse/pyglass/blob/83cd0ff2b0b7cdaf4ec6f54559a626e67455cd33/pyglass/quicklook/export.py#L60-L80
| null |
# -*- coding: utf-8 -*-
# Default libs
import shutil
from tempfile import NamedTemporaryFile, mkdtemp
from os.path import isdir, exists, join, basename
from glob import glob
# Library modules
from pxprocess import check_call
# Project modules
from ..settings import QLMANAGE
from ..utils import extension
############################################################
# DONT CALL DIRECTLY
############################################################
def embedded_preview(src_path):
''' Returns path to temporary copy of embedded QuickLook preview, if it exists '''
try:
assert(exists(src_path) and isdir(src_path))
preview_list = glob(join(src_path, '[Q|q]uicklook', '[P|p]review.*'))
assert(preview_list) # Assert there's at least one preview file
preview_path = preview_list[0] # Simplistically, assume there's only one
with NamedTemporaryFile(prefix='pyglass', suffix=extension(preview_path), delete=False) as tempfileobj:
dest_path = tempfileobj.name
shutil.copy(preview_path, dest_path)
assert(exists(dest_path))
return dest_path
except:
return None
def generator_preview(src_path):
''' Returns path to the preview created by the generator '''
try:
assert(exists(src_path))
dest_dir = mkdtemp(prefix='pyglass')
cmd = [QLMANAGE, '-p', src_path, '-o', dest_dir]
assert(check_call(cmd) == 0)
src_filename = basename(src_path)
dest_list = glob(join(dest_dir, '%s.qlpreview' % (src_filename), '[P|p]review.*'))
assert(dest_list)
dest_path = dest_list[0]
assert(exists(dest_path))
return dest_path
except:
return None
|
Pixelapse/pyglass
|
pyglass/sketch/export.py
|
export_cmd
|
python
|
def export_cmd(cmd, src_path, dest_dir=None, item_id=None, export_format=None, scale=None):
''' Executes a `sketchtool export` command and returns formatted output
:src_path: File to export. :type <str>
:dest_dir: Items are exported at /dest_dir/name@scale.export_format e.g. `~/Desktop/Page 1@2x.png`
:param export_format: 'png', 'pdf' etc. :type <ExportFormat>
:param scale: Specify as 1.0, 2.0 etc. :type <float>
:param item_id: id or name of an Exportable :type <str>
:returns: list of exported item paths
'''
cmd.extend([src_path])
if not dest_dir:
dest_dir = mkdtemp(prefix='pyglass')
cmd.extend(['--output=%s' % dest_dir])
if export_format:
cmd.extend(['--formats=%s' % export_format])
if scale:
cmd.extend(['--scales=%s' % scale])
if item_id:
cmd.extend(['--items=%s' % item_id])
logger.debug(u'Executing cmd: %s' % cmd)
exported_str = execute(cmd)
logger.debug(u'Raw result: %s' % exported_str)
# Raw result is in the form: 'Exported <item-name-1>\nExported <item-name-2>\n'
exported_items = [os.path.join(dirpath, f)
for dirpath, dirnames, files in os.walk(dest_dir)
for f in files if f.endswith('.%s' % export_format)]
return exported_items
|
Executes a `sketchtool export` command and returns formatted output
:src_path: File to export. :type <str>
:dest_dir: Items are exported at /dest_dir/name@scale.export_format e.g. `~/Desktop/Page 1@2x.png`
:param export_format: 'png', 'pdf' etc. :type <ExportFormat>
:param scale: Specify as 1.0, 2.0 etc. :type <float>
:param item_id: id or name of an Exportable :type <str>
:returns: list of exported item paths
|
train
|
https://github.com/Pixelapse/pyglass/blob/83cd0ff2b0b7cdaf4ec6f54559a626e67455cd33/pyglass/sketch/export.py#L19-L54
|
[
"def execute(cmd):\n ''' Call cmd and return output. return None if any exception occurs '''\n try:\n return safely_decode(check_output(cmd))\n except Exception as e:\n logger.warn(u'Couldnt execute cmd: %s.\\nReason: %s' % (cmd, e))\n return None\n"
] |
# -*- coding: utf-8 -*-
# Default libs
import logging
import os
from glob import glob
from tempfile import mkdtemp
# Project modules
from ..settings import SKETCHTOOL
from ..utils import execute
logger = logging.getLogger(__name__)
############################################################
# EXPORT COMMANDS - PASSTHROUGH TO SKETCHTOOL EXPORT
############################################################
def export_slices(*args, **kwargs):
cmd = [SKETCHTOOL, 'export', 'slices']
return export_cmd(cmd, *args, **kwargs)
def export_artboards(*args, **kwargs):
cmd = [SKETCHTOOL, 'export', 'artboards']
return export_cmd(cmd, *args, **kwargs)
def export_pages(*args, **kwargs):
''' Exports pages from src_path in dest_dir in given format and scale.
>>> export_pages('~/example.sketch', dest_dir='~/Desktop/',
export_format=ExportFormat.PNG, scale=1.0)
'''
cmd = [SKETCHTOOL, 'export', 'pages']
return export_cmd(cmd, *args, **kwargs)
|
Pixelapse/pyglass
|
pyglass/sketch/api.py
|
list_cmd
|
python
|
def list_cmd(cmd, src_path):
''' Executes a `sketchtool list` command and parse the output
:cmd: A sketchtool list command :type <list>
:src_path: File to export. :type <str>
:returns: A list of pages. Artboards & slices are included in the page hierarchy
'''
cmd.extend([src_path])
logger.debug(u'Executing cmd: %s' % cmd)
result = execute(cmd)
if not result:
return None
logger.debug(u'Raw result: %s' % result)
list_dict = json.loads(result)
pages = parse_pages(src_path, list_dict)
return pages
|
Executes a `sketchtool list` command and parse the output
:cmd: A sketchtool list command :type <list>
:src_path: File to export. :type <str>
:returns: A list of pages. Artboards & slices are included in the page hierarchy
|
train
|
https://github.com/Pixelapse/pyglass/blob/83cd0ff2b0b7cdaf4ec6f54559a626e67455cd33/pyglass/sketch/api.py#L24-L40
|
[
"def execute(cmd):\n ''' Call cmd and return output. return None if any exception occurs '''\n try:\n return safely_decode(check_output(cmd))\n except Exception as e:\n logger.warn(u'Couldnt execute cmd: %s.\\nReason: %s' % (cmd, e))\n return None\n",
"def parse_pages(filename, list_dict):\n from .models import Page\n pages = []\n if 'pages' in list_dict:\n for page_dict in list_dict['pages']:\n pages.append(Page(filename, page_dict))\n return pages\n"
] |
# -*- coding: utf-8 -*-
# Default libs
import json
import logging
# Project modules
from ..settings import SKETCHTOOL
from ..utils import execute, extension
from .parse import parse_pages
logger = logging.getLogger(__name__)
def is_sketchfile(src_path):
''' Returns True if src_path is a sketch file '''
if extension(src_path) == u'.sketch':
return True
return False
############################################################
# LIST COMMANDS - PASSTHROUGH TO SKETCHTOOL LIST
############################################################
def list_slices(src_path):
cmd = [SKETCHTOOL, 'list', 'slices']
return list_cmd(cmd, src_path)
def list_artboards(src_path):
cmd = [SKETCHTOOL, 'list', 'artboards']
return list_cmd(cmd, src_path)
def list_pages(src_path):
cmd = [SKETCHTOOL, 'list', 'pages']
return list_cmd(cmd, src_path)
############################################################
# RETURNS PAGES, ARTBOARDS, SLICES WITH EXPORTED PNGS
############################################################
def pages(src_path):
''' Return pages as flat list '''
pages = list_pages(src_path)
return pages
def slices(src_path):
''' Return slices as a flat list '''
pages = list_slices(src_path)
slices = []
for page in pages:
slices.extend(page.slices)
return slices
def artboards(src_path):
''' Return artboards as a flat list '''
pages = list_artboards(src_path)
artboards = []
for page in pages:
artboards.extend(page.artboards)
return artboards
############################################################
# SIMPLE IMAGE PREVIEW OF FILE
############################################################
def preview(src_path):
''' Generates a preview of src_path as PNG.
:returns: A list of preview paths, one for each page.
'''
previews = []
for page in list_artboards(src_path):
previews.append(page.export())
for artboard in page.artboards:
previews.append(artboard.export())
return previews
|
Pixelapse/pyglass
|
pyglass/sketch/api.py
|
slices
|
python
|
def slices(src_path):
''' Return slices as a flat list '''
pages = list_slices(src_path)
slices = []
for page in pages:
slices.extend(page.slices)
return slices
|
Return slices as a flat list
|
train
|
https://github.com/Pixelapse/pyglass/blob/83cd0ff2b0b7cdaf4ec6f54559a626e67455cd33/pyglass/sketch/api.py#L67-L73
|
[
"def list_slices(src_path):\n cmd = [SKETCHTOOL, 'list', 'slices']\n return list_cmd(cmd, src_path)\n"
] |
# -*- coding: utf-8 -*-
# Default libs
import json
import logging
# Project modules
from ..settings import SKETCHTOOL
from ..utils import execute, extension
from .parse import parse_pages
logger = logging.getLogger(__name__)
def is_sketchfile(src_path):
''' Returns True if src_path is a sketch file '''
if extension(src_path) == u'.sketch':
return True
return False
############################################################
# LIST COMMANDS - PASSTHROUGH TO SKETCHTOOL LIST
############################################################
def list_cmd(cmd, src_path):
''' Executes a `sketchtool list` command and parse the output
:cmd: A sketchtool list command :type <list>
:src_path: File to export. :type <str>
:returns: A list of pages. Artboards & slices are included in the page hierarchy
'''
cmd.extend([src_path])
logger.debug(u'Executing cmd: %s' % cmd)
result = execute(cmd)
if not result:
return None
logger.debug(u'Raw result: %s' % result)
list_dict = json.loads(result)
pages = parse_pages(src_path, list_dict)
return pages
def list_slices(src_path):
cmd = [SKETCHTOOL, 'list', 'slices']
return list_cmd(cmd, src_path)
def list_artboards(src_path):
cmd = [SKETCHTOOL, 'list', 'artboards']
return list_cmd(cmd, src_path)
def list_pages(src_path):
cmd = [SKETCHTOOL, 'list', 'pages']
return list_cmd(cmd, src_path)
############################################################
# RETURNS PAGES, ARTBOARDS, SLICES WITH EXPORTED PNGS
############################################################
def pages(src_path):
''' Return pages as flat list '''
pages = list_pages(src_path)
return pages
def artboards(src_path):
''' Return artboards as a flat list '''
pages = list_artboards(src_path)
artboards = []
for page in pages:
artboards.extend(page.artboards)
return artboards
############################################################
# SIMPLE IMAGE PREVIEW OF FILE
############################################################
def preview(src_path):
''' Generates a preview of src_path as PNG.
:returns: A list of preview paths, one for each page.
'''
previews = []
for page in list_artboards(src_path):
previews.append(page.export())
for artboard in page.artboards:
previews.append(artboard.export())
return previews
|
Pixelapse/pyglass
|
pyglass/sketch/api.py
|
artboards
|
python
|
def artboards(src_path):
''' Return artboards as a flat list '''
pages = list_artboards(src_path)
artboards = []
for page in pages:
artboards.extend(page.artboards)
return artboards
|
Return artboards as a flat list
|
train
|
https://github.com/Pixelapse/pyglass/blob/83cd0ff2b0b7cdaf4ec6f54559a626e67455cd33/pyglass/sketch/api.py#L76-L82
|
[
"def list_artboards(src_path):\n cmd = [SKETCHTOOL, 'list', 'artboards']\n return list_cmd(cmd, src_path)\n"
] |
# -*- coding: utf-8 -*-
# Default libs
import json
import logging
# Project modules
from ..settings import SKETCHTOOL
from ..utils import execute, extension
from .parse import parse_pages
logger = logging.getLogger(__name__)
def is_sketchfile(src_path):
''' Returns True if src_path is a sketch file '''
if extension(src_path) == u'.sketch':
return True
return False
############################################################
# LIST COMMANDS - PASSTHROUGH TO SKETCHTOOL LIST
############################################################
def list_cmd(cmd, src_path):
''' Executes a `sketchtool list` command and parse the output
:cmd: A sketchtool list command :type <list>
:src_path: File to export. :type <str>
:returns: A list of pages. Artboards & slices are included in the page hierarchy
'''
cmd.extend([src_path])
logger.debug(u'Executing cmd: %s' % cmd)
result = execute(cmd)
if not result:
return None
logger.debug(u'Raw result: %s' % result)
list_dict = json.loads(result)
pages = parse_pages(src_path, list_dict)
return pages
def list_slices(src_path):
cmd = [SKETCHTOOL, 'list', 'slices']
return list_cmd(cmd, src_path)
def list_artboards(src_path):
cmd = [SKETCHTOOL, 'list', 'artboards']
return list_cmd(cmd, src_path)
def list_pages(src_path):
cmd = [SKETCHTOOL, 'list', 'pages']
return list_cmd(cmd, src_path)
############################################################
# RETURNS PAGES, ARTBOARDS, SLICES WITH EXPORTED PNGS
############################################################
def pages(src_path):
''' Return pages as flat list '''
pages = list_pages(src_path)
return pages
def slices(src_path):
''' Return slices as a flat list '''
pages = list_slices(src_path)
slices = []
for page in pages:
slices.extend(page.slices)
return slices
############################################################
# SIMPLE IMAGE PREVIEW OF FILE
############################################################
def preview(src_path):
''' Generates a preview of src_path as PNG.
:returns: A list of preview paths, one for each page.
'''
previews = []
for page in list_artboards(src_path):
previews.append(page.export())
for artboard in page.artboards:
previews.append(artboard.export())
return previews
|
Pixelapse/pyglass
|
pyglass/sketch/api.py
|
preview
|
python
|
def preview(src_path):
''' Generates a preview of src_path as PNG.
:returns: A list of preview paths, one for each page.
'''
previews = []
for page in list_artboards(src_path):
previews.append(page.export())
for artboard in page.artboards:
previews.append(artboard.export())
return previews
|
Generates a preview of src_path as PNG.
:returns: A list of preview paths, one for each page.
|
train
|
https://github.com/Pixelapse/pyglass/blob/83cd0ff2b0b7cdaf4ec6f54559a626e67455cd33/pyglass/sketch/api.py#L88-L97
|
[
"def list_artboards(src_path):\n cmd = [SKETCHTOOL, 'list', 'artboards']\n return list_cmd(cmd, src_path)\n"
] |
# -*- coding: utf-8 -*-
# Default libs
import json
import logging
# Project modules
from ..settings import SKETCHTOOL
from ..utils import execute, extension
from .parse import parse_pages
logger = logging.getLogger(__name__)
def is_sketchfile(src_path):
''' Returns True if src_path is a sketch file '''
if extension(src_path) == u'.sketch':
return True
return False
############################################################
# LIST COMMANDS - PASSTHROUGH TO SKETCHTOOL LIST
############################################################
def list_cmd(cmd, src_path):
''' Executes a `sketchtool list` command and parse the output
:cmd: A sketchtool list command :type <list>
:src_path: File to export. :type <str>
:returns: A list of pages. Artboards & slices are included in the page hierarchy
'''
cmd.extend([src_path])
logger.debug(u'Executing cmd: %s' % cmd)
result = execute(cmd)
if not result:
return None
logger.debug(u'Raw result: %s' % result)
list_dict = json.loads(result)
pages = parse_pages(src_path, list_dict)
return pages
def list_slices(src_path):
cmd = [SKETCHTOOL, 'list', 'slices']
return list_cmd(cmd, src_path)
def list_artboards(src_path):
cmd = [SKETCHTOOL, 'list', 'artboards']
return list_cmd(cmd, src_path)
def list_pages(src_path):
cmd = [SKETCHTOOL, 'list', 'pages']
return list_cmd(cmd, src_path)
############################################################
# RETURNS PAGES, ARTBOARDS, SLICES WITH EXPORTED PNGS
############################################################
def pages(src_path):
''' Return pages as flat list '''
pages = list_pages(src_path)
return pages
def slices(src_path):
''' Return slices as a flat list '''
pages = list_slices(src_path)
slices = []
for page in pages:
slices.extend(page.slices)
return slices
def artboards(src_path):
''' Return artboards as a flat list '''
pages = list_artboards(src_path)
artboards = []
for page in pages:
artboards.extend(page.artboards)
return artboards
############################################################
# SIMPLE IMAGE PREVIEW OF FILE
############################################################
|
Pixelapse/pyglass
|
pyglass/quicklook/api.py
|
is_valid_preview
|
python
|
def is_valid_preview(preview):
''' Verifies that the preview is a valid filetype '''
if not preview:
return False
if mimetype(preview) not in [ExportMimeType.PNG, ExportMimeType.PDF]:
return False
return True
|
Verifies that the preview is a valid filetype
|
train
|
https://github.com/Pixelapse/pyglass/blob/83cd0ff2b0b7cdaf4ec6f54559a626e67455cd33/pyglass/quicklook/api.py#L13-L21
|
[
"def mimetype(path_str):\n ''' Returns the mimetype of the file at path_str. Depends on OS X's `file` util '''\n return execute(['file', '--mime-type', '--brief', path_str]).strip().lower()\n"
] |
# -*- coding: utf-8 -*-
# Default libs
# Installed libs
# Project modules
from ..models import ExportMimeType
from ..pdf import to_pngs
from ..utils import mimetype
from .export import embedded_preview, generator_preview, thumbnail_preview
def preview(src_path):
''' Generates a preview of src_path in the requested format.
:returns: A list of preview paths, one for each page. Blank list if unsupported.
'''
preview = embedded_preview(src_path)
if not is_valid_preview(preview):
preview = generator_preview(src_path)
if not is_valid_preview(preview):
preview = thumbnail_preview(src_path)
# Ensure the preview is returned in the right format
if is_valid_preview(preview):
if mimetype(preview) in [ExportMimeType.PNG]:
return [preview]
if mimetype(preview) in [ExportMimeType.PDF]:
return to_pngs(preview)
return []
|
Pixelapse/pyglass
|
pyglass/quicklook/api.py
|
preview
|
python
|
def preview(src_path):
''' Generates a preview of src_path in the requested format.
:returns: A list of preview paths, one for each page. Blank list if unsupported.
'''
preview = embedded_preview(src_path)
if not is_valid_preview(preview):
preview = generator_preview(src_path)
if not is_valid_preview(preview):
preview = thumbnail_preview(src_path)
# Ensure the preview is returned in the right format
if is_valid_preview(preview):
if mimetype(preview) in [ExportMimeType.PNG]:
return [preview]
if mimetype(preview) in [ExportMimeType.PDF]:
return to_pngs(preview)
return []
|
Generates a preview of src_path in the requested format.
:returns: A list of preview paths, one for each page. Blank list if unsupported.
|
train
|
https://github.com/Pixelapse/pyglass/blob/83cd0ff2b0b7cdaf4ec6f54559a626e67455cd33/pyglass/quicklook/api.py#L24-L44
|
[
"def to_pngs(pdf_path):\n ''' Converts a multi-page pdfs to a list of pngs via the `sips` command\n :returns: A list of converted pngs\n '''\n pdf_list = split_pdf(pdf_path)\n pngs = []\n for pdf in pdf_list:\n pngs.append(to_png(pdf))\n os.remove(pdf) # Clean up\n return pngs\n",
"def mimetype(path_str):\n ''' Returns the mimetype of the file at path_str. Depends on OS X's `file` util '''\n return execute(['file', '--mime-type', '--brief', path_str]).strip().lower()\n",
"def embedded_preview(src_path):\n ''' Returns path to temporary copy of embedded QuickLook preview, if it exists '''\n try:\n assert(exists(src_path) and isdir(src_path))\n\n preview_list = glob(join(src_path, '[Q|q]uicklook', '[P|p]review.*'))\n assert(preview_list) # Assert there's at least one preview file\n preview_path = preview_list[0] # Simplistically, assume there's only one\n\n with NamedTemporaryFile(prefix='pyglass', suffix=extension(preview_path), delete=False) as tempfileobj:\n dest_path = tempfileobj.name\n shutil.copy(preview_path, dest_path)\n\n assert(exists(dest_path))\n return dest_path\n except:\n return None\n",
"def generator_preview(src_path):\n ''' Returns path to the preview created by the generator '''\n try:\n assert(exists(src_path))\n\n dest_dir = mkdtemp(prefix='pyglass')\n cmd = [QLMANAGE, '-p', src_path, '-o', dest_dir]\n assert(check_call(cmd) == 0)\n\n src_filename = basename(src_path)\n dest_list = glob(join(dest_dir, '%s.qlpreview' % (src_filename), '[P|p]review.*'))\n assert(dest_list)\n\n dest_path = dest_list[0]\n assert(exists(dest_path))\n\n return dest_path\n except:\n return None\n",
"def thumbnail_preview(src_path):\n ''' Returns the path to small thumbnail preview. '''\n try:\n assert(exists(src_path))\n\n width = '1980'\n\n dest_dir = mkdtemp(prefix='pyglass')\n cmd = [QLMANAGE, '-t', '-s', width, src_path, '-o', dest_dir]\n assert(check_call(cmd) == 0)\n\n src_filename = basename(src_path)\n dest_list = glob(join(dest_dir, '%s.png' % (src_filename)))\n assert(dest_list)\n\n dest_path = dest_list[0]\n assert(exists(dest_path))\n\n return dest_path\n except:\n return None\n",
"def is_valid_preview(preview):\n ''' Verifies that the preview is a valid filetype '''\n if not preview:\n return False\n\n if mimetype(preview) not in [ExportMimeType.PNG, ExportMimeType.PDF]:\n return False\n\n return True\n"
] |
# -*- coding: utf-8 -*-
# Default libs
# Installed libs
# Project modules
from ..models import ExportMimeType
from ..pdf import to_pngs
from ..utils import mimetype
from .export import embedded_preview, generator_preview, thumbnail_preview
def is_valid_preview(preview):
''' Verifies that the preview is a valid filetype '''
if not preview:
return False
if mimetype(preview) not in [ExportMimeType.PNG, ExportMimeType.PDF]:
return False
return True
|
Pixelapse/pyglass
|
pyglass/utils.py
|
execute
|
python
|
def execute(cmd):
''' Call cmd and return output. return None if any exception occurs '''
try:
return safely_decode(check_output(cmd))
except Exception as e:
logger.warn(u'Couldnt execute cmd: %s.\nReason: %s' % (cmd, e))
return None
|
Call cmd and return output. return None if any exception occurs
|
train
|
https://github.com/Pixelapse/pyglass/blob/83cd0ff2b0b7cdaf4ec6f54559a626e67455cd33/pyglass/utils.py#L13-L19
| null |
# -*- coding: utf-8 -*-
# Default libs
import logging
import os
# Library modules
from pxprocess import check_output
from pyunicode import safely_decode
logger = logging.getLogger(__name__)
def unicode_or_none(dictionary, key):
if dictionary is None or key is None:
return None
return None if key not in dictionary or dictionary[key] is None else unicode(dictionary[key])
def extension(path_str):
''' Returns lowercased file extension for the path '''
return os.path.splitext(path_str)[1].lower()
def mimetype(path_str):
''' Returns the mimetype of the file at path_str. Depends on OS X's `file` util '''
return execute(['file', '--mime-type', '--brief', path_str]).strip().lower()
|
Pixelapse/pyglass
|
setup.py
|
rm_tempdirs
|
python
|
def rm_tempdirs():
''' Remove temporary build folders '''
tempdirs = [Dir.BUILD, Dir.COCOA_BUILD, Dir.LIB]
for tempdir in tempdirs:
if os.path.exists(tempdir):
shutil.rmtree(tempdir, ignore_errors=True)
|
Remove temporary build folders
|
train
|
https://github.com/Pixelapse/pyglass/blob/83cd0ff2b0b7cdaf4ec6f54559a626e67455cd33/setup.py#L38-L43
| null |
#!/usr/bin/env python
# System modules
import sys
import os
import shutil
import platform
from os.path import join
# Library modules
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
from distutils.dir_util import copy_tree
# Package modules
if sys.version_info[:2] < (2, 7):
print "Sorry, pyglass requires python version 2.7 or later"
sys.exit(1)
if platform.system() != 'Darwin':
print "Sorry, pyglass only runs on OS X"
sys.exit(1)
class Dir:
BUILD = 'build'
DIST = 'dist'
COCOA = 'cocoa'
COCOA_BUILD = join(COCOA, 'build')
LIB = join('pyglass', 'lib') # Destination directory for vendor/custom libs
VENDOR = 'vendor' # Third-party libraries
def copy_vendor_libs():
''' Copies third party vendor libs into the module '''
copy_tree('%s/' % Dir.VENDOR, '%s/' % Dir.LIB)
def lib_list():
''' Returns the contents of 'pyglass/lib' as a list of 'lib/*' items for package_data '''
lib_list = []
for (root, dirs, files) in os.walk(Dir.LIB):
for filename in files:
root = root.replace('pyglass/', '')
lib_list.append(join(root, filename))
return lib_list
# Compile custom project
rm_tempdirs()
# Copy over libs into Dir.LIB
os.makedirs(Dir.LIB)
copy_vendor_libs()
package_libs = lib_list()
version = '0.1.2'
setup(
name='pyglass',
version=version,
url='http://github.com/Pixelapse/pyglass',
download_url='https://github.com/Pixelapse/pyglass/tarball/v%s' % version,
description='Mac OS X File Preview Generator',
long_description=open('README.md').read(),
author='Shravan Reddy',
author_email='shravan@pixelapse.com',
maintainer='Pixelapse',
maintainer_email='hello@pixelapse.com',
packages=find_packages(),
package_data={'': package_libs},
install_requires=['pxprocess', 'pyunicode', 'PyPDF2'],
include_package_data=True,
zip_safe=False,
license=open('LICENSE').read()
)
rm_tempdirs()
|
Pixelapse/pyglass
|
setup.py
|
lib_list
|
python
|
def lib_list():
''' Returns the contents of 'pyglass/lib' as a list of 'lib/*' items for package_data '''
lib_list = []
for (root, dirs, files) in os.walk(Dir.LIB):
for filename in files:
root = root.replace('pyglass/', '')
lib_list.append(join(root, filename))
return lib_list
|
Returns the contents of 'pyglass/lib' as a list of 'lib/*' items for package_data
|
train
|
https://github.com/Pixelapse/pyglass/blob/83cd0ff2b0b7cdaf4ec6f54559a626e67455cd33/setup.py#L51-L58
| null |
#!/usr/bin/env python
# System modules
import sys
import os
import shutil
import platform
from os.path import join
# Library modules
try:
from setuptools import setup, find_packages
except ImportError:
from distutils.core import setup
from distutils.dir_util import copy_tree
# Package modules
if sys.version_info[:2] < (2, 7):
print "Sorry, pyglass requires python version 2.7 or later"
sys.exit(1)
if platform.system() != 'Darwin':
print "Sorry, pyglass only runs on OS X"
sys.exit(1)
class Dir:
BUILD = 'build'
DIST = 'dist'
COCOA = 'cocoa'
COCOA_BUILD = join(COCOA, 'build')
LIB = join('pyglass', 'lib') # Destination directory for vendor/custom libs
VENDOR = 'vendor' # Third-party libraries
def rm_tempdirs():
''' Remove temporary build folders '''
tempdirs = [Dir.BUILD, Dir.COCOA_BUILD, Dir.LIB]
for tempdir in tempdirs:
if os.path.exists(tempdir):
shutil.rmtree(tempdir, ignore_errors=True)
def copy_vendor_libs():
''' Copies third party vendor libs into the module '''
copy_tree('%s/' % Dir.VENDOR, '%s/' % Dir.LIB)
# Compile custom project
rm_tempdirs()
# Copy over libs into Dir.LIB
os.makedirs(Dir.LIB)
copy_vendor_libs()
package_libs = lib_list()
version = '0.1.2'
setup(
name='pyglass',
version=version,
url='http://github.com/Pixelapse/pyglass',
download_url='https://github.com/Pixelapse/pyglass/tarball/v%s' % version,
description='Mac OS X File Preview Generator',
long_description=open('README.md').read(),
author='Shravan Reddy',
author_email='shravan@pixelapse.com',
maintainer='Pixelapse',
maintainer_email='hello@pixelapse.com',
packages=find_packages(),
package_data={'': package_libs},
install_requires=['pxprocess', 'pyunicode', 'PyPDF2'],
include_package_data=True,
zip_safe=False,
license=open('LICENSE').read()
)
rm_tempdirs()
|
Pixelapse/pyglass
|
pyglass/quicklook/models.py
|
Page
|
python
|
def Page(QLExportable):
''' For multi-page files, e.g. if pdf preview '''
def __init__(self, filename, page_id):
self.id = page_id
super(Page, self).__init__(filename)
def export(self, export_format=ExportFormat.PNG):
pass
|
For multi-page files, e.g. if pdf preview
|
train
|
https://github.com/Pixelapse/pyglass/blob/83cd0ff2b0b7cdaf4ec6f54559a626e67455cd33/pyglass/quicklook/models.py#L17-L24
| null |
# -*- coding: utf-8 -*-
# Project modules
from ..models import Exportable, ExportFormat
class QLExportable(Exportable):
''' Base class for any exportable QuickLook item '''
def __init__(self, filename):
self.filename = filename
super(QLExportable, self).__init__()
def __unicode__(self):
return u'<QLExportable>'
|
Pixelapse/pyglass
|
pyglass/api.py
|
preview
|
python
|
def preview(src_path):
''' Generates a preview of src_path in the requested format.
:returns: A list of preview paths, one for each page.
'''
previews = []
if sketch.is_sketchfile(src_path):
previews = sketch.preview(src_path)
if not previews:
previews = quicklook.preview(src_path)
previews = [safely_decode(preview) for preview in previews]
return previews
|
Generates a preview of src_path in the requested format.
:returns: A list of preview paths, one for each page.
|
train
|
https://github.com/Pixelapse/pyglass/blob/83cd0ff2b0b7cdaf4ec6f54559a626e67455cd33/pyglass/api.py#L9-L23
|
[
"def preview(src_path):\n ''' Generates a preview of src_path in the requested format.\n :returns: A list of preview paths, one for each page. Blank list if unsupported.\n '''\n preview = embedded_preview(src_path)\n\n if not is_valid_preview(preview):\n preview = generator_preview(src_path)\n\n if not is_valid_preview(preview):\n preview = thumbnail_preview(src_path)\n\n # Ensure the preview is returned in the right format\n if is_valid_preview(preview):\n if mimetype(preview) in [ExportMimeType.PNG]:\n return [preview]\n\n if mimetype(preview) in [ExportMimeType.PDF]:\n return to_pngs(preview)\n\n return []\n",
"def is_sketchfile(src_path):\n ''' Returns True if src_path is a sketch file '''\n if extension(src_path) == u'.sketch':\n return True\n return False\n"
] |
# -*- coding: utf-8 -*-
# Library modules
from pyunicode import safely_decode
# Project modules
from . import quicklook, sketch
|
Pixelapse/pyglass
|
pyglass/settings.py
|
make_executable
|
python
|
def make_executable(path_str):
''' Performs the equivalent of `chmod +x` on the file at path_str.
:returns: path_str if success, else None
'''
try:
mode = os.stat(path_str).st_mode
os.chmod(path_str, mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
except Exception as e:
print 'Exception: %s' % e
return None
return path_str
|
Performs the equivalent of `chmod +x` on the file at path_str.
:returns: path_str if success, else None
|
train
|
https://github.com/Pixelapse/pyglass/blob/83cd0ff2b0b7cdaf4ec6f54559a626e67455cd33/pyglass/settings.py#L11-L21
| null |
# -*- coding: utf-8 -*-
# Default libs
import os
import stat
from os.path import dirname, join, abspath
curr_dir = dirname(abspath(__file__))
def sketchtool_executable():
make_executable(join(curr_dir, 'lib', 'SketchTool', 'sketchmigrate'))
return make_executable(join(curr_dir, 'lib', 'SketchTool', 'sketchtool'))
SKETCHTOOL = sketchtool_executable()
QLMANAGE = join('/usr', 'bin', 'qlmanage')
|
Pixelapse/pyglass
|
pyglass/pdf/api.py
|
stitch_pdfs
|
python
|
def stitch_pdfs(pdf_list):
''' Merges a series of single page pdfs into one multi-page doc '''
pdf_merger = PdfFileMerger()
for pdf in pdf_list:
pdf_merger.append(pdf)
with NamedTemporaryFile(prefix='pyglass', suffix='.pdf', delete=False) as tempfileobj:
dest_path = tempfileobj.name
pdf_merger.write(dest_path)
pdf_merger.close()
return dest_path
|
Merges a series of single page pdfs into one multi-page doc
|
train
|
https://github.com/Pixelapse/pyglass/blob/83cd0ff2b0b7cdaf4ec6f54559a626e67455cd33/pyglass/pdf/api.py#L16-L27
| null |
# -*- coding: utf-8 -*-
# Default libs
import os
from tempfile import NamedTemporaryFile
from os.path import exists
# Library modules
from PyPDF2 import PdfFileMerger, PdfFileReader, PdfFileWriter
from pxprocess import check_call
############################################################
# PDF CLASSES
############################################################
def split_pdf(pdf_path):
''' Splits a multi-page pdf into a list of single page pdfs '''
pdf = PdfFileReader(pdf_path)
pdf_list = []
for page_num in range(pdf.numPages):
page = pdf.getPage(page_num)
pdf_writer = PdfFileWriter()
pdf_writer.addPage(page)
with NamedTemporaryFile(prefix='pyglass', suffix='.pdf', delete=False) as tempfileobj:
pdf_writer.write(tempfileobj)
page_path = tempfileobj.name
pdf_list.append(page_path)
return pdf_list
def to_png(pdf_path):
''' Converts a single-page pdf to a png image via the `sips` command
:returns: Path to the converted png
'''
try:
with NamedTemporaryFile(prefix='pyglass', suffix='.png', delete=False) as tempfileobj:
png_path = tempfileobj.name
cmd = ['sips', '-s', 'format', 'png', pdf_path, '--out', png_path]
assert(check_call(cmd) == 0)
assert(exists(png_path))
return png_path
except:
return None
def to_pngs(pdf_path):
''' Converts a multi-page pdfs to a list of pngs via the `sips` command
:returns: A list of converted pngs
'''
pdf_list = split_pdf(pdf_path)
pngs = []
for pdf in pdf_list:
pngs.append(to_png(pdf))
os.remove(pdf) # Clean up
return pngs
|
Pixelapse/pyglass
|
pyglass/pdf/api.py
|
split_pdf
|
python
|
def split_pdf(pdf_path):
''' Splits a multi-page pdf into a list of single page pdfs '''
pdf = PdfFileReader(pdf_path)
pdf_list = []
for page_num in range(pdf.numPages):
page = pdf.getPage(page_num)
pdf_writer = PdfFileWriter()
pdf_writer.addPage(page)
with NamedTemporaryFile(prefix='pyglass', suffix='.pdf', delete=False) as tempfileobj:
pdf_writer.write(tempfileobj)
page_path = tempfileobj.name
pdf_list.append(page_path)
return pdf_list
|
Splits a multi-page pdf into a list of single page pdfs
|
train
|
https://github.com/Pixelapse/pyglass/blob/83cd0ff2b0b7cdaf4ec6f54559a626e67455cd33/pyglass/pdf/api.py#L30-L47
| null |
# -*- coding: utf-8 -*-
# Default libs
import os
from tempfile import NamedTemporaryFile
from os.path import exists
# Library modules
from PyPDF2 import PdfFileMerger, PdfFileReader, PdfFileWriter
from pxprocess import check_call
############################################################
# PDF CLASSES
############################################################
def stitch_pdfs(pdf_list):
''' Merges a series of single page pdfs into one multi-page doc '''
pdf_merger = PdfFileMerger()
for pdf in pdf_list:
pdf_merger.append(pdf)
with NamedTemporaryFile(prefix='pyglass', suffix='.pdf', delete=False) as tempfileobj:
dest_path = tempfileobj.name
pdf_merger.write(dest_path)
pdf_merger.close()
return dest_path
def to_png(pdf_path):
''' Converts a single-page pdf to a png image via the `sips` command
:returns: Path to the converted png
'''
try:
with NamedTemporaryFile(prefix='pyglass', suffix='.png', delete=False) as tempfileobj:
png_path = tempfileobj.name
cmd = ['sips', '-s', 'format', 'png', pdf_path, '--out', png_path]
assert(check_call(cmd) == 0)
assert(exists(png_path))
return png_path
except:
return None
def to_pngs(pdf_path):
''' Converts a multi-page pdfs to a list of pngs via the `sips` command
:returns: A list of converted pngs
'''
pdf_list = split_pdf(pdf_path)
pngs = []
for pdf in pdf_list:
pngs.append(to_png(pdf))
os.remove(pdf) # Clean up
return pngs
|
Pixelapse/pyglass
|
pyglass/pdf/api.py
|
to_png
|
python
|
def to_png(pdf_path):
''' Converts a single-page pdf to a png image via the `sips` command
:returns: Path to the converted png
'''
try:
with NamedTemporaryFile(prefix='pyglass', suffix='.png', delete=False) as tempfileobj:
png_path = tempfileobj.name
cmd = ['sips', '-s', 'format', 'png', pdf_path, '--out', png_path]
assert(check_call(cmd) == 0)
assert(exists(png_path))
return png_path
except:
return None
|
Converts a single-page pdf to a png image via the `sips` command
:returns: Path to the converted png
|
train
|
https://github.com/Pixelapse/pyglass/blob/83cd0ff2b0b7cdaf4ec6f54559a626e67455cd33/pyglass/pdf/api.py#L50-L64
| null |
# -*- coding: utf-8 -*-
# Default libs
import os
from tempfile import NamedTemporaryFile
from os.path import exists
# Library modules
from PyPDF2 import PdfFileMerger, PdfFileReader, PdfFileWriter
from pxprocess import check_call
############################################################
# PDF CLASSES
############################################################
def stitch_pdfs(pdf_list):
''' Merges a series of single page pdfs into one multi-page doc '''
pdf_merger = PdfFileMerger()
for pdf in pdf_list:
pdf_merger.append(pdf)
with NamedTemporaryFile(prefix='pyglass', suffix='.pdf', delete=False) as tempfileobj:
dest_path = tempfileobj.name
pdf_merger.write(dest_path)
pdf_merger.close()
return dest_path
def split_pdf(pdf_path):
''' Splits a multi-page pdf into a list of single page pdfs '''
pdf = PdfFileReader(pdf_path)
pdf_list = []
for page_num in range(pdf.numPages):
page = pdf.getPage(page_num)
pdf_writer = PdfFileWriter()
pdf_writer.addPage(page)
with NamedTemporaryFile(prefix='pyglass', suffix='.pdf', delete=False) as tempfileobj:
pdf_writer.write(tempfileobj)
page_path = tempfileobj.name
pdf_list.append(page_path)
return pdf_list
def to_pngs(pdf_path):
''' Converts a multi-page pdfs to a list of pngs via the `sips` command
:returns: A list of converted pngs
'''
pdf_list = split_pdf(pdf_path)
pngs = []
for pdf in pdf_list:
pngs.append(to_png(pdf))
os.remove(pdf) # Clean up
return pngs
|
Pixelapse/pyglass
|
pyglass/pdf/api.py
|
to_pngs
|
python
|
def to_pngs(pdf_path):
''' Converts a multi-page pdfs to a list of pngs via the `sips` command
:returns: A list of converted pngs
'''
pdf_list = split_pdf(pdf_path)
pngs = []
for pdf in pdf_list:
pngs.append(to_png(pdf))
os.remove(pdf) # Clean up
return pngs
|
Converts a multi-page pdfs to a list of pngs via the `sips` command
:returns: A list of converted pngs
|
train
|
https://github.com/Pixelapse/pyglass/blob/83cd0ff2b0b7cdaf4ec6f54559a626e67455cd33/pyglass/pdf/api.py#L67-L76
|
[
"def split_pdf(pdf_path):\n ''' Splits a multi-page pdf into a list of single page pdfs '''\n pdf = PdfFileReader(pdf_path)\n pdf_list = []\n\n for page_num in range(pdf.numPages):\n page = pdf.getPage(page_num)\n\n pdf_writer = PdfFileWriter()\n pdf_writer.addPage(page)\n\n with NamedTemporaryFile(prefix='pyglass', suffix='.pdf', delete=False) as tempfileobj:\n pdf_writer.write(tempfileobj)\n page_path = tempfileobj.name\n\n pdf_list.append(page_path)\n\n return pdf_list\n",
"def to_png(pdf_path):\n ''' Converts a single-page pdf to a png image via the `sips` command\n :returns: Path to the converted png\n '''\n try:\n with NamedTemporaryFile(prefix='pyglass', suffix='.png', delete=False) as tempfileobj:\n png_path = tempfileobj.name\n\n cmd = ['sips', '-s', 'format', 'png', pdf_path, '--out', png_path]\n assert(check_call(cmd) == 0)\n\n assert(exists(png_path))\n return png_path\n except:\n return None\n"
] |
# -*- coding: utf-8 -*-
# Default libs
import os
from tempfile import NamedTemporaryFile
from os.path import exists
# Library modules
from PyPDF2 import PdfFileMerger, PdfFileReader, PdfFileWriter
from pxprocess import check_call
############################################################
# PDF CLASSES
############################################################
def stitch_pdfs(pdf_list):
''' Merges a series of single page pdfs into one multi-page doc '''
pdf_merger = PdfFileMerger()
for pdf in pdf_list:
pdf_merger.append(pdf)
with NamedTemporaryFile(prefix='pyglass', suffix='.pdf', delete=False) as tempfileobj:
dest_path = tempfileobj.name
pdf_merger.write(dest_path)
pdf_merger.close()
return dest_path
def split_pdf(pdf_path):
''' Splits a multi-page pdf into a list of single page pdfs '''
pdf = PdfFileReader(pdf_path)
pdf_list = []
for page_num in range(pdf.numPages):
page = pdf.getPage(page_num)
pdf_writer = PdfFileWriter()
pdf_writer.addPage(page)
with NamedTemporaryFile(prefix='pyglass', suffix='.pdf', delete=False) as tempfileobj:
pdf_writer.write(tempfileobj)
page_path = tempfileobj.name
pdf_list.append(page_path)
return pdf_list
def to_png(pdf_path):
''' Converts a single-page pdf to a png image via the `sips` command
:returns: Path to the converted png
'''
try:
with NamedTemporaryFile(prefix='pyglass', suffix='.png', delete=False) as tempfileobj:
png_path = tempfileobj.name
cmd = ['sips', '-s', 'format', 'png', pdf_path, '--out', png_path]
assert(check_call(cmd) == 0)
assert(exists(png_path))
return png_path
except:
return None
|
misli/django-cms-articles
|
cms_articles/templatetags/cms_articles.py
|
_get_article_by_untyped_arg
|
python
|
def _get_article_by_untyped_arg(article_lookup, request, site_id):
if article_lookup is None:
return request.current_article
if isinstance(article_lookup, Article):
if hasattr(request, 'current_article') and request.current_article.pk == article_lookup.pk:
return request.current_article
return article_lookup
if isinstance(article_lookup, six.integer_types):
article_lookup = {'pk': article_lookup}
elif not isinstance(article_lookup, dict):
raise TypeError('The article_lookup argument can be either a Dictionary, Integer, or Article.')
article_lookup.update({'site': site_id})
try:
article = Article.objects.all().get(**article_lookup)
if request and use_draft(request):
if article.publisher_is_draft:
return article
else:
return article.publisher_draft
else:
if article.publisher_is_draft:
return article.publisher_public
else:
return article
except Article.DoesNotExist:
site = Site.objects.get_current()
subject = _('Article not found on %(domain)s') % {'domain': site.domain}
body = (
_("A template tag couldn't find the article with lookup arguments `%(article_lookup)s\n`. "
"The URL of the request was: http://%(host)s%(path)s") %
{'article_lookup': repr(article_lookup), 'host': site.domain, 'path': request.path_info}
)
if settings.DEBUG:
raise Article.DoesNotExist(body)
else:
mw = get_middleware()
if getattr(settings, 'SEND_BROKEN_LINK_EMAILS', False):
mail_managers(subject, body, fail_silently=True)
elif 'django.middleware.common.BrokenLinkEmailsMiddleware' in mw:
middle = BrokenLinkEmailsMiddleware()
domain = request.get_host()
path = request.get_full_path()
referer = force_text(request.META.get('HTTP_REFERER', ''), errors='replace')
if not middle.is_ignorable_request(request, path, domain, referer):
mail_managers(subject, body, fail_silently=True)
return None
|
The `article_lookup` argument can be of any of the following types:
- Integer: interpreted as `pk` of the desired article
- `dict`: a dictionary containing keyword arguments to find the desired article
(for instance: `{'pk': 1}`)
- `Article`: you can also pass an Article object directly, in which case there will be no database lookup.
- `None`: the current article will be used
|
train
|
https://github.com/misli/django-cms-articles/blob/d96ac77e049022deb4c70d268e4eab74d175145c/cms_articles/templatetags/cms_articles.py#L30-L83
| null |
# -*- coding: utf-8 -*-
from classytags.arguments import Argument, MultiValueArgument
from classytags.core import Options, Tag
from classytags.helpers import AsTag
from cms.exceptions import PlaceholderNotFound
from cms.templatetags.cms_tags import DeclaredPlaceholder, PlaceholderOptions
from cms.toolbar.utils import get_toolbar_from_request
from cms.utils import get_language_from_request, get_site_id
from cms.utils.compat.dj import get_middleware
from cms.utils.moderator import use_draft
from django import template
from django.contrib.sites.models import Site
from django.core.mail import mail_managers
from django.middleware.common import BrokenLinkEmailsMiddleware
from django.utils import six
from django.utils.encoding import force_text
from django.utils.html import escape
from django.utils.translation import ugettext_lazy as _
from menus.base import NavigationNode
from menus.templatetags.menu_tags import ShowBreadcrumb
from ..conf import settings
from ..models import Article
from ..utils.placeholder import validate_placeholder_name
register = template.Library()
class ArticlePlaceholder(Tag):
"""
This template node is used to output article content and
is also used in the admin to dynamically generate input fields.
eg: {% article_placeholder "placeholder_name" %}
{% article_placeholder "footer" or %}
<a href="/about/">About us</a>
{% endarticle_placeholder %}
Keyword arguments:
name -- the name of the placeholder
or -- optional argument which if given will make the template tag a block
tag whose content is shown if the placeholder is empty
"""
name = 'article_placeholder'
options = PlaceholderOptions(
Argument('name', resolve=False),
MultiValueArgument('extra_bits', required=False, resolve=False),
blocks=[
('endarticle_placeholder', 'nodelist'),
],
)
def render_tag(self, context, name, extra_bits, nodelist=None):
request = context.get('request')
if not request:
return ''
validate_placeholder_name(name)
toolbar = get_toolbar_from_request(request)
renderer = toolbar.get_content_renderer()
inherit = False
try:
content = renderer.render_page_placeholder(
slot=name,
context=context,
inherit=inherit,
page=request.current_article,
nodelist=nodelist,
)
except PlaceholderNotFound:
content = ''
if not content and nodelist:
return nodelist.render(context)
return content
def get_declaration(self):
slot = self.kwargs['name'].var.value.strip('"').strip("'")
return DeclaredPlaceholder(slot=slot, inherit=False)
register.tag('article_placeholder', ArticlePlaceholder)
class ArticleAttribute(AsTag):
"""
This template node is used to output an attribute from a article such
as its title or slug.
Synopsis
{% article_attribute "field-name" %}
{% article_attribute "field-name" as varname %}
{% article_attribute "field-name" article_lookup %}
{% article_attribute "field-name" article_lookup as varname %}
Example
{# Output current article's page_title attribute: #}
{% article_attribute "page_title" %}
{# Output slug attribute of the article with pk 10: #}
{% article_attribute "slug" 10 %}
{# Assign page_title attribute to a variable: #}
{% article_attribute "page_title" as title %}
Keyword arguments:
field-name -- the name of the field to output. Use one of:
- title
- description
- page_title
- slug
- meta_description
- changed_date
- changed_by
- image
article_lookup -- lookup argument for Article, if omitted field-name of current article is returned.
See _get_article_by_untyped_arg() for detailed information on the allowed types and their interpretation
for the article_lookup argument.
varname -- context variable name. Output will be added to template context as this variable.
This argument is required to follow the 'as' keyword.
"""
name = 'article_attribute'
options = Options(
Argument('name', resolve=False),
Argument('article_lookup', required=False, default=None),
'as',
Argument('varname', required=False, resolve=False)
)
valid_attributes = [
"title",
"slug",
"description",
"page_title",
"menu_title",
"meta_description",
"changed_date",
"changed_by",
"image",
]
def get_value(self, context, name, article_lookup):
if 'request' not in context:
return ''
name = name.lower()
request = context['request']
lang = get_language_from_request(request)
article = _get_article_by_untyped_arg(article_lookup, request, get_site_id(None))
if article and name in self.valid_attributes:
func = getattr(article, "get_%s" % name)
ret_val = func(language=lang, fallback=True)
if name not in ("changed_date", "image"):
ret_val = escape(ret_val)
return ret_val
return ''
register.tag('article_attribute', ArticleAttribute)
class ShowArticleBreadcrumb(ShowBreadcrumb):
name = 'show_article_breadcrumb'
def get_context(self, context, start_level, template, only_visible):
context = super(ShowArticleBreadcrumb, self).get_context(context, start_level, template, only_visible)
try:
current_article = context['request'].current_article
except (AttributeError, KeyError):
pass
else:
context['ancestors'].append(NavigationNode(
title=current_article.get_menu_title(),
url=current_article.get_absolute_url(),
id=current_article.pk,
visible=True,
))
return context
register.tag('show_article_breadcrumb', ShowArticleBreadcrumb)
@register.simple_tag(takes_context=True)
def url_page(context, page):
get = context['request'].GET.copy()
get[settings.CMS_ARTICLES_PAGE_FIELD] = page
return '{}?{}'.format(context['request'].path, get.urlencode())
|
misli/django-cms-articles
|
cms_articles/search_indexes.py
|
TitleIndex.get_article_placeholders
|
python
|
def get_article_placeholders(self, article):
placeholders_search_list = getattr(settings, 'CMS_ARTICLES_PLACEHOLDERS_SEARCH_LIST', {})
included = placeholders_search_list.get('include', [])
excluded = placeholders_search_list.get('exclude', [])
diff = set(included) - set(excluded)
if diff:
return article.placeholders.filter(slot__in=diff)
elif excluded:
return article.placeholders.exclude(slot__in=excluded)
else:
return article.placeholders.all()
|
In the project settings set up the variable
CMS_ARTICLES_PLACEHOLDERS_SEARCH_LIST = {
'include': [ 'slot1', 'slot2', etc. ],
'exclude': [ 'slot3', 'slot4', etc. ],
}
or leave it empty
CMS_ARTICLES_PLACEHOLDERS_SEARCH_LIST = {}
|
train
|
https://github.com/misli/django-cms-articles/blob/d96ac77e049022deb4c70d268e4eab74d175145c/cms_articles/search_indexes.py#L46-L69
| null |
class TitleIndex(get_index_base()):
index_title = True
object_actions = ('publish', 'unpublish')
haystack_use_for_indexing = settings.CMS_ARTICLES_USE_HAYSTACK
def prepare_pub_date(self, obj):
return obj.article.publication_date
def prepare_login_required(self, obj):
return obj.article.login_required
def prepare_site_id(self, obj):
return obj.article.tree.node.site_id
def get_language(self, obj):
return obj.language
def get_url(self, obj):
return obj.article.get_absolute_url()
def get_title(self, obj):
return obj.title
def get_description(self, obj):
return obj.meta_description or None
def get_plugin_queryset(self, language):
queryset = CMSPlugin.objects.filter(language=language)
return queryset
def get_search_data(self, obj, language, request):
current_article = obj.article
placeholders = self.get_article_placeholders(current_article)
plugins = self.get_plugin_queryset(language).filter(placeholder__in=placeholders)
text_bits = []
for base_plugin in plugins:
plugin_text_content = self.get_plugin_search_text(base_plugin, request)
text_bits.append(plugin_text_content)
article_meta_description = current_article.get_meta_description(fallback=False, language=language)
if article_meta_description:
text_bits.append(article_meta_description)
article_meta_keywords = getattr(current_article, 'get_meta_keywords', None)
if callable(article_meta_keywords):
text_bits.append(article_meta_keywords())
return clean_join(' ', text_bits)
def get_plugin_search_text(self, base_plugin, request):
plugin_content_bits = get_plugin_index_data(base_plugin, request)
return clean_join(' ', plugin_content_bits)
def get_model(self):
return Title
def get_index_queryset(self, language):
queryset = Title.objects.public().filter(
Q(article__publication_date__lt=timezone.now()) | Q(article__publication_date__isnull=True),
Q(article__publication_end_date__gte=timezone.now()) | Q(article__publication_end_date__isnull=True),
language=language
).select_related('article').distinct()
return queryset
def should_update(self, instance, **kwargs):
# We use the action flag to prevent
# updating the cms article on save.
return kwargs.get('object_action') in self.object_actions
|
misli/django-cms-articles
|
cms_articles/utils/article.py
|
get_article_from_slug
|
python
|
def get_article_from_slug(tree, slug, preview=False, draft=False):
from ..models import Title
titles = Title.objects.select_related('article').filter(article__tree=tree)
published_only = (not draft and not preview)
if draft:
titles = titles.filter(publisher_is_draft=True)
elif preview:
titles = titles.filter(publisher_is_draft=False)
else:
titles = titles.filter(published=True, publisher_is_draft=False)
titles = titles.filter(slug=slug)
for title in titles.iterator():
if published_only and not _page_is_published(title.article):
continue
title.article.title_cache = {title.language: title}
return title.article
return
|
Resolves a slug to a single article object.
Returns None if article does not exist
|
train
|
https://github.com/misli/django-cms-articles/blob/d96ac77e049022deb4c70d268e4eab74d175145c/cms_articles/utils/article.py#L7-L31
| null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from cms.utils.page import _page_is_published
|
misli/django-cms-articles
|
cms_articles/models/managers.py
|
ArticleManager.search
|
python
|
def search(self, q, language=None, current_site_only=True):
from cms.plugin_pool import plugin_pool
qs = self.get_queryset()
qs = qs.public()
if current_site_only:
site = Site.objects.get_current()
qs = qs.filter(tree__site=site)
qt = Q(title_set__title__icontains=q)
# find 'searchable' plugins and build query
qp = Q()
plugins = plugin_pool.get_all_plugins()
for plugin in plugins:
cmsplugin = plugin.model
if not (
hasattr(cmsplugin, 'search_fields') and
hasattr(cmsplugin, 'cmsplugin_ptr')
):
continue
field = cmsplugin.cmsplugin_ptr.field
related_query_name = field.related_query_name()
if related_query_name and not related_query_name.startswith('+'):
for field in cmsplugin.search_fields:
qp |= Q(**{
'placeholders__cmsplugin__{0}__{1}__icontains'.format(
related_query_name,
field,
): q})
if language:
qt &= Q(title_set__language=language)
qp &= Q(cmsplugin__language=language)
qs = qs.filter(qt | qp)
return qs.distinct()
|
Simple search function
Plugins can define a 'search_fields' tuple similar to ModelAdmin classes
|
train
|
https://github.com/misli/django-cms-articles/blob/d96ac77e049022deb4c70d268e4eab74d175145c/cms_articles/models/managers.py#L19-L60
| null |
class ArticleManager(PublisherManager):
"""Use draft() and public() methods for accessing the corresponding
instances.
"""
def get_queryset(self):
"""Change standard model queryset to our own.
"""
return ArticleQuerySet(self.model)
|
misli/django-cms-articles
|
cms_articles/models/managers.py
|
TitleManager.get_title
|
python
|
def get_title(self, article, language, language_fallback=False):
try:
title = self.get(language=language, article=article)
return title
except self.model.DoesNotExist:
if language_fallback:
try:
titles = self.filter(article=article)
fallbacks = get_fallback_languages(language)
for lang in fallbacks:
for title in titles:
if lang == title.language:
return title
return None
except self.model.DoesNotExist:
pass
else:
raise
return None
|
Gets the latest content for a particular article and language. Falls back
to another language if wanted.
|
train
|
https://github.com/misli/django-cms-articles/blob/d96ac77e049022deb4c70d268e4eab74d175145c/cms_articles/models/managers.py#L64-L86
| null |
class TitleManager(PublisherManager):
# created new public method to meet test case requirement and to get a list of titles for published articles
def public(self):
return self.get_queryset().filter(publisher_is_draft=False, published=True)
def drafts(self):
return self.get_queryset().filter(publisher_is_draft=True)
def set_or_create(self, request, article, form, language):
"""
set or create a title for a particular article and language
"""
base_fields = [
'slug',
'title',
'description',
'meta_description',
'page_title',
'menu_title',
'image',
]
cleaned_data = form.cleaned_data
try:
obj = self.get(article=article, language=language)
except self.model.DoesNotExist:
data = {}
for name in base_fields:
if name in cleaned_data:
data[name] = cleaned_data[name]
data['article'] = article
data['language'] = language
return self.create(**data)
for name in base_fields:
if name in form.base_fields:
value = cleaned_data.get(name, None)
setattr(obj, name, value)
obj.save()
return obj
|
misli/django-cms-articles
|
cms_articles/models/managers.py
|
TitleManager.set_or_create
|
python
|
def set_or_create(self, request, article, form, language):
base_fields = [
'slug',
'title',
'description',
'meta_description',
'page_title',
'menu_title',
'image',
]
cleaned_data = form.cleaned_data
try:
obj = self.get(article=article, language=language)
except self.model.DoesNotExist:
data = {}
for name in base_fields:
if name in cleaned_data:
data[name] = cleaned_data[name]
data['article'] = article
data['language'] = language
return self.create(**data)
for name in base_fields:
if name in form.base_fields:
value = cleaned_data.get(name, None)
setattr(obj, name, value)
obj.save()
return obj
|
set or create a title for a particular article and language
|
train
|
https://github.com/misli/django-cms-articles/blob/d96ac77e049022deb4c70d268e4eab74d175145c/cms_articles/models/managers.py#L95-L124
| null |
class TitleManager(PublisherManager):
def get_title(self, article, language, language_fallback=False):
"""
Gets the latest content for a particular article and language. Falls back
to another language if wanted.
"""
try:
title = self.get(language=language, article=article)
return title
except self.model.DoesNotExist:
if language_fallback:
try:
titles = self.filter(article=article)
fallbacks = get_fallback_languages(language)
for lang in fallbacks:
for title in titles:
if lang == title.language:
return title
return None
except self.model.DoesNotExist:
pass
else:
raise
return None
# created new public method to meet test case requirement and to get a list of titles for published articles
def public(self):
return self.get_queryset().filter(publisher_is_draft=False, published=True)
def drafts(self):
return self.get_queryset().filter(publisher_is_draft=True)
|
misli/django-cms-articles
|
cms_articles/article_rendering.py
|
render_article
|
python
|
def render_article(request, article, current_language, slug):
context = {}
context['article'] = article
context['lang'] = current_language
context['current_article'] = article
context['has_change_permissions'] = article.has_change_permission(request)
response = TemplateResponse(request, article.template, context)
response.add_post_render_callback(set_page_cache)
# Add headers for X Frame Options - this really should be changed upon moving to class based views
xframe_options = article.tree.get_xframe_options()
# xframe_options can be None if there's no xframe information on the page
# (eg. a top-level page which has xframe options set to "inherit")
if xframe_options == Page.X_FRAME_OPTIONS_INHERIT or xframe_options is None:
# This is when we defer to django's own clickjacking handling
return response
# We want to prevent django setting this in their middlewear
response.xframe_options_exempt = True
if xframe_options == Page.X_FRAME_OPTIONS_ALLOW:
# Do nothing, allowed is no header.
return response
elif xframe_options == Page.X_FRAME_OPTIONS_SAMEORIGIN:
response['X-Frame-Options'] = 'SAMEORIGIN'
elif xframe_options == Page.X_FRAME_OPTIONS_DENY:
response['X-Frame-Options'] = 'DENY'
return response
|
Renders an article
|
train
|
https://github.com/misli/django-cms-articles/blob/d96ac77e049022deb4c70d268e4eab74d175145c/cms_articles/article_rendering.py#L7-L38
| null |
# -*- coding: utf-8 -*-
from cms.cache.page import set_page_cache
from cms.models import Page
from django.template.response import TemplateResponse
|
misli/django-cms-articles
|
cms_articles/views.py
|
article
|
python
|
def article(request, slug):
# Get current CMS Page as article tree
tree = request.current_page.get_public_object()
# Check whether it really is a tree.
# It could also be one of its sub-pages.
if tree.application_urls != 'CMSArticlesApp':
# In such case show regular CMS Page
return page(request, slug)
# Get an Article object from the request
draft = use_draft(request) and request.user.has_perm('cms_articles.change_article')
preview = 'preview' in request.GET and request.user.has_perm('cms_articles.change_article')
site = tree.node.site
article = get_article_from_slug(tree, slug, preview, draft)
if not article:
# raise 404
_handle_no_page(request)
request.current_article = article
if hasattr(request, 'user') and request.user.is_staff:
user_languages = get_language_list(site_id=site.pk)
else:
user_languages = get_public_languages(site_id=site.pk)
request_language = get_language_from_request(request, check_path=True)
# get_published_languages will return all languages in draft mode
# and published only in live mode.
# These languages are then filtered out by the user allowed languages
available_languages = [
language for language in user_languages
if language in list(article.get_published_languages())
]
own_urls = [
request.build_absolute_uri(request.path),
'/%s' % request.path,
request.path,
]
try:
redirect_on_fallback = get_redirect_on_fallback(request_language, site_id=site.pk)
except LanguageError:
redirect_on_fallback = False
if request_language not in user_languages:
# Language is not allowed
# Use the default site language
default_language = get_default_language_for_site(site.pk)
fallbacks = get_fallback_languages(default_language, site_id=site.pk)
fallbacks = [default_language] + fallbacks
else:
fallbacks = get_fallback_languages(request_language, site_id=site.pk)
# Only fallback to languages the user is allowed to see
fallback_languages = [
language for language in fallbacks
if language != request_language and language in available_languages
]
language_is_unavailable = request_language not in available_languages
if language_is_unavailable and not fallback_languages:
# There is no page with the requested language
# and there's no configured fallbacks
return _handle_no_page(request)
elif language_is_unavailable and redirect_on_fallback:
# There is no page with the requested language and
# the user has explicitly requested to redirect on fallbacks,
# so redirect to the first configured / available fallback language
fallback = fallback_languages[0]
redirect_url = article.get_absolute_url(fallback, fallback=False)
else:
redirect_url = False
if redirect_url:
if request.user.is_staff and hasattr(request, 'toolbar') and request.toolbar.edit_mode_active:
request.toolbar.redirect_url = redirect_url
elif redirect_url not in own_urls:
# prevent redirect to self
return HttpResponseRedirect(redirect_url)
# permission checks
if article.login_required and not request.user.is_authenticated():
return redirect_to_login(urlquote(request.get_full_path()), settings.LOGIN_URL)
if hasattr(request, 'toolbar'):
request.toolbar.obj = article
structure_requested = get_cms_setting('CMS_TOOLBAR_URL__BUILD') in request.GET
if article.has_change_permission(request) and structure_requested:
return render_object_structure(request, article)
return render_article(request, article, current_language=request_language, slug=slug)
|
The main view of the Django-CMS Articles! Takes a request and a slug,
renders the article.
|
train
|
https://github.com/misli/django-cms-articles/blob/d96ac77e049022deb4c70d268e4eab74d175145c/cms_articles/views.py#L20-L120
|
[
"def get_article_from_slug(tree, slug, preview=False, draft=False):\n \"\"\"\n Resolves a slug to a single article object.\n Returns None if article does not exist\n \"\"\"\n from ..models import Title\n\n titles = Title.objects.select_related('article').filter(article__tree=tree)\n published_only = (not draft and not preview)\n\n if draft:\n titles = titles.filter(publisher_is_draft=True)\n elif preview:\n titles = titles.filter(publisher_is_draft=False)\n else:\n titles = titles.filter(published=True, publisher_is_draft=False)\n titles = titles.filter(slug=slug)\n\n for title in titles.iterator():\n if published_only and not _page_is_published(title.article):\n continue\n\n title.article.title_cache = {title.language: title}\n return title.article\n return\n"
] |
from cms.exceptions import LanguageError
from cms.page_rendering import _handle_no_page, render_object_structure
from cms.utils.conf import get_cms_setting
from cms.utils.i18n import (
get_default_language_for_site, get_fallback_languages, get_language_list,
get_public_languages, get_redirect_on_fallback,
)
from cms.utils.moderator import use_draft
from cms.views import details as page
from django.conf import settings
from django.contrib.auth.views import redirect_to_login
from django.http import HttpResponseRedirect
from django.utils.http import urlquote
from django.utils.translation import get_language_from_request
from .article_rendering import render_article
from .utils.article import get_article_from_slug
|
misli/django-cms-articles
|
cms_articles/utils/__init__.py
|
is_valid_article_slug
|
python
|
def is_valid_article_slug(article, language, slug):
from ..models import Title
qs = Title.objects.filter(slug=slug, language=language)
if article.pk:
qs = qs.exclude(Q(language=language) & Q(article=article))
qs = qs.exclude(article__publisher_public=article)
if qs.count():
return False
return True
|
Validates given slug depending on settings.
|
train
|
https://github.com/misli/django-cms-articles/blob/d96ac77e049022deb4c70d268e4eab74d175145c/cms_articles/utils/__init__.py#L4-L18
| null |
from django.db.models import Q
|
misli/django-cms-articles
|
cms_articles/admin/article.py
|
ArticleAdmin.get_urls
|
python
|
def get_urls(self):
info = '%s_%s' % (self.model._meta.app_label, self.model._meta.model_name)
def pat(regex, fn):
return url(regex, self.admin_site.admin_view(fn), name='%s_%s' % (info, fn.__name__))
url_patterns = [
pat(r'^([0-9]+)/delete-translation/$', self.delete_translation),
pat(r'^([0-9]+)/([a-z\-]+)/publish/$', self.publish_article),
pat(r'^([0-9]+)/([a-z\-]+)/unpublish/$', self.unpublish),
pat(r'^([0-9]+)/([a-z\-]+)/preview/$', self.preview_article),
]
url_patterns += super(ArticleAdmin, self).get_urls()
return url_patterns
|
Get the admin urls
|
train
|
https://github.com/misli/django-cms-articles/blob/d96ac77e049022deb4c70d268e4eab74d175145c/cms_articles/admin/article.py#L102-L118
|
[
"def pat(regex, fn):\n return url(regex, self.admin_site.admin_view(fn), name='%s_%s' % (info, fn.__name__))\n"
] |
class ArticleAdmin(PlaceholderAdminMixin, admin.ModelAdmin):
change_list_template = 'admin/cms_articles/article_changelist.html'
search_fields = ('=id', 'title_set__slug', 'title_set__title', 'title_set__description')
list_display = ('__str__', 'order_date', 'preview_link') + tuple(
'lang_{}'.format(lang) for lang in get_language_list()
)
list_filter = ['tree', 'attributes', 'categories', 'template', 'changed_by']
date_hierarchy = 'order_date'
filter_horizontal = ['attributes', 'categories']
preview_template = get_template('admin/cms_articles/article/change_list_preview.html')
def preview_link(self, obj):
return self.preview_template.render({
'article': obj,
'lang': _thread_locals.language,
'request': _thread_locals.request,
})
preview_link.short_description = _('Show')
preview_link.allow_tags = True
lang_template = get_template('admin/cms_articles/article/change_list_lang.html')
def __getattr__(self, name):
if name.startswith('lang_'):
lang = name[len('lang_'):]
def lang_dropdown(obj):
return self.lang_template.render({
'article': obj,
'lang': lang,
'language': _thread_locals.language,
'has_change_permission': obj.has_change_permission(_thread_locals.request),
'has_publish_permission': obj.has_publish_permission(_thread_locals.request),
'request': _thread_locals.request,
})
lang_dropdown.short_description = lang
lang_dropdown.allow_tags = True
return lang_dropdown
raise AttributeError(name)
def get_fieldsets(self, request, obj=None):
language_dependent = ['title', 'slug', 'description', 'content', 'page_title',
'menu_title', 'meta_description', 'image']
if obj:
language_dependent.remove('content')
return [
(None, {'fields': ['tree', 'template']}),
(_('Language dependent settings'), {'fields': language_dependent}),
(_('Other settings'), {'fields': ['attributes', 'categories', 'publication_date',
'publication_end_date', 'login_required']}),
]
def get_queryset(self, request):
return super(ArticleAdmin, self).get_queryset(request).filter(
tree__node__site_id=settings.SITE_ID,
publisher_is_draft=True,
)
def save_model(self, request, obj, form, change):
new = obj.id is None
super(ArticleAdmin, self).save_model(request, obj, form, change)
Title.objects.set_or_create(request, obj, form, form.cleaned_data['language'])
if new and form.cleaned_data['content']:
add_content(
obj,
language=form.cleaned_data['language'],
slot=settings.CMS_ARTICLES_SLOT,
content=form.cleaned_data['content'],
)
SLUG_REGEXP = re.compile(settings.CMS_ARTICLES_SLUG_REGEXP)
def get_form(self, request, obj=None, **kwargs):
"""
Get ArticleForm for the Article model and modify its fields depending on
the request.
"""
language = get_language_from_request(request)
form = super(ArticleAdmin, self).get_form(
request, obj,
form=(obj and ArticleForm or ArticleCreateForm),
**kwargs
)
# get_form method operates by overriding initial fields value which
# may persist across invocation. Code below deepcopies fields definition
# to avoid leaks
for field in tuple(form.base_fields.keys()):
form.base_fields[field] = copy.deepcopy(form.base_fields[field])
if 'language' in form.base_fields:
form.base_fields['language'].initial = language
if obj:
title_obj = obj.get_title_obj(language=language, fallback=False, force_reload=True)
if hasattr(title_obj, 'id'):
for name in ('title', 'description', 'page_title', 'menu_title', 'meta_description', 'image'):
if name in form.base_fields:
form.base_fields[name].initial = getattr(title_obj, name)
try:
slug = self.SLUG_REGEXP.search(title_obj.slug).groups()[settings.CMS_ARTICLES_SLUG_GROUP_INDEX]
except AttributeError:
warnings.warn('Failed to parse slug from CMS_ARTICLES_SLUG_REGEXP. '
'It probably doesn\'t correspond to CMS_ARTICLES_SLUG_FORMAT.')
slug = title_obj.slug
form.base_fields['slug'].initial = slug
return form
def get_unihandecode_context(self, language):
if language[:2] in get_cms_setting('UNIHANDECODE_DECODERS'):
uhd_lang = language[:2]
else:
uhd_lang = get_cms_setting('UNIHANDECODE_DEFAULT_DECODER')
uhd_host = get_cms_setting('UNIHANDECODE_HOST')
uhd_version = get_cms_setting('UNIHANDECODE_VERSION')
if uhd_lang and uhd_host and uhd_version:
uhd_urls = [
'%sunihandecode-%s.core.min.js' % (uhd_host, uhd_version),
'%sunihandecode-%s.%s.min.js' % (uhd_host, uhd_version, uhd_lang),
]
else:
uhd_urls = []
return {'unihandecode_lang': uhd_lang, 'unihandecode_urls': uhd_urls}
def changelist_view(self, request, extra_context=None):
_thread_locals.request = request
_thread_locals.language = get_language_from_request(request)
return super(ArticleAdmin, self).changelist_view(request, extra_context=extra_context)
def add_view(self, request, form_url='', extra_context=None):
extra_context = self.update_language_tab_context(request, context=extra_context)
extra_context.update(self.get_unihandecode_context(extra_context['language']))
return super(ArticleAdmin, self).add_view(request, form_url, extra_context=extra_context)
def change_view(self, request, object_id, form_url='', extra_context=None):
extra_context = self.update_language_tab_context(request, context=extra_context)
language = extra_context['language']
extra_context.update(self.get_unihandecode_context(language))
response = super(ArticleAdmin, self).change_view(request, object_id,
form_url=form_url, extra_context=extra_context)
if language and response.status_code == 302 and response._headers['location'][1] == request.path_info:
location = response._headers['location']
response._headers['location'] = (location[0], '%s?language=%s' % (location[1], language))
return response
def render_change_form(self, request, context, add=False, change=False, form_url='', obj=None):
# add context variables
filled_languages = []
if obj:
filled_languages = [t[0] for t in obj.title_set.filter(title__isnull=False).values_list('language')]
allowed_languages = [lang[0] for lang in get_language_tuple()]
context.update({
'filled_languages': [lang for lang in filled_languages if lang in allowed_languages],
})
return super(ArticleAdmin, self).render_change_form(request, context, add, change, form_url, obj)
def update_language_tab_context(self, request, context=None):
if not context:
context = {}
language = get_language_from_request(request)
languages = get_language_tuple()
context.update({
'language': language,
'languages': languages,
'language_tabs': languages,
'show_language_tabs': len(list(languages)) > 1,
})
return context
@require_POST
@transaction.atomic
def publish_article(self, request, article_id, language):
try:
article = Article.objects.get(id=article_id, publisher_is_draft=True)
except Article.DoesNotExist:
article = None
# ensure user has permissions to publish this article
if article:
if not self.has_change_permission(request):
return HttpResponseForbidden(force_text(_('You do not have permission to publish this article')))
article.publish(language)
statics = request.GET.get('statics', '')
if not statics and not article:
raise Http404('No article or stack found for publishing.')
all_published = True
if statics:
static_ids = statics.split(',')
for pk in static_ids:
static_placeholder = StaticPlaceholder.objects.get(pk=pk)
published = static_placeholder.publish(request, language)
if not published:
all_published = False
if article:
if all_published:
messages.info(request, _('The content was successfully published.'))
LogEntry.objects.log_action(
user_id=request.user.id,
content_type_id=ContentType.objects.get_for_model(Article).pk,
object_id=article_id,
object_repr=article.get_title(language),
action_flag=CHANGE,
)
else:
messages.warning(request, _('There was a problem publishing your content'))
if 'redirect' in request.GET:
return HttpResponseRedirect(request.GET['redirect'])
referrer = request.META.get('HTTP_REFERER', '')
path = admin_reverse('cms_articles_article_changelist')
if request.GET.get('redirect_language'):
path = '%s?language=%s&article_id=%s' % (
path,
request.GET.get('redirect_language'),
request.GET.get('redirect_article_id')
)
if admin_reverse('index') not in referrer:
if all_published:
if article:
if article.get_publisher_state(language) == PUBLISHER_STATE_PENDING:
path = article.get_absolute_url(language, fallback=True)
else:
public_article = Article.objects.get(publisher_public=article.pk)
path = '%s?%s' % (
public_article.get_absolute_url(language, fallback=True),
get_cms_setting('CMS_TOOLBAR_URL__EDIT_OFF')
)
else:
path = '%s?%s' % (referrer, get_cms_setting('CMS_TOOLBAR_URL__EDIT_OFF'))
else:
path = '/?%s' % get_cms_setting('CMS_TOOLBAR_URL__EDIT_OFF')
return HttpResponseRedirect(path)
@require_POST
@transaction.atomic
def unpublish(self, request, article_id, language):
"""
Publish or unpublish a language of a article
"""
article = get_object_or_404(self.model, pk=article_id)
if not article.has_publish_permission(request):
return HttpResponseForbidden(force_text(_('You do not have permission to unpublish this article')))
if not article.publisher_public_id:
return HttpResponseForbidden(force_text(_('This article was never published')))
try:
article.unpublish(language)
message = _('The %(language)s article "%(article)s" was successfully unpublished') % {
'language': get_language_object(language)['name'], 'article': article}
messages.info(request, message)
LogEntry.objects.log_action(
user_id=request.user.id,
content_type_id=ContentType.objects.get_for_model(Article).pk,
object_id=article_id,
object_repr=article.get_title(),
action_flag=CHANGE,
change_message=message,
)
except RuntimeError:
exc = sys.exc_info()[1]
messages.error(request, exc.message)
except ValidationError:
exc = sys.exc_info()[1]
messages.error(request, exc.message)
path = admin_reverse('cms_articles_article_changelist')
if request.GET.get('redirect_language'):
path = '%s?language=%s&article_id=%s' % (
path,
request.GET.get('redirect_language'),
request.GET.get('redirect_article_id')
)
return HttpResponseRedirect(path)
def delete_translation(self, request, object_id, extra_context=None):
if 'language' in request.GET:
language = request.GET['language']
else:
language = get_language_from_request(request)
opts = Article._meta
titleopts = Title._meta
app_label = titleopts.app_label
pluginopts = CMSPlugin._meta
try:
obj = self.get_queryset(request).get(pk=unquote(object_id))
except self.model.DoesNotExist:
# Don't raise Http404 just yet, because we haven't checked
# permissions yet. We don't want an unauthenticated user to be able
# to determine whether a given object exists.
obj = None
if not self.has_delete_permission(request, obj):
return HttpResponseForbidden(force_text(_('You do not have permission to change this article')))
if obj is None:
raise Http404(
_('%(name)s object with primary key %(key)r does not exist.') % {
'name': force_text(opts.verbose_name),
'key': escape(object_id)
})
if not len(list(obj.get_languages())) > 1:
raise Http404(_('There only exists one translation for this article'))
titleobj = get_object_or_404(Title, article__id=object_id, language=language)
saved_plugins = CMSPlugin.objects.filter(placeholder__article__id=object_id, language=language)
using = router.db_for_read(self.model)
kwargs = {
'admin_site': self.admin_site,
'user': request.user,
'using': using
}
deleted_objects, __, perms_needed = get_deleted_objects(
[titleobj],
titleopts,
**kwargs
)[:3]
to_delete_plugins, __, perms_needed_plugins = get_deleted_objects(
saved_plugins,
pluginopts,
**kwargs
)[:3]
deleted_objects.append(to_delete_plugins)
perms_needed = set(list(perms_needed) + list(perms_needed_plugins))
if request.method == 'POST':
if perms_needed:
raise PermissionDenied
message = _('Title and plugins with language %(language)s was deleted') % {
'language': force_text(get_language_object(language)['name'])
}
self.log_change(request, titleobj, message)
messages.info(request, message)
titleobj.delete()
for p in saved_plugins:
p.delete()
public = obj.publisher_public
if public:
public.save()
if not self.has_change_permission(request, None):
return HttpResponseRedirect(admin_reverse('index'))
return HttpResponseRedirect(admin_reverse('cms_articles_article_changelist'))
context = {
'title': _('Are you sure?'),
'object_name': force_text(titleopts.verbose_name),
'object': titleobj,
'deleted_objects': deleted_objects,
'perms_lacking': perms_needed,
'opts': opts,
'root_path': admin_reverse('index'),
'app_label': app_label,
}
context.update(extra_context or {})
request.current_app = self.admin_site.name
return render(request, self.delete_confirmation_template or [
'admin/%s/%s/delete_confirmation.html' % (app_label, titleopts.object_name.lower()),
'admin/%s/delete_confirmation.html' % app_label,
'admin/delete_confirmation.html'
], context)
def preview_article(self, request, object_id, language):
"""Redirecting preview function based on draft_id
"""
article = get_object_or_404(self.model, id=object_id)
attrs = '?%s' % get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON')
attrs += '&language=' + language
with force_language(language):
url = article.get_absolute_url(language) + attrs
return HttpResponseRedirect(url)
|
misli/django-cms-articles
|
cms_articles/admin/article.py
|
ArticleAdmin.get_form
|
python
|
def get_form(self, request, obj=None, **kwargs):
language = get_language_from_request(request)
form = super(ArticleAdmin, self).get_form(
request, obj,
form=(obj and ArticleForm or ArticleCreateForm),
**kwargs
)
# get_form method operates by overriding initial fields value which
# may persist across invocation. Code below deepcopies fields definition
# to avoid leaks
for field in tuple(form.base_fields.keys()):
form.base_fields[field] = copy.deepcopy(form.base_fields[field])
if 'language' in form.base_fields:
form.base_fields['language'].initial = language
if obj:
title_obj = obj.get_title_obj(language=language, fallback=False, force_reload=True)
if hasattr(title_obj, 'id'):
for name in ('title', 'description', 'page_title', 'menu_title', 'meta_description', 'image'):
if name in form.base_fields:
form.base_fields[name].initial = getattr(title_obj, name)
try:
slug = self.SLUG_REGEXP.search(title_obj.slug).groups()[settings.CMS_ARTICLES_SLUG_GROUP_INDEX]
except AttributeError:
warnings.warn('Failed to parse slug from CMS_ARTICLES_SLUG_REGEXP. '
'It probably doesn\'t correspond to CMS_ARTICLES_SLUG_FORMAT.')
slug = title_obj.slug
form.base_fields['slug'].initial = slug
return form
|
Get ArticleForm for the Article model and modify its fields depending on
the request.
|
train
|
https://github.com/misli/django-cms-articles/blob/d96ac77e049022deb4c70d268e4eab74d175145c/cms_articles/admin/article.py#L140-L175
| null |
class ArticleAdmin(PlaceholderAdminMixin, admin.ModelAdmin):
change_list_template = 'admin/cms_articles/article_changelist.html'
search_fields = ('=id', 'title_set__slug', 'title_set__title', 'title_set__description')
list_display = ('__str__', 'order_date', 'preview_link') + tuple(
'lang_{}'.format(lang) for lang in get_language_list()
)
list_filter = ['tree', 'attributes', 'categories', 'template', 'changed_by']
date_hierarchy = 'order_date'
filter_horizontal = ['attributes', 'categories']
preview_template = get_template('admin/cms_articles/article/change_list_preview.html')
def preview_link(self, obj):
return self.preview_template.render({
'article': obj,
'lang': _thread_locals.language,
'request': _thread_locals.request,
})
preview_link.short_description = _('Show')
preview_link.allow_tags = True
lang_template = get_template('admin/cms_articles/article/change_list_lang.html')
def __getattr__(self, name):
if name.startswith('lang_'):
lang = name[len('lang_'):]
def lang_dropdown(obj):
return self.lang_template.render({
'article': obj,
'lang': lang,
'language': _thread_locals.language,
'has_change_permission': obj.has_change_permission(_thread_locals.request),
'has_publish_permission': obj.has_publish_permission(_thread_locals.request),
'request': _thread_locals.request,
})
lang_dropdown.short_description = lang
lang_dropdown.allow_tags = True
return lang_dropdown
raise AttributeError(name)
def get_fieldsets(self, request, obj=None):
language_dependent = ['title', 'slug', 'description', 'content', 'page_title',
'menu_title', 'meta_description', 'image']
if obj:
language_dependent.remove('content')
return [
(None, {'fields': ['tree', 'template']}),
(_('Language dependent settings'), {'fields': language_dependent}),
(_('Other settings'), {'fields': ['attributes', 'categories', 'publication_date',
'publication_end_date', 'login_required']}),
]
def get_urls(self):
"""Get the admin urls
"""
info = '%s_%s' % (self.model._meta.app_label, self.model._meta.model_name)
def pat(regex, fn):
return url(regex, self.admin_site.admin_view(fn), name='%s_%s' % (info, fn.__name__))
url_patterns = [
pat(r'^([0-9]+)/delete-translation/$', self.delete_translation),
pat(r'^([0-9]+)/([a-z\-]+)/publish/$', self.publish_article),
pat(r'^([0-9]+)/([a-z\-]+)/unpublish/$', self.unpublish),
pat(r'^([0-9]+)/([a-z\-]+)/preview/$', self.preview_article),
]
url_patterns += super(ArticleAdmin, self).get_urls()
return url_patterns
def get_queryset(self, request):
return super(ArticleAdmin, self).get_queryset(request).filter(
tree__node__site_id=settings.SITE_ID,
publisher_is_draft=True,
)
def save_model(self, request, obj, form, change):
new = obj.id is None
super(ArticleAdmin, self).save_model(request, obj, form, change)
Title.objects.set_or_create(request, obj, form, form.cleaned_data['language'])
if new and form.cleaned_data['content']:
add_content(
obj,
language=form.cleaned_data['language'],
slot=settings.CMS_ARTICLES_SLOT,
content=form.cleaned_data['content'],
)
SLUG_REGEXP = re.compile(settings.CMS_ARTICLES_SLUG_REGEXP)
def get_unihandecode_context(self, language):
if language[:2] in get_cms_setting('UNIHANDECODE_DECODERS'):
uhd_lang = language[:2]
else:
uhd_lang = get_cms_setting('UNIHANDECODE_DEFAULT_DECODER')
uhd_host = get_cms_setting('UNIHANDECODE_HOST')
uhd_version = get_cms_setting('UNIHANDECODE_VERSION')
if uhd_lang and uhd_host and uhd_version:
uhd_urls = [
'%sunihandecode-%s.core.min.js' % (uhd_host, uhd_version),
'%sunihandecode-%s.%s.min.js' % (uhd_host, uhd_version, uhd_lang),
]
else:
uhd_urls = []
return {'unihandecode_lang': uhd_lang, 'unihandecode_urls': uhd_urls}
def changelist_view(self, request, extra_context=None):
_thread_locals.request = request
_thread_locals.language = get_language_from_request(request)
return super(ArticleAdmin, self).changelist_view(request, extra_context=extra_context)
def add_view(self, request, form_url='', extra_context=None):
extra_context = self.update_language_tab_context(request, context=extra_context)
extra_context.update(self.get_unihandecode_context(extra_context['language']))
return super(ArticleAdmin, self).add_view(request, form_url, extra_context=extra_context)
def change_view(self, request, object_id, form_url='', extra_context=None):
extra_context = self.update_language_tab_context(request, context=extra_context)
language = extra_context['language']
extra_context.update(self.get_unihandecode_context(language))
response = super(ArticleAdmin, self).change_view(request, object_id,
form_url=form_url, extra_context=extra_context)
if language and response.status_code == 302 and response._headers['location'][1] == request.path_info:
location = response._headers['location']
response._headers['location'] = (location[0], '%s?language=%s' % (location[1], language))
return response
def render_change_form(self, request, context, add=False, change=False, form_url='', obj=None):
# add context variables
filled_languages = []
if obj:
filled_languages = [t[0] for t in obj.title_set.filter(title__isnull=False).values_list('language')]
allowed_languages = [lang[0] for lang in get_language_tuple()]
context.update({
'filled_languages': [lang for lang in filled_languages if lang in allowed_languages],
})
return super(ArticleAdmin, self).render_change_form(request, context, add, change, form_url, obj)
def update_language_tab_context(self, request, context=None):
if not context:
context = {}
language = get_language_from_request(request)
languages = get_language_tuple()
context.update({
'language': language,
'languages': languages,
'language_tabs': languages,
'show_language_tabs': len(list(languages)) > 1,
})
return context
@require_POST
@transaction.atomic
def publish_article(self, request, article_id, language):
try:
article = Article.objects.get(id=article_id, publisher_is_draft=True)
except Article.DoesNotExist:
article = None
# ensure user has permissions to publish this article
if article:
if not self.has_change_permission(request):
return HttpResponseForbidden(force_text(_('You do not have permission to publish this article')))
article.publish(language)
statics = request.GET.get('statics', '')
if not statics and not article:
raise Http404('No article or stack found for publishing.')
all_published = True
if statics:
static_ids = statics.split(',')
for pk in static_ids:
static_placeholder = StaticPlaceholder.objects.get(pk=pk)
published = static_placeholder.publish(request, language)
if not published:
all_published = False
if article:
if all_published:
messages.info(request, _('The content was successfully published.'))
LogEntry.objects.log_action(
user_id=request.user.id,
content_type_id=ContentType.objects.get_for_model(Article).pk,
object_id=article_id,
object_repr=article.get_title(language),
action_flag=CHANGE,
)
else:
messages.warning(request, _('There was a problem publishing your content'))
if 'redirect' in request.GET:
return HttpResponseRedirect(request.GET['redirect'])
referrer = request.META.get('HTTP_REFERER', '')
path = admin_reverse('cms_articles_article_changelist')
if request.GET.get('redirect_language'):
path = '%s?language=%s&article_id=%s' % (
path,
request.GET.get('redirect_language'),
request.GET.get('redirect_article_id')
)
if admin_reverse('index') not in referrer:
if all_published:
if article:
if article.get_publisher_state(language) == PUBLISHER_STATE_PENDING:
path = article.get_absolute_url(language, fallback=True)
else:
public_article = Article.objects.get(publisher_public=article.pk)
path = '%s?%s' % (
public_article.get_absolute_url(language, fallback=True),
get_cms_setting('CMS_TOOLBAR_URL__EDIT_OFF')
)
else:
path = '%s?%s' % (referrer, get_cms_setting('CMS_TOOLBAR_URL__EDIT_OFF'))
else:
path = '/?%s' % get_cms_setting('CMS_TOOLBAR_URL__EDIT_OFF')
return HttpResponseRedirect(path)
@require_POST
@transaction.atomic
def unpublish(self, request, article_id, language):
"""
Publish or unpublish a language of a article
"""
article = get_object_or_404(self.model, pk=article_id)
if not article.has_publish_permission(request):
return HttpResponseForbidden(force_text(_('You do not have permission to unpublish this article')))
if not article.publisher_public_id:
return HttpResponseForbidden(force_text(_('This article was never published')))
try:
article.unpublish(language)
message = _('The %(language)s article "%(article)s" was successfully unpublished') % {
'language': get_language_object(language)['name'], 'article': article}
messages.info(request, message)
LogEntry.objects.log_action(
user_id=request.user.id,
content_type_id=ContentType.objects.get_for_model(Article).pk,
object_id=article_id,
object_repr=article.get_title(),
action_flag=CHANGE,
change_message=message,
)
except RuntimeError:
exc = sys.exc_info()[1]
messages.error(request, exc.message)
except ValidationError:
exc = sys.exc_info()[1]
messages.error(request, exc.message)
path = admin_reverse('cms_articles_article_changelist')
if request.GET.get('redirect_language'):
path = '%s?language=%s&article_id=%s' % (
path,
request.GET.get('redirect_language'),
request.GET.get('redirect_article_id')
)
return HttpResponseRedirect(path)
def delete_translation(self, request, object_id, extra_context=None):
if 'language' in request.GET:
language = request.GET['language']
else:
language = get_language_from_request(request)
opts = Article._meta
titleopts = Title._meta
app_label = titleopts.app_label
pluginopts = CMSPlugin._meta
try:
obj = self.get_queryset(request).get(pk=unquote(object_id))
except self.model.DoesNotExist:
# Don't raise Http404 just yet, because we haven't checked
# permissions yet. We don't want an unauthenticated user to be able
# to determine whether a given object exists.
obj = None
if not self.has_delete_permission(request, obj):
return HttpResponseForbidden(force_text(_('You do not have permission to change this article')))
if obj is None:
raise Http404(
_('%(name)s object with primary key %(key)r does not exist.') % {
'name': force_text(opts.verbose_name),
'key': escape(object_id)
})
if not len(list(obj.get_languages())) > 1:
raise Http404(_('There only exists one translation for this article'))
titleobj = get_object_or_404(Title, article__id=object_id, language=language)
saved_plugins = CMSPlugin.objects.filter(placeholder__article__id=object_id, language=language)
using = router.db_for_read(self.model)
kwargs = {
'admin_site': self.admin_site,
'user': request.user,
'using': using
}
deleted_objects, __, perms_needed = get_deleted_objects(
[titleobj],
titleopts,
**kwargs
)[:3]
to_delete_plugins, __, perms_needed_plugins = get_deleted_objects(
saved_plugins,
pluginopts,
**kwargs
)[:3]
deleted_objects.append(to_delete_plugins)
perms_needed = set(list(perms_needed) + list(perms_needed_plugins))
if request.method == 'POST':
if perms_needed:
raise PermissionDenied
message = _('Title and plugins with language %(language)s was deleted') % {
'language': force_text(get_language_object(language)['name'])
}
self.log_change(request, titleobj, message)
messages.info(request, message)
titleobj.delete()
for p in saved_plugins:
p.delete()
public = obj.publisher_public
if public:
public.save()
if not self.has_change_permission(request, None):
return HttpResponseRedirect(admin_reverse('index'))
return HttpResponseRedirect(admin_reverse('cms_articles_article_changelist'))
context = {
'title': _('Are you sure?'),
'object_name': force_text(titleopts.verbose_name),
'object': titleobj,
'deleted_objects': deleted_objects,
'perms_lacking': perms_needed,
'opts': opts,
'root_path': admin_reverse('index'),
'app_label': app_label,
}
context.update(extra_context or {})
request.current_app = self.admin_site.name
return render(request, self.delete_confirmation_template or [
'admin/%s/%s/delete_confirmation.html' % (app_label, titleopts.object_name.lower()),
'admin/%s/delete_confirmation.html' % app_label,
'admin/delete_confirmation.html'
], context)
def preview_article(self, request, object_id, language):
"""Redirecting preview function based on draft_id
"""
article = get_object_or_404(self.model, id=object_id)
attrs = '?%s' % get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON')
attrs += '&language=' + language
with force_language(language):
url = article.get_absolute_url(language) + attrs
return HttpResponseRedirect(url)
|
misli/django-cms-articles
|
cms_articles/admin/article.py
|
ArticleAdmin.unpublish
|
python
|
def unpublish(self, request, article_id, language):
article = get_object_or_404(self.model, pk=article_id)
if not article.has_publish_permission(request):
return HttpResponseForbidden(force_text(_('You do not have permission to unpublish this article')))
if not article.publisher_public_id:
return HttpResponseForbidden(force_text(_('This article was never published')))
try:
article.unpublish(language)
message = _('The %(language)s article "%(article)s" was successfully unpublished') % {
'language': get_language_object(language)['name'], 'article': article}
messages.info(request, message)
LogEntry.objects.log_action(
user_id=request.user.id,
content_type_id=ContentType.objects.get_for_model(Article).pk,
object_id=article_id,
object_repr=article.get_title(),
action_flag=CHANGE,
change_message=message,
)
except RuntimeError:
exc = sys.exc_info()[1]
messages.error(request, exc.message)
except ValidationError:
exc = sys.exc_info()[1]
messages.error(request, exc.message)
path = admin_reverse('cms_articles_article_changelist')
if request.GET.get('redirect_language'):
path = '%s?language=%s&article_id=%s' % (
path,
request.GET.get('redirect_language'),
request.GET.get('redirect_article_id')
)
return HttpResponseRedirect(path)
|
Publish or unpublish a language of a article
|
train
|
https://github.com/misli/django-cms-articles/blob/d96ac77e049022deb4c70d268e4eab74d175145c/cms_articles/admin/article.py#L306-L341
| null |
class ArticleAdmin(PlaceholderAdminMixin, admin.ModelAdmin):
change_list_template = 'admin/cms_articles/article_changelist.html'
search_fields = ('=id', 'title_set__slug', 'title_set__title', 'title_set__description')
list_display = ('__str__', 'order_date', 'preview_link') + tuple(
'lang_{}'.format(lang) for lang in get_language_list()
)
list_filter = ['tree', 'attributes', 'categories', 'template', 'changed_by']
date_hierarchy = 'order_date'
filter_horizontal = ['attributes', 'categories']
preview_template = get_template('admin/cms_articles/article/change_list_preview.html')
def preview_link(self, obj):
return self.preview_template.render({
'article': obj,
'lang': _thread_locals.language,
'request': _thread_locals.request,
})
preview_link.short_description = _('Show')
preview_link.allow_tags = True
lang_template = get_template('admin/cms_articles/article/change_list_lang.html')
def __getattr__(self, name):
if name.startswith('lang_'):
lang = name[len('lang_'):]
def lang_dropdown(obj):
return self.lang_template.render({
'article': obj,
'lang': lang,
'language': _thread_locals.language,
'has_change_permission': obj.has_change_permission(_thread_locals.request),
'has_publish_permission': obj.has_publish_permission(_thread_locals.request),
'request': _thread_locals.request,
})
lang_dropdown.short_description = lang
lang_dropdown.allow_tags = True
return lang_dropdown
raise AttributeError(name)
def get_fieldsets(self, request, obj=None):
language_dependent = ['title', 'slug', 'description', 'content', 'page_title',
'menu_title', 'meta_description', 'image']
if obj:
language_dependent.remove('content')
return [
(None, {'fields': ['tree', 'template']}),
(_('Language dependent settings'), {'fields': language_dependent}),
(_('Other settings'), {'fields': ['attributes', 'categories', 'publication_date',
'publication_end_date', 'login_required']}),
]
def get_urls(self):
"""Get the admin urls
"""
info = '%s_%s' % (self.model._meta.app_label, self.model._meta.model_name)
def pat(regex, fn):
return url(regex, self.admin_site.admin_view(fn), name='%s_%s' % (info, fn.__name__))
url_patterns = [
pat(r'^([0-9]+)/delete-translation/$', self.delete_translation),
pat(r'^([0-9]+)/([a-z\-]+)/publish/$', self.publish_article),
pat(r'^([0-9]+)/([a-z\-]+)/unpublish/$', self.unpublish),
pat(r'^([0-9]+)/([a-z\-]+)/preview/$', self.preview_article),
]
url_patterns += super(ArticleAdmin, self).get_urls()
return url_patterns
def get_queryset(self, request):
return super(ArticleAdmin, self).get_queryset(request).filter(
tree__node__site_id=settings.SITE_ID,
publisher_is_draft=True,
)
def save_model(self, request, obj, form, change):
new = obj.id is None
super(ArticleAdmin, self).save_model(request, obj, form, change)
Title.objects.set_or_create(request, obj, form, form.cleaned_data['language'])
if new and form.cleaned_data['content']:
add_content(
obj,
language=form.cleaned_data['language'],
slot=settings.CMS_ARTICLES_SLOT,
content=form.cleaned_data['content'],
)
SLUG_REGEXP = re.compile(settings.CMS_ARTICLES_SLUG_REGEXP)
def get_form(self, request, obj=None, **kwargs):
"""
Get ArticleForm for the Article model and modify its fields depending on
the request.
"""
language = get_language_from_request(request)
form = super(ArticleAdmin, self).get_form(
request, obj,
form=(obj and ArticleForm or ArticleCreateForm),
**kwargs
)
# get_form method operates by overriding initial fields value which
# may persist across invocation. Code below deepcopies fields definition
# to avoid leaks
for field in tuple(form.base_fields.keys()):
form.base_fields[field] = copy.deepcopy(form.base_fields[field])
if 'language' in form.base_fields:
form.base_fields['language'].initial = language
if obj:
title_obj = obj.get_title_obj(language=language, fallback=False, force_reload=True)
if hasattr(title_obj, 'id'):
for name in ('title', 'description', 'page_title', 'menu_title', 'meta_description', 'image'):
if name in form.base_fields:
form.base_fields[name].initial = getattr(title_obj, name)
try:
slug = self.SLUG_REGEXP.search(title_obj.slug).groups()[settings.CMS_ARTICLES_SLUG_GROUP_INDEX]
except AttributeError:
warnings.warn('Failed to parse slug from CMS_ARTICLES_SLUG_REGEXP. '
'It probably doesn\'t correspond to CMS_ARTICLES_SLUG_FORMAT.')
slug = title_obj.slug
form.base_fields['slug'].initial = slug
return form
def get_unihandecode_context(self, language):
if language[:2] in get_cms_setting('UNIHANDECODE_DECODERS'):
uhd_lang = language[:2]
else:
uhd_lang = get_cms_setting('UNIHANDECODE_DEFAULT_DECODER')
uhd_host = get_cms_setting('UNIHANDECODE_HOST')
uhd_version = get_cms_setting('UNIHANDECODE_VERSION')
if uhd_lang and uhd_host and uhd_version:
uhd_urls = [
'%sunihandecode-%s.core.min.js' % (uhd_host, uhd_version),
'%sunihandecode-%s.%s.min.js' % (uhd_host, uhd_version, uhd_lang),
]
else:
uhd_urls = []
return {'unihandecode_lang': uhd_lang, 'unihandecode_urls': uhd_urls}
def changelist_view(self, request, extra_context=None):
_thread_locals.request = request
_thread_locals.language = get_language_from_request(request)
return super(ArticleAdmin, self).changelist_view(request, extra_context=extra_context)
def add_view(self, request, form_url='', extra_context=None):
extra_context = self.update_language_tab_context(request, context=extra_context)
extra_context.update(self.get_unihandecode_context(extra_context['language']))
return super(ArticleAdmin, self).add_view(request, form_url, extra_context=extra_context)
def change_view(self, request, object_id, form_url='', extra_context=None):
extra_context = self.update_language_tab_context(request, context=extra_context)
language = extra_context['language']
extra_context.update(self.get_unihandecode_context(language))
response = super(ArticleAdmin, self).change_view(request, object_id,
form_url=form_url, extra_context=extra_context)
if language and response.status_code == 302 and response._headers['location'][1] == request.path_info:
location = response._headers['location']
response._headers['location'] = (location[0], '%s?language=%s' % (location[1], language))
return response
def render_change_form(self, request, context, add=False, change=False, form_url='', obj=None):
# add context variables
filled_languages = []
if obj:
filled_languages = [t[0] for t in obj.title_set.filter(title__isnull=False).values_list('language')]
allowed_languages = [lang[0] for lang in get_language_tuple()]
context.update({
'filled_languages': [lang for lang in filled_languages if lang in allowed_languages],
})
return super(ArticleAdmin, self).render_change_form(request, context, add, change, form_url, obj)
def update_language_tab_context(self, request, context=None):
if not context:
context = {}
language = get_language_from_request(request)
languages = get_language_tuple()
context.update({
'language': language,
'languages': languages,
'language_tabs': languages,
'show_language_tabs': len(list(languages)) > 1,
})
return context
@require_POST
@transaction.atomic
def publish_article(self, request, article_id, language):
try:
article = Article.objects.get(id=article_id, publisher_is_draft=True)
except Article.DoesNotExist:
article = None
# ensure user has permissions to publish this article
if article:
if not self.has_change_permission(request):
return HttpResponseForbidden(force_text(_('You do not have permission to publish this article')))
article.publish(language)
statics = request.GET.get('statics', '')
if not statics and not article:
raise Http404('No article or stack found for publishing.')
all_published = True
if statics:
static_ids = statics.split(',')
for pk in static_ids:
static_placeholder = StaticPlaceholder.objects.get(pk=pk)
published = static_placeholder.publish(request, language)
if not published:
all_published = False
if article:
if all_published:
messages.info(request, _('The content was successfully published.'))
LogEntry.objects.log_action(
user_id=request.user.id,
content_type_id=ContentType.objects.get_for_model(Article).pk,
object_id=article_id,
object_repr=article.get_title(language),
action_flag=CHANGE,
)
else:
messages.warning(request, _('There was a problem publishing your content'))
if 'redirect' in request.GET:
return HttpResponseRedirect(request.GET['redirect'])
referrer = request.META.get('HTTP_REFERER', '')
path = admin_reverse('cms_articles_article_changelist')
if request.GET.get('redirect_language'):
path = '%s?language=%s&article_id=%s' % (
path,
request.GET.get('redirect_language'),
request.GET.get('redirect_article_id')
)
if admin_reverse('index') not in referrer:
if all_published:
if article:
if article.get_publisher_state(language) == PUBLISHER_STATE_PENDING:
path = article.get_absolute_url(language, fallback=True)
else:
public_article = Article.objects.get(publisher_public=article.pk)
path = '%s?%s' % (
public_article.get_absolute_url(language, fallback=True),
get_cms_setting('CMS_TOOLBAR_URL__EDIT_OFF')
)
else:
path = '%s?%s' % (referrer, get_cms_setting('CMS_TOOLBAR_URL__EDIT_OFF'))
else:
path = '/?%s' % get_cms_setting('CMS_TOOLBAR_URL__EDIT_OFF')
return HttpResponseRedirect(path)
@require_POST
@transaction.atomic
def delete_translation(self, request, object_id, extra_context=None):
if 'language' in request.GET:
language = request.GET['language']
else:
language = get_language_from_request(request)
opts = Article._meta
titleopts = Title._meta
app_label = titleopts.app_label
pluginopts = CMSPlugin._meta
try:
obj = self.get_queryset(request).get(pk=unquote(object_id))
except self.model.DoesNotExist:
# Don't raise Http404 just yet, because we haven't checked
# permissions yet. We don't want an unauthenticated user to be able
# to determine whether a given object exists.
obj = None
if not self.has_delete_permission(request, obj):
return HttpResponseForbidden(force_text(_('You do not have permission to change this article')))
if obj is None:
raise Http404(
_('%(name)s object with primary key %(key)r does not exist.') % {
'name': force_text(opts.verbose_name),
'key': escape(object_id)
})
if not len(list(obj.get_languages())) > 1:
raise Http404(_('There only exists one translation for this article'))
titleobj = get_object_or_404(Title, article__id=object_id, language=language)
saved_plugins = CMSPlugin.objects.filter(placeholder__article__id=object_id, language=language)
using = router.db_for_read(self.model)
kwargs = {
'admin_site': self.admin_site,
'user': request.user,
'using': using
}
deleted_objects, __, perms_needed = get_deleted_objects(
[titleobj],
titleopts,
**kwargs
)[:3]
to_delete_plugins, __, perms_needed_plugins = get_deleted_objects(
saved_plugins,
pluginopts,
**kwargs
)[:3]
deleted_objects.append(to_delete_plugins)
perms_needed = set(list(perms_needed) + list(perms_needed_plugins))
if request.method == 'POST':
if perms_needed:
raise PermissionDenied
message = _('Title and plugins with language %(language)s was deleted') % {
'language': force_text(get_language_object(language)['name'])
}
self.log_change(request, titleobj, message)
messages.info(request, message)
titleobj.delete()
for p in saved_plugins:
p.delete()
public = obj.publisher_public
if public:
public.save()
if not self.has_change_permission(request, None):
return HttpResponseRedirect(admin_reverse('index'))
return HttpResponseRedirect(admin_reverse('cms_articles_article_changelist'))
context = {
'title': _('Are you sure?'),
'object_name': force_text(titleopts.verbose_name),
'object': titleobj,
'deleted_objects': deleted_objects,
'perms_lacking': perms_needed,
'opts': opts,
'root_path': admin_reverse('index'),
'app_label': app_label,
}
context.update(extra_context or {})
request.current_app = self.admin_site.name
return render(request, self.delete_confirmation_template or [
'admin/%s/%s/delete_confirmation.html' % (app_label, titleopts.object_name.lower()),
'admin/%s/delete_confirmation.html' % app_label,
'admin/delete_confirmation.html'
], context)
def preview_article(self, request, object_id, language):
"""Redirecting preview function based on draft_id
"""
article = get_object_or_404(self.model, id=object_id)
attrs = '?%s' % get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON')
attrs += '&language=' + language
with force_language(language):
url = article.get_absolute_url(language) + attrs
return HttpResponseRedirect(url)
|
misli/django-cms-articles
|
cms_articles/admin/article.py
|
ArticleAdmin.preview_article
|
python
|
def preview_article(self, request, object_id, language):
article = get_object_or_404(self.model, id=object_id)
attrs = '?%s' % get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON')
attrs += '&language=' + language
with force_language(language):
url = article.get_absolute_url(language) + attrs
return HttpResponseRedirect(url)
|
Redirecting preview function based on draft_id
|
train
|
https://github.com/misli/django-cms-articles/blob/d96ac77e049022deb4c70d268e4eab74d175145c/cms_articles/admin/article.py#L439-L447
| null |
class ArticleAdmin(PlaceholderAdminMixin, admin.ModelAdmin):
change_list_template = 'admin/cms_articles/article_changelist.html'
search_fields = ('=id', 'title_set__slug', 'title_set__title', 'title_set__description')
list_display = ('__str__', 'order_date', 'preview_link') + tuple(
'lang_{}'.format(lang) for lang in get_language_list()
)
list_filter = ['tree', 'attributes', 'categories', 'template', 'changed_by']
date_hierarchy = 'order_date'
filter_horizontal = ['attributes', 'categories']
preview_template = get_template('admin/cms_articles/article/change_list_preview.html')
def preview_link(self, obj):
return self.preview_template.render({
'article': obj,
'lang': _thread_locals.language,
'request': _thread_locals.request,
})
preview_link.short_description = _('Show')
preview_link.allow_tags = True
lang_template = get_template('admin/cms_articles/article/change_list_lang.html')
def __getattr__(self, name):
if name.startswith('lang_'):
lang = name[len('lang_'):]
def lang_dropdown(obj):
return self.lang_template.render({
'article': obj,
'lang': lang,
'language': _thread_locals.language,
'has_change_permission': obj.has_change_permission(_thread_locals.request),
'has_publish_permission': obj.has_publish_permission(_thread_locals.request),
'request': _thread_locals.request,
})
lang_dropdown.short_description = lang
lang_dropdown.allow_tags = True
return lang_dropdown
raise AttributeError(name)
def get_fieldsets(self, request, obj=None):
language_dependent = ['title', 'slug', 'description', 'content', 'page_title',
'menu_title', 'meta_description', 'image']
if obj:
language_dependent.remove('content')
return [
(None, {'fields': ['tree', 'template']}),
(_('Language dependent settings'), {'fields': language_dependent}),
(_('Other settings'), {'fields': ['attributes', 'categories', 'publication_date',
'publication_end_date', 'login_required']}),
]
def get_urls(self):
"""Get the admin urls
"""
info = '%s_%s' % (self.model._meta.app_label, self.model._meta.model_name)
def pat(regex, fn):
return url(regex, self.admin_site.admin_view(fn), name='%s_%s' % (info, fn.__name__))
url_patterns = [
pat(r'^([0-9]+)/delete-translation/$', self.delete_translation),
pat(r'^([0-9]+)/([a-z\-]+)/publish/$', self.publish_article),
pat(r'^([0-9]+)/([a-z\-]+)/unpublish/$', self.unpublish),
pat(r'^([0-9]+)/([a-z\-]+)/preview/$', self.preview_article),
]
url_patterns += super(ArticleAdmin, self).get_urls()
return url_patterns
def get_queryset(self, request):
return super(ArticleAdmin, self).get_queryset(request).filter(
tree__node__site_id=settings.SITE_ID,
publisher_is_draft=True,
)
def save_model(self, request, obj, form, change):
new = obj.id is None
super(ArticleAdmin, self).save_model(request, obj, form, change)
Title.objects.set_or_create(request, obj, form, form.cleaned_data['language'])
if new and form.cleaned_data['content']:
add_content(
obj,
language=form.cleaned_data['language'],
slot=settings.CMS_ARTICLES_SLOT,
content=form.cleaned_data['content'],
)
SLUG_REGEXP = re.compile(settings.CMS_ARTICLES_SLUG_REGEXP)
def get_form(self, request, obj=None, **kwargs):
"""
Get ArticleForm for the Article model and modify its fields depending on
the request.
"""
language = get_language_from_request(request)
form = super(ArticleAdmin, self).get_form(
request, obj,
form=(obj and ArticleForm or ArticleCreateForm),
**kwargs
)
# get_form method operates by overriding initial fields value which
# may persist across invocation. Code below deepcopies fields definition
# to avoid leaks
for field in tuple(form.base_fields.keys()):
form.base_fields[field] = copy.deepcopy(form.base_fields[field])
if 'language' in form.base_fields:
form.base_fields['language'].initial = language
if obj:
title_obj = obj.get_title_obj(language=language, fallback=False, force_reload=True)
if hasattr(title_obj, 'id'):
for name in ('title', 'description', 'page_title', 'menu_title', 'meta_description', 'image'):
if name in form.base_fields:
form.base_fields[name].initial = getattr(title_obj, name)
try:
slug = self.SLUG_REGEXP.search(title_obj.slug).groups()[settings.CMS_ARTICLES_SLUG_GROUP_INDEX]
except AttributeError:
warnings.warn('Failed to parse slug from CMS_ARTICLES_SLUG_REGEXP. '
'It probably doesn\'t correspond to CMS_ARTICLES_SLUG_FORMAT.')
slug = title_obj.slug
form.base_fields['slug'].initial = slug
return form
def get_unihandecode_context(self, language):
if language[:2] in get_cms_setting('UNIHANDECODE_DECODERS'):
uhd_lang = language[:2]
else:
uhd_lang = get_cms_setting('UNIHANDECODE_DEFAULT_DECODER')
uhd_host = get_cms_setting('UNIHANDECODE_HOST')
uhd_version = get_cms_setting('UNIHANDECODE_VERSION')
if uhd_lang and uhd_host and uhd_version:
uhd_urls = [
'%sunihandecode-%s.core.min.js' % (uhd_host, uhd_version),
'%sunihandecode-%s.%s.min.js' % (uhd_host, uhd_version, uhd_lang),
]
else:
uhd_urls = []
return {'unihandecode_lang': uhd_lang, 'unihandecode_urls': uhd_urls}
def changelist_view(self, request, extra_context=None):
_thread_locals.request = request
_thread_locals.language = get_language_from_request(request)
return super(ArticleAdmin, self).changelist_view(request, extra_context=extra_context)
def add_view(self, request, form_url='', extra_context=None):
extra_context = self.update_language_tab_context(request, context=extra_context)
extra_context.update(self.get_unihandecode_context(extra_context['language']))
return super(ArticleAdmin, self).add_view(request, form_url, extra_context=extra_context)
def change_view(self, request, object_id, form_url='', extra_context=None):
extra_context = self.update_language_tab_context(request, context=extra_context)
language = extra_context['language']
extra_context.update(self.get_unihandecode_context(language))
response = super(ArticleAdmin, self).change_view(request, object_id,
form_url=form_url, extra_context=extra_context)
if language and response.status_code == 302 and response._headers['location'][1] == request.path_info:
location = response._headers['location']
response._headers['location'] = (location[0], '%s?language=%s' % (location[1], language))
return response
def render_change_form(self, request, context, add=False, change=False, form_url='', obj=None):
# add context variables
filled_languages = []
if obj:
filled_languages = [t[0] for t in obj.title_set.filter(title__isnull=False).values_list('language')]
allowed_languages = [lang[0] for lang in get_language_tuple()]
context.update({
'filled_languages': [lang for lang in filled_languages if lang in allowed_languages],
})
return super(ArticleAdmin, self).render_change_form(request, context, add, change, form_url, obj)
def update_language_tab_context(self, request, context=None):
if not context:
context = {}
language = get_language_from_request(request)
languages = get_language_tuple()
context.update({
'language': language,
'languages': languages,
'language_tabs': languages,
'show_language_tabs': len(list(languages)) > 1,
})
return context
@require_POST
@transaction.atomic
def publish_article(self, request, article_id, language):
try:
article = Article.objects.get(id=article_id, publisher_is_draft=True)
except Article.DoesNotExist:
article = None
# ensure user has permissions to publish this article
if article:
if not self.has_change_permission(request):
return HttpResponseForbidden(force_text(_('You do not have permission to publish this article')))
article.publish(language)
statics = request.GET.get('statics', '')
if not statics and not article:
raise Http404('No article or stack found for publishing.')
all_published = True
if statics:
static_ids = statics.split(',')
for pk in static_ids:
static_placeholder = StaticPlaceholder.objects.get(pk=pk)
published = static_placeholder.publish(request, language)
if not published:
all_published = False
if article:
if all_published:
messages.info(request, _('The content was successfully published.'))
LogEntry.objects.log_action(
user_id=request.user.id,
content_type_id=ContentType.objects.get_for_model(Article).pk,
object_id=article_id,
object_repr=article.get_title(language),
action_flag=CHANGE,
)
else:
messages.warning(request, _('There was a problem publishing your content'))
if 'redirect' in request.GET:
return HttpResponseRedirect(request.GET['redirect'])
referrer = request.META.get('HTTP_REFERER', '')
path = admin_reverse('cms_articles_article_changelist')
if request.GET.get('redirect_language'):
path = '%s?language=%s&article_id=%s' % (
path,
request.GET.get('redirect_language'),
request.GET.get('redirect_article_id')
)
if admin_reverse('index') not in referrer:
if all_published:
if article:
if article.get_publisher_state(language) == PUBLISHER_STATE_PENDING:
path = article.get_absolute_url(language, fallback=True)
else:
public_article = Article.objects.get(publisher_public=article.pk)
path = '%s?%s' % (
public_article.get_absolute_url(language, fallback=True),
get_cms_setting('CMS_TOOLBAR_URL__EDIT_OFF')
)
else:
path = '%s?%s' % (referrer, get_cms_setting('CMS_TOOLBAR_URL__EDIT_OFF'))
else:
path = '/?%s' % get_cms_setting('CMS_TOOLBAR_URL__EDIT_OFF')
return HttpResponseRedirect(path)
@require_POST
@transaction.atomic
def unpublish(self, request, article_id, language):
"""
Publish or unpublish a language of a article
"""
article = get_object_or_404(self.model, pk=article_id)
if not article.has_publish_permission(request):
return HttpResponseForbidden(force_text(_('You do not have permission to unpublish this article')))
if not article.publisher_public_id:
return HttpResponseForbidden(force_text(_('This article was never published')))
try:
article.unpublish(language)
message = _('The %(language)s article "%(article)s" was successfully unpublished') % {
'language': get_language_object(language)['name'], 'article': article}
messages.info(request, message)
LogEntry.objects.log_action(
user_id=request.user.id,
content_type_id=ContentType.objects.get_for_model(Article).pk,
object_id=article_id,
object_repr=article.get_title(),
action_flag=CHANGE,
change_message=message,
)
except RuntimeError:
exc = sys.exc_info()[1]
messages.error(request, exc.message)
except ValidationError:
exc = sys.exc_info()[1]
messages.error(request, exc.message)
path = admin_reverse('cms_articles_article_changelist')
if request.GET.get('redirect_language'):
path = '%s?language=%s&article_id=%s' % (
path,
request.GET.get('redirect_language'),
request.GET.get('redirect_article_id')
)
return HttpResponseRedirect(path)
def delete_translation(self, request, object_id, extra_context=None):
if 'language' in request.GET:
language = request.GET['language']
else:
language = get_language_from_request(request)
opts = Article._meta
titleopts = Title._meta
app_label = titleopts.app_label
pluginopts = CMSPlugin._meta
try:
obj = self.get_queryset(request).get(pk=unquote(object_id))
except self.model.DoesNotExist:
# Don't raise Http404 just yet, because we haven't checked
# permissions yet. We don't want an unauthenticated user to be able
# to determine whether a given object exists.
obj = None
if not self.has_delete_permission(request, obj):
return HttpResponseForbidden(force_text(_('You do not have permission to change this article')))
if obj is None:
raise Http404(
_('%(name)s object with primary key %(key)r does not exist.') % {
'name': force_text(opts.verbose_name),
'key': escape(object_id)
})
if not len(list(obj.get_languages())) > 1:
raise Http404(_('There only exists one translation for this article'))
titleobj = get_object_or_404(Title, article__id=object_id, language=language)
saved_plugins = CMSPlugin.objects.filter(placeholder__article__id=object_id, language=language)
using = router.db_for_read(self.model)
kwargs = {
'admin_site': self.admin_site,
'user': request.user,
'using': using
}
deleted_objects, __, perms_needed = get_deleted_objects(
[titleobj],
titleopts,
**kwargs
)[:3]
to_delete_plugins, __, perms_needed_plugins = get_deleted_objects(
saved_plugins,
pluginopts,
**kwargs
)[:3]
deleted_objects.append(to_delete_plugins)
perms_needed = set(list(perms_needed) + list(perms_needed_plugins))
if request.method == 'POST':
if perms_needed:
raise PermissionDenied
message = _('Title and plugins with language %(language)s was deleted') % {
'language': force_text(get_language_object(language)['name'])
}
self.log_change(request, titleobj, message)
messages.info(request, message)
titleobj.delete()
for p in saved_plugins:
p.delete()
public = obj.publisher_public
if public:
public.save()
if not self.has_change_permission(request, None):
return HttpResponseRedirect(admin_reverse('index'))
return HttpResponseRedirect(admin_reverse('cms_articles_article_changelist'))
context = {
'title': _('Are you sure?'),
'object_name': force_text(titleopts.verbose_name),
'object': titleobj,
'deleted_objects': deleted_objects,
'perms_lacking': perms_needed,
'opts': opts,
'root_path': admin_reverse('index'),
'app_label': app_label,
}
context.update(extra_context or {})
request.current_app = self.admin_site.name
return render(request, self.delete_confirmation_template or [
'admin/%s/%s/delete_confirmation.html' % (app_label, titleopts.object_name.lower()),
'admin/%s/delete_confirmation.html' % app_label,
'admin/delete_confirmation.html'
], context)
|
misli/django-cms-articles
|
cms_articles/signals/title.py
|
pre_save_title
|
python
|
def pre_save_title(instance, **kwargs):
''' Update article.languages
'''
if instance.article.languages:
languages = instance.article.languages.split(',')
else:
languages = []
if instance.language not in languages:
languages.append(instance.language)
instance.article.languages = ','.join(languages)
instance.article._publisher_keep_state = True
instance.article.save(no_signals=True)
|
Update article.languages
|
train
|
https://github.com/misli/django-cms-articles/blob/d96ac77e049022deb4c70d268e4eab74d175145c/cms_articles/signals/title.py#L1-L12
| null |
def pre_delete_title(instance, **kwargs):
''' Update article.languages
'''
if instance.article.languages:
languages = instance.article.languages.split(',')
else:
languages = []
if instance.language in languages:
languages.remove(instance.language)
instance.article.languages = ','.join(languages)
instance.article._publisher_keep_state = True
instance.article.save(no_signals=True)
|
misli/django-cms-articles
|
cms_articles/signals/title.py
|
pre_delete_title
|
python
|
def pre_delete_title(instance, **kwargs):
''' Update article.languages
'''
if instance.article.languages:
languages = instance.article.languages.split(',')
else:
languages = []
if instance.language in languages:
languages.remove(instance.language)
instance.article.languages = ','.join(languages)
instance.article._publisher_keep_state = True
instance.article.save(no_signals=True)
|
Update article.languages
|
train
|
https://github.com/misli/django-cms-articles/blob/d96ac77e049022deb4c70d268e4eab74d175145c/cms_articles/signals/title.py#L15-L26
| null |
def pre_save_title(instance, **kwargs):
''' Update article.languages
'''
if instance.article.languages:
languages = instance.article.languages.split(',')
else:
languages = []
if instance.language not in languages:
languages.append(instance.language)
instance.article.languages = ','.join(languages)
instance.article._publisher_keep_state = True
instance.article.save(no_signals=True)
|
misli/django-cms-articles
|
cms_articles/api.py
|
create_article
|
python
|
def create_article(tree, template, title, language, slug=None, description=None,
page_title=None, menu_title=None, meta_description=None,
created_by=None, image=None, publication_date=None, publication_end_date=None,
published=False, login_required=False, creation_date=None, categories=[]):
# validate tree
tree = tree.get_public_object()
assert tree.application_urls == 'CMSArticlesApp'
# validate template
assert template in [tpl[0] for tpl in settings.CMS_ARTICLES_TEMPLATES]
get_template(template)
# validate language:
assert language in get_language_list(tree.node.site_id), settings.CMS_LANGUAGES.get(tree.node.site_id)
# validate publication date
if publication_date:
assert isinstance(publication_date, datetime.date)
# validate publication end date
if publication_end_date:
assert isinstance(publication_end_date, datetime.date)
# validate creation date
if not creation_date:
creation_date = publication_date
if creation_date:
assert isinstance(creation_date, datetime.date)
# get username
if created_by:
try:
username = created_by.get_username()
except Exception:
username = force_text(created_by)
else:
username = 'script'
with current_user(username):
# create article
article = Article.objects.create(
tree=tree,
template=template,
login_required=login_required,
creation_date=creation_date,
publication_date=publication_date,
publication_end_date=publication_end_date,
languages=language,
)
for category in categories:
article.categories.add(category)
# create title
create_title(
article=article,
language=language,
title=title,
slug=slug,
description=description,
page_title=page_title,
menu_title=menu_title,
meta_description=meta_description,
creation_date=creation_date,
image=image,
)
# publish article
if published:
article.publish(language)
return article.reload()
|
Create a CMS Article and it's title for the given language
|
train
|
https://github.com/misli/django-cms-articles/blob/d96ac77e049022deb4c70d268e4eab74d175145c/cms_articles/api.py#L26-L100
|
[
"def create_title(article, language, title, slug=None, description=None,\n page_title=None, menu_title=None, meta_description=None,\n creation_date=None, image=None):\n \"\"\"\n Create an article title.\n \"\"\"\n # validate article\n assert isinstance(article, Article)\n\n # validate language:\n assert language in get_language_list(article.tree.node.site_id)\n\n # validate creation date\n if creation_date:\n assert isinstance(creation_date, datetime.date)\n\n # set default slug:\n if not slug:\n slug = settings.CMS_ARTICLES_SLUG_FORMAT.format(\n now=creation_date or now(),\n slug=slugify(title),\n )\n\n # find unused slug:\n base_slug = slug\n qs = Title.objects.filter(language=language)\n used_slugs = list(s for s in qs.values_list('slug', flat=True) if s.startswith(base_slug))\n i = 1\n while slug in used_slugs:\n slug = '%s-%s' % (base_slug, i)\n i += 1\n\n # create title\n title = Title.objects.create(\n article=article,\n language=language,\n title=title,\n slug=slug,\n description=description,\n page_title=page_title,\n menu_title=menu_title,\n meta_description=meta_description,\n image=image,\n )\n\n return title\n"
] |
import datetime
from cms.api import add_plugin
from cms.utils.i18n import get_language_list
from cms.utils.permissions import current_user
from django.template.defaultfilters import slugify
from django.template.loader import get_template
from django.utils.encoding import force_text
from django.utils.timezone import now
from djangocms_text_ckeditor.cms_plugins import TextPlugin
from .conf import settings
from .models import Article, Title
"""
Public Python API to create CMS articles.
WARNING: None of the functions defined in this module checks for permissions.
You must implement the necessary permission checks in your own code before
calling these methods!
"""
def create_title(article, language, title, slug=None, description=None,
page_title=None, menu_title=None, meta_description=None,
creation_date=None, image=None):
"""
Create an article title.
"""
# validate article
assert isinstance(article, Article)
# validate language:
assert language in get_language_list(article.tree.node.site_id)
# validate creation date
if creation_date:
assert isinstance(creation_date, datetime.date)
# set default slug:
if not slug:
slug = settings.CMS_ARTICLES_SLUG_FORMAT.format(
now=creation_date or now(),
slug=slugify(title),
)
# find unused slug:
base_slug = slug
qs = Title.objects.filter(language=language)
used_slugs = list(s for s in qs.values_list('slug', flat=True) if s.startswith(base_slug))
i = 1
while slug in used_slugs:
slug = '%s-%s' % (base_slug, i)
i += 1
# create title
title = Title.objects.create(
article=article,
language=language,
title=title,
slug=slug,
description=description,
page_title=page_title,
menu_title=menu_title,
meta_description=meta_description,
image=image,
)
return title
def add_content(obj, language, slot, content):
"""
Adds a TextPlugin with given content to given slot
"""
placeholder = obj.placeholders.get(slot=slot)
add_plugin(placeholder, TextPlugin, language, body=content)
def publish_article(article, language, changed_by=None):
"""
Publish an article. This sets `article.published` to `True`
and calls article.publish() which does the actual publishing.
"""
article = article.reload()
# get username
if changed_by:
username = changed_by.get_username()
else:
username = 'script'
with current_user(username):
article.publish(language)
return article.reload()
|
misli/django-cms-articles
|
cms_articles/api.py
|
create_title
|
python
|
def create_title(article, language, title, slug=None, description=None,
page_title=None, menu_title=None, meta_description=None,
creation_date=None, image=None):
# validate article
assert isinstance(article, Article)
# validate language:
assert language in get_language_list(article.tree.node.site_id)
# validate creation date
if creation_date:
assert isinstance(creation_date, datetime.date)
# set default slug:
if not slug:
slug = settings.CMS_ARTICLES_SLUG_FORMAT.format(
now=creation_date or now(),
slug=slugify(title),
)
# find unused slug:
base_slug = slug
qs = Title.objects.filter(language=language)
used_slugs = list(s for s in qs.values_list('slug', flat=True) if s.startswith(base_slug))
i = 1
while slug in used_slugs:
slug = '%s-%s' % (base_slug, i)
i += 1
# create title
title = Title.objects.create(
article=article,
language=language,
title=title,
slug=slug,
description=description,
page_title=page_title,
menu_title=menu_title,
meta_description=meta_description,
image=image,
)
return title
|
Create an article title.
|
train
|
https://github.com/misli/django-cms-articles/blob/d96ac77e049022deb4c70d268e4eab74d175145c/cms_articles/api.py#L103-L148
| null |
import datetime
from cms.api import add_plugin
from cms.utils.i18n import get_language_list
from cms.utils.permissions import current_user
from django.template.defaultfilters import slugify
from django.template.loader import get_template
from django.utils.encoding import force_text
from django.utils.timezone import now
from djangocms_text_ckeditor.cms_plugins import TextPlugin
from .conf import settings
from .models import Article, Title
"""
Public Python API to create CMS articles.
WARNING: None of the functions defined in this module checks for permissions.
You must implement the necessary permission checks in your own code before
calling these methods!
"""
def create_article(tree, template, title, language, slug=None, description=None,
page_title=None, menu_title=None, meta_description=None,
created_by=None, image=None, publication_date=None, publication_end_date=None,
published=False, login_required=False, creation_date=None, categories=[]):
"""
Create a CMS Article and it's title for the given language
"""
# validate tree
tree = tree.get_public_object()
assert tree.application_urls == 'CMSArticlesApp'
# validate template
assert template in [tpl[0] for tpl in settings.CMS_ARTICLES_TEMPLATES]
get_template(template)
# validate language:
assert language in get_language_list(tree.node.site_id), settings.CMS_LANGUAGES.get(tree.node.site_id)
# validate publication date
if publication_date:
assert isinstance(publication_date, datetime.date)
# validate publication end date
if publication_end_date:
assert isinstance(publication_end_date, datetime.date)
# validate creation date
if not creation_date:
creation_date = publication_date
if creation_date:
assert isinstance(creation_date, datetime.date)
# get username
if created_by:
try:
username = created_by.get_username()
except Exception:
username = force_text(created_by)
else:
username = 'script'
with current_user(username):
# create article
article = Article.objects.create(
tree=tree,
template=template,
login_required=login_required,
creation_date=creation_date,
publication_date=publication_date,
publication_end_date=publication_end_date,
languages=language,
)
for category in categories:
article.categories.add(category)
# create title
create_title(
article=article,
language=language,
title=title,
slug=slug,
description=description,
page_title=page_title,
menu_title=menu_title,
meta_description=meta_description,
creation_date=creation_date,
image=image,
)
# publish article
if published:
article.publish(language)
return article.reload()
def add_content(obj, language, slot, content):
"""
Adds a TextPlugin with given content to given slot
"""
placeholder = obj.placeholders.get(slot=slot)
add_plugin(placeholder, TextPlugin, language, body=content)
def publish_article(article, language, changed_by=None):
"""
Publish an article. This sets `article.published` to `True`
and calls article.publish() which does the actual publishing.
"""
article = article.reload()
# get username
if changed_by:
username = changed_by.get_username()
else:
username = 'script'
with current_user(username):
article.publish(language)
return article.reload()
|
misli/django-cms-articles
|
cms_articles/api.py
|
add_content
|
python
|
def add_content(obj, language, slot, content):
placeholder = obj.placeholders.get(slot=slot)
add_plugin(placeholder, TextPlugin, language, body=content)
|
Adds a TextPlugin with given content to given slot
|
train
|
https://github.com/misli/django-cms-articles/blob/d96ac77e049022deb4c70d268e4eab74d175145c/cms_articles/api.py#L151-L156
| null |
import datetime
from cms.api import add_plugin
from cms.utils.i18n import get_language_list
from cms.utils.permissions import current_user
from django.template.defaultfilters import slugify
from django.template.loader import get_template
from django.utils.encoding import force_text
from django.utils.timezone import now
from djangocms_text_ckeditor.cms_plugins import TextPlugin
from .conf import settings
from .models import Article, Title
"""
Public Python API to create CMS articles.
WARNING: None of the functions defined in this module checks for permissions.
You must implement the necessary permission checks in your own code before
calling these methods!
"""
def create_article(tree, template, title, language, slug=None, description=None,
page_title=None, menu_title=None, meta_description=None,
created_by=None, image=None, publication_date=None, publication_end_date=None,
published=False, login_required=False, creation_date=None, categories=[]):
"""
Create a CMS Article and it's title for the given language
"""
# validate tree
tree = tree.get_public_object()
assert tree.application_urls == 'CMSArticlesApp'
# validate template
assert template in [tpl[0] for tpl in settings.CMS_ARTICLES_TEMPLATES]
get_template(template)
# validate language:
assert language in get_language_list(tree.node.site_id), settings.CMS_LANGUAGES.get(tree.node.site_id)
# validate publication date
if publication_date:
assert isinstance(publication_date, datetime.date)
# validate publication end date
if publication_end_date:
assert isinstance(publication_end_date, datetime.date)
# validate creation date
if not creation_date:
creation_date = publication_date
if creation_date:
assert isinstance(creation_date, datetime.date)
# get username
if created_by:
try:
username = created_by.get_username()
except Exception:
username = force_text(created_by)
else:
username = 'script'
with current_user(username):
# create article
article = Article.objects.create(
tree=tree,
template=template,
login_required=login_required,
creation_date=creation_date,
publication_date=publication_date,
publication_end_date=publication_end_date,
languages=language,
)
for category in categories:
article.categories.add(category)
# create title
create_title(
article=article,
language=language,
title=title,
slug=slug,
description=description,
page_title=page_title,
menu_title=menu_title,
meta_description=meta_description,
creation_date=creation_date,
image=image,
)
# publish article
if published:
article.publish(language)
return article.reload()
def create_title(article, language, title, slug=None, description=None,
page_title=None, menu_title=None, meta_description=None,
creation_date=None, image=None):
"""
Create an article title.
"""
# validate article
assert isinstance(article, Article)
# validate language:
assert language in get_language_list(article.tree.node.site_id)
# validate creation date
if creation_date:
assert isinstance(creation_date, datetime.date)
# set default slug:
if not slug:
slug = settings.CMS_ARTICLES_SLUG_FORMAT.format(
now=creation_date or now(),
slug=slugify(title),
)
# find unused slug:
base_slug = slug
qs = Title.objects.filter(language=language)
used_slugs = list(s for s in qs.values_list('slug', flat=True) if s.startswith(base_slug))
i = 1
while slug in used_slugs:
slug = '%s-%s' % (base_slug, i)
i += 1
# create title
title = Title.objects.create(
article=article,
language=language,
title=title,
slug=slug,
description=description,
page_title=page_title,
menu_title=menu_title,
meta_description=meta_description,
image=image,
)
return title
def publish_article(article, language, changed_by=None):
"""
Publish an article. This sets `article.published` to `True`
and calls article.publish() which does the actual publishing.
"""
article = article.reload()
# get username
if changed_by:
username = changed_by.get_username()
else:
username = 'script'
with current_user(username):
article.publish(language)
return article.reload()
|
misli/django-cms-articles
|
cms_articles/api.py
|
publish_article
|
python
|
def publish_article(article, language, changed_by=None):
article = article.reload()
# get username
if changed_by:
username = changed_by.get_username()
else:
username = 'script'
with current_user(username):
article.publish(language)
return article.reload()
|
Publish an article. This sets `article.published` to `True`
and calls article.publish() which does the actual publishing.
|
train
|
https://github.com/misli/django-cms-articles/blob/d96ac77e049022deb4c70d268e4eab74d175145c/cms_articles/api.py#L159-L175
| null |
import datetime
from cms.api import add_plugin
from cms.utils.i18n import get_language_list
from cms.utils.permissions import current_user
from django.template.defaultfilters import slugify
from django.template.loader import get_template
from django.utils.encoding import force_text
from django.utils.timezone import now
from djangocms_text_ckeditor.cms_plugins import TextPlugin
from .conf import settings
from .models import Article, Title
"""
Public Python API to create CMS articles.
WARNING: None of the functions defined in this module checks for permissions.
You must implement the necessary permission checks in your own code before
calling these methods!
"""
def create_article(tree, template, title, language, slug=None, description=None,
page_title=None, menu_title=None, meta_description=None,
created_by=None, image=None, publication_date=None, publication_end_date=None,
published=False, login_required=False, creation_date=None, categories=[]):
"""
Create a CMS Article and it's title for the given language
"""
# validate tree
tree = tree.get_public_object()
assert tree.application_urls == 'CMSArticlesApp'
# validate template
assert template in [tpl[0] for tpl in settings.CMS_ARTICLES_TEMPLATES]
get_template(template)
# validate language:
assert language in get_language_list(tree.node.site_id), settings.CMS_LANGUAGES.get(tree.node.site_id)
# validate publication date
if publication_date:
assert isinstance(publication_date, datetime.date)
# validate publication end date
if publication_end_date:
assert isinstance(publication_end_date, datetime.date)
# validate creation date
if not creation_date:
creation_date = publication_date
if creation_date:
assert isinstance(creation_date, datetime.date)
# get username
if created_by:
try:
username = created_by.get_username()
except Exception:
username = force_text(created_by)
else:
username = 'script'
with current_user(username):
# create article
article = Article.objects.create(
tree=tree,
template=template,
login_required=login_required,
creation_date=creation_date,
publication_date=publication_date,
publication_end_date=publication_end_date,
languages=language,
)
for category in categories:
article.categories.add(category)
# create title
create_title(
article=article,
language=language,
title=title,
slug=slug,
description=description,
page_title=page_title,
menu_title=menu_title,
meta_description=meta_description,
creation_date=creation_date,
image=image,
)
# publish article
if published:
article.publish(language)
return article.reload()
def create_title(article, language, title, slug=None, description=None,
page_title=None, menu_title=None, meta_description=None,
creation_date=None, image=None):
"""
Create an article title.
"""
# validate article
assert isinstance(article, Article)
# validate language:
assert language in get_language_list(article.tree.node.site_id)
# validate creation date
if creation_date:
assert isinstance(creation_date, datetime.date)
# set default slug:
if not slug:
slug = settings.CMS_ARTICLES_SLUG_FORMAT.format(
now=creation_date or now(),
slug=slugify(title),
)
# find unused slug:
base_slug = slug
qs = Title.objects.filter(language=language)
used_slugs = list(s for s in qs.values_list('slug', flat=True) if s.startswith(base_slug))
i = 1
while slug in used_slugs:
slug = '%s-%s' % (base_slug, i)
i += 1
# create title
title = Title.objects.create(
article=article,
language=language,
title=title,
slug=slug,
description=description,
page_title=page_title,
menu_title=menu_title,
meta_description=meta_description,
image=image,
)
return title
def add_content(obj, language, slot, content):
"""
Adds a TextPlugin with given content to given slot
"""
placeholder = obj.placeholders.get(slot=slot)
add_plugin(placeholder, TextPlugin, language, body=content)
|
epio/mantrid
|
mantrid/socketmeld.py
|
SocketMelder.piper
|
python
|
def piper(self, in_sock, out_sock, out_addr, onkill):
"Worker thread for data reading"
try:
while True:
written = in_sock.recv(32768)
if not written:
try:
out_sock.shutdown(socket.SHUT_WR)
except socket.error:
self.threads[onkill].kill()
break
try:
out_sock.sendall(written)
except socket.error:
pass
self.data_handled += len(written)
except greenlet.GreenletExit:
return
|
Worker thread for data reading
|
train
|
https://github.com/epio/mantrid/blob/1c699f1a4b33888b533c19cb6d025173f2160576/mantrid/socketmeld.py#L16-L33
| null |
class SocketMelder(object):
"""
Takes two sockets and directly connects them together.
"""
def __init__(self, client, server):
self.client = client
self.server = server
self.data_handled = 0
def run(self):
self.threads = {
"ctos": eventlet.spawn(self.piper, self.server, self.client, "client", "stoc"),
"stoc": eventlet.spawn(self.piper, self.client, self.server, "server", "ctos"),
}
try:
self.threads['stoc'].wait()
except (greenlet.GreenletExit, socket.error):
pass
try:
self.threads['ctos'].wait()
except (greenlet.GreenletExit, socket.error):
pass
self.server.close()
self.client.close()
return self.data_handled
|
epio/mantrid
|
mantrid/management.py
|
ManagementApp.handle
|
python
|
def handle(self, environ, start_response):
"Main entry point"
# Pass off to the router
try:
handler = self.route(
environ['PATH_INFO'].lower(),
environ['REQUEST_METHOD'].lower(),
)
if handler is None:
raise HttpNotFound()
# Handle errors
except HttpNotFound:
start_response('404 Not Found', [('Content-Type', 'application/json')])
return [json.dumps({"error": "not_found"})]
except HttpMethodNotAllowed:
start_response('405 Method Not Allowed', [('Content-Type', 'application/json')])
return [json.dumps({"error": "method_not_allowed"})]
# Dispatch to the named method
body = environ['wsgi.input'].read()
if body:
body = json.loads(body)
response = handler(
environ['PATH_INFO'].lower(),
body,
)
# Send the response
start_response('200 OK', [('Content-Type', 'application/json')])
return [json.dumps(response)]
|
Main entry point
|
train
|
https://github.com/epio/mantrid/blob/1c699f1a4b33888b533c19cb6d025173f2160576/mantrid/management.py#L33-L60
|
[
"def route(self, path, method):\n # Simple routing for paths\n if path == \"/\":\n raise HttpMethodNotAllowed()\n elif path == \"/stats/\":\n if method == \"get\":\n return self.get_all_stats\n else:\n raise HttpMethodNotAllowed()\n elif self.stats_host_regex.match(path):\n if method == \"get\":\n return self.get_single_stats\n else:\n raise HttpMethodNotAllowed()\n elif path == \"/hostname/\":\n if method == \"get\":\n return self.get_all\n elif method == \"put\":\n return self.set_all\n else:\n raise HttpMethodNotAllowed()\n elif self.host_regex.match(path):\n if method == \"get\":\n return self.get_single\n elif method == \"put\":\n return self.set_single\n elif method == \"delete\":\n return self.delete_single\n else:\n raise HttpMethodNotAllowed()\n else:\n raise HttpNotFound()\n",
"def get_all_stats(self, path, body):\n return self.balancer.stats\n"
] |
class ManagementApp(object):
"""
Management WSGI app for the Mantrid loadbalancer.
Allows endpoints to be changed via HTTP requests to
the management port.
"""
host_regex = re.compile(r"^/hostname/([^/]+)/?$")
stats_host_regex = re.compile(r"^/stats/([^/]+)/?$")
def __init__(self, balancer):
self.balancer = balancer
def route(self, path, method):
# Simple routing for paths
if path == "/":
raise HttpMethodNotAllowed()
elif path == "/stats/":
if method == "get":
return self.get_all_stats
else:
raise HttpMethodNotAllowed()
elif self.stats_host_regex.match(path):
if method == "get":
return self.get_single_stats
else:
raise HttpMethodNotAllowed()
elif path == "/hostname/":
if method == "get":
return self.get_all
elif method == "put":
return self.set_all
else:
raise HttpMethodNotAllowed()
elif self.host_regex.match(path):
if method == "get":
return self.get_single
elif method == "put":
return self.set_single
elif method == "delete":
return self.delete_single
else:
raise HttpMethodNotAllowed()
else:
raise HttpNotFound()
### Handling methods ###
def host_errors(self, hostname, details):
"""
Validates the format of a host entry
Returns an error string, or None if it is valid.
"""
if not hostname or not isinstance(hostname, basestring):
return "hostname_invalid"
if not isinstance(details, list):
return "host_details_not_list"
if len(details) != 3:
return "host_details_wrong_length"
if details[0] not in self.balancer.action_mapping:
return "host_action_invalid:%s" % details[0]
if not isinstance(details[1], dict):
return "host_kwargs_not_dict"
if not isinstance(details[2], bool):
return "host_match_subdomains_not_bool"
return None
def get_all(self, path, body):
return self.balancer.hosts
def set_all(self, path, body):
"Replaces the hosts list with the provided input"
# Do some error checking
if not isinstance(body, dict):
raise HttpBadRequest("body_not_a_dict")
for hostname, details in body.items():
error = self.host_errors(hostname, details)
if error:
raise HttpBadRequest("%s:%s" % (hostname, error))
# Replace
old_hostnames = set(self.balancer.hosts.keys())
new_hostnames = set(body.keys())
self.balancer.hosts = body
# Clean up stats dict
for hostname in new_hostnames - old_hostnames:
self.balancer.stats[hostname] = {}
for hostname in old_hostnames - new_hostnames:
try:
del self.balancer.stats[hostname]
except KeyError:
pass
return {"ok": True}
def get_single(self, path, body):
host = self.host_regex.match(path).group(1)
if host in self.balancer.hosts:
return self.balancer.hosts[host]
else:
return None
def set_single(self, path, body):
host = self.host_regex.match(path).group(1)
error = self.host_errors(host, body)
if error:
raise HttpBadRequest("%s:%s" % (host, error))
self.balancer.hosts[host] = body
self.balancer.stats[host] = {}
return {"ok": True}
def delete_single(self, path, body):
host = self.host_regex.match(path).group(1)
try:
del self.balancer.hosts[host]
except KeyError:
pass
try:
del self.balancer.stats[host]
except KeyError:
pass
return {"ok": True}
def get_all_stats(self, path, body):
return self.balancer.stats
def get_single_stats(self, path, body):
host = self.stats_host_regex.match(path).group(1)
return self.balancer.stats.get(host, {})
|
epio/mantrid
|
mantrid/management.py
|
ManagementApp.host_errors
|
python
|
def host_errors(self, hostname, details):
if not hostname or not isinstance(hostname, basestring):
return "hostname_invalid"
if not isinstance(details, list):
return "host_details_not_list"
if len(details) != 3:
return "host_details_wrong_length"
if details[0] not in self.balancer.action_mapping:
return "host_action_invalid:%s" % details[0]
if not isinstance(details[1], dict):
return "host_kwargs_not_dict"
if not isinstance(details[2], bool):
return "host_match_subdomains_not_bool"
return None
|
Validates the format of a host entry
Returns an error string, or None if it is valid.
|
train
|
https://github.com/epio/mantrid/blob/1c699f1a4b33888b533c19cb6d025173f2160576/mantrid/management.py#L97-L114
| null |
class ManagementApp(object):
"""
Management WSGI app for the Mantrid loadbalancer.
Allows endpoints to be changed via HTTP requests to
the management port.
"""
host_regex = re.compile(r"^/hostname/([^/]+)/?$")
stats_host_regex = re.compile(r"^/stats/([^/]+)/?$")
def __init__(self, balancer):
self.balancer = balancer
def handle(self, environ, start_response):
"Main entry point"
# Pass off to the router
try:
handler = self.route(
environ['PATH_INFO'].lower(),
environ['REQUEST_METHOD'].lower(),
)
if handler is None:
raise HttpNotFound()
# Handle errors
except HttpNotFound:
start_response('404 Not Found', [('Content-Type', 'application/json')])
return [json.dumps({"error": "not_found"})]
except HttpMethodNotAllowed:
start_response('405 Method Not Allowed', [('Content-Type', 'application/json')])
return [json.dumps({"error": "method_not_allowed"})]
# Dispatch to the named method
body = environ['wsgi.input'].read()
if body:
body = json.loads(body)
response = handler(
environ['PATH_INFO'].lower(),
body,
)
# Send the response
start_response('200 OK', [('Content-Type', 'application/json')])
return [json.dumps(response)]
def route(self, path, method):
# Simple routing for paths
if path == "/":
raise HttpMethodNotAllowed()
elif path == "/stats/":
if method == "get":
return self.get_all_stats
else:
raise HttpMethodNotAllowed()
elif self.stats_host_regex.match(path):
if method == "get":
return self.get_single_stats
else:
raise HttpMethodNotAllowed()
elif path == "/hostname/":
if method == "get":
return self.get_all
elif method == "put":
return self.set_all
else:
raise HttpMethodNotAllowed()
elif self.host_regex.match(path):
if method == "get":
return self.get_single
elif method == "put":
return self.set_single
elif method == "delete":
return self.delete_single
else:
raise HttpMethodNotAllowed()
else:
raise HttpNotFound()
### Handling methods ###
def get_all(self, path, body):
return self.balancer.hosts
def set_all(self, path, body):
"Replaces the hosts list with the provided input"
# Do some error checking
if not isinstance(body, dict):
raise HttpBadRequest("body_not_a_dict")
for hostname, details in body.items():
error = self.host_errors(hostname, details)
if error:
raise HttpBadRequest("%s:%s" % (hostname, error))
# Replace
old_hostnames = set(self.balancer.hosts.keys())
new_hostnames = set(body.keys())
self.balancer.hosts = body
# Clean up stats dict
for hostname in new_hostnames - old_hostnames:
self.balancer.stats[hostname] = {}
for hostname in old_hostnames - new_hostnames:
try:
del self.balancer.stats[hostname]
except KeyError:
pass
return {"ok": True}
def get_single(self, path, body):
host = self.host_regex.match(path).group(1)
if host in self.balancer.hosts:
return self.balancer.hosts[host]
else:
return None
def set_single(self, path, body):
host = self.host_regex.match(path).group(1)
error = self.host_errors(host, body)
if error:
raise HttpBadRequest("%s:%s" % (host, error))
self.balancer.hosts[host] = body
self.balancer.stats[host] = {}
return {"ok": True}
def delete_single(self, path, body):
host = self.host_regex.match(path).group(1)
try:
del self.balancer.hosts[host]
except KeyError:
pass
try:
del self.balancer.stats[host]
except KeyError:
pass
return {"ok": True}
def get_all_stats(self, path, body):
return self.balancer.stats
def get_single_stats(self, path, body):
host = self.stats_host_regex.match(path).group(1)
return self.balancer.stats.get(host, {})
|
epio/mantrid
|
mantrid/management.py
|
ManagementApp.set_all
|
python
|
def set_all(self, path, body):
"Replaces the hosts list with the provided input"
# Do some error checking
if not isinstance(body, dict):
raise HttpBadRequest("body_not_a_dict")
for hostname, details in body.items():
error = self.host_errors(hostname, details)
if error:
raise HttpBadRequest("%s:%s" % (hostname, error))
# Replace
old_hostnames = set(self.balancer.hosts.keys())
new_hostnames = set(body.keys())
self.balancer.hosts = body
# Clean up stats dict
for hostname in new_hostnames - old_hostnames:
self.balancer.stats[hostname] = {}
for hostname in old_hostnames - new_hostnames:
try:
del self.balancer.stats[hostname]
except KeyError:
pass
return {"ok": True}
|
Replaces the hosts list with the provided input
|
train
|
https://github.com/epio/mantrid/blob/1c699f1a4b33888b533c19cb6d025173f2160576/mantrid/management.py#L119-L140
|
[
"def host_errors(self, hostname, details):\n \"\"\"\n Validates the format of a host entry\n Returns an error string, or None if it is valid.\n \"\"\"\n if not hostname or not isinstance(hostname, basestring):\n return \"hostname_invalid\"\n if not isinstance(details, list):\n return \"host_details_not_list\"\n if len(details) != 3:\n return \"host_details_wrong_length\"\n if details[0] not in self.balancer.action_mapping:\n return \"host_action_invalid:%s\" % details[0]\n if not isinstance(details[1], dict):\n return \"host_kwargs_not_dict\"\n if not isinstance(details[2], bool):\n return \"host_match_subdomains_not_bool\"\n return None\n"
] |
class ManagementApp(object):
"""
Management WSGI app for the Mantrid loadbalancer.
Allows endpoints to be changed via HTTP requests to
the management port.
"""
host_regex = re.compile(r"^/hostname/([^/]+)/?$")
stats_host_regex = re.compile(r"^/stats/([^/]+)/?$")
def __init__(self, balancer):
self.balancer = balancer
def handle(self, environ, start_response):
"Main entry point"
# Pass off to the router
try:
handler = self.route(
environ['PATH_INFO'].lower(),
environ['REQUEST_METHOD'].lower(),
)
if handler is None:
raise HttpNotFound()
# Handle errors
except HttpNotFound:
start_response('404 Not Found', [('Content-Type', 'application/json')])
return [json.dumps({"error": "not_found"})]
except HttpMethodNotAllowed:
start_response('405 Method Not Allowed', [('Content-Type', 'application/json')])
return [json.dumps({"error": "method_not_allowed"})]
# Dispatch to the named method
body = environ['wsgi.input'].read()
if body:
body = json.loads(body)
response = handler(
environ['PATH_INFO'].lower(),
body,
)
# Send the response
start_response('200 OK', [('Content-Type', 'application/json')])
return [json.dumps(response)]
def route(self, path, method):
# Simple routing for paths
if path == "/":
raise HttpMethodNotAllowed()
elif path == "/stats/":
if method == "get":
return self.get_all_stats
else:
raise HttpMethodNotAllowed()
elif self.stats_host_regex.match(path):
if method == "get":
return self.get_single_stats
else:
raise HttpMethodNotAllowed()
elif path == "/hostname/":
if method == "get":
return self.get_all
elif method == "put":
return self.set_all
else:
raise HttpMethodNotAllowed()
elif self.host_regex.match(path):
if method == "get":
return self.get_single
elif method == "put":
return self.set_single
elif method == "delete":
return self.delete_single
else:
raise HttpMethodNotAllowed()
else:
raise HttpNotFound()
### Handling methods ###
def host_errors(self, hostname, details):
"""
Validates the format of a host entry
Returns an error string, or None if it is valid.
"""
if not hostname or not isinstance(hostname, basestring):
return "hostname_invalid"
if not isinstance(details, list):
return "host_details_not_list"
if len(details) != 3:
return "host_details_wrong_length"
if details[0] not in self.balancer.action_mapping:
return "host_action_invalid:%s" % details[0]
if not isinstance(details[1], dict):
return "host_kwargs_not_dict"
if not isinstance(details[2], bool):
return "host_match_subdomains_not_bool"
return None
def get_all(self, path, body):
return self.balancer.hosts
def get_single(self, path, body):
host = self.host_regex.match(path).group(1)
if host in self.balancer.hosts:
return self.balancer.hosts[host]
else:
return None
def set_single(self, path, body):
host = self.host_regex.match(path).group(1)
error = self.host_errors(host, body)
if error:
raise HttpBadRequest("%s:%s" % (host, error))
self.balancer.hosts[host] = body
self.balancer.stats[host] = {}
return {"ok": True}
def delete_single(self, path, body):
host = self.host_regex.match(path).group(1)
try:
del self.balancer.hosts[host]
except KeyError:
pass
try:
del self.balancer.stats[host]
except KeyError:
pass
return {"ok": True}
def get_all_stats(self, path, body):
return self.balancer.stats
def get_single_stats(self, path, body):
host = self.stats_host_regex.match(path).group(1)
return self.balancer.stats.get(host, {})
|
epio/mantrid
|
mantrid/cli.py
|
MantridCli.action_list
|
python
|
def action_list(self):
"Lists all hosts on the LB"
format = "%-35s %-25s %-8s"
print format % ("HOST", "ACTION", "SUBDOMS")
for host, details in sorted(self.client.get_all().items()):
if details[0] in ("proxy", "mirror"):
action = "%s<%s>" % (
details[0],
",".join(
"%s:%s" % (host, port)
for host, port in details[1]['backends']
)
)
elif details[0] == "static":
action = "%s<%s>" % (
details[0],
details[1]['type'],
)
elif details[0] == "redirect":
action = "%s<%s>" % (
details[0],
details[1]['redirect_to'],
)
elif details[0] == "empty":
action = "%s<%s>" % (
details[0],
details[1]['code'],
)
else:
action = details[0]
print format % (host, action, details[2])
|
Lists all hosts on the LB
|
train
|
https://github.com/epio/mantrid/blob/1c699f1a4b33888b533c19cb6d025173f2160576/mantrid/cli.py#L44-L74
| null |
class MantridCli(object):
"""Command line interface to Mantrid"""
def __init__(self, base_url):
self.client = MantridClient(base_url)
@classmethod
def main(cls):
cli = cls("http://localhost:8042")
cli.run(sys.argv)
@property
def action_names(self):
for method_name in dir(self):
if method_name.startswith("action_") \
and method_name != "action_names":
yield method_name[7:]
def run(self, argv):
# Work out what action we're doing
try:
action = argv[1]
except IndexError:
sys.stderr.write(
"Please provide an action (%s).\n" % (
", ".join(self.action_names),
)
)
sys.exit(1)
if action not in list(self.action_names):
sys.stderr.write(
"Action %s does not exist.\n" % (
action,
)
)
sys.exit(1)
# Run it
getattr(self, "action_%s" % action)(*argv[2:])
def action_set(self, hostname=None, action=None, subdoms=None, *args):
"Adds a hostname to the LB, or alters an existing one"
usage = "set <hostname> <action> <subdoms> [option=value, ...]"
if hostname is None:
sys.stderr.write("You must supply a hostname.\n")
sys.stderr.write("Usage: %s\n" % usage)
sys.exit(1)
if action is None:
sys.stderr.write("You must supply an action.\n")
sys.stderr.write("Usage: %s\n" % usage)
sys.exit(1)
if subdoms is None or subdoms.lower() not in ("true", "false"):
sys.stderr.write("You must supply True or False for the subdomains flag.\n")
sys.stderr.write("Usage: %s\n" % usage)
sys.exit(1)
# Grab options
options = {}
for arg in args:
if "=" not in arg:
sys.stderr.write("%s is not a valid option (no =)\n" % (
arg
))
sys.exit(1)
key, value = arg.split("=", 1)
options[key] = value
# Sanity-check options
if action in ("proxy, mirror") and "backends" not in options:
sys.stderr.write("The %s action requires a backends option.\n" % action)
sys.exit(1)
if action == "static" and "type" not in options:
sys.stderr.write("The %s action requires a type option.\n" % action)
sys.exit(1)
if action == "redirect" and "redirect_to" not in options:
sys.stderr.write("The %s action requires a redirect_to option.\n" % action)
sys.exit(1)
if action == "empty" and "code" not in options:
sys.stderr.write("The %s action requires a code option.\n" % action)
sys.exit(1)
# Expand some options from text to datastructure
if "backends" in options:
options['backends'] = [
(lambda x: (x[0], int(x[1])))(bit.split(":", 1))
for bit in options['backends'].split(",")
]
# Set!
self.client.set(
hostname,
[action, options, subdoms.lower() == "true"]
)
def action_delete(self, hostname):
"Deletes the hostname from the LB."
self.client.delete(
hostname,
)
def action_stats(self, hostname=None):
"Shows stats (possibly limited by hostname)"
format = "%-35s %-11s %-11s %-11s %-11s"
print format % ("HOST", "OPEN", "COMPLETED", "BYTES IN", "BYTES OUT")
for host, details in sorted(self.client.stats(hostname).items()):
print format % (
host,
details.get("open_requests", 0),
details.get("completed_requests", 0),
details.get("bytes_received", 0),
details.get("bytes_sent", 0),
)
|
epio/mantrid
|
mantrid/cli.py
|
MantridCli.action_set
|
python
|
def action_set(self, hostname=None, action=None, subdoms=None, *args):
"Adds a hostname to the LB, or alters an existing one"
usage = "set <hostname> <action> <subdoms> [option=value, ...]"
if hostname is None:
sys.stderr.write("You must supply a hostname.\n")
sys.stderr.write("Usage: %s\n" % usage)
sys.exit(1)
if action is None:
sys.stderr.write("You must supply an action.\n")
sys.stderr.write("Usage: %s\n" % usage)
sys.exit(1)
if subdoms is None or subdoms.lower() not in ("true", "false"):
sys.stderr.write("You must supply True or False for the subdomains flag.\n")
sys.stderr.write("Usage: %s\n" % usage)
sys.exit(1)
# Grab options
options = {}
for arg in args:
if "=" not in arg:
sys.stderr.write("%s is not a valid option (no =)\n" % (
arg
))
sys.exit(1)
key, value = arg.split("=", 1)
options[key] = value
# Sanity-check options
if action in ("proxy, mirror") and "backends" not in options:
sys.stderr.write("The %s action requires a backends option.\n" % action)
sys.exit(1)
if action == "static" and "type" not in options:
sys.stderr.write("The %s action requires a type option.\n" % action)
sys.exit(1)
if action == "redirect" and "redirect_to" not in options:
sys.stderr.write("The %s action requires a redirect_to option.\n" % action)
sys.exit(1)
if action == "empty" and "code" not in options:
sys.stderr.write("The %s action requires a code option.\n" % action)
sys.exit(1)
# Expand some options from text to datastructure
if "backends" in options:
options['backends'] = [
(lambda x: (x[0], int(x[1])))(bit.split(":", 1))
for bit in options['backends'].split(",")
]
# Set!
self.client.set(
hostname,
[action, options, subdoms.lower() == "true"]
)
|
Adds a hostname to the LB, or alters an existing one
|
train
|
https://github.com/epio/mantrid/blob/1c699f1a4b33888b533c19cb6d025173f2160576/mantrid/cli.py#L76-L124
| null |
class MantridCli(object):
"""Command line interface to Mantrid"""
def __init__(self, base_url):
self.client = MantridClient(base_url)
@classmethod
def main(cls):
cli = cls("http://localhost:8042")
cli.run(sys.argv)
@property
def action_names(self):
for method_name in dir(self):
if method_name.startswith("action_") \
and method_name != "action_names":
yield method_name[7:]
def run(self, argv):
# Work out what action we're doing
try:
action = argv[1]
except IndexError:
sys.stderr.write(
"Please provide an action (%s).\n" % (
", ".join(self.action_names),
)
)
sys.exit(1)
if action not in list(self.action_names):
sys.stderr.write(
"Action %s does not exist.\n" % (
action,
)
)
sys.exit(1)
# Run it
getattr(self, "action_%s" % action)(*argv[2:])
def action_list(self):
"Lists all hosts on the LB"
format = "%-35s %-25s %-8s"
print format % ("HOST", "ACTION", "SUBDOMS")
for host, details in sorted(self.client.get_all().items()):
if details[0] in ("proxy", "mirror"):
action = "%s<%s>" % (
details[0],
",".join(
"%s:%s" % (host, port)
for host, port in details[1]['backends']
)
)
elif details[0] == "static":
action = "%s<%s>" % (
details[0],
details[1]['type'],
)
elif details[0] == "redirect":
action = "%s<%s>" % (
details[0],
details[1]['redirect_to'],
)
elif details[0] == "empty":
action = "%s<%s>" % (
details[0],
details[1]['code'],
)
else:
action = details[0]
print format % (host, action, details[2])
def action_delete(self, hostname):
"Deletes the hostname from the LB."
self.client.delete(
hostname,
)
def action_stats(self, hostname=None):
"Shows stats (possibly limited by hostname)"
format = "%-35s %-11s %-11s %-11s %-11s"
print format % ("HOST", "OPEN", "COMPLETED", "BYTES IN", "BYTES OUT")
for host, details in sorted(self.client.stats(hostname).items()):
print format % (
host,
details.get("open_requests", 0),
details.get("completed_requests", 0),
details.get("bytes_received", 0),
details.get("bytes_sent", 0),
)
|
epio/mantrid
|
mantrid/cli.py
|
MantridCli.action_stats
|
python
|
def action_stats(self, hostname=None):
"Shows stats (possibly limited by hostname)"
format = "%-35s %-11s %-11s %-11s %-11s"
print format % ("HOST", "OPEN", "COMPLETED", "BYTES IN", "BYTES OUT")
for host, details in sorted(self.client.stats(hostname).items()):
print format % (
host,
details.get("open_requests", 0),
details.get("completed_requests", 0),
details.get("bytes_received", 0),
details.get("bytes_sent", 0),
)
|
Shows stats (possibly limited by hostname)
|
train
|
https://github.com/epio/mantrid/blob/1c699f1a4b33888b533c19cb6d025173f2160576/mantrid/cli.py#L132-L143
| null |
class MantridCli(object):
"""Command line interface to Mantrid"""
def __init__(self, base_url):
self.client = MantridClient(base_url)
@classmethod
def main(cls):
cli = cls("http://localhost:8042")
cli.run(sys.argv)
@property
def action_names(self):
for method_name in dir(self):
if method_name.startswith("action_") \
and method_name != "action_names":
yield method_name[7:]
def run(self, argv):
# Work out what action we're doing
try:
action = argv[1]
except IndexError:
sys.stderr.write(
"Please provide an action (%s).\n" % (
", ".join(self.action_names),
)
)
sys.exit(1)
if action not in list(self.action_names):
sys.stderr.write(
"Action %s does not exist.\n" % (
action,
)
)
sys.exit(1)
# Run it
getattr(self, "action_%s" % action)(*argv[2:])
def action_list(self):
"Lists all hosts on the LB"
format = "%-35s %-25s %-8s"
print format % ("HOST", "ACTION", "SUBDOMS")
for host, details in sorted(self.client.get_all().items()):
if details[0] in ("proxy", "mirror"):
action = "%s<%s>" % (
details[0],
",".join(
"%s:%s" % (host, port)
for host, port in details[1]['backends']
)
)
elif details[0] == "static":
action = "%s<%s>" % (
details[0],
details[1]['type'],
)
elif details[0] == "redirect":
action = "%s<%s>" % (
details[0],
details[1]['redirect_to'],
)
elif details[0] == "empty":
action = "%s<%s>" % (
details[0],
details[1]['code'],
)
else:
action = details[0]
print format % (host, action, details[2])
def action_set(self, hostname=None, action=None, subdoms=None, *args):
"Adds a hostname to the LB, or alters an existing one"
usage = "set <hostname> <action> <subdoms> [option=value, ...]"
if hostname is None:
sys.stderr.write("You must supply a hostname.\n")
sys.stderr.write("Usage: %s\n" % usage)
sys.exit(1)
if action is None:
sys.stderr.write("You must supply an action.\n")
sys.stderr.write("Usage: %s\n" % usage)
sys.exit(1)
if subdoms is None or subdoms.lower() not in ("true", "false"):
sys.stderr.write("You must supply True or False for the subdomains flag.\n")
sys.stderr.write("Usage: %s\n" % usage)
sys.exit(1)
# Grab options
options = {}
for arg in args:
if "=" not in arg:
sys.stderr.write("%s is not a valid option (no =)\n" % (
arg
))
sys.exit(1)
key, value = arg.split("=", 1)
options[key] = value
# Sanity-check options
if action in ("proxy, mirror") and "backends" not in options:
sys.stderr.write("The %s action requires a backends option.\n" % action)
sys.exit(1)
if action == "static" and "type" not in options:
sys.stderr.write("The %s action requires a type option.\n" % action)
sys.exit(1)
if action == "redirect" and "redirect_to" not in options:
sys.stderr.write("The %s action requires a redirect_to option.\n" % action)
sys.exit(1)
if action == "empty" and "code" not in options:
sys.stderr.write("The %s action requires a code option.\n" % action)
sys.exit(1)
# Expand some options from text to datastructure
if "backends" in options:
options['backends'] = [
(lambda x: (x[0], int(x[1])))(bit.split(":", 1))
for bit in options['backends'].split(",")
]
# Set!
self.client.set(
hostname,
[action, options, subdoms.lower() == "true"]
)
def action_delete(self, hostname):
"Deletes the hostname from the LB."
self.client.delete(
hostname,
)
|
epio/mantrid
|
mantrid/loadbalancer.py
|
Balancer.load
|
python
|
def load(self):
"Loads the state from the state file"
try:
if os.path.getsize(self.state_file) <= 1:
raise IOError("File is empty.")
with open(self.state_file) as fh:
state = json.load(fh)
assert isinstance(state, dict)
self.hosts = state['hosts']
self.stats = state['stats']
for key in self.stats:
self.stats[key]['open_requests'] = 0
except (IOError, OSError):
# There is no state file; start empty.
self.hosts = {}
self.stats = {}
|
Loads the state from the state file
|
train
|
https://github.com/epio/mantrid/blob/1c699f1a4b33888b533c19cb6d025173f2160576/mantrid/loadbalancer.py#L100-L115
| null |
class Balancer(object):
"""
Main loadbalancer class.
"""
nofile = 102400
save_interval = 10
action_mapping = {
"proxy": Proxy,
"empty": Empty,
"static": Static,
"redirect": Redirect,
"unknown": Unknown,
"spin": Spin,
"no_hosts": NoHosts,
}
def __init__(self, external_addresses, internal_addresses, management_addresses, state_file, uid=None, gid=65535, static_dir="/etc/mantrid/static/"):
"""
Constructor.
Takes one parameter, the dict of ports to listen on.
The key in this dict is the port number, and the value
is if it's an internal endpoint or not.
Internal endpoints do not have X-Forwarded-* stripped;
other ones do, and have X-Forwarded-For added.
"""
self.external_addresses = external_addresses
self.internal_addresses = internal_addresses
self.management_addresses = management_addresses
self.state_file = state_file
self.uid = uid
self.gid = gid
self.static_dir = static_dir
@classmethod
def main(cls):
# Parse command-line args
parser = argparse.ArgumentParser(description='The Mantrid load balancer')
parser.add_argument('--debug', dest='debug', action='store_const', const=True, help='Enable debug logging')
parser.add_argument('-c', '--config', dest='config', default=None, metavar="PATH", help='Path to the configuration file')
args = parser.parse_args()
# Set up logging
logger = logging.getLogger()
logger.setLevel(logging.DEBUG if args.debug else logging.INFO)
# Output to stderr, always
sh = logging.StreamHandler()
sh.setFormatter(logging.Formatter(
fmt = "%(asctime)s - %(levelname)8s: %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
))
sh.setLevel(logging.DEBUG)
logger.addHandler(sh)
# Check they have root access
try:
resource.setrlimit(resource.RLIMIT_NOFILE, (cls.nofile, cls.nofile))
except (ValueError, resource.error):
logging.warning("Cannot raise resource limits (run as root/change ulimits)")
# Load settings from the config file
if args.config is None:
if os.path.exists("/etc/mantrid/mantrid.conf"):
args.config = "/etc/mantrid/mantrid.conf"
logging.info("Using configuration file %s" % args.config)
else:
args.config = "/dev/null"
logging.info("No configuration file found - using defaults.")
else:
logging.info("Using configuration file %s" % args.config)
config = SimpleConfig(args.config)
balancer = cls(
config.get_all_addresses("bind", set([(("::", 80), socket.AF_INET6)])),
config.get_all_addresses("bind_internal"),
config.get_all_addresses("bind_management", set([(("127.0.0.1", 8042), socket.AF_INET), (("::1", 8042), socket.AF_INET6)])),
config.get("state_file", "/var/lib/mantrid/state.json"),
config.get_int("uid", 4321),
config.get_int("gid", 4321),
config.get("static_dir", "/etc/mantrid/static/"),
)
balancer.run()
def save(self):
"Saves the state to the state file"
with open(self.state_file, "w") as fh:
json.dump({
"hosts": self.hosts,
"stats": self.stats,
}, fh)
def run(self):
# First, initialise the process
self.load()
self.running = True
# Try to ensure the state file is readable
state_dir = os.path.dirname(self.state_file)
if not os.path.isdir(state_dir):
os.makedirs(state_dir)
if self.uid is not None:
try:
os.chown(state_dir, self.uid, -1)
except OSError:
pass
try:
os.chown(self.state_file, self.uid, -1)
except OSError:
pass
# Then, launch the socket loops
pool = GreenBody(
len(self.external_addresses) +
len(self.internal_addresses) +
len(self.management_addresses) +
1
)
pool.spawn(self.save_loop)
for address, family in self.external_addresses:
pool.spawn(self.listen_loop, address, family, internal=False)
for address, family in self.internal_addresses:
pool.spawn(self.listen_loop, address, family, internal=True)
for address, family in self.management_addresses:
pool.spawn(self.management_loop, address, family)
# Give the other threads a chance to open their listening sockets
eventlet.sleep(0.5)
# Drop to the lesser UID/GIDs, if supplied
if self.gid:
try:
os.setegid(self.gid)
os.setgid(self.gid)
except OSError:
logging.error("Cannot change to GID %i (probably not running as root)" % self.gid)
else:
logging.info("Dropped to GID %i" % self.gid)
if self.uid:
try:
os.seteuid(0)
os.setuid(self.uid)
os.seteuid(self.uid)
except OSError:
logging.error("Cannot change to UID %i (probably not running as root)" % self.uid)
else:
logging.info("Dropped to UID %i" % self.uid)
# Ensure we can save to the state file, or die hard.
try:
open(self.state_file, "a").close()
except (OSError, IOError):
logging.critical("Cannot write to state file %s" % self.state_file)
sys.exit(1)
# Wait for one to exit, or for a clean/forced shutdown
try:
pool.wait()
except (KeyboardInterrupt, StopIteration, SystemExit):
pass
except:
logging.error(traceback.format_exc())
# We're done
self.running = False
logging.info("Exiting")
### Management ###
def save_loop(self):
"""
Saves the state if it has changed.
"""
last_hash = hash(repr(self.hosts))
while self.running:
eventlet.sleep(self.save_interval)
next_hash = hash(repr(self.hosts))
if next_hash != last_hash:
self.save()
last_hash = next_hash
def management_loop(self, address, family):
"""
Accepts management requests.
"""
try:
sock = eventlet.listen(address, family)
except socket.error, e:
logging.critical("Cannot listen on (%s, %s): %s" % (address, family, e))
return
# Sleep to ensure we've dropped privileges by the time we start serving
eventlet.sleep(0.5)
# Actually serve management
logging.info("Listening for management on %s" % (address, ))
management_app = ManagementApp(self)
try:
with open("/dev/null", "w") as log_dest:
wsgi.server(
sock,
management_app.handle,
log = log_dest,
)
finally:
sock.close()
### Client handling ###
def listen_loop(self, address, family, internal=False):
"""
Accepts incoming connections.
"""
try:
sock = eventlet.listen(address, family)
except socket.error, e:
if e.errno == errno.EADDRINUSE:
logging.critical("Cannot listen on (%s, %s): already in use" % (address, family))
raise
elif e.errno == errno.EACCES and address[1] <= 1024:
logging.critical("Cannot listen on (%s, %s) (you might need to launch as root)" % (address, family))
return
logging.critical("Cannot listen on (%s, %s): %s" % (address, family, e))
return
# Sleep to ensure we've dropped privileges by the time we start serving
eventlet.sleep(0.5)
# Start serving
logging.info("Listening for requests on %s" % (address, ))
try:
eventlet.serve(
sock,
lambda sock, addr: self.handle(sock, addr, internal),
concurrency = 10000,
)
finally:
sock.close()
def resolve_host(self, host, protocol="http"):
# Special case for empty hosts dict
if not self.hosts:
return NoHosts(self, host, "unknown")
# Check for an exact or any subdomain matches
bits = host.split(".")
for i in range(len(bits)):
for prefix in ["%s://" % protocol, ""]:
subhost = prefix + (".".join(bits[i:]))
if subhost in self.hosts:
action, kwargs, allow_subs = self.hosts[subhost]
if allow_subs or i == 0:
action_class = self.action_mapping[action]
return action_class(
balancer = self,
host = host,
matched_host = subhost,
**kwargs
)
return Unknown(self, host, "unknown")
def handle(self, sock, address, internal=False):
"""
Handles an incoming HTTP connection.
"""
try:
sock = StatsSocket(sock)
rfile = sock.makefile('rb', 4096)
# Read the first line
first = rfile.readline().strip("\r\n")
words = first.split()
# Ensure it looks kind of like HTTP
if not (2 <= len(words) <= 3):
sock.sendall("HTTP/1.0 400 Bad Request\r\nConnection: close\r\nContent-length: 0\r\n\r\n")
return
path = words[1]
# Read the headers
headers = mimetools.Message(rfile, 0)
# Work out the host
try:
host = headers['Host']
except KeyError:
host = "unknown"
headers['Connection'] = "close"
if not internal:
headers['X-Forwarded-For'] = address[0]
headers['X-Forwarded-Protocol'] = ""
headers['X-Forwarded-Proto'] = ""
# Make sure they're not using odd encodings
if "Transfer-Encoding" in headers:
sock.sendall("HTTP/1.0 411 Length Required\r\nConnection: close\r\nContent-length: 0\r\n\r\n")
return
# Match the host to an action
protocol = "http"
if headers.get('X-Forwarded-Protocol', headers.get('X-Forwarded-Proto', "")).lower() in ("ssl", "https"):
protocol = "https"
action = self.resolve_host(host, protocol)
# Record us as an open connection
stats_dict = self.stats.setdefault(action.matched_host, {})
stats_dict['open_requests'] = stats_dict.get('open_requests', 0) + 1
# Run the action
try:
rfile._rbuf.seek(0)
action.handle(
sock = sock,
read_data = first + "\r\n" + str(headers) + "\r\n" + rfile._rbuf.read(),
path = path,
headers = headers,
)
finally:
stats_dict['open_requests'] -= 1
stats_dict['completed_requests'] = stats_dict.get('completed_requests', 0) + 1
stats_dict['bytes_sent'] = stats_dict.get('bytes_sent', 0) + sock.bytes_sent
stats_dict['bytes_received'] = stats_dict.get('bytes_received', 0) + sock.bytes_received
except socket.error, e:
if e.errno not in (errno.EPIPE, errno.ETIMEDOUT, errno.ECONNRESET):
logging.error(traceback.format_exc())
except:
logging.error(traceback.format_exc())
try:
sock.sendall("HTTP/1.0 500 Internal Server Error\r\n\r\nThere has been an internal error in the load balancer.")
except socket.error, e:
if e.errno != errno.EPIPE:
raise
finally:
try:
sock.close()
rfile.close()
except:
logging.error(traceback.format_exc())
|
epio/mantrid
|
mantrid/loadbalancer.py
|
Balancer.save
|
python
|
def save(self):
"Saves the state to the state file"
with open(self.state_file, "w") as fh:
json.dump({
"hosts": self.hosts,
"stats": self.stats,
}, fh)
|
Saves the state to the state file
|
train
|
https://github.com/epio/mantrid/blob/1c699f1a4b33888b533c19cb6d025173f2160576/mantrid/loadbalancer.py#L117-L123
| null |
class Balancer(object):
"""
Main loadbalancer class.
"""
nofile = 102400
save_interval = 10
action_mapping = {
"proxy": Proxy,
"empty": Empty,
"static": Static,
"redirect": Redirect,
"unknown": Unknown,
"spin": Spin,
"no_hosts": NoHosts,
}
def __init__(self, external_addresses, internal_addresses, management_addresses, state_file, uid=None, gid=65535, static_dir="/etc/mantrid/static/"):
"""
Constructor.
Takes one parameter, the dict of ports to listen on.
The key in this dict is the port number, and the value
is if it's an internal endpoint or not.
Internal endpoints do not have X-Forwarded-* stripped;
other ones do, and have X-Forwarded-For added.
"""
self.external_addresses = external_addresses
self.internal_addresses = internal_addresses
self.management_addresses = management_addresses
self.state_file = state_file
self.uid = uid
self.gid = gid
self.static_dir = static_dir
@classmethod
def main(cls):
# Parse command-line args
parser = argparse.ArgumentParser(description='The Mantrid load balancer')
parser.add_argument('--debug', dest='debug', action='store_const', const=True, help='Enable debug logging')
parser.add_argument('-c', '--config', dest='config', default=None, metavar="PATH", help='Path to the configuration file')
args = parser.parse_args()
# Set up logging
logger = logging.getLogger()
logger.setLevel(logging.DEBUG if args.debug else logging.INFO)
# Output to stderr, always
sh = logging.StreamHandler()
sh.setFormatter(logging.Formatter(
fmt = "%(asctime)s - %(levelname)8s: %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
))
sh.setLevel(logging.DEBUG)
logger.addHandler(sh)
# Check they have root access
try:
resource.setrlimit(resource.RLIMIT_NOFILE, (cls.nofile, cls.nofile))
except (ValueError, resource.error):
logging.warning("Cannot raise resource limits (run as root/change ulimits)")
# Load settings from the config file
if args.config is None:
if os.path.exists("/etc/mantrid/mantrid.conf"):
args.config = "/etc/mantrid/mantrid.conf"
logging.info("Using configuration file %s" % args.config)
else:
args.config = "/dev/null"
logging.info("No configuration file found - using defaults.")
else:
logging.info("Using configuration file %s" % args.config)
config = SimpleConfig(args.config)
balancer = cls(
config.get_all_addresses("bind", set([(("::", 80), socket.AF_INET6)])),
config.get_all_addresses("bind_internal"),
config.get_all_addresses("bind_management", set([(("127.0.0.1", 8042), socket.AF_INET), (("::1", 8042), socket.AF_INET6)])),
config.get("state_file", "/var/lib/mantrid/state.json"),
config.get_int("uid", 4321),
config.get_int("gid", 4321),
config.get("static_dir", "/etc/mantrid/static/"),
)
balancer.run()
def load(self):
"Loads the state from the state file"
try:
if os.path.getsize(self.state_file) <= 1:
raise IOError("File is empty.")
with open(self.state_file) as fh:
state = json.load(fh)
assert isinstance(state, dict)
self.hosts = state['hosts']
self.stats = state['stats']
for key in self.stats:
self.stats[key]['open_requests'] = 0
except (IOError, OSError):
# There is no state file; start empty.
self.hosts = {}
self.stats = {}
def run(self):
# First, initialise the process
self.load()
self.running = True
# Try to ensure the state file is readable
state_dir = os.path.dirname(self.state_file)
if not os.path.isdir(state_dir):
os.makedirs(state_dir)
if self.uid is not None:
try:
os.chown(state_dir, self.uid, -1)
except OSError:
pass
try:
os.chown(self.state_file, self.uid, -1)
except OSError:
pass
# Then, launch the socket loops
pool = GreenBody(
len(self.external_addresses) +
len(self.internal_addresses) +
len(self.management_addresses) +
1
)
pool.spawn(self.save_loop)
for address, family in self.external_addresses:
pool.spawn(self.listen_loop, address, family, internal=False)
for address, family in self.internal_addresses:
pool.spawn(self.listen_loop, address, family, internal=True)
for address, family in self.management_addresses:
pool.spawn(self.management_loop, address, family)
# Give the other threads a chance to open their listening sockets
eventlet.sleep(0.5)
# Drop to the lesser UID/GIDs, if supplied
if self.gid:
try:
os.setegid(self.gid)
os.setgid(self.gid)
except OSError:
logging.error("Cannot change to GID %i (probably not running as root)" % self.gid)
else:
logging.info("Dropped to GID %i" % self.gid)
if self.uid:
try:
os.seteuid(0)
os.setuid(self.uid)
os.seteuid(self.uid)
except OSError:
logging.error("Cannot change to UID %i (probably not running as root)" % self.uid)
else:
logging.info("Dropped to UID %i" % self.uid)
# Ensure we can save to the state file, or die hard.
try:
open(self.state_file, "a").close()
except (OSError, IOError):
logging.critical("Cannot write to state file %s" % self.state_file)
sys.exit(1)
# Wait for one to exit, or for a clean/forced shutdown
try:
pool.wait()
except (KeyboardInterrupt, StopIteration, SystemExit):
pass
except:
logging.error(traceback.format_exc())
# We're done
self.running = False
logging.info("Exiting")
### Management ###
def save_loop(self):
"""
Saves the state if it has changed.
"""
last_hash = hash(repr(self.hosts))
while self.running:
eventlet.sleep(self.save_interval)
next_hash = hash(repr(self.hosts))
if next_hash != last_hash:
self.save()
last_hash = next_hash
def management_loop(self, address, family):
"""
Accepts management requests.
"""
try:
sock = eventlet.listen(address, family)
except socket.error, e:
logging.critical("Cannot listen on (%s, %s): %s" % (address, family, e))
return
# Sleep to ensure we've dropped privileges by the time we start serving
eventlet.sleep(0.5)
# Actually serve management
logging.info("Listening for management on %s" % (address, ))
management_app = ManagementApp(self)
try:
with open("/dev/null", "w") as log_dest:
wsgi.server(
sock,
management_app.handle,
log = log_dest,
)
finally:
sock.close()
### Client handling ###
def listen_loop(self, address, family, internal=False):
"""
Accepts incoming connections.
"""
try:
sock = eventlet.listen(address, family)
except socket.error, e:
if e.errno == errno.EADDRINUSE:
logging.critical("Cannot listen on (%s, %s): already in use" % (address, family))
raise
elif e.errno == errno.EACCES and address[1] <= 1024:
logging.critical("Cannot listen on (%s, %s) (you might need to launch as root)" % (address, family))
return
logging.critical("Cannot listen on (%s, %s): %s" % (address, family, e))
return
# Sleep to ensure we've dropped privileges by the time we start serving
eventlet.sleep(0.5)
# Start serving
logging.info("Listening for requests on %s" % (address, ))
try:
eventlet.serve(
sock,
lambda sock, addr: self.handle(sock, addr, internal),
concurrency = 10000,
)
finally:
sock.close()
def resolve_host(self, host, protocol="http"):
# Special case for empty hosts dict
if not self.hosts:
return NoHosts(self, host, "unknown")
# Check for an exact or any subdomain matches
bits = host.split(".")
for i in range(len(bits)):
for prefix in ["%s://" % protocol, ""]:
subhost = prefix + (".".join(bits[i:]))
if subhost in self.hosts:
action, kwargs, allow_subs = self.hosts[subhost]
if allow_subs or i == 0:
action_class = self.action_mapping[action]
return action_class(
balancer = self,
host = host,
matched_host = subhost,
**kwargs
)
return Unknown(self, host, "unknown")
def handle(self, sock, address, internal=False):
"""
Handles an incoming HTTP connection.
"""
try:
sock = StatsSocket(sock)
rfile = sock.makefile('rb', 4096)
# Read the first line
first = rfile.readline().strip("\r\n")
words = first.split()
# Ensure it looks kind of like HTTP
if not (2 <= len(words) <= 3):
sock.sendall("HTTP/1.0 400 Bad Request\r\nConnection: close\r\nContent-length: 0\r\n\r\n")
return
path = words[1]
# Read the headers
headers = mimetools.Message(rfile, 0)
# Work out the host
try:
host = headers['Host']
except KeyError:
host = "unknown"
headers['Connection'] = "close"
if not internal:
headers['X-Forwarded-For'] = address[0]
headers['X-Forwarded-Protocol'] = ""
headers['X-Forwarded-Proto'] = ""
# Make sure they're not using odd encodings
if "Transfer-Encoding" in headers:
sock.sendall("HTTP/1.0 411 Length Required\r\nConnection: close\r\nContent-length: 0\r\n\r\n")
return
# Match the host to an action
protocol = "http"
if headers.get('X-Forwarded-Protocol', headers.get('X-Forwarded-Proto', "")).lower() in ("ssl", "https"):
protocol = "https"
action = self.resolve_host(host, protocol)
# Record us as an open connection
stats_dict = self.stats.setdefault(action.matched_host, {})
stats_dict['open_requests'] = stats_dict.get('open_requests', 0) + 1
# Run the action
try:
rfile._rbuf.seek(0)
action.handle(
sock = sock,
read_data = first + "\r\n" + str(headers) + "\r\n" + rfile._rbuf.read(),
path = path,
headers = headers,
)
finally:
stats_dict['open_requests'] -= 1
stats_dict['completed_requests'] = stats_dict.get('completed_requests', 0) + 1
stats_dict['bytes_sent'] = stats_dict.get('bytes_sent', 0) + sock.bytes_sent
stats_dict['bytes_received'] = stats_dict.get('bytes_received', 0) + sock.bytes_received
except socket.error, e:
if e.errno not in (errno.EPIPE, errno.ETIMEDOUT, errno.ECONNRESET):
logging.error(traceback.format_exc())
except:
logging.error(traceback.format_exc())
try:
sock.sendall("HTTP/1.0 500 Internal Server Error\r\n\r\nThere has been an internal error in the load balancer.")
except socket.error, e:
if e.errno != errno.EPIPE:
raise
finally:
try:
sock.close()
rfile.close()
except:
logging.error(traceback.format_exc())
|
epio/mantrid
|
mantrid/loadbalancer.py
|
Balancer.save_loop
|
python
|
def save_loop(self):
last_hash = hash(repr(self.hosts))
while self.running:
eventlet.sleep(self.save_interval)
next_hash = hash(repr(self.hosts))
if next_hash != last_hash:
self.save()
last_hash = next_hash
|
Saves the state if it has changed.
|
train
|
https://github.com/epio/mantrid/blob/1c699f1a4b33888b533c19cb6d025173f2160576/mantrid/loadbalancer.py#L195-L205
|
[
"def save(self):\n \"Saves the state to the state file\"\n with open(self.state_file, \"w\") as fh:\n json.dump({\n \"hosts\": self.hosts,\n \"stats\": self.stats,\n }, fh)\n"
] |
class Balancer(object):
"""
Main loadbalancer class.
"""
nofile = 102400
save_interval = 10
action_mapping = {
"proxy": Proxy,
"empty": Empty,
"static": Static,
"redirect": Redirect,
"unknown": Unknown,
"spin": Spin,
"no_hosts": NoHosts,
}
def __init__(self, external_addresses, internal_addresses, management_addresses, state_file, uid=None, gid=65535, static_dir="/etc/mantrid/static/"):
"""
Constructor.
Takes one parameter, the dict of ports to listen on.
The key in this dict is the port number, and the value
is if it's an internal endpoint or not.
Internal endpoints do not have X-Forwarded-* stripped;
other ones do, and have X-Forwarded-For added.
"""
self.external_addresses = external_addresses
self.internal_addresses = internal_addresses
self.management_addresses = management_addresses
self.state_file = state_file
self.uid = uid
self.gid = gid
self.static_dir = static_dir
@classmethod
def main(cls):
# Parse command-line args
parser = argparse.ArgumentParser(description='The Mantrid load balancer')
parser.add_argument('--debug', dest='debug', action='store_const', const=True, help='Enable debug logging')
parser.add_argument('-c', '--config', dest='config', default=None, metavar="PATH", help='Path to the configuration file')
args = parser.parse_args()
# Set up logging
logger = logging.getLogger()
logger.setLevel(logging.DEBUG if args.debug else logging.INFO)
# Output to stderr, always
sh = logging.StreamHandler()
sh.setFormatter(logging.Formatter(
fmt = "%(asctime)s - %(levelname)8s: %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
))
sh.setLevel(logging.DEBUG)
logger.addHandler(sh)
# Check they have root access
try:
resource.setrlimit(resource.RLIMIT_NOFILE, (cls.nofile, cls.nofile))
except (ValueError, resource.error):
logging.warning("Cannot raise resource limits (run as root/change ulimits)")
# Load settings from the config file
if args.config is None:
if os.path.exists("/etc/mantrid/mantrid.conf"):
args.config = "/etc/mantrid/mantrid.conf"
logging.info("Using configuration file %s" % args.config)
else:
args.config = "/dev/null"
logging.info("No configuration file found - using defaults.")
else:
logging.info("Using configuration file %s" % args.config)
config = SimpleConfig(args.config)
balancer = cls(
config.get_all_addresses("bind", set([(("::", 80), socket.AF_INET6)])),
config.get_all_addresses("bind_internal"),
config.get_all_addresses("bind_management", set([(("127.0.0.1", 8042), socket.AF_INET), (("::1", 8042), socket.AF_INET6)])),
config.get("state_file", "/var/lib/mantrid/state.json"),
config.get_int("uid", 4321),
config.get_int("gid", 4321),
config.get("static_dir", "/etc/mantrid/static/"),
)
balancer.run()
def load(self):
"Loads the state from the state file"
try:
if os.path.getsize(self.state_file) <= 1:
raise IOError("File is empty.")
with open(self.state_file) as fh:
state = json.load(fh)
assert isinstance(state, dict)
self.hosts = state['hosts']
self.stats = state['stats']
for key in self.stats:
self.stats[key]['open_requests'] = 0
except (IOError, OSError):
# There is no state file; start empty.
self.hosts = {}
self.stats = {}
def save(self):
"Saves the state to the state file"
with open(self.state_file, "w") as fh:
json.dump({
"hosts": self.hosts,
"stats": self.stats,
}, fh)
def run(self):
# First, initialise the process
self.load()
self.running = True
# Try to ensure the state file is readable
state_dir = os.path.dirname(self.state_file)
if not os.path.isdir(state_dir):
os.makedirs(state_dir)
if self.uid is not None:
try:
os.chown(state_dir, self.uid, -1)
except OSError:
pass
try:
os.chown(self.state_file, self.uid, -1)
except OSError:
pass
# Then, launch the socket loops
pool = GreenBody(
len(self.external_addresses) +
len(self.internal_addresses) +
len(self.management_addresses) +
1
)
pool.spawn(self.save_loop)
for address, family in self.external_addresses:
pool.spawn(self.listen_loop, address, family, internal=False)
for address, family in self.internal_addresses:
pool.spawn(self.listen_loop, address, family, internal=True)
for address, family in self.management_addresses:
pool.spawn(self.management_loop, address, family)
# Give the other threads a chance to open their listening sockets
eventlet.sleep(0.5)
# Drop to the lesser UID/GIDs, if supplied
if self.gid:
try:
os.setegid(self.gid)
os.setgid(self.gid)
except OSError:
logging.error("Cannot change to GID %i (probably not running as root)" % self.gid)
else:
logging.info("Dropped to GID %i" % self.gid)
if self.uid:
try:
os.seteuid(0)
os.setuid(self.uid)
os.seteuid(self.uid)
except OSError:
logging.error("Cannot change to UID %i (probably not running as root)" % self.uid)
else:
logging.info("Dropped to UID %i" % self.uid)
# Ensure we can save to the state file, or die hard.
try:
open(self.state_file, "a").close()
except (OSError, IOError):
logging.critical("Cannot write to state file %s" % self.state_file)
sys.exit(1)
# Wait for one to exit, or for a clean/forced shutdown
try:
pool.wait()
except (KeyboardInterrupt, StopIteration, SystemExit):
pass
except:
logging.error(traceback.format_exc())
# We're done
self.running = False
logging.info("Exiting")
### Management ###
def management_loop(self, address, family):
"""
Accepts management requests.
"""
try:
sock = eventlet.listen(address, family)
except socket.error, e:
logging.critical("Cannot listen on (%s, %s): %s" % (address, family, e))
return
# Sleep to ensure we've dropped privileges by the time we start serving
eventlet.sleep(0.5)
# Actually serve management
logging.info("Listening for management on %s" % (address, ))
management_app = ManagementApp(self)
try:
with open("/dev/null", "w") as log_dest:
wsgi.server(
sock,
management_app.handle,
log = log_dest,
)
finally:
sock.close()
### Client handling ###
def listen_loop(self, address, family, internal=False):
"""
Accepts incoming connections.
"""
try:
sock = eventlet.listen(address, family)
except socket.error, e:
if e.errno == errno.EADDRINUSE:
logging.critical("Cannot listen on (%s, %s): already in use" % (address, family))
raise
elif e.errno == errno.EACCES and address[1] <= 1024:
logging.critical("Cannot listen on (%s, %s) (you might need to launch as root)" % (address, family))
return
logging.critical("Cannot listen on (%s, %s): %s" % (address, family, e))
return
# Sleep to ensure we've dropped privileges by the time we start serving
eventlet.sleep(0.5)
# Start serving
logging.info("Listening for requests on %s" % (address, ))
try:
eventlet.serve(
sock,
lambda sock, addr: self.handle(sock, addr, internal),
concurrency = 10000,
)
finally:
sock.close()
def resolve_host(self, host, protocol="http"):
# Special case for empty hosts dict
if not self.hosts:
return NoHosts(self, host, "unknown")
# Check for an exact or any subdomain matches
bits = host.split(".")
for i in range(len(bits)):
for prefix in ["%s://" % protocol, ""]:
subhost = prefix + (".".join(bits[i:]))
if subhost in self.hosts:
action, kwargs, allow_subs = self.hosts[subhost]
if allow_subs or i == 0:
action_class = self.action_mapping[action]
return action_class(
balancer = self,
host = host,
matched_host = subhost,
**kwargs
)
return Unknown(self, host, "unknown")
def handle(self, sock, address, internal=False):
"""
Handles an incoming HTTP connection.
"""
try:
sock = StatsSocket(sock)
rfile = sock.makefile('rb', 4096)
# Read the first line
first = rfile.readline().strip("\r\n")
words = first.split()
# Ensure it looks kind of like HTTP
if not (2 <= len(words) <= 3):
sock.sendall("HTTP/1.0 400 Bad Request\r\nConnection: close\r\nContent-length: 0\r\n\r\n")
return
path = words[1]
# Read the headers
headers = mimetools.Message(rfile, 0)
# Work out the host
try:
host = headers['Host']
except KeyError:
host = "unknown"
headers['Connection'] = "close"
if not internal:
headers['X-Forwarded-For'] = address[0]
headers['X-Forwarded-Protocol'] = ""
headers['X-Forwarded-Proto'] = ""
# Make sure they're not using odd encodings
if "Transfer-Encoding" in headers:
sock.sendall("HTTP/1.0 411 Length Required\r\nConnection: close\r\nContent-length: 0\r\n\r\n")
return
# Match the host to an action
protocol = "http"
if headers.get('X-Forwarded-Protocol', headers.get('X-Forwarded-Proto', "")).lower() in ("ssl", "https"):
protocol = "https"
action = self.resolve_host(host, protocol)
# Record us as an open connection
stats_dict = self.stats.setdefault(action.matched_host, {})
stats_dict['open_requests'] = stats_dict.get('open_requests', 0) + 1
# Run the action
try:
rfile._rbuf.seek(0)
action.handle(
sock = sock,
read_data = first + "\r\n" + str(headers) + "\r\n" + rfile._rbuf.read(),
path = path,
headers = headers,
)
finally:
stats_dict['open_requests'] -= 1
stats_dict['completed_requests'] = stats_dict.get('completed_requests', 0) + 1
stats_dict['bytes_sent'] = stats_dict.get('bytes_sent', 0) + sock.bytes_sent
stats_dict['bytes_received'] = stats_dict.get('bytes_received', 0) + sock.bytes_received
except socket.error, e:
if e.errno not in (errno.EPIPE, errno.ETIMEDOUT, errno.ECONNRESET):
logging.error(traceback.format_exc())
except:
logging.error(traceback.format_exc())
try:
sock.sendall("HTTP/1.0 500 Internal Server Error\r\n\r\nThere has been an internal error in the load balancer.")
except socket.error, e:
if e.errno != errno.EPIPE:
raise
finally:
try:
sock.close()
rfile.close()
except:
logging.error(traceback.format_exc())
|
epio/mantrid
|
mantrid/loadbalancer.py
|
Balancer.management_loop
|
python
|
def management_loop(self, address, family):
try:
sock = eventlet.listen(address, family)
except socket.error, e:
logging.critical("Cannot listen on (%s, %s): %s" % (address, family, e))
return
# Sleep to ensure we've dropped privileges by the time we start serving
eventlet.sleep(0.5)
# Actually serve management
logging.info("Listening for management on %s" % (address, ))
management_app = ManagementApp(self)
try:
with open("/dev/null", "w") as log_dest:
wsgi.server(
sock,
management_app.handle,
log = log_dest,
)
finally:
sock.close()
|
Accepts management requests.
|
train
|
https://github.com/epio/mantrid/blob/1c699f1a4b33888b533c19cb6d025173f2160576/mantrid/loadbalancer.py#L207-L229
| null |
class Balancer(object):
"""
Main loadbalancer class.
"""
nofile = 102400
save_interval = 10
action_mapping = {
"proxy": Proxy,
"empty": Empty,
"static": Static,
"redirect": Redirect,
"unknown": Unknown,
"spin": Spin,
"no_hosts": NoHosts,
}
def __init__(self, external_addresses, internal_addresses, management_addresses, state_file, uid=None, gid=65535, static_dir="/etc/mantrid/static/"):
"""
Constructor.
Takes one parameter, the dict of ports to listen on.
The key in this dict is the port number, and the value
is if it's an internal endpoint or not.
Internal endpoints do not have X-Forwarded-* stripped;
other ones do, and have X-Forwarded-For added.
"""
self.external_addresses = external_addresses
self.internal_addresses = internal_addresses
self.management_addresses = management_addresses
self.state_file = state_file
self.uid = uid
self.gid = gid
self.static_dir = static_dir
@classmethod
def main(cls):
# Parse command-line args
parser = argparse.ArgumentParser(description='The Mantrid load balancer')
parser.add_argument('--debug', dest='debug', action='store_const', const=True, help='Enable debug logging')
parser.add_argument('-c', '--config', dest='config', default=None, metavar="PATH", help='Path to the configuration file')
args = parser.parse_args()
# Set up logging
logger = logging.getLogger()
logger.setLevel(logging.DEBUG if args.debug else logging.INFO)
# Output to stderr, always
sh = logging.StreamHandler()
sh.setFormatter(logging.Formatter(
fmt = "%(asctime)s - %(levelname)8s: %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
))
sh.setLevel(logging.DEBUG)
logger.addHandler(sh)
# Check they have root access
try:
resource.setrlimit(resource.RLIMIT_NOFILE, (cls.nofile, cls.nofile))
except (ValueError, resource.error):
logging.warning("Cannot raise resource limits (run as root/change ulimits)")
# Load settings from the config file
if args.config is None:
if os.path.exists("/etc/mantrid/mantrid.conf"):
args.config = "/etc/mantrid/mantrid.conf"
logging.info("Using configuration file %s" % args.config)
else:
args.config = "/dev/null"
logging.info("No configuration file found - using defaults.")
else:
logging.info("Using configuration file %s" % args.config)
config = SimpleConfig(args.config)
balancer = cls(
config.get_all_addresses("bind", set([(("::", 80), socket.AF_INET6)])),
config.get_all_addresses("bind_internal"),
config.get_all_addresses("bind_management", set([(("127.0.0.1", 8042), socket.AF_INET), (("::1", 8042), socket.AF_INET6)])),
config.get("state_file", "/var/lib/mantrid/state.json"),
config.get_int("uid", 4321),
config.get_int("gid", 4321),
config.get("static_dir", "/etc/mantrid/static/"),
)
balancer.run()
def load(self):
"Loads the state from the state file"
try:
if os.path.getsize(self.state_file) <= 1:
raise IOError("File is empty.")
with open(self.state_file) as fh:
state = json.load(fh)
assert isinstance(state, dict)
self.hosts = state['hosts']
self.stats = state['stats']
for key in self.stats:
self.stats[key]['open_requests'] = 0
except (IOError, OSError):
# There is no state file; start empty.
self.hosts = {}
self.stats = {}
def save(self):
"Saves the state to the state file"
with open(self.state_file, "w") as fh:
json.dump({
"hosts": self.hosts,
"stats": self.stats,
}, fh)
def run(self):
# First, initialise the process
self.load()
self.running = True
# Try to ensure the state file is readable
state_dir = os.path.dirname(self.state_file)
if not os.path.isdir(state_dir):
os.makedirs(state_dir)
if self.uid is not None:
try:
os.chown(state_dir, self.uid, -1)
except OSError:
pass
try:
os.chown(self.state_file, self.uid, -1)
except OSError:
pass
# Then, launch the socket loops
pool = GreenBody(
len(self.external_addresses) +
len(self.internal_addresses) +
len(self.management_addresses) +
1
)
pool.spawn(self.save_loop)
for address, family in self.external_addresses:
pool.spawn(self.listen_loop, address, family, internal=False)
for address, family in self.internal_addresses:
pool.spawn(self.listen_loop, address, family, internal=True)
for address, family in self.management_addresses:
pool.spawn(self.management_loop, address, family)
# Give the other threads a chance to open their listening sockets
eventlet.sleep(0.5)
# Drop to the lesser UID/GIDs, if supplied
if self.gid:
try:
os.setegid(self.gid)
os.setgid(self.gid)
except OSError:
logging.error("Cannot change to GID %i (probably not running as root)" % self.gid)
else:
logging.info("Dropped to GID %i" % self.gid)
if self.uid:
try:
os.seteuid(0)
os.setuid(self.uid)
os.seteuid(self.uid)
except OSError:
logging.error("Cannot change to UID %i (probably not running as root)" % self.uid)
else:
logging.info("Dropped to UID %i" % self.uid)
# Ensure we can save to the state file, or die hard.
try:
open(self.state_file, "a").close()
except (OSError, IOError):
logging.critical("Cannot write to state file %s" % self.state_file)
sys.exit(1)
# Wait for one to exit, or for a clean/forced shutdown
try:
pool.wait()
except (KeyboardInterrupt, StopIteration, SystemExit):
pass
except:
logging.error(traceback.format_exc())
# We're done
self.running = False
logging.info("Exiting")
### Management ###
def save_loop(self):
"""
Saves the state if it has changed.
"""
last_hash = hash(repr(self.hosts))
while self.running:
eventlet.sleep(self.save_interval)
next_hash = hash(repr(self.hosts))
if next_hash != last_hash:
self.save()
last_hash = next_hash
### Client handling ###
def listen_loop(self, address, family, internal=False):
"""
Accepts incoming connections.
"""
try:
sock = eventlet.listen(address, family)
except socket.error, e:
if e.errno == errno.EADDRINUSE:
logging.critical("Cannot listen on (%s, %s): already in use" % (address, family))
raise
elif e.errno == errno.EACCES and address[1] <= 1024:
logging.critical("Cannot listen on (%s, %s) (you might need to launch as root)" % (address, family))
return
logging.critical("Cannot listen on (%s, %s): %s" % (address, family, e))
return
# Sleep to ensure we've dropped privileges by the time we start serving
eventlet.sleep(0.5)
# Start serving
logging.info("Listening for requests on %s" % (address, ))
try:
eventlet.serve(
sock,
lambda sock, addr: self.handle(sock, addr, internal),
concurrency = 10000,
)
finally:
sock.close()
def resolve_host(self, host, protocol="http"):
# Special case for empty hosts dict
if not self.hosts:
return NoHosts(self, host, "unknown")
# Check for an exact or any subdomain matches
bits = host.split(".")
for i in range(len(bits)):
for prefix in ["%s://" % protocol, ""]:
subhost = prefix + (".".join(bits[i:]))
if subhost in self.hosts:
action, kwargs, allow_subs = self.hosts[subhost]
if allow_subs or i == 0:
action_class = self.action_mapping[action]
return action_class(
balancer = self,
host = host,
matched_host = subhost,
**kwargs
)
return Unknown(self, host, "unknown")
def handle(self, sock, address, internal=False):
"""
Handles an incoming HTTP connection.
"""
try:
sock = StatsSocket(sock)
rfile = sock.makefile('rb', 4096)
# Read the first line
first = rfile.readline().strip("\r\n")
words = first.split()
# Ensure it looks kind of like HTTP
if not (2 <= len(words) <= 3):
sock.sendall("HTTP/1.0 400 Bad Request\r\nConnection: close\r\nContent-length: 0\r\n\r\n")
return
path = words[1]
# Read the headers
headers = mimetools.Message(rfile, 0)
# Work out the host
try:
host = headers['Host']
except KeyError:
host = "unknown"
headers['Connection'] = "close"
if not internal:
headers['X-Forwarded-For'] = address[0]
headers['X-Forwarded-Protocol'] = ""
headers['X-Forwarded-Proto'] = ""
# Make sure they're not using odd encodings
if "Transfer-Encoding" in headers:
sock.sendall("HTTP/1.0 411 Length Required\r\nConnection: close\r\nContent-length: 0\r\n\r\n")
return
# Match the host to an action
protocol = "http"
if headers.get('X-Forwarded-Protocol', headers.get('X-Forwarded-Proto', "")).lower() in ("ssl", "https"):
protocol = "https"
action = self.resolve_host(host, protocol)
# Record us as an open connection
stats_dict = self.stats.setdefault(action.matched_host, {})
stats_dict['open_requests'] = stats_dict.get('open_requests', 0) + 1
# Run the action
try:
rfile._rbuf.seek(0)
action.handle(
sock = sock,
read_data = first + "\r\n" + str(headers) + "\r\n" + rfile._rbuf.read(),
path = path,
headers = headers,
)
finally:
stats_dict['open_requests'] -= 1
stats_dict['completed_requests'] = stats_dict.get('completed_requests', 0) + 1
stats_dict['bytes_sent'] = stats_dict.get('bytes_sent', 0) + sock.bytes_sent
stats_dict['bytes_received'] = stats_dict.get('bytes_received', 0) + sock.bytes_received
except socket.error, e:
if e.errno not in (errno.EPIPE, errno.ETIMEDOUT, errno.ECONNRESET):
logging.error(traceback.format_exc())
except:
logging.error(traceback.format_exc())
try:
sock.sendall("HTTP/1.0 500 Internal Server Error\r\n\r\nThere has been an internal error in the load balancer.")
except socket.error, e:
if e.errno != errno.EPIPE:
raise
finally:
try:
sock.close()
rfile.close()
except:
logging.error(traceback.format_exc())
|
epio/mantrid
|
mantrid/loadbalancer.py
|
Balancer.listen_loop
|
python
|
def listen_loop(self, address, family, internal=False):
try:
sock = eventlet.listen(address, family)
except socket.error, e:
if e.errno == errno.EADDRINUSE:
logging.critical("Cannot listen on (%s, %s): already in use" % (address, family))
raise
elif e.errno == errno.EACCES and address[1] <= 1024:
logging.critical("Cannot listen on (%s, %s) (you might need to launch as root)" % (address, family))
return
logging.critical("Cannot listen on (%s, %s): %s" % (address, family, e))
return
# Sleep to ensure we've dropped privileges by the time we start serving
eventlet.sleep(0.5)
# Start serving
logging.info("Listening for requests on %s" % (address, ))
try:
eventlet.serve(
sock,
lambda sock, addr: self.handle(sock, addr, internal),
concurrency = 10000,
)
finally:
sock.close()
|
Accepts incoming connections.
|
train
|
https://github.com/epio/mantrid/blob/1c699f1a4b33888b533c19cb6d025173f2160576/mantrid/loadbalancer.py#L233-L259
| null |
class Balancer(object):
"""
Main loadbalancer class.
"""
nofile = 102400
save_interval = 10
action_mapping = {
"proxy": Proxy,
"empty": Empty,
"static": Static,
"redirect": Redirect,
"unknown": Unknown,
"spin": Spin,
"no_hosts": NoHosts,
}
def __init__(self, external_addresses, internal_addresses, management_addresses, state_file, uid=None, gid=65535, static_dir="/etc/mantrid/static/"):
"""
Constructor.
Takes one parameter, the dict of ports to listen on.
The key in this dict is the port number, and the value
is if it's an internal endpoint or not.
Internal endpoints do not have X-Forwarded-* stripped;
other ones do, and have X-Forwarded-For added.
"""
self.external_addresses = external_addresses
self.internal_addresses = internal_addresses
self.management_addresses = management_addresses
self.state_file = state_file
self.uid = uid
self.gid = gid
self.static_dir = static_dir
@classmethod
def main(cls):
# Parse command-line args
parser = argparse.ArgumentParser(description='The Mantrid load balancer')
parser.add_argument('--debug', dest='debug', action='store_const', const=True, help='Enable debug logging')
parser.add_argument('-c', '--config', dest='config', default=None, metavar="PATH", help='Path to the configuration file')
args = parser.parse_args()
# Set up logging
logger = logging.getLogger()
logger.setLevel(logging.DEBUG if args.debug else logging.INFO)
# Output to stderr, always
sh = logging.StreamHandler()
sh.setFormatter(logging.Formatter(
fmt = "%(asctime)s - %(levelname)8s: %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
))
sh.setLevel(logging.DEBUG)
logger.addHandler(sh)
# Check they have root access
try:
resource.setrlimit(resource.RLIMIT_NOFILE, (cls.nofile, cls.nofile))
except (ValueError, resource.error):
logging.warning("Cannot raise resource limits (run as root/change ulimits)")
# Load settings from the config file
if args.config is None:
if os.path.exists("/etc/mantrid/mantrid.conf"):
args.config = "/etc/mantrid/mantrid.conf"
logging.info("Using configuration file %s" % args.config)
else:
args.config = "/dev/null"
logging.info("No configuration file found - using defaults.")
else:
logging.info("Using configuration file %s" % args.config)
config = SimpleConfig(args.config)
balancer = cls(
config.get_all_addresses("bind", set([(("::", 80), socket.AF_INET6)])),
config.get_all_addresses("bind_internal"),
config.get_all_addresses("bind_management", set([(("127.0.0.1", 8042), socket.AF_INET), (("::1", 8042), socket.AF_INET6)])),
config.get("state_file", "/var/lib/mantrid/state.json"),
config.get_int("uid", 4321),
config.get_int("gid", 4321),
config.get("static_dir", "/etc/mantrid/static/"),
)
balancer.run()
def load(self):
"Loads the state from the state file"
try:
if os.path.getsize(self.state_file) <= 1:
raise IOError("File is empty.")
with open(self.state_file) as fh:
state = json.load(fh)
assert isinstance(state, dict)
self.hosts = state['hosts']
self.stats = state['stats']
for key in self.stats:
self.stats[key]['open_requests'] = 0
except (IOError, OSError):
# There is no state file; start empty.
self.hosts = {}
self.stats = {}
def save(self):
"Saves the state to the state file"
with open(self.state_file, "w") as fh:
json.dump({
"hosts": self.hosts,
"stats": self.stats,
}, fh)
def run(self):
# First, initialise the process
self.load()
self.running = True
# Try to ensure the state file is readable
state_dir = os.path.dirname(self.state_file)
if not os.path.isdir(state_dir):
os.makedirs(state_dir)
if self.uid is not None:
try:
os.chown(state_dir, self.uid, -1)
except OSError:
pass
try:
os.chown(self.state_file, self.uid, -1)
except OSError:
pass
# Then, launch the socket loops
pool = GreenBody(
len(self.external_addresses) +
len(self.internal_addresses) +
len(self.management_addresses) +
1
)
pool.spawn(self.save_loop)
for address, family in self.external_addresses:
pool.spawn(self.listen_loop, address, family, internal=False)
for address, family in self.internal_addresses:
pool.spawn(self.listen_loop, address, family, internal=True)
for address, family in self.management_addresses:
pool.spawn(self.management_loop, address, family)
# Give the other threads a chance to open their listening sockets
eventlet.sleep(0.5)
# Drop to the lesser UID/GIDs, if supplied
if self.gid:
try:
os.setegid(self.gid)
os.setgid(self.gid)
except OSError:
logging.error("Cannot change to GID %i (probably not running as root)" % self.gid)
else:
logging.info("Dropped to GID %i" % self.gid)
if self.uid:
try:
os.seteuid(0)
os.setuid(self.uid)
os.seteuid(self.uid)
except OSError:
logging.error("Cannot change to UID %i (probably not running as root)" % self.uid)
else:
logging.info("Dropped to UID %i" % self.uid)
# Ensure we can save to the state file, or die hard.
try:
open(self.state_file, "a").close()
except (OSError, IOError):
logging.critical("Cannot write to state file %s" % self.state_file)
sys.exit(1)
# Wait for one to exit, or for a clean/forced shutdown
try:
pool.wait()
except (KeyboardInterrupt, StopIteration, SystemExit):
pass
except:
logging.error(traceback.format_exc())
# We're done
self.running = False
logging.info("Exiting")
### Management ###
def save_loop(self):
"""
Saves the state if it has changed.
"""
last_hash = hash(repr(self.hosts))
while self.running:
eventlet.sleep(self.save_interval)
next_hash = hash(repr(self.hosts))
if next_hash != last_hash:
self.save()
last_hash = next_hash
def management_loop(self, address, family):
"""
Accepts management requests.
"""
try:
sock = eventlet.listen(address, family)
except socket.error, e:
logging.critical("Cannot listen on (%s, %s): %s" % (address, family, e))
return
# Sleep to ensure we've dropped privileges by the time we start serving
eventlet.sleep(0.5)
# Actually serve management
logging.info("Listening for management on %s" % (address, ))
management_app = ManagementApp(self)
try:
with open("/dev/null", "w") as log_dest:
wsgi.server(
sock,
management_app.handle,
log = log_dest,
)
finally:
sock.close()
### Client handling ###
def resolve_host(self, host, protocol="http"):
# Special case for empty hosts dict
if not self.hosts:
return NoHosts(self, host, "unknown")
# Check for an exact or any subdomain matches
bits = host.split(".")
for i in range(len(bits)):
for prefix in ["%s://" % protocol, ""]:
subhost = prefix + (".".join(bits[i:]))
if subhost in self.hosts:
action, kwargs, allow_subs = self.hosts[subhost]
if allow_subs or i == 0:
action_class = self.action_mapping[action]
return action_class(
balancer = self,
host = host,
matched_host = subhost,
**kwargs
)
return Unknown(self, host, "unknown")
def handle(self, sock, address, internal=False):
"""
Handles an incoming HTTP connection.
"""
try:
sock = StatsSocket(sock)
rfile = sock.makefile('rb', 4096)
# Read the first line
first = rfile.readline().strip("\r\n")
words = first.split()
# Ensure it looks kind of like HTTP
if not (2 <= len(words) <= 3):
sock.sendall("HTTP/1.0 400 Bad Request\r\nConnection: close\r\nContent-length: 0\r\n\r\n")
return
path = words[1]
# Read the headers
headers = mimetools.Message(rfile, 0)
# Work out the host
try:
host = headers['Host']
except KeyError:
host = "unknown"
headers['Connection'] = "close"
if not internal:
headers['X-Forwarded-For'] = address[0]
headers['X-Forwarded-Protocol'] = ""
headers['X-Forwarded-Proto'] = ""
# Make sure they're not using odd encodings
if "Transfer-Encoding" in headers:
sock.sendall("HTTP/1.0 411 Length Required\r\nConnection: close\r\nContent-length: 0\r\n\r\n")
return
# Match the host to an action
protocol = "http"
if headers.get('X-Forwarded-Protocol', headers.get('X-Forwarded-Proto', "")).lower() in ("ssl", "https"):
protocol = "https"
action = self.resolve_host(host, protocol)
# Record us as an open connection
stats_dict = self.stats.setdefault(action.matched_host, {})
stats_dict['open_requests'] = stats_dict.get('open_requests', 0) + 1
# Run the action
try:
rfile._rbuf.seek(0)
action.handle(
sock = sock,
read_data = first + "\r\n" + str(headers) + "\r\n" + rfile._rbuf.read(),
path = path,
headers = headers,
)
finally:
stats_dict['open_requests'] -= 1
stats_dict['completed_requests'] = stats_dict.get('completed_requests', 0) + 1
stats_dict['bytes_sent'] = stats_dict.get('bytes_sent', 0) + sock.bytes_sent
stats_dict['bytes_received'] = stats_dict.get('bytes_received', 0) + sock.bytes_received
except socket.error, e:
if e.errno not in (errno.EPIPE, errno.ETIMEDOUT, errno.ECONNRESET):
logging.error(traceback.format_exc())
except:
logging.error(traceback.format_exc())
try:
sock.sendall("HTTP/1.0 500 Internal Server Error\r\n\r\nThere has been an internal error in the load balancer.")
except socket.error, e:
if e.errno != errno.EPIPE:
raise
finally:
try:
sock.close()
rfile.close()
except:
logging.error(traceback.format_exc())
|
epio/mantrid
|
mantrid/loadbalancer.py
|
Balancer.handle
|
python
|
def handle(self, sock, address, internal=False):
try:
sock = StatsSocket(sock)
rfile = sock.makefile('rb', 4096)
# Read the first line
first = rfile.readline().strip("\r\n")
words = first.split()
# Ensure it looks kind of like HTTP
if not (2 <= len(words) <= 3):
sock.sendall("HTTP/1.0 400 Bad Request\r\nConnection: close\r\nContent-length: 0\r\n\r\n")
return
path = words[1]
# Read the headers
headers = mimetools.Message(rfile, 0)
# Work out the host
try:
host = headers['Host']
except KeyError:
host = "unknown"
headers['Connection'] = "close"
if not internal:
headers['X-Forwarded-For'] = address[0]
headers['X-Forwarded-Protocol'] = ""
headers['X-Forwarded-Proto'] = ""
# Make sure they're not using odd encodings
if "Transfer-Encoding" in headers:
sock.sendall("HTTP/1.0 411 Length Required\r\nConnection: close\r\nContent-length: 0\r\n\r\n")
return
# Match the host to an action
protocol = "http"
if headers.get('X-Forwarded-Protocol', headers.get('X-Forwarded-Proto', "")).lower() in ("ssl", "https"):
protocol = "https"
action = self.resolve_host(host, protocol)
# Record us as an open connection
stats_dict = self.stats.setdefault(action.matched_host, {})
stats_dict['open_requests'] = stats_dict.get('open_requests', 0) + 1
# Run the action
try:
rfile._rbuf.seek(0)
action.handle(
sock = sock,
read_data = first + "\r\n" + str(headers) + "\r\n" + rfile._rbuf.read(),
path = path,
headers = headers,
)
finally:
stats_dict['open_requests'] -= 1
stats_dict['completed_requests'] = stats_dict.get('completed_requests', 0) + 1
stats_dict['bytes_sent'] = stats_dict.get('bytes_sent', 0) + sock.bytes_sent
stats_dict['bytes_received'] = stats_dict.get('bytes_received', 0) + sock.bytes_received
except socket.error, e:
if e.errno not in (errno.EPIPE, errno.ETIMEDOUT, errno.ECONNRESET):
logging.error(traceback.format_exc())
except:
logging.error(traceback.format_exc())
try:
sock.sendall("HTTP/1.0 500 Internal Server Error\r\n\r\nThere has been an internal error in the load balancer.")
except socket.error, e:
if e.errno != errno.EPIPE:
raise
finally:
try:
sock.close()
rfile.close()
except:
logging.error(traceback.format_exc())
|
Handles an incoming HTTP connection.
|
train
|
https://github.com/epio/mantrid/blob/1c699f1a4b33888b533c19cb6d025173f2160576/mantrid/loadbalancer.py#L282-L350
|
[
"def resolve_host(self, host, protocol=\"http\"):\n # Special case for empty hosts dict\n if not self.hosts:\n return NoHosts(self, host, \"unknown\")\n # Check for an exact or any subdomain matches\n bits = host.split(\".\")\n for i in range(len(bits)):\n for prefix in [\"%s://\" % protocol, \"\"]:\n subhost = prefix + (\".\".join(bits[i:]))\n if subhost in self.hosts:\n action, kwargs, allow_subs = self.hosts[subhost]\n if allow_subs or i == 0:\n action_class = self.action_mapping[action]\n return action_class(\n balancer = self,\n host = host,\n matched_host = subhost,\n **kwargs\n )\n return Unknown(self, host, \"unknown\")\n",
"def sendall(self, data):\n self.bytes_sent += len(data)\n self.sock.sendall(data)\n",
"def makefile(self, *args, **kwargs):\n fh = self.sock.makefile(*args, **kwargs)\n fh._sock = self\n return fh\n"
] |
class Balancer(object):
"""
Main loadbalancer class.
"""
nofile = 102400
save_interval = 10
action_mapping = {
"proxy": Proxy,
"empty": Empty,
"static": Static,
"redirect": Redirect,
"unknown": Unknown,
"spin": Spin,
"no_hosts": NoHosts,
}
def __init__(self, external_addresses, internal_addresses, management_addresses, state_file, uid=None, gid=65535, static_dir="/etc/mantrid/static/"):
"""
Constructor.
Takes one parameter, the dict of ports to listen on.
The key in this dict is the port number, and the value
is if it's an internal endpoint or not.
Internal endpoints do not have X-Forwarded-* stripped;
other ones do, and have X-Forwarded-For added.
"""
self.external_addresses = external_addresses
self.internal_addresses = internal_addresses
self.management_addresses = management_addresses
self.state_file = state_file
self.uid = uid
self.gid = gid
self.static_dir = static_dir
@classmethod
def main(cls):
# Parse command-line args
parser = argparse.ArgumentParser(description='The Mantrid load balancer')
parser.add_argument('--debug', dest='debug', action='store_const', const=True, help='Enable debug logging')
parser.add_argument('-c', '--config', dest='config', default=None, metavar="PATH", help='Path to the configuration file')
args = parser.parse_args()
# Set up logging
logger = logging.getLogger()
logger.setLevel(logging.DEBUG if args.debug else logging.INFO)
# Output to stderr, always
sh = logging.StreamHandler()
sh.setFormatter(logging.Formatter(
fmt = "%(asctime)s - %(levelname)8s: %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
))
sh.setLevel(logging.DEBUG)
logger.addHandler(sh)
# Check they have root access
try:
resource.setrlimit(resource.RLIMIT_NOFILE, (cls.nofile, cls.nofile))
except (ValueError, resource.error):
logging.warning("Cannot raise resource limits (run as root/change ulimits)")
# Load settings from the config file
if args.config is None:
if os.path.exists("/etc/mantrid/mantrid.conf"):
args.config = "/etc/mantrid/mantrid.conf"
logging.info("Using configuration file %s" % args.config)
else:
args.config = "/dev/null"
logging.info("No configuration file found - using defaults.")
else:
logging.info("Using configuration file %s" % args.config)
config = SimpleConfig(args.config)
balancer = cls(
config.get_all_addresses("bind", set([(("::", 80), socket.AF_INET6)])),
config.get_all_addresses("bind_internal"),
config.get_all_addresses("bind_management", set([(("127.0.0.1", 8042), socket.AF_INET), (("::1", 8042), socket.AF_INET6)])),
config.get("state_file", "/var/lib/mantrid/state.json"),
config.get_int("uid", 4321),
config.get_int("gid", 4321),
config.get("static_dir", "/etc/mantrid/static/"),
)
balancer.run()
def load(self):
"Loads the state from the state file"
try:
if os.path.getsize(self.state_file) <= 1:
raise IOError("File is empty.")
with open(self.state_file) as fh:
state = json.load(fh)
assert isinstance(state, dict)
self.hosts = state['hosts']
self.stats = state['stats']
for key in self.stats:
self.stats[key]['open_requests'] = 0
except (IOError, OSError):
# There is no state file; start empty.
self.hosts = {}
self.stats = {}
def save(self):
"Saves the state to the state file"
with open(self.state_file, "w") as fh:
json.dump({
"hosts": self.hosts,
"stats": self.stats,
}, fh)
def run(self):
# First, initialise the process
self.load()
self.running = True
# Try to ensure the state file is readable
state_dir = os.path.dirname(self.state_file)
if not os.path.isdir(state_dir):
os.makedirs(state_dir)
if self.uid is not None:
try:
os.chown(state_dir, self.uid, -1)
except OSError:
pass
try:
os.chown(self.state_file, self.uid, -1)
except OSError:
pass
# Then, launch the socket loops
pool = GreenBody(
len(self.external_addresses) +
len(self.internal_addresses) +
len(self.management_addresses) +
1
)
pool.spawn(self.save_loop)
for address, family in self.external_addresses:
pool.spawn(self.listen_loop, address, family, internal=False)
for address, family in self.internal_addresses:
pool.spawn(self.listen_loop, address, family, internal=True)
for address, family in self.management_addresses:
pool.spawn(self.management_loop, address, family)
# Give the other threads a chance to open their listening sockets
eventlet.sleep(0.5)
# Drop to the lesser UID/GIDs, if supplied
if self.gid:
try:
os.setegid(self.gid)
os.setgid(self.gid)
except OSError:
logging.error("Cannot change to GID %i (probably not running as root)" % self.gid)
else:
logging.info("Dropped to GID %i" % self.gid)
if self.uid:
try:
os.seteuid(0)
os.setuid(self.uid)
os.seteuid(self.uid)
except OSError:
logging.error("Cannot change to UID %i (probably not running as root)" % self.uid)
else:
logging.info("Dropped to UID %i" % self.uid)
# Ensure we can save to the state file, or die hard.
try:
open(self.state_file, "a").close()
except (OSError, IOError):
logging.critical("Cannot write to state file %s" % self.state_file)
sys.exit(1)
# Wait for one to exit, or for a clean/forced shutdown
try:
pool.wait()
except (KeyboardInterrupt, StopIteration, SystemExit):
pass
except:
logging.error(traceback.format_exc())
# We're done
self.running = False
logging.info("Exiting")
### Management ###
def save_loop(self):
"""
Saves the state if it has changed.
"""
last_hash = hash(repr(self.hosts))
while self.running:
eventlet.sleep(self.save_interval)
next_hash = hash(repr(self.hosts))
if next_hash != last_hash:
self.save()
last_hash = next_hash
def management_loop(self, address, family):
"""
Accepts management requests.
"""
try:
sock = eventlet.listen(address, family)
except socket.error, e:
logging.critical("Cannot listen on (%s, %s): %s" % (address, family, e))
return
# Sleep to ensure we've dropped privileges by the time we start serving
eventlet.sleep(0.5)
# Actually serve management
logging.info("Listening for management on %s" % (address, ))
management_app = ManagementApp(self)
try:
with open("/dev/null", "w") as log_dest:
wsgi.server(
sock,
management_app.handle,
log = log_dest,
)
finally:
sock.close()
### Client handling ###
def listen_loop(self, address, family, internal=False):
"""
Accepts incoming connections.
"""
try:
sock = eventlet.listen(address, family)
except socket.error, e:
if e.errno == errno.EADDRINUSE:
logging.critical("Cannot listen on (%s, %s): already in use" % (address, family))
raise
elif e.errno == errno.EACCES and address[1] <= 1024:
logging.critical("Cannot listen on (%s, %s) (you might need to launch as root)" % (address, family))
return
logging.critical("Cannot listen on (%s, %s): %s" % (address, family, e))
return
# Sleep to ensure we've dropped privileges by the time we start serving
eventlet.sleep(0.5)
# Start serving
logging.info("Listening for requests on %s" % (address, ))
try:
eventlet.serve(
sock,
lambda sock, addr: self.handle(sock, addr, internal),
concurrency = 10000,
)
finally:
sock.close()
def resolve_host(self, host, protocol="http"):
# Special case for empty hosts dict
if not self.hosts:
return NoHosts(self, host, "unknown")
# Check for an exact or any subdomain matches
bits = host.split(".")
for i in range(len(bits)):
for prefix in ["%s://" % protocol, ""]:
subhost = prefix + (".".join(bits[i:]))
if subhost in self.hosts:
action, kwargs, allow_subs = self.hosts[subhost]
if allow_subs or i == 0:
action_class = self.action_mapping[action]
return action_class(
balancer = self,
host = host,
matched_host = subhost,
**kwargs
)
return Unknown(self, host, "unknown")
|
epio/mantrid
|
mantrid/client.py
|
MantridClient._request
|
python
|
def _request(self, path, method, body=None):
"Base request function"
h = httplib2.Http()
resp, content = h.request(
self.base_url + path,
method,
body = json.dumps(body),
)
if resp['status'] == "200":
return json.loads(content)
else:
raise IOError(
"Got %s reponse from server (%s)" % (
resp['status'],
content,
)
)
|
Base request function
|
train
|
https://github.com/epio/mantrid/blob/1c699f1a4b33888b533c19cb6d025173f2160576/mantrid/client.py#L17-L33
| null |
class MantridClient(object):
"""
Class encapsulating Mantrid client operations.
"""
def __init__(self, base_url):
self.base_url = base_url.rstrip("/")
def get_all(self):
"Returns all endpoints"
return self._request("/hostname/", "GET")
def set_all(self, data):
"Sets all endpoints"
return self._request("/hostname/", "PUT", data)
def set(self, hostname, entry):
"Sets endpoint for a single hostname"
return self._request("/hostname/%s/" % hostname, "PUT", entry)
def delete(self, hostname):
"Deletes a single hostname"
return self._request("/hostname/%s/" % hostname, "DELETE")
def stats(self, hostname=None):
if hostname:
return self._request("/stats/%s/" % hostname, "GET")
else:
return self._request("/stats/", "GET")
|
epio/mantrid
|
mantrid/actions.py
|
Empty.handle
|
python
|
def handle(self, sock, read_data, path, headers):
"Sends back a static error page."
try:
sock.sendall("HTTP/1.0 %s %s\r\nConnection: close\r\nContent-length: 0\r\n\r\n" % (self.code, responses.get(self.code, "Unknown")))
except socket.error, e:
if e.errno != errno.EPIPE:
raise
|
Sends back a static error page.
|
train
|
https://github.com/epio/mantrid/blob/1c699f1a4b33888b533c19cb6d025173f2160576/mantrid/actions.py#L35-L41
|
[
"def sendall(self, data):\n self.data += data\n"
] |
class Empty(Action):
"Sends a code-only HTTP response"
code = None
def __init__(self, balancer, host, matched_host, code):
super(Empty, self).__init__(balancer, host, matched_host)
self.code = code
|
epio/mantrid
|
mantrid/actions.py
|
Static.handle
|
python
|
def handle(self, sock, read_data, path, headers):
"Sends back a static error page."
assert self.type is not None
try:
# Get the correct file
try:
fh = open(os.path.join(self.balancer.static_dir, "%s.http" % self.type))
except IOError:
fh = open(os.path.join(os.path.dirname(__file__), "static", "%s.http" % self.type))
# Send it, using sendfile if poss. (no fileno() means we're probably using mock sockets)
try:
self._sendfile(sock.fileno(), fh.fileno(), 0, os.fstat(fh.fileno()).st_size)
except (TypeError, AttributeError):
sock.sendall(fh.read())
# Close the file and socket
fh.close()
sock.close()
except socket.error, e:
if e.errno != errno.EPIPE:
raise
|
Sends back a static error page.
|
train
|
https://github.com/epio/mantrid/blob/1c699f1a4b33888b533c19cb6d025173f2160576/mantrid/actions.py#L63-L82
|
[
"def sendall(self, data):\n self.data += data\n",
"def close(self):\n pass\n"
] |
class Static(Action):
"Sends a static HTTP response"
type = None
def __init__(self, balancer, host, matched_host, type=None):
super(Static, self).__init__(balancer, host, matched_host)
if type is not None:
self.type = type
# Try to get sendfile() using ctypes; otherwise, fall back
try:
import ctypes
_sendfile = ctypes.CDLL("libc.so.6").sendfile
_sendfile.argtypes = [ctypes.c_int, ctypes.c_int, ctypes.c_long, ctypes.c_size_t]
_sendfile.restype = ctypes.c_ssize_t
except Exception:
_sendfile = None
|
epio/mantrid
|
mantrid/actions.py
|
Redirect.handle
|
python
|
def handle(self, sock, read_data, path, headers):
"Sends back a static error page."
if "://" not in self.redirect_to:
destination = "http%s://%s" % (
"s" if headers.get('X-Forwarded-Protocol', headers.get('X-Forwarded-Proto', "")).lower() in ("https", "ssl") else "",
self.redirect_to
)
else:
destination = self.redirect_to
try:
sock.sendall("HTTP/1.0 302 Found\r\nLocation: %s/%s\r\n\r\n" % (
destination.rstrip("/"),
path.lstrip("/"),
))
except socket.error, e:
if e.errno != errno.EPIPE:
raise
|
Sends back a static error page.
|
train
|
https://github.com/epio/mantrid/blob/1c699f1a4b33888b533c19cb6d025173f2160576/mantrid/actions.py#L106-L122
|
[
"def sendall(self, data):\n self.data += data\n"
] |
class Redirect(Action):
"Sends a redirect"
type = None
def __init__(self, balancer, host, matched_host, redirect_to):
super(Redirect, self).__init__(balancer, host, matched_host)
self.redirect_to = redirect_to
|
epio/mantrid
|
mantrid/actions.py
|
Proxy.handle
|
python
|
def handle(self, sock, read_data, path, headers):
"Sends back a static error page."
for i in range(self.attempts):
try:
server_sock = eventlet.connect(
tuple(random.choice(self.backends)),
)
except socket.error:
eventlet.sleep(self.delay)
continue
# Function to help track data usage
def send_onwards(data):
server_sock.sendall(data)
return len(data)
try:
size = send_onwards(read_data)
size += SocketMelder(sock, server_sock).run()
except socket.error, e:
if e.errno != errno.EPIPE:
raise
|
Sends back a static error page.
|
train
|
https://github.com/epio/mantrid/blob/1c699f1a4b33888b533c19cb6d025173f2160576/mantrid/actions.py#L140-L159
|
[
"def run(self):\n self.threads = {\n \"ctos\": eventlet.spawn(self.piper, self.server, self.client, \"client\", \"stoc\"),\n \"stoc\": eventlet.spawn(self.piper, self.client, self.server, \"server\", \"ctos\"),\n }\n try:\n self.threads['stoc'].wait()\n except (greenlet.GreenletExit, socket.error):\n pass\n try:\n self.threads['ctos'].wait()\n except (greenlet.GreenletExit, socket.error):\n pass\n self.server.close()\n self.client.close()\n return self.data_handled\n",
"def send_onwards(data):\n server_sock.sendall(data)\n return len(data)\n"
] |
class Proxy(Action):
"Proxies them through to a server. What loadbalancers do."
attempts = 1
delay = 1
def __init__(self, balancer, host, matched_host, backends, attempts=None, delay=None):
super(Proxy, self).__init__(balancer, host, matched_host)
self.backends = backends
assert self.backends
if attempts is not None:
self.attempts = int(attempts)
if delay is not None:
self.delay = float(delay)
|
epio/mantrid
|
mantrid/actions.py
|
Spin.handle
|
python
|
def handle(self, sock, read_data, path, headers):
"Just waits, and checks for other actions to replace us"
for i in range(self.timeout // self.check_interval):
# Sleep first
eventlet.sleep(self.check_interval)
# Check for another action
action = self.balancer.resolve_host(self.host)
if not isinstance(action, Spin):
return action.handle(sock, read_data, path, headers)
# OK, nothing happened, so give up.
action = Static(self.balancer, self.host, self.matched_host, type="timeout")
return action.handle(sock, read_data, path, headers)
|
Just waits, and checks for other actions to replace us
|
train
|
https://github.com/epio/mantrid/blob/1c699f1a4b33888b533c19cb6d025173f2160576/mantrid/actions.py#L178-L189
|
[
"def handle(self, sock, read_data, path, headers):\n \"Sends back a static error page.\"\n assert self.type is not None\n try:\n # Get the correct file\n try:\n fh = open(os.path.join(self.balancer.static_dir, \"%s.http\" % self.type))\n except IOError:\n fh = open(os.path.join(os.path.dirname(__file__), \"static\", \"%s.http\" % self.type))\n # Send it, using sendfile if poss. (no fileno() means we're probably using mock sockets)\n try:\n self._sendfile(sock.fileno(), fh.fileno(), 0, os.fstat(fh.fileno()).st_size)\n except (TypeError, AttributeError):\n sock.sendall(fh.read())\n # Close the file and socket\n fh.close()\n sock.close()\n except socket.error, e:\n if e.errno != errno.EPIPE:\n raise\n"
] |
class Spin(Action):
"""
Just holds the request open until either the timeout expires, or
another action becomes available.
"""
timeout = 120
check_interval = 1
def __init__(self, balancer, host, matched_host, timeout=None, check_interval=None):
super(Spin, self).__init__(balancer, host, matched_host)
if timeout is not None:
self.timeout = int(timeout)
if check_interval is not None:
self.check_interval = int(check_interval)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.