repository_name stringclasses 316 values | func_path_in_repository stringlengths 6 223 | func_name stringlengths 1 134 | language stringclasses 1 value | func_code_string stringlengths 57 65.5k | func_documentation_string stringlengths 1 46.3k | split_name stringclasses 1 value | func_code_url stringlengths 91 315 | called_functions listlengths 1 156 ⌀ | enclosing_scope stringlengths 2 1.48M |
|---|---|---|---|---|---|---|---|---|---|
shmir/PyTrafficGenerator | trafficgenerator/tgn_object.py | TgnObject.get_child | python | def get_child(self, *types):
children = list(self.get_children(*types))
return children[0] if any(children) else None | :param types: list of requested types.
:return: the first (and in most useful cases only) child of specific type(s). | train | https://github.com/shmir/PyTrafficGenerator/blob/382e5d549c83404af2a6571fe19c9e71df8bac14/trafficgenerator/tgn_object.py#L109-L115 | [
"def get_children(self, *types):\n \"\"\" Get all children of the requested types.\n\n :param attribute: requested children types.\n :return: list of all children of the requested types.\n \"\"\"\n pass\n"
] | class TgnObject(object):
""" Base class for all TGN classes. """
objects = OrderedDict()
""" Dictionary of child objects <object reference: object name>. """
def __init__(self, **data):
""" Create new TGN object in the API.
If object does not exist on the chassis, create it on the chassis as well.
:param parent: object parent. If == None the api and logger attributes must be set explicitly by the caller.
"""
super(TgnObject, self).__init__()
self._data = {}
self.objects = OrderedDict()
self._set_data(**data)
if self._data['parent']:
self.api = self.obj_parent().api
self.logger = self.obj_parent().logger
if 'objRef' not in self._data:
self._data['objRef'] = self._create()
if 'name' not in self._data:
self._data['name'] = self.obj_ref()
if self._data.get('parent', None):
# todo: make sure each object has parent and test only for None parents (STC project and IXN root)..
self._data['parent'].objects[self.obj_ref()] = self
def __str__(self):
return self.name
def get_object_by_ref(self, obj_ref):
"""
:param obj_ref: requested object reference.
:return: the first object with the requested object reference in the object branch.
"""
return self._get_object_by_key('objRef', _WA_norm_obj_ref(obj_ref))
def get_object_by_name(self, obj_name):
"""
:param obj_name: requested object name.
:return: the first object with the requested object name in the object branch.
"""
return self._get_object_by_key('name', obj_name)
def _get_object_by_key(self, key, value, *types):
if self._data[key] == value and (types and self.obj_type() in types or not types):
return self
else:
if not types:
children = self.objects.values()
else:
children = self.get_objects_by_type(*types)
for child in children:
obj = child._get_object_by_key(key, value, *types)
if obj is not None:
return obj
def get_objects_by_type(self, *types):
""" Returned objects stored in memory (without re-reading them from the TGN).
Use this method for fast access to objects in case of static configurations.
:param types: requested object types.
:return: all children of the specified types.
"""
if not types:
return self.objects.values()
types_l = [o.lower() for o in types]
return [o for o in self.objects.values() if o.obj_type().lower() in types_l]
def get_object_by_type(self, *types):
"""
:param types: requested object types.
:return: the child of the specified types.
"""
children = self.get_objects_by_type(*types)
return children[0] if any(children) else None
def get_objects_by_type_in_subtree(self, *types):
"""
:param types: requested object types.
:return: all children of the specified types.
"""
typed_objects = self.get_objects_by_type(*types)
for child in self.objects.values():
typed_objects += child.get_objects_by_type_in_subtree(*types)
return typed_objects
def get_objects_or_children_by_type(self, *types):
""" Get objects if children already been read or get children.
Use this method for fast access to objects in case of static configurations.
:param types: requested object types.
:return: all children of the specified types.
"""
objects = self.get_objects_by_type(*types)
return objects if objects else self.get_children(*types)
def get_object_or_child_by_type(self, *types):
""" Get object if child already been read or get child.
Use this method for fast access to objects in case of static configurations.
:param types: requested object types.
:return: all children of the specified types.
"""
objects = self.get_objects_or_children_by_type(*types)
return objects[0] if any(objects) else None
def get_objects_with_object(self, obj_type, *child_types):
"""
:param obj_type: requested object type.
:param child_type: requested child types.
:return: all children of the requested type that have the requested child types.
"""
return [o for o in self.get_objects_by_type(obj_type) if
o.get_objects_by_type(*child_types)]
def get_objects_without_object(self, obj_type, *child_types):
"""
:param obj_type: requested object type.
:param child_type: unrequested child types.
:return: all children of the requested type that do not have the unrequested child types.
"""
return [o for o in self.get_objects_by_type(obj_type) if
not o.get_objects_by_type(*child_types)]
def get_objects_with_attribute(self, obj_type, attribute, value):
"""
:param obj_type: requested object type.
:param attribute: requested attribute.
:param value: requested attribute value.
:return: all children of the requested type that have the requested attribute == requested value.
"""
return [o for o in self.get_objects_by_type(obj_type) if o.get_attribute(attribute) == value]
def get_ancestor_object_by_type(self, obj_type):
"""
:param obj_type: requested ancestor type.
:return: the ancestor of the object who's type is obj_type if exists else None.
"""
if self.type.lower() == obj_type.lower():
return self
else:
if not self.parent:
return None
return self.parent.get_ancestor_object_by_type(obj_type)
def get_object_from_attribute(self, attribute):
return self.get_objects_from_attribute(attribute)[0] if self.get_objects_from_attribute(attribute) else None
@abstractmethod
def get_objects_from_attribute(self, attribute):
pass
def del_object_from_parent(self):
""" Delete object from parent object. """
if self.parent:
self.parent.objects.pop(self.ref)
def del_objects_by_type(self, type_):
""" Delete all children objects.
:param type_: type of objects to delete.
"""
[o.del_object_from_parent() for o in self.get_objects_by_type(type_)]
@classmethod
def get_objects_of_class(cls):
"""
:return: all instances of the requested class.
"""
return list(o for o in gc.get_objects() if isinstance(o, cls))
#
# Simple utilities to return object _data. Maybe it's not Pythonic (more like Java) but after
# changing the key name couple of times I decided to go for it.
#
def obj_name(self):
"""
:return: object name.
"""
return self._data['name']
name = property(obj_name)
def obj_ref(self):
"""
:return: object reference.
"""
return str(self._data['objRef'])
ref = property(obj_ref)
def obj_type(self):
"""
:return: object type.
"""
return self._data['objType']
type = property(obj_type)
def obj_parent(self):
"""
:return: object parent.
"""
return self._data['parent']
parent = property(obj_parent)
#
# Private methods.
#
def _set_data(self, **data):
self._data.update(data)
def _build_children_objs(self, child_type, children):
children_objs = OrderedDict()
child_obj_type = self.get_obj_class(child_type)
for child in (c for c in children if c is not ''):
child_object = child_obj_type(objRef=child, objType=child_type, parent=self)
child_object._set_data(name=child_object.get_name())
children_objs[child_object.obj_ref()] = child_object
self.objects.update(children_objs)
return children_objs
#
# Abstract API methods.
#
@abstractmethod
def get_attribute(self, attribute):
""" Get single attribute value.
:param attribute: attribute name.
:return: attribute value.
"""
pass
@abstractmethod
def get_children(self, *types):
""" Get all children of the requested types.
:param attribute: requested children types.
:return: list of all children of the requested types.
"""
pass
|
shmir/PyTrafficGenerator | trafficgenerator/tgn_object.py | TgnObject.get_objects_by_type | python | def get_objects_by_type(self, *types):
if not types:
return self.objects.values()
types_l = [o.lower() for o in types]
return [o for o in self.objects.values() if o.obj_type().lower() in types_l] | Returned objects stored in memory (without re-reading them from the TGN).
Use this method for fast access to objects in case of static configurations.
:param types: requested object types.
:return: all children of the specified types. | train | https://github.com/shmir/PyTrafficGenerator/blob/382e5d549c83404af2a6571fe19c9e71df8bac14/trafficgenerator/tgn_object.py#L144-L156 | null | class TgnObject(object):
""" Base class for all TGN classes. """
objects = OrderedDict()
""" Dictionary of child objects <object reference: object name>. """
def __init__(self, **data):
""" Create new TGN object in the API.
If object does not exist on the chassis, create it on the chassis as well.
:param parent: object parent. If == None the api and logger attributes must be set explicitly by the caller.
"""
super(TgnObject, self).__init__()
self._data = {}
self.objects = OrderedDict()
self._set_data(**data)
if self._data['parent']:
self.api = self.obj_parent().api
self.logger = self.obj_parent().logger
if 'objRef' not in self._data:
self._data['objRef'] = self._create()
if 'name' not in self._data:
self._data['name'] = self.obj_ref()
if self._data.get('parent', None):
# todo: make sure each object has parent and test only for None parents (STC project and IXN root)..
self._data['parent'].objects[self.obj_ref()] = self
def __str__(self):
return self.name
def get_child(self, *types):
"""
:param types: list of requested types.
:return: the first (and in most useful cases only) child of specific type(s).
"""
children = list(self.get_children(*types))
return children[0] if any(children) else None
def get_object_by_ref(self, obj_ref):
"""
:param obj_ref: requested object reference.
:return: the first object with the requested object reference in the object branch.
"""
return self._get_object_by_key('objRef', _WA_norm_obj_ref(obj_ref))
def get_object_by_name(self, obj_name):
"""
:param obj_name: requested object name.
:return: the first object with the requested object name in the object branch.
"""
return self._get_object_by_key('name', obj_name)
def _get_object_by_key(self, key, value, *types):
if self._data[key] == value and (types and self.obj_type() in types or not types):
return self
else:
if not types:
children = self.objects.values()
else:
children = self.get_objects_by_type(*types)
for child in children:
obj = child._get_object_by_key(key, value, *types)
if obj is not None:
return obj
def get_object_by_type(self, *types):
"""
:param types: requested object types.
:return: the child of the specified types.
"""
children = self.get_objects_by_type(*types)
return children[0] if any(children) else None
def get_objects_by_type_in_subtree(self, *types):
"""
:param types: requested object types.
:return: all children of the specified types.
"""
typed_objects = self.get_objects_by_type(*types)
for child in self.objects.values():
typed_objects += child.get_objects_by_type_in_subtree(*types)
return typed_objects
def get_objects_or_children_by_type(self, *types):
""" Get objects if children already been read or get children.
Use this method for fast access to objects in case of static configurations.
:param types: requested object types.
:return: all children of the specified types.
"""
objects = self.get_objects_by_type(*types)
return objects if objects else self.get_children(*types)
def get_object_or_child_by_type(self, *types):
""" Get object if child already been read or get child.
Use this method for fast access to objects in case of static configurations.
:param types: requested object types.
:return: all children of the specified types.
"""
objects = self.get_objects_or_children_by_type(*types)
return objects[0] if any(objects) else None
def get_objects_with_object(self, obj_type, *child_types):
"""
:param obj_type: requested object type.
:param child_type: requested child types.
:return: all children of the requested type that have the requested child types.
"""
return [o for o in self.get_objects_by_type(obj_type) if
o.get_objects_by_type(*child_types)]
def get_objects_without_object(self, obj_type, *child_types):
"""
:param obj_type: requested object type.
:param child_type: unrequested child types.
:return: all children of the requested type that do not have the unrequested child types.
"""
return [o for o in self.get_objects_by_type(obj_type) if
not o.get_objects_by_type(*child_types)]
def get_objects_with_attribute(self, obj_type, attribute, value):
"""
:param obj_type: requested object type.
:param attribute: requested attribute.
:param value: requested attribute value.
:return: all children of the requested type that have the requested attribute == requested value.
"""
return [o for o in self.get_objects_by_type(obj_type) if o.get_attribute(attribute) == value]
def get_ancestor_object_by_type(self, obj_type):
"""
:param obj_type: requested ancestor type.
:return: the ancestor of the object who's type is obj_type if exists else None.
"""
if self.type.lower() == obj_type.lower():
return self
else:
if not self.parent:
return None
return self.parent.get_ancestor_object_by_type(obj_type)
def get_object_from_attribute(self, attribute):
return self.get_objects_from_attribute(attribute)[0] if self.get_objects_from_attribute(attribute) else None
@abstractmethod
def get_objects_from_attribute(self, attribute):
pass
def del_object_from_parent(self):
""" Delete object from parent object. """
if self.parent:
self.parent.objects.pop(self.ref)
def del_objects_by_type(self, type_):
""" Delete all children objects.
:param type_: type of objects to delete.
"""
[o.del_object_from_parent() for o in self.get_objects_by_type(type_)]
@classmethod
def get_objects_of_class(cls):
"""
:return: all instances of the requested class.
"""
return list(o for o in gc.get_objects() if isinstance(o, cls))
#
# Simple utilities to return object _data. Maybe it's not Pythonic (more like Java) but after
# changing the key name couple of times I decided to go for it.
#
def obj_name(self):
"""
:return: object name.
"""
return self._data['name']
name = property(obj_name)
def obj_ref(self):
"""
:return: object reference.
"""
return str(self._data['objRef'])
ref = property(obj_ref)
def obj_type(self):
"""
:return: object type.
"""
return self._data['objType']
type = property(obj_type)
def obj_parent(self):
"""
:return: object parent.
"""
return self._data['parent']
parent = property(obj_parent)
#
# Private methods.
#
def _set_data(self, **data):
self._data.update(data)
def _build_children_objs(self, child_type, children):
children_objs = OrderedDict()
child_obj_type = self.get_obj_class(child_type)
for child in (c for c in children if c is not ''):
child_object = child_obj_type(objRef=child, objType=child_type, parent=self)
child_object._set_data(name=child_object.get_name())
children_objs[child_object.obj_ref()] = child_object
self.objects.update(children_objs)
return children_objs
#
# Abstract API methods.
#
@abstractmethod
def get_attribute(self, attribute):
""" Get single attribute value.
:param attribute: attribute name.
:return: attribute value.
"""
pass
@abstractmethod
def get_children(self, *types):
""" Get all children of the requested types.
:param attribute: requested children types.
:return: list of all children of the requested types.
"""
pass
|
shmir/PyTrafficGenerator | trafficgenerator/tgn_object.py | TgnObject.get_object_by_type | python | def get_object_by_type(self, *types):
children = self.get_objects_by_type(*types)
return children[0] if any(children) else None | :param types: requested object types.
:return: the child of the specified types. | train | https://github.com/shmir/PyTrafficGenerator/blob/382e5d549c83404af2a6571fe19c9e71df8bac14/trafficgenerator/tgn_object.py#L158-L164 | [
"def get_objects_by_type(self, *types):\n \"\"\" Returned objects stored in memory (without re-reading them from the TGN).\n\n Use this method for fast access to objects in case of static configurations.\n\n :param types: requested object types.\n :return: all children of the specified types.\n \"\"\"\n\n if not types:\n return self.objects.values()\n types_l = [o.lower() for o in types]\n return [o for o in self.objects.values() if o.obj_type().lower() in types_l]\n"
] | class TgnObject(object):
""" Base class for all TGN classes. """
objects = OrderedDict()
""" Dictionary of child objects <object reference: object name>. """
def __init__(self, **data):
""" Create new TGN object in the API.
If object does not exist on the chassis, create it on the chassis as well.
:param parent: object parent. If == None the api and logger attributes must be set explicitly by the caller.
"""
super(TgnObject, self).__init__()
self._data = {}
self.objects = OrderedDict()
self._set_data(**data)
if self._data['parent']:
self.api = self.obj_parent().api
self.logger = self.obj_parent().logger
if 'objRef' not in self._data:
self._data['objRef'] = self._create()
if 'name' not in self._data:
self._data['name'] = self.obj_ref()
if self._data.get('parent', None):
# todo: make sure each object has parent and test only for None parents (STC project and IXN root)..
self._data['parent'].objects[self.obj_ref()] = self
def __str__(self):
return self.name
def get_child(self, *types):
"""
:param types: list of requested types.
:return: the first (and in most useful cases only) child of specific type(s).
"""
children = list(self.get_children(*types))
return children[0] if any(children) else None
def get_object_by_ref(self, obj_ref):
"""
:param obj_ref: requested object reference.
:return: the first object with the requested object reference in the object branch.
"""
return self._get_object_by_key('objRef', _WA_norm_obj_ref(obj_ref))
def get_object_by_name(self, obj_name):
"""
:param obj_name: requested object name.
:return: the first object with the requested object name in the object branch.
"""
return self._get_object_by_key('name', obj_name)
def _get_object_by_key(self, key, value, *types):
if self._data[key] == value and (types and self.obj_type() in types or not types):
return self
else:
if not types:
children = self.objects.values()
else:
children = self.get_objects_by_type(*types)
for child in children:
obj = child._get_object_by_key(key, value, *types)
if obj is not None:
return obj
def get_objects_by_type(self, *types):
""" Returned objects stored in memory (without re-reading them from the TGN).
Use this method for fast access to objects in case of static configurations.
:param types: requested object types.
:return: all children of the specified types.
"""
if not types:
return self.objects.values()
types_l = [o.lower() for o in types]
return [o for o in self.objects.values() if o.obj_type().lower() in types_l]
def get_objects_by_type_in_subtree(self, *types):
"""
:param types: requested object types.
:return: all children of the specified types.
"""
typed_objects = self.get_objects_by_type(*types)
for child in self.objects.values():
typed_objects += child.get_objects_by_type_in_subtree(*types)
return typed_objects
def get_objects_or_children_by_type(self, *types):
""" Get objects if children already been read or get children.
Use this method for fast access to objects in case of static configurations.
:param types: requested object types.
:return: all children of the specified types.
"""
objects = self.get_objects_by_type(*types)
return objects if objects else self.get_children(*types)
def get_object_or_child_by_type(self, *types):
""" Get object if child already been read or get child.
Use this method for fast access to objects in case of static configurations.
:param types: requested object types.
:return: all children of the specified types.
"""
objects = self.get_objects_or_children_by_type(*types)
return objects[0] if any(objects) else None
def get_objects_with_object(self, obj_type, *child_types):
"""
:param obj_type: requested object type.
:param child_type: requested child types.
:return: all children of the requested type that have the requested child types.
"""
return [o for o in self.get_objects_by_type(obj_type) if
o.get_objects_by_type(*child_types)]
def get_objects_without_object(self, obj_type, *child_types):
"""
:param obj_type: requested object type.
:param child_type: unrequested child types.
:return: all children of the requested type that do not have the unrequested child types.
"""
return [o for o in self.get_objects_by_type(obj_type) if
not o.get_objects_by_type(*child_types)]
def get_objects_with_attribute(self, obj_type, attribute, value):
"""
:param obj_type: requested object type.
:param attribute: requested attribute.
:param value: requested attribute value.
:return: all children of the requested type that have the requested attribute == requested value.
"""
return [o for o in self.get_objects_by_type(obj_type) if o.get_attribute(attribute) == value]
def get_ancestor_object_by_type(self, obj_type):
"""
:param obj_type: requested ancestor type.
:return: the ancestor of the object who's type is obj_type if exists else None.
"""
if self.type.lower() == obj_type.lower():
return self
else:
if not self.parent:
return None
return self.parent.get_ancestor_object_by_type(obj_type)
def get_object_from_attribute(self, attribute):
return self.get_objects_from_attribute(attribute)[0] if self.get_objects_from_attribute(attribute) else None
@abstractmethod
def get_objects_from_attribute(self, attribute):
pass
def del_object_from_parent(self):
""" Delete object from parent object. """
if self.parent:
self.parent.objects.pop(self.ref)
def del_objects_by_type(self, type_):
""" Delete all children objects.
:param type_: type of objects to delete.
"""
[o.del_object_from_parent() for o in self.get_objects_by_type(type_)]
@classmethod
def get_objects_of_class(cls):
"""
:return: all instances of the requested class.
"""
return list(o for o in gc.get_objects() if isinstance(o, cls))
#
# Simple utilities to return object _data. Maybe it's not Pythonic (more like Java) but after
# changing the key name couple of times I decided to go for it.
#
def obj_name(self):
"""
:return: object name.
"""
return self._data['name']
name = property(obj_name)
def obj_ref(self):
"""
:return: object reference.
"""
return str(self._data['objRef'])
ref = property(obj_ref)
def obj_type(self):
"""
:return: object type.
"""
return self._data['objType']
type = property(obj_type)
def obj_parent(self):
"""
:return: object parent.
"""
return self._data['parent']
parent = property(obj_parent)
#
# Private methods.
#
def _set_data(self, **data):
self._data.update(data)
def _build_children_objs(self, child_type, children):
children_objs = OrderedDict()
child_obj_type = self.get_obj_class(child_type)
for child in (c for c in children if c is not ''):
child_object = child_obj_type(objRef=child, objType=child_type, parent=self)
child_object._set_data(name=child_object.get_name())
children_objs[child_object.obj_ref()] = child_object
self.objects.update(children_objs)
return children_objs
#
# Abstract API methods.
#
@abstractmethod
def get_attribute(self, attribute):
""" Get single attribute value.
:param attribute: attribute name.
:return: attribute value.
"""
pass
@abstractmethod
def get_children(self, *types):
""" Get all children of the requested types.
:param attribute: requested children types.
:return: list of all children of the requested types.
"""
pass
|
shmir/PyTrafficGenerator | trafficgenerator/tgn_object.py | TgnObject.get_objects_by_type_in_subtree | python | def get_objects_by_type_in_subtree(self, *types):
typed_objects = self.get_objects_by_type(*types)
for child in self.objects.values():
typed_objects += child.get_objects_by_type_in_subtree(*types)
return typed_objects | :param types: requested object types.
:return: all children of the specified types. | train | https://github.com/shmir/PyTrafficGenerator/blob/382e5d549c83404af2a6571fe19c9e71df8bac14/trafficgenerator/tgn_object.py#L166-L175 | [
"def get_objects_by_type(self, *types):\n \"\"\" Returned objects stored in memory (without re-reading them from the TGN).\n\n Use this method for fast access to objects in case of static configurations.\n\n :param types: requested object types.\n :return: all children of the specified types.\n \"\"\"\n\n if not types:\n return self.objects.values()\n types_l = [o.lower() for o in types]\n return [o for o in self.objects.values() if o.obj_type().lower() in types_l]\n"
] | class TgnObject(object):
""" Base class for all TGN classes. """
objects = OrderedDict()
""" Dictionary of child objects <object reference: object name>. """
def __init__(self, **data):
""" Create new TGN object in the API.
If object does not exist on the chassis, create it on the chassis as well.
:param parent: object parent. If == None the api and logger attributes must be set explicitly by the caller.
"""
super(TgnObject, self).__init__()
self._data = {}
self.objects = OrderedDict()
self._set_data(**data)
if self._data['parent']:
self.api = self.obj_parent().api
self.logger = self.obj_parent().logger
if 'objRef' not in self._data:
self._data['objRef'] = self._create()
if 'name' not in self._data:
self._data['name'] = self.obj_ref()
if self._data.get('parent', None):
# todo: make sure each object has parent and test only for None parents (STC project and IXN root)..
self._data['parent'].objects[self.obj_ref()] = self
def __str__(self):
return self.name
def get_child(self, *types):
"""
:param types: list of requested types.
:return: the first (and in most useful cases only) child of specific type(s).
"""
children = list(self.get_children(*types))
return children[0] if any(children) else None
def get_object_by_ref(self, obj_ref):
"""
:param obj_ref: requested object reference.
:return: the first object with the requested object reference in the object branch.
"""
return self._get_object_by_key('objRef', _WA_norm_obj_ref(obj_ref))
def get_object_by_name(self, obj_name):
"""
:param obj_name: requested object name.
:return: the first object with the requested object name in the object branch.
"""
return self._get_object_by_key('name', obj_name)
def _get_object_by_key(self, key, value, *types):
if self._data[key] == value and (types and self.obj_type() in types or not types):
return self
else:
if not types:
children = self.objects.values()
else:
children = self.get_objects_by_type(*types)
for child in children:
obj = child._get_object_by_key(key, value, *types)
if obj is not None:
return obj
def get_objects_by_type(self, *types):
""" Returned objects stored in memory (without re-reading them from the TGN).
Use this method for fast access to objects in case of static configurations.
:param types: requested object types.
:return: all children of the specified types.
"""
if not types:
return self.objects.values()
types_l = [o.lower() for o in types]
return [o for o in self.objects.values() if o.obj_type().lower() in types_l]
def get_object_by_type(self, *types):
"""
:param types: requested object types.
:return: the child of the specified types.
"""
children = self.get_objects_by_type(*types)
return children[0] if any(children) else None
def get_objects_or_children_by_type(self, *types):
""" Get objects if children already been read or get children.
Use this method for fast access to objects in case of static configurations.
:param types: requested object types.
:return: all children of the specified types.
"""
objects = self.get_objects_by_type(*types)
return objects if objects else self.get_children(*types)
def get_object_or_child_by_type(self, *types):
""" Get object if child already been read or get child.
Use this method for fast access to objects in case of static configurations.
:param types: requested object types.
:return: all children of the specified types.
"""
objects = self.get_objects_or_children_by_type(*types)
return objects[0] if any(objects) else None
def get_objects_with_object(self, obj_type, *child_types):
"""
:param obj_type: requested object type.
:param child_type: requested child types.
:return: all children of the requested type that have the requested child types.
"""
return [o for o in self.get_objects_by_type(obj_type) if
o.get_objects_by_type(*child_types)]
def get_objects_without_object(self, obj_type, *child_types):
"""
:param obj_type: requested object type.
:param child_type: unrequested child types.
:return: all children of the requested type that do not have the unrequested child types.
"""
return [o for o in self.get_objects_by_type(obj_type) if
not o.get_objects_by_type(*child_types)]
def get_objects_with_attribute(self, obj_type, attribute, value):
"""
:param obj_type: requested object type.
:param attribute: requested attribute.
:param value: requested attribute value.
:return: all children of the requested type that have the requested attribute == requested value.
"""
return [o for o in self.get_objects_by_type(obj_type) if o.get_attribute(attribute) == value]
def get_ancestor_object_by_type(self, obj_type):
"""
:param obj_type: requested ancestor type.
:return: the ancestor of the object who's type is obj_type if exists else None.
"""
if self.type.lower() == obj_type.lower():
return self
else:
if not self.parent:
return None
return self.parent.get_ancestor_object_by_type(obj_type)
def get_object_from_attribute(self, attribute):
return self.get_objects_from_attribute(attribute)[0] if self.get_objects_from_attribute(attribute) else None
@abstractmethod
def get_objects_from_attribute(self, attribute):
pass
def del_object_from_parent(self):
""" Delete object from parent object. """
if self.parent:
self.parent.objects.pop(self.ref)
def del_objects_by_type(self, type_):
""" Delete all children objects.
:param type_: type of objects to delete.
"""
[o.del_object_from_parent() for o in self.get_objects_by_type(type_)]
@classmethod
def get_objects_of_class(cls):
"""
:return: all instances of the requested class.
"""
return list(o for o in gc.get_objects() if isinstance(o, cls))
#
# Simple utilities to return object _data. Maybe it's not Pythonic (more like Java) but after
# changing the key name couple of times I decided to go for it.
#
def obj_name(self):
"""
:return: object name.
"""
return self._data['name']
name = property(obj_name)
def obj_ref(self):
"""
:return: object reference.
"""
return str(self._data['objRef'])
ref = property(obj_ref)
def obj_type(self):
"""
:return: object type.
"""
return self._data['objType']
type = property(obj_type)
def obj_parent(self):
"""
:return: object parent.
"""
return self._data['parent']
parent = property(obj_parent)
#
# Private methods.
#
def _set_data(self, **data):
self._data.update(data)
def _build_children_objs(self, child_type, children):
children_objs = OrderedDict()
child_obj_type = self.get_obj_class(child_type)
for child in (c for c in children if c is not ''):
child_object = child_obj_type(objRef=child, objType=child_type, parent=self)
child_object._set_data(name=child_object.get_name())
children_objs[child_object.obj_ref()] = child_object
self.objects.update(children_objs)
return children_objs
#
# Abstract API methods.
#
@abstractmethod
def get_attribute(self, attribute):
""" Get single attribute value.
:param attribute: attribute name.
:return: attribute value.
"""
pass
@abstractmethod
def get_children(self, *types):
""" Get all children of the requested types.
:param attribute: requested children types.
:return: list of all children of the requested types.
"""
pass
|
shmir/PyTrafficGenerator | trafficgenerator/tgn_object.py | TgnObject.get_objects_or_children_by_type | python | def get_objects_or_children_by_type(self, *types):
objects = self.get_objects_by_type(*types)
return objects if objects else self.get_children(*types) | Get objects if children already been read or get children.
Use this method for fast access to objects in case of static configurations.
:param types: requested object types.
:return: all children of the specified types. | train | https://github.com/shmir/PyTrafficGenerator/blob/382e5d549c83404af2a6571fe19c9e71df8bac14/trafficgenerator/tgn_object.py#L177-L187 | [
"def get_objects_by_type(self, *types):\n \"\"\" Returned objects stored in memory (without re-reading them from the TGN).\n\n Use this method for fast access to objects in case of static configurations.\n\n :param types: requested object types.\n :return: all children of the specified types.\n \"\"\"\n\n if not types:\n return self.objects.values()\n types_l = [o.lower() for o in types]\n return [o for o in self.objects.values() if o.obj_type().lower() in types_l]\n",
"def get_children(self, *types):\n \"\"\" Get all children of the requested types.\n\n :param attribute: requested children types.\n :return: list of all children of the requested types.\n \"\"\"\n pass\n"
] | class TgnObject(object):
""" Base class for all TGN classes. """
objects = OrderedDict()
""" Dictionary of child objects <object reference: object name>. """
def __init__(self, **data):
""" Create new TGN object in the API.
If object does not exist on the chassis, create it on the chassis as well.
:param parent: object parent. If == None the api and logger attributes must be set explicitly by the caller.
"""
super(TgnObject, self).__init__()
self._data = {}
self.objects = OrderedDict()
self._set_data(**data)
if self._data['parent']:
self.api = self.obj_parent().api
self.logger = self.obj_parent().logger
if 'objRef' not in self._data:
self._data['objRef'] = self._create()
if 'name' not in self._data:
self._data['name'] = self.obj_ref()
if self._data.get('parent', None):
# todo: make sure each object has parent and test only for None parents (STC project and IXN root)..
self._data['parent'].objects[self.obj_ref()] = self
def __str__(self):
return self.name
def get_child(self, *types):
"""
:param types: list of requested types.
:return: the first (and in most useful cases only) child of specific type(s).
"""
children = list(self.get_children(*types))
return children[0] if any(children) else None
def get_object_by_ref(self, obj_ref):
"""
:param obj_ref: requested object reference.
:return: the first object with the requested object reference in the object branch.
"""
return self._get_object_by_key('objRef', _WA_norm_obj_ref(obj_ref))
def get_object_by_name(self, obj_name):
"""
:param obj_name: requested object name.
:return: the first object with the requested object name in the object branch.
"""
return self._get_object_by_key('name', obj_name)
def _get_object_by_key(self, key, value, *types):
if self._data[key] == value and (types and self.obj_type() in types or not types):
return self
else:
if not types:
children = self.objects.values()
else:
children = self.get_objects_by_type(*types)
for child in children:
obj = child._get_object_by_key(key, value, *types)
if obj is not None:
return obj
def get_objects_by_type(self, *types):
""" Returned objects stored in memory (without re-reading them from the TGN).
Use this method for fast access to objects in case of static configurations.
:param types: requested object types.
:return: all children of the specified types.
"""
if not types:
return self.objects.values()
types_l = [o.lower() for o in types]
return [o for o in self.objects.values() if o.obj_type().lower() in types_l]
def get_object_by_type(self, *types):
"""
:param types: requested object types.
:return: the child of the specified types.
"""
children = self.get_objects_by_type(*types)
return children[0] if any(children) else None
def get_objects_by_type_in_subtree(self, *types):
"""
:param types: requested object types.
:return: all children of the specified types.
"""
typed_objects = self.get_objects_by_type(*types)
for child in self.objects.values():
typed_objects += child.get_objects_by_type_in_subtree(*types)
return typed_objects
def get_object_or_child_by_type(self, *types):
""" Get object if child already been read or get child.
Use this method for fast access to objects in case of static configurations.
:param types: requested object types.
:return: all children of the specified types.
"""
objects = self.get_objects_or_children_by_type(*types)
return objects[0] if any(objects) else None
def get_objects_with_object(self, obj_type, *child_types):
"""
:param obj_type: requested object type.
:param child_type: requested child types.
:return: all children of the requested type that have the requested child types.
"""
return [o for o in self.get_objects_by_type(obj_type) if
o.get_objects_by_type(*child_types)]
def get_objects_without_object(self, obj_type, *child_types):
"""
:param obj_type: requested object type.
:param child_type: unrequested child types.
:return: all children of the requested type that do not have the unrequested child types.
"""
return [o for o in self.get_objects_by_type(obj_type) if
not o.get_objects_by_type(*child_types)]
def get_objects_with_attribute(self, obj_type, attribute, value):
"""
:param obj_type: requested object type.
:param attribute: requested attribute.
:param value: requested attribute value.
:return: all children of the requested type that have the requested attribute == requested value.
"""
return [o for o in self.get_objects_by_type(obj_type) if o.get_attribute(attribute) == value]
def get_ancestor_object_by_type(self, obj_type):
"""
:param obj_type: requested ancestor type.
:return: the ancestor of the object who's type is obj_type if exists else None.
"""
if self.type.lower() == obj_type.lower():
return self
else:
if not self.parent:
return None
return self.parent.get_ancestor_object_by_type(obj_type)
def get_object_from_attribute(self, attribute):
return self.get_objects_from_attribute(attribute)[0] if self.get_objects_from_attribute(attribute) else None
@abstractmethod
def get_objects_from_attribute(self, attribute):
pass
def del_object_from_parent(self):
""" Delete object from parent object. """
if self.parent:
self.parent.objects.pop(self.ref)
def del_objects_by_type(self, type_):
""" Delete all children objects.
:param type_: type of objects to delete.
"""
[o.del_object_from_parent() for o in self.get_objects_by_type(type_)]
@classmethod
def get_objects_of_class(cls):
"""
:return: all instances of the requested class.
"""
return list(o for o in gc.get_objects() if isinstance(o, cls))
#
# Simple utilities to return object _data. Maybe it's not Pythonic (more like Java) but after
# changing the key name couple of times I decided to go for it.
#
def obj_name(self):
"""
:return: object name.
"""
return self._data['name']
name = property(obj_name)
def obj_ref(self):
"""
:return: object reference.
"""
return str(self._data['objRef'])
ref = property(obj_ref)
def obj_type(self):
"""
:return: object type.
"""
return self._data['objType']
type = property(obj_type)
def obj_parent(self):
"""
:return: object parent.
"""
return self._data['parent']
parent = property(obj_parent)
#
# Private methods.
#
def _set_data(self, **data):
self._data.update(data)
def _build_children_objs(self, child_type, children):
children_objs = OrderedDict()
child_obj_type = self.get_obj_class(child_type)
for child in (c for c in children if c is not ''):
child_object = child_obj_type(objRef=child, objType=child_type, parent=self)
child_object._set_data(name=child_object.get_name())
children_objs[child_object.obj_ref()] = child_object
self.objects.update(children_objs)
return children_objs
#
# Abstract API methods.
#
@abstractmethod
def get_attribute(self, attribute):
""" Get single attribute value.
:param attribute: attribute name.
:return: attribute value.
"""
pass
@abstractmethod
def get_children(self, *types):
""" Get all children of the requested types.
:param attribute: requested children types.
:return: list of all children of the requested types.
"""
pass
|
shmir/PyTrafficGenerator | trafficgenerator/tgn_object.py | TgnObject.get_object_or_child_by_type | python | def get_object_or_child_by_type(self, *types):
objects = self.get_objects_or_children_by_type(*types)
return objects[0] if any(objects) else None | Get object if child already been read or get child.
Use this method for fast access to objects in case of static configurations.
:param types: requested object types.
:return: all children of the specified types. | train | https://github.com/shmir/PyTrafficGenerator/blob/382e5d549c83404af2a6571fe19c9e71df8bac14/trafficgenerator/tgn_object.py#L189-L199 | [
"def get_objects_or_children_by_type(self, *types):\n \"\"\" Get objects if children already been read or get children.\n\n Use this method for fast access to objects in case of static configurations.\n\n :param types: requested object types.\n :return: all children of the specified types.\n \"\"\"\n\n objects = self.get_objects_by_type(*types)\n return objects if objects else self.get_children(*types)\n"
] | class TgnObject(object):
""" Base class for all TGN classes. """
objects = OrderedDict()
""" Dictionary of child objects <object reference: object name>. """
def __init__(self, **data):
""" Create new TGN object in the API.
If object does not exist on the chassis, create it on the chassis as well.
:param parent: object parent. If == None the api and logger attributes must be set explicitly by the caller.
"""
super(TgnObject, self).__init__()
self._data = {}
self.objects = OrderedDict()
self._set_data(**data)
if self._data['parent']:
self.api = self.obj_parent().api
self.logger = self.obj_parent().logger
if 'objRef' not in self._data:
self._data['objRef'] = self._create()
if 'name' not in self._data:
self._data['name'] = self.obj_ref()
if self._data.get('parent', None):
# todo: make sure each object has parent and test only for None parents (STC project and IXN root)..
self._data['parent'].objects[self.obj_ref()] = self
def __str__(self):
return self.name
def get_child(self, *types):
"""
:param types: list of requested types.
:return: the first (and in most useful cases only) child of specific type(s).
"""
children = list(self.get_children(*types))
return children[0] if any(children) else None
def get_object_by_ref(self, obj_ref):
"""
:param obj_ref: requested object reference.
:return: the first object with the requested object reference in the object branch.
"""
return self._get_object_by_key('objRef', _WA_norm_obj_ref(obj_ref))
def get_object_by_name(self, obj_name):
"""
:param obj_name: requested object name.
:return: the first object with the requested object name in the object branch.
"""
return self._get_object_by_key('name', obj_name)
def _get_object_by_key(self, key, value, *types):
if self._data[key] == value and (types and self.obj_type() in types or not types):
return self
else:
if not types:
children = self.objects.values()
else:
children = self.get_objects_by_type(*types)
for child in children:
obj = child._get_object_by_key(key, value, *types)
if obj is not None:
return obj
def get_objects_by_type(self, *types):
""" Returned objects stored in memory (without re-reading them from the TGN).
Use this method for fast access to objects in case of static configurations.
:param types: requested object types.
:return: all children of the specified types.
"""
if not types:
return self.objects.values()
types_l = [o.lower() for o in types]
return [o for o in self.objects.values() if o.obj_type().lower() in types_l]
def get_object_by_type(self, *types):
"""
:param types: requested object types.
:return: the child of the specified types.
"""
children = self.get_objects_by_type(*types)
return children[0] if any(children) else None
def get_objects_by_type_in_subtree(self, *types):
"""
:param types: requested object types.
:return: all children of the specified types.
"""
typed_objects = self.get_objects_by_type(*types)
for child in self.objects.values():
typed_objects += child.get_objects_by_type_in_subtree(*types)
return typed_objects
def get_objects_or_children_by_type(self, *types):
""" Get objects if children already been read or get children.
Use this method for fast access to objects in case of static configurations.
:param types: requested object types.
:return: all children of the specified types.
"""
objects = self.get_objects_by_type(*types)
return objects if objects else self.get_children(*types)
def get_objects_with_object(self, obj_type, *child_types):
"""
:param obj_type: requested object type.
:param child_type: requested child types.
:return: all children of the requested type that have the requested child types.
"""
return [o for o in self.get_objects_by_type(obj_type) if
o.get_objects_by_type(*child_types)]
def get_objects_without_object(self, obj_type, *child_types):
"""
:param obj_type: requested object type.
:param child_type: unrequested child types.
:return: all children of the requested type that do not have the unrequested child types.
"""
return [o for o in self.get_objects_by_type(obj_type) if
not o.get_objects_by_type(*child_types)]
def get_objects_with_attribute(self, obj_type, attribute, value):
"""
:param obj_type: requested object type.
:param attribute: requested attribute.
:param value: requested attribute value.
:return: all children of the requested type that have the requested attribute == requested value.
"""
return [o for o in self.get_objects_by_type(obj_type) if o.get_attribute(attribute) == value]
def get_ancestor_object_by_type(self, obj_type):
"""
:param obj_type: requested ancestor type.
:return: the ancestor of the object who's type is obj_type if exists else None.
"""
if self.type.lower() == obj_type.lower():
return self
else:
if not self.parent:
return None
return self.parent.get_ancestor_object_by_type(obj_type)
def get_object_from_attribute(self, attribute):
return self.get_objects_from_attribute(attribute)[0] if self.get_objects_from_attribute(attribute) else None
@abstractmethod
def get_objects_from_attribute(self, attribute):
pass
def del_object_from_parent(self):
""" Delete object from parent object. """
if self.parent:
self.parent.objects.pop(self.ref)
def del_objects_by_type(self, type_):
""" Delete all children objects.
:param type_: type of objects to delete.
"""
[o.del_object_from_parent() for o in self.get_objects_by_type(type_)]
@classmethod
def get_objects_of_class(cls):
"""
:return: all instances of the requested class.
"""
return list(o for o in gc.get_objects() if isinstance(o, cls))
#
# Simple utilities to return object _data. Maybe it's not Pythonic (more like Java) but after
# changing the key name couple of times I decided to go for it.
#
def obj_name(self):
"""
:return: object name.
"""
return self._data['name']
name = property(obj_name)
def obj_ref(self):
"""
:return: object reference.
"""
return str(self._data['objRef'])
ref = property(obj_ref)
def obj_type(self):
"""
:return: object type.
"""
return self._data['objType']
type = property(obj_type)
def obj_parent(self):
"""
:return: object parent.
"""
return self._data['parent']
parent = property(obj_parent)
#
# Private methods.
#
def _set_data(self, **data):
self._data.update(data)
def _build_children_objs(self, child_type, children):
children_objs = OrderedDict()
child_obj_type = self.get_obj_class(child_type)
for child in (c for c in children if c is not ''):
child_object = child_obj_type(objRef=child, objType=child_type, parent=self)
child_object._set_data(name=child_object.get_name())
children_objs[child_object.obj_ref()] = child_object
self.objects.update(children_objs)
return children_objs
#
# Abstract API methods.
#
@abstractmethod
def get_attribute(self, attribute):
""" Get single attribute value.
:param attribute: attribute name.
:return: attribute value.
"""
pass
@abstractmethod
def get_children(self, *types):
""" Get all children of the requested types.
:param attribute: requested children types.
:return: list of all children of the requested types.
"""
pass
|
shmir/PyTrafficGenerator | trafficgenerator/tgn_object.py | TgnObject.get_objects_with_object | python | def get_objects_with_object(self, obj_type, *child_types):
return [o for o in self.get_objects_by_type(obj_type) if
o.get_objects_by_type(*child_types)] | :param obj_type: requested object type.
:param child_type: requested child types.
:return: all children of the requested type that have the requested child types. | train | https://github.com/shmir/PyTrafficGenerator/blob/382e5d549c83404af2a6571fe19c9e71df8bac14/trafficgenerator/tgn_object.py#L201-L209 | [
"def get_objects_by_type(self, *types):\n \"\"\" Returned objects stored in memory (without re-reading them from the TGN).\n\n Use this method for fast access to objects in case of static configurations.\n\n :param types: requested object types.\n :return: all children of the specified types.\n \"\"\"\n\n if not types:\n return self.objects.values()\n types_l = [o.lower() for o in types]\n return [o for o in self.objects.values() if o.obj_type().lower() in types_l]\n"
] | class TgnObject(object):
""" Base class for all TGN classes. """
objects = OrderedDict()
""" Dictionary of child objects <object reference: object name>. """
def __init__(self, **data):
""" Create new TGN object in the API.
If object does not exist on the chassis, create it on the chassis as well.
:param parent: object parent. If == None the api and logger attributes must be set explicitly by the caller.
"""
super(TgnObject, self).__init__()
self._data = {}
self.objects = OrderedDict()
self._set_data(**data)
if self._data['parent']:
self.api = self.obj_parent().api
self.logger = self.obj_parent().logger
if 'objRef' not in self._data:
self._data['objRef'] = self._create()
if 'name' not in self._data:
self._data['name'] = self.obj_ref()
if self._data.get('parent', None):
# todo: make sure each object has parent and test only for None parents (STC project and IXN root)..
self._data['parent'].objects[self.obj_ref()] = self
def __str__(self):
return self.name
def get_child(self, *types):
"""
:param types: list of requested types.
:return: the first (and in most useful cases only) child of specific type(s).
"""
children = list(self.get_children(*types))
return children[0] if any(children) else None
def get_object_by_ref(self, obj_ref):
"""
:param obj_ref: requested object reference.
:return: the first object with the requested object reference in the object branch.
"""
return self._get_object_by_key('objRef', _WA_norm_obj_ref(obj_ref))
def get_object_by_name(self, obj_name):
"""
:param obj_name: requested object name.
:return: the first object with the requested object name in the object branch.
"""
return self._get_object_by_key('name', obj_name)
def _get_object_by_key(self, key, value, *types):
if self._data[key] == value and (types and self.obj_type() in types or not types):
return self
else:
if not types:
children = self.objects.values()
else:
children = self.get_objects_by_type(*types)
for child in children:
obj = child._get_object_by_key(key, value, *types)
if obj is not None:
return obj
def get_objects_by_type(self, *types):
""" Returned objects stored in memory (without re-reading them from the TGN).
Use this method for fast access to objects in case of static configurations.
:param types: requested object types.
:return: all children of the specified types.
"""
if not types:
return self.objects.values()
types_l = [o.lower() for o in types]
return [o for o in self.objects.values() if o.obj_type().lower() in types_l]
def get_object_by_type(self, *types):
"""
:param types: requested object types.
:return: the child of the specified types.
"""
children = self.get_objects_by_type(*types)
return children[0] if any(children) else None
def get_objects_by_type_in_subtree(self, *types):
"""
:param types: requested object types.
:return: all children of the specified types.
"""
typed_objects = self.get_objects_by_type(*types)
for child in self.objects.values():
typed_objects += child.get_objects_by_type_in_subtree(*types)
return typed_objects
def get_objects_or_children_by_type(self, *types):
""" Get objects if children already been read or get children.
Use this method for fast access to objects in case of static configurations.
:param types: requested object types.
:return: all children of the specified types.
"""
objects = self.get_objects_by_type(*types)
return objects if objects else self.get_children(*types)
def get_object_or_child_by_type(self, *types):
""" Get object if child already been read or get child.
Use this method for fast access to objects in case of static configurations.
:param types: requested object types.
:return: all children of the specified types.
"""
objects = self.get_objects_or_children_by_type(*types)
return objects[0] if any(objects) else None
def get_objects_without_object(self, obj_type, *child_types):
"""
:param obj_type: requested object type.
:param child_type: unrequested child types.
:return: all children of the requested type that do not have the unrequested child types.
"""
return [o for o in self.get_objects_by_type(obj_type) if
not o.get_objects_by_type(*child_types)]
def get_objects_with_attribute(self, obj_type, attribute, value):
"""
:param obj_type: requested object type.
:param attribute: requested attribute.
:param value: requested attribute value.
:return: all children of the requested type that have the requested attribute == requested value.
"""
return [o for o in self.get_objects_by_type(obj_type) if o.get_attribute(attribute) == value]
def get_ancestor_object_by_type(self, obj_type):
"""
:param obj_type: requested ancestor type.
:return: the ancestor of the object who's type is obj_type if exists else None.
"""
if self.type.lower() == obj_type.lower():
return self
else:
if not self.parent:
return None
return self.parent.get_ancestor_object_by_type(obj_type)
def get_object_from_attribute(self, attribute):
return self.get_objects_from_attribute(attribute)[0] if self.get_objects_from_attribute(attribute) else None
@abstractmethod
def get_objects_from_attribute(self, attribute):
pass
def del_object_from_parent(self):
""" Delete object from parent object. """
if self.parent:
self.parent.objects.pop(self.ref)
def del_objects_by_type(self, type_):
""" Delete all children objects.
:param type_: type of objects to delete.
"""
[o.del_object_from_parent() for o in self.get_objects_by_type(type_)]
@classmethod
def get_objects_of_class(cls):
"""
:return: all instances of the requested class.
"""
return list(o for o in gc.get_objects() if isinstance(o, cls))
#
# Simple utilities to return object _data. Maybe it's not Pythonic (more like Java) but after
# changing the key name couple of times I decided to go for it.
#
def obj_name(self):
"""
:return: object name.
"""
return self._data['name']
name = property(obj_name)
def obj_ref(self):
"""
:return: object reference.
"""
return str(self._data['objRef'])
ref = property(obj_ref)
def obj_type(self):
"""
:return: object type.
"""
return self._data['objType']
type = property(obj_type)
def obj_parent(self):
"""
:return: object parent.
"""
return self._data['parent']
parent = property(obj_parent)
#
# Private methods.
#
def _set_data(self, **data):
self._data.update(data)
def _build_children_objs(self, child_type, children):
children_objs = OrderedDict()
child_obj_type = self.get_obj_class(child_type)
for child in (c for c in children if c is not ''):
child_object = child_obj_type(objRef=child, objType=child_type, parent=self)
child_object._set_data(name=child_object.get_name())
children_objs[child_object.obj_ref()] = child_object
self.objects.update(children_objs)
return children_objs
#
# Abstract API methods.
#
@abstractmethod
def get_attribute(self, attribute):
""" Get single attribute value.
:param attribute: attribute name.
:return: attribute value.
"""
pass
@abstractmethod
def get_children(self, *types):
""" Get all children of the requested types.
:param attribute: requested children types.
:return: list of all children of the requested types.
"""
pass
|
shmir/PyTrafficGenerator | trafficgenerator/tgn_object.py | TgnObject.get_objects_without_object | python | def get_objects_without_object(self, obj_type, *child_types):
return [o for o in self.get_objects_by_type(obj_type) if
not o.get_objects_by_type(*child_types)] | :param obj_type: requested object type.
:param child_type: unrequested child types.
:return: all children of the requested type that do not have the unrequested child types. | train | https://github.com/shmir/PyTrafficGenerator/blob/382e5d549c83404af2a6571fe19c9e71df8bac14/trafficgenerator/tgn_object.py#L211-L218 | [
"def get_objects_by_type(self, *types):\n \"\"\" Returned objects stored in memory (without re-reading them from the TGN).\n\n Use this method for fast access to objects in case of static configurations.\n\n :param types: requested object types.\n :return: all children of the specified types.\n \"\"\"\n\n if not types:\n return self.objects.values()\n types_l = [o.lower() for o in types]\n return [o for o in self.objects.values() if o.obj_type().lower() in types_l]\n"
] | class TgnObject(object):
""" Base class for all TGN classes. """
objects = OrderedDict()
""" Dictionary of child objects <object reference: object name>. """
def __init__(self, **data):
""" Create new TGN object in the API.
If object does not exist on the chassis, create it on the chassis as well.
:param parent: object parent. If == None the api and logger attributes must be set explicitly by the caller.
"""
super(TgnObject, self).__init__()
self._data = {}
self.objects = OrderedDict()
self._set_data(**data)
if self._data['parent']:
self.api = self.obj_parent().api
self.logger = self.obj_parent().logger
if 'objRef' not in self._data:
self._data['objRef'] = self._create()
if 'name' not in self._data:
self._data['name'] = self.obj_ref()
if self._data.get('parent', None):
# todo: make sure each object has parent and test only for None parents (STC project and IXN root)..
self._data['parent'].objects[self.obj_ref()] = self
def __str__(self):
return self.name
def get_child(self, *types):
"""
:param types: list of requested types.
:return: the first (and in most useful cases only) child of specific type(s).
"""
children = list(self.get_children(*types))
return children[0] if any(children) else None
def get_object_by_ref(self, obj_ref):
"""
:param obj_ref: requested object reference.
:return: the first object with the requested object reference in the object branch.
"""
return self._get_object_by_key('objRef', _WA_norm_obj_ref(obj_ref))
def get_object_by_name(self, obj_name):
"""
:param obj_name: requested object name.
:return: the first object with the requested object name in the object branch.
"""
return self._get_object_by_key('name', obj_name)
def _get_object_by_key(self, key, value, *types):
if self._data[key] == value and (types and self.obj_type() in types or not types):
return self
else:
if not types:
children = self.objects.values()
else:
children = self.get_objects_by_type(*types)
for child in children:
obj = child._get_object_by_key(key, value, *types)
if obj is not None:
return obj
def get_objects_by_type(self, *types):
""" Returned objects stored in memory (without re-reading them from the TGN).
Use this method for fast access to objects in case of static configurations.
:param types: requested object types.
:return: all children of the specified types.
"""
if not types:
return self.objects.values()
types_l = [o.lower() for o in types]
return [o for o in self.objects.values() if o.obj_type().lower() in types_l]
def get_object_by_type(self, *types):
"""
:param types: requested object types.
:return: the child of the specified types.
"""
children = self.get_objects_by_type(*types)
return children[0] if any(children) else None
def get_objects_by_type_in_subtree(self, *types):
"""
:param types: requested object types.
:return: all children of the specified types.
"""
typed_objects = self.get_objects_by_type(*types)
for child in self.objects.values():
typed_objects += child.get_objects_by_type_in_subtree(*types)
return typed_objects
def get_objects_or_children_by_type(self, *types):
""" Get objects if children already been read or get children.
Use this method for fast access to objects in case of static configurations.
:param types: requested object types.
:return: all children of the specified types.
"""
objects = self.get_objects_by_type(*types)
return objects if objects else self.get_children(*types)
def get_object_or_child_by_type(self, *types):
""" Get object if child already been read or get child.
Use this method for fast access to objects in case of static configurations.
:param types: requested object types.
:return: all children of the specified types.
"""
objects = self.get_objects_or_children_by_type(*types)
return objects[0] if any(objects) else None
def get_objects_with_object(self, obj_type, *child_types):
"""
:param obj_type: requested object type.
:param child_type: requested child types.
:return: all children of the requested type that have the requested child types.
"""
return [o for o in self.get_objects_by_type(obj_type) if
o.get_objects_by_type(*child_types)]
def get_objects_with_attribute(self, obj_type, attribute, value):
"""
:param obj_type: requested object type.
:param attribute: requested attribute.
:param value: requested attribute value.
:return: all children of the requested type that have the requested attribute == requested value.
"""
return [o for o in self.get_objects_by_type(obj_type) if o.get_attribute(attribute) == value]
def get_ancestor_object_by_type(self, obj_type):
"""
:param obj_type: requested ancestor type.
:return: the ancestor of the object who's type is obj_type if exists else None.
"""
if self.type.lower() == obj_type.lower():
return self
else:
if not self.parent:
return None
return self.parent.get_ancestor_object_by_type(obj_type)
def get_object_from_attribute(self, attribute):
return self.get_objects_from_attribute(attribute)[0] if self.get_objects_from_attribute(attribute) else None
@abstractmethod
def get_objects_from_attribute(self, attribute):
pass
def del_object_from_parent(self):
""" Delete object from parent object. """
if self.parent:
self.parent.objects.pop(self.ref)
def del_objects_by_type(self, type_):
""" Delete all children objects.
:param type_: type of objects to delete.
"""
[o.del_object_from_parent() for o in self.get_objects_by_type(type_)]
@classmethod
def get_objects_of_class(cls):
"""
:return: all instances of the requested class.
"""
return list(o for o in gc.get_objects() if isinstance(o, cls))
#
# Simple utilities to return object _data. Maybe it's not Pythonic (more like Java) but after
# changing the key name couple of times I decided to go for it.
#
def obj_name(self):
"""
:return: object name.
"""
return self._data['name']
name = property(obj_name)
def obj_ref(self):
"""
:return: object reference.
"""
return str(self._data['objRef'])
ref = property(obj_ref)
def obj_type(self):
"""
:return: object type.
"""
return self._data['objType']
type = property(obj_type)
def obj_parent(self):
"""
:return: object parent.
"""
return self._data['parent']
parent = property(obj_parent)
#
# Private methods.
#
def _set_data(self, **data):
self._data.update(data)
def _build_children_objs(self, child_type, children):
children_objs = OrderedDict()
child_obj_type = self.get_obj_class(child_type)
for child in (c for c in children if c is not ''):
child_object = child_obj_type(objRef=child, objType=child_type, parent=self)
child_object._set_data(name=child_object.get_name())
children_objs[child_object.obj_ref()] = child_object
self.objects.update(children_objs)
return children_objs
#
# Abstract API methods.
#
@abstractmethod
def get_attribute(self, attribute):
""" Get single attribute value.
:param attribute: attribute name.
:return: attribute value.
"""
pass
@abstractmethod
def get_children(self, *types):
""" Get all children of the requested types.
:param attribute: requested children types.
:return: list of all children of the requested types.
"""
pass
|
shmir/PyTrafficGenerator | trafficgenerator/tgn_object.py | TgnObject.get_objects_with_attribute | python | def get_objects_with_attribute(self, obj_type, attribute, value):
return [o for o in self.get_objects_by_type(obj_type) if o.get_attribute(attribute) == value] | :param obj_type: requested object type.
:param attribute: requested attribute.
:param value: requested attribute value.
:return: all children of the requested type that have the requested attribute == requested value. | train | https://github.com/shmir/PyTrafficGenerator/blob/382e5d549c83404af2a6571fe19c9e71df8bac14/trafficgenerator/tgn_object.py#L220-L227 | [
"def get_objects_by_type(self, *types):\n \"\"\" Returned objects stored in memory (without re-reading them from the TGN).\n\n Use this method for fast access to objects in case of static configurations.\n\n :param types: requested object types.\n :return: all children of the specified types.\n \"\"\"\n\n if not types:\n return self.objects.values()\n types_l = [o.lower() for o in types]\n return [o for o in self.objects.values() if o.obj_type().lower() in types_l]\n"
] | class TgnObject(object):
""" Base class for all TGN classes. """
objects = OrderedDict()
""" Dictionary of child objects <object reference: object name>. """
def __init__(self, **data):
""" Create new TGN object in the API.
If object does not exist on the chassis, create it on the chassis as well.
:param parent: object parent. If == None the api and logger attributes must be set explicitly by the caller.
"""
super(TgnObject, self).__init__()
self._data = {}
self.objects = OrderedDict()
self._set_data(**data)
if self._data['parent']:
self.api = self.obj_parent().api
self.logger = self.obj_parent().logger
if 'objRef' not in self._data:
self._data['objRef'] = self._create()
if 'name' not in self._data:
self._data['name'] = self.obj_ref()
if self._data.get('parent', None):
# todo: make sure each object has parent and test only for None parents (STC project and IXN root)..
self._data['parent'].objects[self.obj_ref()] = self
def __str__(self):
return self.name
def get_child(self, *types):
"""
:param types: list of requested types.
:return: the first (and in most useful cases only) child of specific type(s).
"""
children = list(self.get_children(*types))
return children[0] if any(children) else None
def get_object_by_ref(self, obj_ref):
"""
:param obj_ref: requested object reference.
:return: the first object with the requested object reference in the object branch.
"""
return self._get_object_by_key('objRef', _WA_norm_obj_ref(obj_ref))
def get_object_by_name(self, obj_name):
"""
:param obj_name: requested object name.
:return: the first object with the requested object name in the object branch.
"""
return self._get_object_by_key('name', obj_name)
def _get_object_by_key(self, key, value, *types):
if self._data[key] == value and (types and self.obj_type() in types or not types):
return self
else:
if not types:
children = self.objects.values()
else:
children = self.get_objects_by_type(*types)
for child in children:
obj = child._get_object_by_key(key, value, *types)
if obj is not None:
return obj
def get_objects_by_type(self, *types):
""" Returned objects stored in memory (without re-reading them from the TGN).
Use this method for fast access to objects in case of static configurations.
:param types: requested object types.
:return: all children of the specified types.
"""
if not types:
return self.objects.values()
types_l = [o.lower() for o in types]
return [o for o in self.objects.values() if o.obj_type().lower() in types_l]
def get_object_by_type(self, *types):
"""
:param types: requested object types.
:return: the child of the specified types.
"""
children = self.get_objects_by_type(*types)
return children[0] if any(children) else None
def get_objects_by_type_in_subtree(self, *types):
"""
:param types: requested object types.
:return: all children of the specified types.
"""
typed_objects = self.get_objects_by_type(*types)
for child in self.objects.values():
typed_objects += child.get_objects_by_type_in_subtree(*types)
return typed_objects
def get_objects_or_children_by_type(self, *types):
""" Get objects if children already been read or get children.
Use this method for fast access to objects in case of static configurations.
:param types: requested object types.
:return: all children of the specified types.
"""
objects = self.get_objects_by_type(*types)
return objects if objects else self.get_children(*types)
def get_object_or_child_by_type(self, *types):
""" Get object if child already been read or get child.
Use this method for fast access to objects in case of static configurations.
:param types: requested object types.
:return: all children of the specified types.
"""
objects = self.get_objects_or_children_by_type(*types)
return objects[0] if any(objects) else None
def get_objects_with_object(self, obj_type, *child_types):
"""
:param obj_type: requested object type.
:param child_type: requested child types.
:return: all children of the requested type that have the requested child types.
"""
return [o for o in self.get_objects_by_type(obj_type) if
o.get_objects_by_type(*child_types)]
def get_objects_without_object(self, obj_type, *child_types):
"""
:param obj_type: requested object type.
:param child_type: unrequested child types.
:return: all children of the requested type that do not have the unrequested child types.
"""
return [o for o in self.get_objects_by_type(obj_type) if
not o.get_objects_by_type(*child_types)]
def get_ancestor_object_by_type(self, obj_type):
"""
:param obj_type: requested ancestor type.
:return: the ancestor of the object who's type is obj_type if exists else None.
"""
if self.type.lower() == obj_type.lower():
return self
else:
if not self.parent:
return None
return self.parent.get_ancestor_object_by_type(obj_type)
def get_object_from_attribute(self, attribute):
return self.get_objects_from_attribute(attribute)[0] if self.get_objects_from_attribute(attribute) else None
@abstractmethod
def get_objects_from_attribute(self, attribute):
pass
def del_object_from_parent(self):
""" Delete object from parent object. """
if self.parent:
self.parent.objects.pop(self.ref)
def del_objects_by_type(self, type_):
""" Delete all children objects.
:param type_: type of objects to delete.
"""
[o.del_object_from_parent() for o in self.get_objects_by_type(type_)]
@classmethod
def get_objects_of_class(cls):
"""
:return: all instances of the requested class.
"""
return list(o for o in gc.get_objects() if isinstance(o, cls))
#
# Simple utilities to return object _data. Maybe it's not Pythonic (more like Java) but after
# changing the key name couple of times I decided to go for it.
#
def obj_name(self):
"""
:return: object name.
"""
return self._data['name']
name = property(obj_name)
def obj_ref(self):
"""
:return: object reference.
"""
return str(self._data['objRef'])
ref = property(obj_ref)
def obj_type(self):
"""
:return: object type.
"""
return self._data['objType']
type = property(obj_type)
def obj_parent(self):
"""
:return: object parent.
"""
return self._data['parent']
parent = property(obj_parent)
#
# Private methods.
#
def _set_data(self, **data):
self._data.update(data)
def _build_children_objs(self, child_type, children):
children_objs = OrderedDict()
child_obj_type = self.get_obj_class(child_type)
for child in (c for c in children if c is not ''):
child_object = child_obj_type(objRef=child, objType=child_type, parent=self)
child_object._set_data(name=child_object.get_name())
children_objs[child_object.obj_ref()] = child_object
self.objects.update(children_objs)
return children_objs
#
# Abstract API methods.
#
@abstractmethod
def get_attribute(self, attribute):
""" Get single attribute value.
:param attribute: attribute name.
:return: attribute value.
"""
pass
@abstractmethod
def get_children(self, *types):
""" Get all children of the requested types.
:param attribute: requested children types.
:return: list of all children of the requested types.
"""
pass
|
shmir/PyTrafficGenerator | trafficgenerator/tgn_object.py | TgnObject.get_ancestor_object_by_type | python | def get_ancestor_object_by_type(self, obj_type):
if self.type.lower() == obj_type.lower():
return self
else:
if not self.parent:
return None
return self.parent.get_ancestor_object_by_type(obj_type) | :param obj_type: requested ancestor type.
:return: the ancestor of the object who's type is obj_type if exists else None. | train | https://github.com/shmir/PyTrafficGenerator/blob/382e5d549c83404af2a6571fe19c9e71df8bac14/trafficgenerator/tgn_object.py#L229-L240 | null | class TgnObject(object):
""" Base class for all TGN classes. """
objects = OrderedDict()
""" Dictionary of child objects <object reference: object name>. """
def __init__(self, **data):
""" Create new TGN object in the API.
If object does not exist on the chassis, create it on the chassis as well.
:param parent: object parent. If == None the api and logger attributes must be set explicitly by the caller.
"""
super(TgnObject, self).__init__()
self._data = {}
self.objects = OrderedDict()
self._set_data(**data)
if self._data['parent']:
self.api = self.obj_parent().api
self.logger = self.obj_parent().logger
if 'objRef' not in self._data:
self._data['objRef'] = self._create()
if 'name' not in self._data:
self._data['name'] = self.obj_ref()
if self._data.get('parent', None):
# todo: make sure each object has parent and test only for None parents (STC project and IXN root)..
self._data['parent'].objects[self.obj_ref()] = self
def __str__(self):
return self.name
def get_child(self, *types):
"""
:param types: list of requested types.
:return: the first (and in most useful cases only) child of specific type(s).
"""
children = list(self.get_children(*types))
return children[0] if any(children) else None
def get_object_by_ref(self, obj_ref):
"""
:param obj_ref: requested object reference.
:return: the first object with the requested object reference in the object branch.
"""
return self._get_object_by_key('objRef', _WA_norm_obj_ref(obj_ref))
def get_object_by_name(self, obj_name):
"""
:param obj_name: requested object name.
:return: the first object with the requested object name in the object branch.
"""
return self._get_object_by_key('name', obj_name)
def _get_object_by_key(self, key, value, *types):
if self._data[key] == value and (types and self.obj_type() in types or not types):
return self
else:
if not types:
children = self.objects.values()
else:
children = self.get_objects_by_type(*types)
for child in children:
obj = child._get_object_by_key(key, value, *types)
if obj is not None:
return obj
def get_objects_by_type(self, *types):
""" Returned objects stored in memory (without re-reading them from the TGN).
Use this method for fast access to objects in case of static configurations.
:param types: requested object types.
:return: all children of the specified types.
"""
if not types:
return self.objects.values()
types_l = [o.lower() for o in types]
return [o for o in self.objects.values() if o.obj_type().lower() in types_l]
def get_object_by_type(self, *types):
"""
:param types: requested object types.
:return: the child of the specified types.
"""
children = self.get_objects_by_type(*types)
return children[0] if any(children) else None
def get_objects_by_type_in_subtree(self, *types):
"""
:param types: requested object types.
:return: all children of the specified types.
"""
typed_objects = self.get_objects_by_type(*types)
for child in self.objects.values():
typed_objects += child.get_objects_by_type_in_subtree(*types)
return typed_objects
def get_objects_or_children_by_type(self, *types):
""" Get objects if children already been read or get children.
Use this method for fast access to objects in case of static configurations.
:param types: requested object types.
:return: all children of the specified types.
"""
objects = self.get_objects_by_type(*types)
return objects if objects else self.get_children(*types)
def get_object_or_child_by_type(self, *types):
""" Get object if child already been read or get child.
Use this method for fast access to objects in case of static configurations.
:param types: requested object types.
:return: all children of the specified types.
"""
objects = self.get_objects_or_children_by_type(*types)
return objects[0] if any(objects) else None
def get_objects_with_object(self, obj_type, *child_types):
"""
:param obj_type: requested object type.
:param child_type: requested child types.
:return: all children of the requested type that have the requested child types.
"""
return [o for o in self.get_objects_by_type(obj_type) if
o.get_objects_by_type(*child_types)]
def get_objects_without_object(self, obj_type, *child_types):
"""
:param obj_type: requested object type.
:param child_type: unrequested child types.
:return: all children of the requested type that do not have the unrequested child types.
"""
return [o for o in self.get_objects_by_type(obj_type) if
not o.get_objects_by_type(*child_types)]
def get_objects_with_attribute(self, obj_type, attribute, value):
"""
:param obj_type: requested object type.
:param attribute: requested attribute.
:param value: requested attribute value.
:return: all children of the requested type that have the requested attribute == requested value.
"""
return [o for o in self.get_objects_by_type(obj_type) if o.get_attribute(attribute) == value]
def get_object_from_attribute(self, attribute):
return self.get_objects_from_attribute(attribute)[0] if self.get_objects_from_attribute(attribute) else None
@abstractmethod
def get_objects_from_attribute(self, attribute):
pass
def del_object_from_parent(self):
""" Delete object from parent object. """
if self.parent:
self.parent.objects.pop(self.ref)
def del_objects_by_type(self, type_):
""" Delete all children objects.
:param type_: type of objects to delete.
"""
[o.del_object_from_parent() for o in self.get_objects_by_type(type_)]
@classmethod
def get_objects_of_class(cls):
"""
:return: all instances of the requested class.
"""
return list(o for o in gc.get_objects() if isinstance(o, cls))
#
# Simple utilities to return object _data. Maybe it's not Pythonic (more like Java) but after
# changing the key name couple of times I decided to go for it.
#
def obj_name(self):
"""
:return: object name.
"""
return self._data['name']
name = property(obj_name)
def obj_ref(self):
"""
:return: object reference.
"""
return str(self._data['objRef'])
ref = property(obj_ref)
def obj_type(self):
"""
:return: object type.
"""
return self._data['objType']
type = property(obj_type)
def obj_parent(self):
"""
:return: object parent.
"""
return self._data['parent']
parent = property(obj_parent)
#
# Private methods.
#
def _set_data(self, **data):
self._data.update(data)
def _build_children_objs(self, child_type, children):
children_objs = OrderedDict()
child_obj_type = self.get_obj_class(child_type)
for child in (c for c in children if c is not ''):
child_object = child_obj_type(objRef=child, objType=child_type, parent=self)
child_object._set_data(name=child_object.get_name())
children_objs[child_object.obj_ref()] = child_object
self.objects.update(children_objs)
return children_objs
#
# Abstract API methods.
#
@abstractmethod
def get_attribute(self, attribute):
""" Get single attribute value.
:param attribute: attribute name.
:return: attribute value.
"""
pass
@abstractmethod
def get_children(self, *types):
""" Get all children of the requested types.
:param attribute: requested children types.
:return: list of all children of the requested types.
"""
pass
|
shmir/PyTrafficGenerator | trafficgenerator/tgn_object.py | TgnObject.del_object_from_parent | python | def del_object_from_parent(self):
if self.parent:
self.parent.objects.pop(self.ref) | Delete object from parent object. | train | https://github.com/shmir/PyTrafficGenerator/blob/382e5d549c83404af2a6571fe19c9e71df8bac14/trafficgenerator/tgn_object.py#L249-L252 | null | class TgnObject(object):
""" Base class for all TGN classes. """
objects = OrderedDict()
""" Dictionary of child objects <object reference: object name>. """
def __init__(self, **data):
""" Create new TGN object in the API.
If object does not exist on the chassis, create it on the chassis as well.
:param parent: object parent. If == None the api and logger attributes must be set explicitly by the caller.
"""
super(TgnObject, self).__init__()
self._data = {}
self.objects = OrderedDict()
self._set_data(**data)
if self._data['parent']:
self.api = self.obj_parent().api
self.logger = self.obj_parent().logger
if 'objRef' not in self._data:
self._data['objRef'] = self._create()
if 'name' not in self._data:
self._data['name'] = self.obj_ref()
if self._data.get('parent', None):
# todo: make sure each object has parent and test only for None parents (STC project and IXN root)..
self._data['parent'].objects[self.obj_ref()] = self
def __str__(self):
return self.name
def get_child(self, *types):
"""
:param types: list of requested types.
:return: the first (and in most useful cases only) child of specific type(s).
"""
children = list(self.get_children(*types))
return children[0] if any(children) else None
def get_object_by_ref(self, obj_ref):
"""
:param obj_ref: requested object reference.
:return: the first object with the requested object reference in the object branch.
"""
return self._get_object_by_key('objRef', _WA_norm_obj_ref(obj_ref))
def get_object_by_name(self, obj_name):
"""
:param obj_name: requested object name.
:return: the first object with the requested object name in the object branch.
"""
return self._get_object_by_key('name', obj_name)
def _get_object_by_key(self, key, value, *types):
if self._data[key] == value and (types and self.obj_type() in types or not types):
return self
else:
if not types:
children = self.objects.values()
else:
children = self.get_objects_by_type(*types)
for child in children:
obj = child._get_object_by_key(key, value, *types)
if obj is not None:
return obj
def get_objects_by_type(self, *types):
""" Returned objects stored in memory (without re-reading them from the TGN).
Use this method for fast access to objects in case of static configurations.
:param types: requested object types.
:return: all children of the specified types.
"""
if not types:
return self.objects.values()
types_l = [o.lower() for o in types]
return [o for o in self.objects.values() if o.obj_type().lower() in types_l]
def get_object_by_type(self, *types):
"""
:param types: requested object types.
:return: the child of the specified types.
"""
children = self.get_objects_by_type(*types)
return children[0] if any(children) else None
def get_objects_by_type_in_subtree(self, *types):
"""
:param types: requested object types.
:return: all children of the specified types.
"""
typed_objects = self.get_objects_by_type(*types)
for child in self.objects.values():
typed_objects += child.get_objects_by_type_in_subtree(*types)
return typed_objects
def get_objects_or_children_by_type(self, *types):
""" Get objects if children already been read or get children.
Use this method for fast access to objects in case of static configurations.
:param types: requested object types.
:return: all children of the specified types.
"""
objects = self.get_objects_by_type(*types)
return objects if objects else self.get_children(*types)
def get_object_or_child_by_type(self, *types):
""" Get object if child already been read or get child.
Use this method for fast access to objects in case of static configurations.
:param types: requested object types.
:return: all children of the specified types.
"""
objects = self.get_objects_or_children_by_type(*types)
return objects[0] if any(objects) else None
def get_objects_with_object(self, obj_type, *child_types):
"""
:param obj_type: requested object type.
:param child_type: requested child types.
:return: all children of the requested type that have the requested child types.
"""
return [o for o in self.get_objects_by_type(obj_type) if
o.get_objects_by_type(*child_types)]
def get_objects_without_object(self, obj_type, *child_types):
"""
:param obj_type: requested object type.
:param child_type: unrequested child types.
:return: all children of the requested type that do not have the unrequested child types.
"""
return [o for o in self.get_objects_by_type(obj_type) if
not o.get_objects_by_type(*child_types)]
def get_objects_with_attribute(self, obj_type, attribute, value):
"""
:param obj_type: requested object type.
:param attribute: requested attribute.
:param value: requested attribute value.
:return: all children of the requested type that have the requested attribute == requested value.
"""
return [o for o in self.get_objects_by_type(obj_type) if o.get_attribute(attribute) == value]
def get_ancestor_object_by_type(self, obj_type):
"""
:param obj_type: requested ancestor type.
:return: the ancestor of the object who's type is obj_type if exists else None.
"""
if self.type.lower() == obj_type.lower():
return self
else:
if not self.parent:
return None
return self.parent.get_ancestor_object_by_type(obj_type)
def get_object_from_attribute(self, attribute):
return self.get_objects_from_attribute(attribute)[0] if self.get_objects_from_attribute(attribute) else None
@abstractmethod
def get_objects_from_attribute(self, attribute):
pass
def del_objects_by_type(self, type_):
""" Delete all children objects.
:param type_: type of objects to delete.
"""
[o.del_object_from_parent() for o in self.get_objects_by_type(type_)]
@classmethod
def get_objects_of_class(cls):
"""
:return: all instances of the requested class.
"""
return list(o for o in gc.get_objects() if isinstance(o, cls))
#
# Simple utilities to return object _data. Maybe it's not Pythonic (more like Java) but after
# changing the key name couple of times I decided to go for it.
#
def obj_name(self):
"""
:return: object name.
"""
return self._data['name']
name = property(obj_name)
def obj_ref(self):
"""
:return: object reference.
"""
return str(self._data['objRef'])
ref = property(obj_ref)
def obj_type(self):
"""
:return: object type.
"""
return self._data['objType']
type = property(obj_type)
def obj_parent(self):
"""
:return: object parent.
"""
return self._data['parent']
parent = property(obj_parent)
#
# Private methods.
#
def _set_data(self, **data):
self._data.update(data)
def _build_children_objs(self, child_type, children):
children_objs = OrderedDict()
child_obj_type = self.get_obj_class(child_type)
for child in (c for c in children if c is not ''):
child_object = child_obj_type(objRef=child, objType=child_type, parent=self)
child_object._set_data(name=child_object.get_name())
children_objs[child_object.obj_ref()] = child_object
self.objects.update(children_objs)
return children_objs
#
# Abstract API methods.
#
@abstractmethod
def get_attribute(self, attribute):
""" Get single attribute value.
:param attribute: attribute name.
:return: attribute value.
"""
pass
@abstractmethod
def get_children(self, *types):
""" Get all children of the requested types.
:param attribute: requested children types.
:return: list of all children of the requested types.
"""
pass
|
shmir/PyTrafficGenerator | trafficgenerator/tgn_object.py | TgnObject.get_objects_of_class | python | def get_objects_of_class(cls):
return list(o for o in gc.get_objects() if isinstance(o, cls)) | :return: all instances of the requested class. | train | https://github.com/shmir/PyTrafficGenerator/blob/382e5d549c83404af2a6571fe19c9e71df8bac14/trafficgenerator/tgn_object.py#L262-L266 | null | class TgnObject(object):
""" Base class for all TGN classes. """
objects = OrderedDict()
""" Dictionary of child objects <object reference: object name>. """
def __init__(self, **data):
""" Create new TGN object in the API.
If object does not exist on the chassis, create it on the chassis as well.
:param parent: object parent. If == None the api and logger attributes must be set explicitly by the caller.
"""
super(TgnObject, self).__init__()
self._data = {}
self.objects = OrderedDict()
self._set_data(**data)
if self._data['parent']:
self.api = self.obj_parent().api
self.logger = self.obj_parent().logger
if 'objRef' not in self._data:
self._data['objRef'] = self._create()
if 'name' not in self._data:
self._data['name'] = self.obj_ref()
if self._data.get('parent', None):
# todo: make sure each object has parent and test only for None parents (STC project and IXN root)..
self._data['parent'].objects[self.obj_ref()] = self
def __str__(self):
return self.name
def get_child(self, *types):
"""
:param types: list of requested types.
:return: the first (and in most useful cases only) child of specific type(s).
"""
children = list(self.get_children(*types))
return children[0] if any(children) else None
def get_object_by_ref(self, obj_ref):
"""
:param obj_ref: requested object reference.
:return: the first object with the requested object reference in the object branch.
"""
return self._get_object_by_key('objRef', _WA_norm_obj_ref(obj_ref))
def get_object_by_name(self, obj_name):
"""
:param obj_name: requested object name.
:return: the first object with the requested object name in the object branch.
"""
return self._get_object_by_key('name', obj_name)
def _get_object_by_key(self, key, value, *types):
if self._data[key] == value and (types and self.obj_type() in types or not types):
return self
else:
if not types:
children = self.objects.values()
else:
children = self.get_objects_by_type(*types)
for child in children:
obj = child._get_object_by_key(key, value, *types)
if obj is not None:
return obj
def get_objects_by_type(self, *types):
""" Returned objects stored in memory (without re-reading them from the TGN).
Use this method for fast access to objects in case of static configurations.
:param types: requested object types.
:return: all children of the specified types.
"""
if not types:
return self.objects.values()
types_l = [o.lower() for o in types]
return [o for o in self.objects.values() if o.obj_type().lower() in types_l]
def get_object_by_type(self, *types):
"""
:param types: requested object types.
:return: the child of the specified types.
"""
children = self.get_objects_by_type(*types)
return children[0] if any(children) else None
def get_objects_by_type_in_subtree(self, *types):
"""
:param types: requested object types.
:return: all children of the specified types.
"""
typed_objects = self.get_objects_by_type(*types)
for child in self.objects.values():
typed_objects += child.get_objects_by_type_in_subtree(*types)
return typed_objects
def get_objects_or_children_by_type(self, *types):
""" Get objects if children already been read or get children.
Use this method for fast access to objects in case of static configurations.
:param types: requested object types.
:return: all children of the specified types.
"""
objects = self.get_objects_by_type(*types)
return objects if objects else self.get_children(*types)
def get_object_or_child_by_type(self, *types):
""" Get object if child already been read or get child.
Use this method for fast access to objects in case of static configurations.
:param types: requested object types.
:return: all children of the specified types.
"""
objects = self.get_objects_or_children_by_type(*types)
return objects[0] if any(objects) else None
def get_objects_with_object(self, obj_type, *child_types):
"""
:param obj_type: requested object type.
:param child_type: requested child types.
:return: all children of the requested type that have the requested child types.
"""
return [o for o in self.get_objects_by_type(obj_type) if
o.get_objects_by_type(*child_types)]
def get_objects_without_object(self, obj_type, *child_types):
"""
:param obj_type: requested object type.
:param child_type: unrequested child types.
:return: all children of the requested type that do not have the unrequested child types.
"""
return [o for o in self.get_objects_by_type(obj_type) if
not o.get_objects_by_type(*child_types)]
def get_objects_with_attribute(self, obj_type, attribute, value):
"""
:param obj_type: requested object type.
:param attribute: requested attribute.
:param value: requested attribute value.
:return: all children of the requested type that have the requested attribute == requested value.
"""
return [o for o in self.get_objects_by_type(obj_type) if o.get_attribute(attribute) == value]
def get_ancestor_object_by_type(self, obj_type):
"""
:param obj_type: requested ancestor type.
:return: the ancestor of the object who's type is obj_type if exists else None.
"""
if self.type.lower() == obj_type.lower():
return self
else:
if not self.parent:
return None
return self.parent.get_ancestor_object_by_type(obj_type)
def get_object_from_attribute(self, attribute):
return self.get_objects_from_attribute(attribute)[0] if self.get_objects_from_attribute(attribute) else None
@abstractmethod
def get_objects_from_attribute(self, attribute):
pass
def del_object_from_parent(self):
""" Delete object from parent object. """
if self.parent:
self.parent.objects.pop(self.ref)
def del_objects_by_type(self, type_):
""" Delete all children objects.
:param type_: type of objects to delete.
"""
[o.del_object_from_parent() for o in self.get_objects_by_type(type_)]
@classmethod
#
# Simple utilities to return object _data. Maybe it's not Pythonic (more like Java) but after
# changing the key name couple of times I decided to go for it.
#
def obj_name(self):
"""
:return: object name.
"""
return self._data['name']
name = property(obj_name)
def obj_ref(self):
"""
:return: object reference.
"""
return str(self._data['objRef'])
ref = property(obj_ref)
def obj_type(self):
"""
:return: object type.
"""
return self._data['objType']
type = property(obj_type)
def obj_parent(self):
"""
:return: object parent.
"""
return self._data['parent']
parent = property(obj_parent)
#
# Private methods.
#
def _set_data(self, **data):
self._data.update(data)
def _build_children_objs(self, child_type, children):
children_objs = OrderedDict()
child_obj_type = self.get_obj_class(child_type)
for child in (c for c in children if c is not ''):
child_object = child_obj_type(objRef=child, objType=child_type, parent=self)
child_object._set_data(name=child_object.get_name())
children_objs[child_object.obj_ref()] = child_object
self.objects.update(children_objs)
return children_objs
#
# Abstract API methods.
#
@abstractmethod
def get_attribute(self, attribute):
""" Get single attribute value.
:param attribute: attribute name.
:return: attribute value.
"""
pass
@abstractmethod
def get_children(self, *types):
""" Get all children of the requested types.
:param attribute: requested children types.
:return: list of all children of the requested types.
"""
pass
|
shmir/PyTrafficGenerator | trafficgenerator/tgn_tcl.py | get_args_pairs | python | def get_args_pairs(arguments):
return ' '.join(' '.join(['-' + k, tcl_str(str(v))]) for k, v in arguments.items()) | :param arguments: Python dictionary of TGN API command arguments <key, value>.
:returns: Tcl list of argument pairs <-key, value> to be used in TGN API commands. | train | https://github.com/shmir/PyTrafficGenerator/blob/382e5d549c83404af2a6571fe19c9e71df8bac14/trafficgenerator/tgn_tcl.py#L46-L52 | null | """
Base class and utilities for TGN Python Tcl wrapper.
@author: yoram.shamir
"""
import sys
from os import path
import logging
import time
import re
from threading import Thread
from queue import Queue
from trafficgenerator.tgn_utils import new_log_file
# IxExplorer only uses Tcl utilities (over socket) without Tcl interpreter so it's OK if Tcl is not installed (e.g for
# some Linux installations). If Tcl interpreter is required and not installed it will fail anyway...
try:
if sys.version_info[0] < 3:
from Tkinter import Tcl
else:
from tkinter import Tcl
except Exception as _:
pass
def tcl_str(string=''):
"""
:param string: Python string.
:returns: Tcl string surrounded by {}.
"""
return ' {' + string + '} '
def tcl_file_name(name):
"""
:param names: file name.
:returns: normalized file name with forward slashes.
"""
return tcl_str(path.normpath(name).replace('\\', '/'))
def build_obj_ref_list(objects):
"""
:param objects: Python list of requested objects.
:returns: Tcl list of all requested objects references.
"""
return ' '.join([o.obj_ref() for o in objects])
tcl_interp_g = None
""" Global Tcl interpreter for Tcl based utilities. Does not log its operations. """
def tcl_list_2_py_list(tcl_list, within_tcl_str=False):
""" Convert Tcl list to Python list using Tcl interpreter.
:param tcl_list: string representing the Tcl string.
:param within_tcl_str: True - Tcl list is embedded within Tcl str. False - native Tcl string.
:return: Python list equivalent to the Tcl ist.
:rtye: list
"""
if not within_tcl_str:
tcl_list = tcl_str(tcl_list)
return tcl_interp_g.eval('join ' + tcl_list + ' LiStSeP').split('LiStSeP') if tcl_list else []
def py_list_to_tcl_list(py_list):
""" Convert Python list to Tcl list using Tcl interpreter.
:param py_list: Python list.
:type py_list: list
:return: string representing the Tcl string equivalent to the Python list.
"""
py_list_str = [str(s) for s in py_list]
return tcl_str(tcl_interp_g.eval('split' + tcl_str('\t'.join(py_list_str)) + '\\t'))
class TgnTk(object):
""" Native Python Tk interpreter. """
def __init__(self):
self.tcl = Tcl()
def eval(self, command):
return self.tcl.eval(command)
class TgnTkMultithread(Thread):
""" Native Python Tk interpreter with multithreading. """
_is_running = True
def __init__(self):
super(self.__class__, self).__init__()
self.in_q = Queue()
self.out_q = Queue()
self.tcl = None
def run(self):
if not self.tcl:
self.tcl = Tcl()
while self._is_running:
if not self.in_q.empty():
command = self.in_q.get()
try:
rc = self.tcl.eval(command)
self.out_q.put(rc)
except Exception as e:
self.out_q.put(e)
time.sleep(1)
def stop(self):
self._is_running = False
def eval(self, command):
self.in_q.put(command)
while self.out_q.empty():
time.sleep(1)
rc = self.out_q.get()
if isinstance(rc, Exception):
raise rc
return rc
class TgnTclConsole(object):
""" Tcl interpreter over console.
Current implementation is a sample extracted from actual project where the console is telnet to Windows machine.
"""
def __init__(self, con, tcl_exe):
""" Start Tcl interpreter on console.
:param con: console.
:param tcl_exe: full path to Tcl exe.
"""
super(TgnTclConsole, self).__init__()
self._con = con
self._con.set_prompt_match_expression('% ')
self._con.send_cmd(tcl_exe)
def eval(self, command):
"""
@summary: Evaluate Tcl command.
@param command: command to evaluate.
@return: command output.
"""
# Some operations (like take ownership) may take long time.
con_command_out = self._con.send_cmd(command, timeout=256)
if 'ERROR_SEND_CMD_EXIT_DUE_TO_TIMEOUT' in con_command_out:
raise Exception('{} - command timeout'.format(command))
command = command.replace('\\', '/')
con_command_out = con_command_out.replace('\\', '/')
command = command.replace('(', '\(').replace(')', '\)')
command = command.replace('{', '\{').replace('}', '\}')
m = re.search(command + '(.*)' + '%', con_command_out, re.DOTALL)
command_out = m.group(1).strip()
if 'couldn\'t read file' in command_out or 'RuntimeError' in command_out:
raise Exception(command_out)
return command_out
def disconnect(self):
self._con.set_prompt_match_expression('C:.*>')
self._con.send_cmd('exit')
class TgnTclWrapper(object):
""" Tcl connectivity for TGN projects. """
def __init__(self, logger, tcl_interp=None):
""" Init Python Tk package.
Add logger to log Tcl commands only.
This creates a clean Tcl script that can be used later for debug.
We assume that there might have both multiple Tcl sessions simultaneously so we add suffix to create
multiple distinguished Tcl scripts.
"""
if not logger:
logger = logging.getLogger('dummy')
self.logger = logger
self.tcl_script = new_log_file(self.logger, self.__class__.__name__)
if not tcl_interp:
self.tcl_interp = TgnTk()
else:
self.tcl_interp = tcl_interp
global tcl_interp_g
tcl_interp_g = self.tcl_interp
def eval(self, command):
""" Execute Tcl command.
Write the command to tcl script (.tcl) log file.
Execute the command.
Write the command and the output to general (.txt) log file.
:param command: Command to execute.
:returns: command raw output.
"""
if self.logger.handlers:
self.logger.debug(command.decode('utf-8'))
if self.tcl_script:
self.tcl_script.info(command)
self.rc = self.tcl_interp.eval(command)
if self.logger.handlers:
self.logger.debug('\t' + self.rc.decode('utf-8'))
return self.rc
def source(self, script_file):
self.eval('source ' + tcl_file_name(script_file))
|
shmir/PyTrafficGenerator | trafficgenerator/tgn_tcl.py | tcl_list_2_py_list | python | def tcl_list_2_py_list(tcl_list, within_tcl_str=False):
if not within_tcl_str:
tcl_list = tcl_str(tcl_list)
return tcl_interp_g.eval('join ' + tcl_list + ' LiStSeP').split('LiStSeP') if tcl_list else [] | Convert Tcl list to Python list using Tcl interpreter.
:param tcl_list: string representing the Tcl string.
:param within_tcl_str: True - Tcl list is embedded within Tcl str. False - native Tcl string.
:return: Python list equivalent to the Tcl ist.
:rtye: list | train | https://github.com/shmir/PyTrafficGenerator/blob/382e5d549c83404af2a6571fe19c9e71df8bac14/trafficgenerator/tgn_tcl.py#L68-L79 | [
"def tcl_str(string=''):\n \"\"\"\n :param string: Python string.\n :returns: Tcl string surrounded by {}.\n \"\"\"\n\n return ' {' + string + '} '\n"
] | """
Base class and utilities for TGN Python Tcl wrapper.
@author: yoram.shamir
"""
import sys
from os import path
import logging
import time
import re
from threading import Thread
from queue import Queue
from trafficgenerator.tgn_utils import new_log_file
# IxExplorer only uses Tcl utilities (over socket) without Tcl interpreter so it's OK if Tcl is not installed (e.g for
# some Linux installations). If Tcl interpreter is required and not installed it will fail anyway...
try:
if sys.version_info[0] < 3:
from Tkinter import Tcl
else:
from tkinter import Tcl
except Exception as _:
pass
def tcl_str(string=''):
"""
:param string: Python string.
:returns: Tcl string surrounded by {}.
"""
return ' {' + string + '} '
def tcl_file_name(name):
"""
:param names: file name.
:returns: normalized file name with forward slashes.
"""
return tcl_str(path.normpath(name).replace('\\', '/'))
def get_args_pairs(arguments):
"""
:param arguments: Python dictionary of TGN API command arguments <key, value>.
:returns: Tcl list of argument pairs <-key, value> to be used in TGN API commands.
"""
return ' '.join(' '.join(['-' + k, tcl_str(str(v))]) for k, v in arguments.items())
def build_obj_ref_list(objects):
"""
:param objects: Python list of requested objects.
:returns: Tcl list of all requested objects references.
"""
return ' '.join([o.obj_ref() for o in objects])
tcl_interp_g = None
""" Global Tcl interpreter for Tcl based utilities. Does not log its operations. """
def py_list_to_tcl_list(py_list):
""" Convert Python list to Tcl list using Tcl interpreter.
:param py_list: Python list.
:type py_list: list
:return: string representing the Tcl string equivalent to the Python list.
"""
py_list_str = [str(s) for s in py_list]
return tcl_str(tcl_interp_g.eval('split' + tcl_str('\t'.join(py_list_str)) + '\\t'))
class TgnTk(object):
""" Native Python Tk interpreter. """
def __init__(self):
self.tcl = Tcl()
def eval(self, command):
return self.tcl.eval(command)
class TgnTkMultithread(Thread):
""" Native Python Tk interpreter with multithreading. """
_is_running = True
def __init__(self):
super(self.__class__, self).__init__()
self.in_q = Queue()
self.out_q = Queue()
self.tcl = None
def run(self):
if not self.tcl:
self.tcl = Tcl()
while self._is_running:
if not self.in_q.empty():
command = self.in_q.get()
try:
rc = self.tcl.eval(command)
self.out_q.put(rc)
except Exception as e:
self.out_q.put(e)
time.sleep(1)
def stop(self):
self._is_running = False
def eval(self, command):
self.in_q.put(command)
while self.out_q.empty():
time.sleep(1)
rc = self.out_q.get()
if isinstance(rc, Exception):
raise rc
return rc
class TgnTclConsole(object):
""" Tcl interpreter over console.
Current implementation is a sample extracted from actual project where the console is telnet to Windows machine.
"""
def __init__(self, con, tcl_exe):
""" Start Tcl interpreter on console.
:param con: console.
:param tcl_exe: full path to Tcl exe.
"""
super(TgnTclConsole, self).__init__()
self._con = con
self._con.set_prompt_match_expression('% ')
self._con.send_cmd(tcl_exe)
def eval(self, command):
"""
@summary: Evaluate Tcl command.
@param command: command to evaluate.
@return: command output.
"""
# Some operations (like take ownership) may take long time.
con_command_out = self._con.send_cmd(command, timeout=256)
if 'ERROR_SEND_CMD_EXIT_DUE_TO_TIMEOUT' in con_command_out:
raise Exception('{} - command timeout'.format(command))
command = command.replace('\\', '/')
con_command_out = con_command_out.replace('\\', '/')
command = command.replace('(', '\(').replace(')', '\)')
command = command.replace('{', '\{').replace('}', '\}')
m = re.search(command + '(.*)' + '%', con_command_out, re.DOTALL)
command_out = m.group(1).strip()
if 'couldn\'t read file' in command_out or 'RuntimeError' in command_out:
raise Exception(command_out)
return command_out
def disconnect(self):
self._con.set_prompt_match_expression('C:.*>')
self._con.send_cmd('exit')
class TgnTclWrapper(object):
""" Tcl connectivity for TGN projects. """
def __init__(self, logger, tcl_interp=None):
""" Init Python Tk package.
Add logger to log Tcl commands only.
This creates a clean Tcl script that can be used later for debug.
We assume that there might have both multiple Tcl sessions simultaneously so we add suffix to create
multiple distinguished Tcl scripts.
"""
if not logger:
logger = logging.getLogger('dummy')
self.logger = logger
self.tcl_script = new_log_file(self.logger, self.__class__.__name__)
if not tcl_interp:
self.tcl_interp = TgnTk()
else:
self.tcl_interp = tcl_interp
global tcl_interp_g
tcl_interp_g = self.tcl_interp
def eval(self, command):
""" Execute Tcl command.
Write the command to tcl script (.tcl) log file.
Execute the command.
Write the command and the output to general (.txt) log file.
:param command: Command to execute.
:returns: command raw output.
"""
if self.logger.handlers:
self.logger.debug(command.decode('utf-8'))
if self.tcl_script:
self.tcl_script.info(command)
self.rc = self.tcl_interp.eval(command)
if self.logger.handlers:
self.logger.debug('\t' + self.rc.decode('utf-8'))
return self.rc
def source(self, script_file):
self.eval('source ' + tcl_file_name(script_file))
|
shmir/PyTrafficGenerator | trafficgenerator/tgn_tcl.py | py_list_to_tcl_list | python | def py_list_to_tcl_list(py_list):
py_list_str = [str(s) for s in py_list]
return tcl_str(tcl_interp_g.eval('split' + tcl_str('\t'.join(py_list_str)) + '\\t')) | Convert Python list to Tcl list using Tcl interpreter.
:param py_list: Python list.
:type py_list: list
:return: string representing the Tcl string equivalent to the Python list. | train | https://github.com/shmir/PyTrafficGenerator/blob/382e5d549c83404af2a6571fe19c9e71df8bac14/trafficgenerator/tgn_tcl.py#L82-L91 | [
"def tcl_str(string=''):\n \"\"\"\n :param string: Python string.\n :returns: Tcl string surrounded by {}.\n \"\"\"\n\n return ' {' + string + '} '\n"
] | """
Base class and utilities for TGN Python Tcl wrapper.
@author: yoram.shamir
"""
import sys
from os import path
import logging
import time
import re
from threading import Thread
from queue import Queue
from trafficgenerator.tgn_utils import new_log_file
# IxExplorer only uses Tcl utilities (over socket) without Tcl interpreter so it's OK if Tcl is not installed (e.g for
# some Linux installations). If Tcl interpreter is required and not installed it will fail anyway...
try:
if sys.version_info[0] < 3:
from Tkinter import Tcl
else:
from tkinter import Tcl
except Exception as _:
pass
def tcl_str(string=''):
"""
:param string: Python string.
:returns: Tcl string surrounded by {}.
"""
return ' {' + string + '} '
def tcl_file_name(name):
"""
:param names: file name.
:returns: normalized file name with forward slashes.
"""
return tcl_str(path.normpath(name).replace('\\', '/'))
def get_args_pairs(arguments):
"""
:param arguments: Python dictionary of TGN API command arguments <key, value>.
:returns: Tcl list of argument pairs <-key, value> to be used in TGN API commands.
"""
return ' '.join(' '.join(['-' + k, tcl_str(str(v))]) for k, v in arguments.items())
def build_obj_ref_list(objects):
"""
:param objects: Python list of requested objects.
:returns: Tcl list of all requested objects references.
"""
return ' '.join([o.obj_ref() for o in objects])
tcl_interp_g = None
""" Global Tcl interpreter for Tcl based utilities. Does not log its operations. """
def tcl_list_2_py_list(tcl_list, within_tcl_str=False):
""" Convert Tcl list to Python list using Tcl interpreter.
:param tcl_list: string representing the Tcl string.
:param within_tcl_str: True - Tcl list is embedded within Tcl str. False - native Tcl string.
:return: Python list equivalent to the Tcl ist.
:rtye: list
"""
if not within_tcl_str:
tcl_list = tcl_str(tcl_list)
return tcl_interp_g.eval('join ' + tcl_list + ' LiStSeP').split('LiStSeP') if tcl_list else []
class TgnTk(object):
""" Native Python Tk interpreter. """
def __init__(self):
self.tcl = Tcl()
def eval(self, command):
return self.tcl.eval(command)
class TgnTkMultithread(Thread):
""" Native Python Tk interpreter with multithreading. """
_is_running = True
def __init__(self):
super(self.__class__, self).__init__()
self.in_q = Queue()
self.out_q = Queue()
self.tcl = None
def run(self):
if not self.tcl:
self.tcl = Tcl()
while self._is_running:
if not self.in_q.empty():
command = self.in_q.get()
try:
rc = self.tcl.eval(command)
self.out_q.put(rc)
except Exception as e:
self.out_q.put(e)
time.sleep(1)
def stop(self):
self._is_running = False
def eval(self, command):
self.in_q.put(command)
while self.out_q.empty():
time.sleep(1)
rc = self.out_q.get()
if isinstance(rc, Exception):
raise rc
return rc
class TgnTclConsole(object):
""" Tcl interpreter over console.
Current implementation is a sample extracted from actual project where the console is telnet to Windows machine.
"""
def __init__(self, con, tcl_exe):
""" Start Tcl interpreter on console.
:param con: console.
:param tcl_exe: full path to Tcl exe.
"""
super(TgnTclConsole, self).__init__()
self._con = con
self._con.set_prompt_match_expression('% ')
self._con.send_cmd(tcl_exe)
def eval(self, command):
"""
@summary: Evaluate Tcl command.
@param command: command to evaluate.
@return: command output.
"""
# Some operations (like take ownership) may take long time.
con_command_out = self._con.send_cmd(command, timeout=256)
if 'ERROR_SEND_CMD_EXIT_DUE_TO_TIMEOUT' in con_command_out:
raise Exception('{} - command timeout'.format(command))
command = command.replace('\\', '/')
con_command_out = con_command_out.replace('\\', '/')
command = command.replace('(', '\(').replace(')', '\)')
command = command.replace('{', '\{').replace('}', '\}')
m = re.search(command + '(.*)' + '%', con_command_out, re.DOTALL)
command_out = m.group(1).strip()
if 'couldn\'t read file' in command_out or 'RuntimeError' in command_out:
raise Exception(command_out)
return command_out
def disconnect(self):
self._con.set_prompt_match_expression('C:.*>')
self._con.send_cmd('exit')
class TgnTclWrapper(object):
""" Tcl connectivity for TGN projects. """
def __init__(self, logger, tcl_interp=None):
""" Init Python Tk package.
Add logger to log Tcl commands only.
This creates a clean Tcl script that can be used later for debug.
We assume that there might have both multiple Tcl sessions simultaneously so we add suffix to create
multiple distinguished Tcl scripts.
"""
if not logger:
logger = logging.getLogger('dummy')
self.logger = logger
self.tcl_script = new_log_file(self.logger, self.__class__.__name__)
if not tcl_interp:
self.tcl_interp = TgnTk()
else:
self.tcl_interp = tcl_interp
global tcl_interp_g
tcl_interp_g = self.tcl_interp
def eval(self, command):
""" Execute Tcl command.
Write the command to tcl script (.tcl) log file.
Execute the command.
Write the command and the output to general (.txt) log file.
:param command: Command to execute.
:returns: command raw output.
"""
if self.logger.handlers:
self.logger.debug(command.decode('utf-8'))
if self.tcl_script:
self.tcl_script.info(command)
self.rc = self.tcl_interp.eval(command)
if self.logger.handlers:
self.logger.debug('\t' + self.rc.decode('utf-8'))
return self.rc
def source(self, script_file):
self.eval('source ' + tcl_file_name(script_file))
|
shmir/PyTrafficGenerator | trafficgenerator/tgn_tcl.py | TgnTclConsole.eval | python | def eval(self, command):
# Some operations (like take ownership) may take long time.
con_command_out = self._con.send_cmd(command, timeout=256)
if 'ERROR_SEND_CMD_EXIT_DUE_TO_TIMEOUT' in con_command_out:
raise Exception('{} - command timeout'.format(command))
command = command.replace('\\', '/')
con_command_out = con_command_out.replace('\\', '/')
command = command.replace('(', '\(').replace(')', '\)')
command = command.replace('{', '\{').replace('}', '\}')
m = re.search(command + '(.*)' + '%', con_command_out, re.DOTALL)
command_out = m.group(1).strip()
if 'couldn\'t read file' in command_out or 'RuntimeError' in command_out:
raise Exception(command_out)
return command_out | @summary: Evaluate Tcl command.
@param command: command to evaluate.
@return: command output. | train | https://github.com/shmir/PyTrafficGenerator/blob/382e5d549c83404af2a6571fe19c9e71df8bac14/trafficgenerator/tgn_tcl.py#L158-L177 | null | class TgnTclConsole(object):
""" Tcl interpreter over console.
Current implementation is a sample extracted from actual project where the console is telnet to Windows machine.
"""
def __init__(self, con, tcl_exe):
""" Start Tcl interpreter on console.
:param con: console.
:param tcl_exe: full path to Tcl exe.
"""
super(TgnTclConsole, self).__init__()
self._con = con
self._con.set_prompt_match_expression('% ')
self._con.send_cmd(tcl_exe)
def disconnect(self):
self._con.set_prompt_match_expression('C:.*>')
self._con.send_cmd('exit')
|
shmir/PyTrafficGenerator | trafficgenerator/tgn_tcl.py | TgnTclWrapper.eval | python | def eval(self, command):
if self.logger.handlers:
self.logger.debug(command.decode('utf-8'))
if self.tcl_script:
self.tcl_script.info(command)
self.rc = self.tcl_interp.eval(command)
if self.logger.handlers:
self.logger.debug('\t' + self.rc.decode('utf-8'))
return self.rc | Execute Tcl command.
Write the command to tcl script (.tcl) log file.
Execute the command.
Write the command and the output to general (.txt) log file.
:param command: Command to execute.
:returns: command raw output. | train | https://github.com/shmir/PyTrafficGenerator/blob/382e5d549c83404af2a6571fe19c9e71df8bac14/trafficgenerator/tgn_tcl.py#L208-L226 | null | class TgnTclWrapper(object):
""" Tcl connectivity for TGN projects. """
def __init__(self, logger, tcl_interp=None):
""" Init Python Tk package.
Add logger to log Tcl commands only.
This creates a clean Tcl script that can be used later for debug.
We assume that there might have both multiple Tcl sessions simultaneously so we add suffix to create
multiple distinguished Tcl scripts.
"""
if not logger:
logger = logging.getLogger('dummy')
self.logger = logger
self.tcl_script = new_log_file(self.logger, self.__class__.__name__)
if not tcl_interp:
self.tcl_interp = TgnTk()
else:
self.tcl_interp = tcl_interp
global tcl_interp_g
tcl_interp_g = self.tcl_interp
def source(self, script_file):
self.eval('source ' + tcl_file_name(script_file))
|
bluedynamics/cone.ugm | src/cone/ugm/browser/autoincrement.py | AutoIncrementForm.prepare | python | def prepare(_next, self):
_next(self)
if not self.autoincrement_support:
return
id_field = self.form['id']
del id_field.attrs['required']
id_field.attrs['disabled'] = 'disabled'
id_field.getter = _('auto_incremented', default='auto incremented') | Hook after prepare and set 'id' disabled. | train | https://github.com/bluedynamics/cone.ugm/blob/3c197075f3f6e94781289311c5637bb9c8e5597c/src/cone/ugm/browser/autoincrement.py#L52-L61 | null | class AutoIncrementForm(Behavior):
"""Plumbing behavior for setting user id by auto increment logic.
For user add form.
"""
@default
@property
def autoincrement_support(self):
cfg = ugm_general(self.model)
return cfg.attrs['user_id_autoincrement'] == 'True'
@default
@property
def next_principal_id(self):
cfg = ugm_general(self.model)
prefix = cfg.attrs['user_id_autoincrement_prefix']
default = int(cfg.attrs['user_id_autoincrement_start'])
search = u'%s*' % prefix
backend = self.model.parent.backend
backend.invalidate()
result = backend.search(attrlist=['id'], criteria={'id': search})
principlal_ids = [_[1]['id'][0] for _ in result]
matching = list()
for principal_id in principlal_ids:
if prefix:
principal_id = principal_id[len(prefix):]
try:
principal_id = int(principal_id)
except ValueError:
continue
matching.append(principal_id)
if not matching:
principal_id = default
else:
principal_id = sorted(matching)[-1] + 1
if principal_id < default:
principal_id = default
return u'%s%i' % (prefix, principal_id)
@plumb
@plumb
def save(_next, self, widget, data):
if self.autoincrement_support:
data['id'].extracted = self.next_principal_id
_next(self, widget, data)
|
bluedynamics/cone.ugm | src/cone/ugm/browser/actions.py | delete_user_action | python | def delete_user_action(model, request):
try:
users = model.parent.backend
uid = model.model.name
del users[uid]
users()
model.parent.invalidate()
localizer = get_localizer(request)
message = localizer.translate(_(
'delete_user_from_database',
default="Deleted user '${uid}' from database.",
mapping={'uid': uid}
))
return {
'success': True,
'message': message
}
except Exception as e:
return {
'success': False,
'message': str(e)
} | Delete user from database. | train | https://github.com/bluedynamics/cone.ugm/blob/3c197075f3f6e94781289311c5637bb9c8e5597c/src/cone/ugm/browser/actions.py#L67-L90 | null | from cone.ugm.model.group import Group
from cone.ugm.model.user import User
from pyramid.i18n import get_localizer
from pyramid.i18n import TranslationStringFactory
from pyramid.view import view_config
_ = TranslationStringFactory('cone.ugm')
###############################################################################
# local manager manage membership validation
###############################################################################
LM_TARGET_GID_NOT_ALLOWED = 0
LM_TARGET_UID_NOT_ALLOWED = 1
LM_TARGET_GID_IS_DEFAULT = 2
class ManageMembershipError(Exception):
def __init__(self, reason, data):
self.reason = reason
self.data = data
def validate_add_users_to_groups(model, user_ids, group_ids):
if not model.local_manager_consider_for_user:
return
lm_gids = model.local_manager_target_gids
for group_id in group_ids:
if group_id not in lm_gids:
raise ManageMembershipError(LM_TARGET_GID_NOT_ALLOWED, group_id)
lm_uids = model.local_manager_target_uids
for user_id in user_ids:
if user_id not in lm_uids:
raise ManageMembershipError(LM_TARGET_UID_NOT_ALLOWED, user_id)
def validate_remove_users_from_groups(model, user_ids, group_ids):
if not model.local_manager_consider_for_user:
return
lm_gids = model.local_manager_target_gids
for group_id in group_ids:
if group_id not in lm_gids:
raise ManageMembershipError(LM_TARGET_GID_NOT_ALLOWED, group_id)
lm_uids = model.local_manager_target_uids
for user_id in user_ids:
if user_id not in lm_uids:
raise ManageMembershipError(LM_TARGET_UID_NOT_ALLOWED, user_id)
adm_gid = model.local_manager_gid
for group_id in group_ids:
if model.local_manager_is_default(adm_gid, group_id):
raise ManageMembershipError(LM_TARGET_GID_IS_DEFAULT, group_id)
###############################################################################
# Actions for User application node
###############################################################################
@view_config(
name='delete_item',
accept='application/json',
renderer='json',
context=User,
permission='delete_user')
@view_config(
name='add_item',
accept='application/json',
renderer='json',
context=User,
permission='manage_membership')
def user_add_to_group_action(model, request):
"""Add user to group.
"""
group_id = request.params.get('id')
if not group_id:
group_ids = request.params.getall('id[]')
else:
group_ids = [group_id]
try:
user = model.model
validate_add_users_to_groups(model, [user.id], group_ids)
groups = user.root.groups
for group_id in group_ids:
groups[group_id].add(user.name)
groups()
model.parent.invalidate(user.name)
localizer = get_localizer(request)
message = localizer.translate(_(
'added_user_to_group',
default="Added user '${uid}' to group '${gid}'.",
mapping={
'uid': user.id,
'gid': ', '.join(group_ids)
}
))
return {
'success': True,
'message': message
}
except ManageMembershipError as e:
if e.reason is not LM_TARGET_GID_NOT_ALLOWED:
raise Exception(u"Unknown ManageMembershipError reason.")
localizer = get_localizer(request)
message = localizer.translate(_(
'lm_add_target_gid_not_allowed',
default=(
"Failed adding user '${uid}' to group '${gid}'. "
"Manage membership denied for target group."
),
mapping={
'uid': user.id,
'gid': e.data
}
))
return {
'success': False,
'message': message
}
except Exception as e:
return {
'success': False,
'message': str(e)
}
@view_config(
name='remove_item',
accept='application/json',
renderer='json',
context=User,
permission='manage_membership')
def user_remove_from_group_action(model, request):
"""Remove user from group.
"""
group_id = request.params.get('id')
if not group_id:
group_ids = request.params.getall('id[]')
else:
group_ids = [group_id]
try:
user = model.model
validate_remove_users_from_groups(model, [user.id], group_ids)
groups = user.root.groups
for group_id in group_ids:
del groups[group_id][user.name]
groups()
model.parent.invalidate(user.name)
localizer = get_localizer(request)
message = localizer.translate(_(
'removed_user_from_group',
default="Removed user '${uid}' from group '${gid}'.",
mapping={
'uid': user.id,
'gid': ', '.join(group_ids)
}
))
return {
'success': True,
'message': message
}
except ManageMembershipError as e:
localizer = get_localizer(request)
if e.reason is LM_TARGET_GID_NOT_ALLOWED:
message = localizer.translate(_(
'lm_remove_target_gid_not_allowed',
default=(
"Failed removing user '${uid}' from group '${gid}'. "
"Manage membership denied for target group."),
mapping={
'uid': user.id,
'gid': e.data
}
))
elif e.reason is LM_TARGET_GID_IS_DEFAULT:
message = localizer.translate(_(
'lm_remove_target_gid_is_default',
default=(
"Failed removing user '${uid}' from group '${gid}'. "
"Target group is default group of user."
),
mapping={
'uid': user.id,
'gid': e.data
}
))
else:
raise Exception(u"Unknown ManageMembershipError reason.")
return {
'success': False,
'message': message
}
except Exception as e:
return {
'success': False,
'message': str(e)
}
###############################################################################
# Actions for Group application node
###############################################################################
@view_config(
name='delete_item',
accept='application/json',
renderer='json',
context=Group,
permission='delete_group')
def delete_group_action(model, request):
"""Delete group from database.
"""
try:
groups = model.parent.backend
uid = model.model.name
del groups[uid]
groups()
model.parent.invalidate()
except Exception as e:
return {
'success': False,
'message': str(e)
}
localizer = get_localizer(request)
message = localizer.translate(_(
'deleted_group',
default='Deleted group from database'
))
return {
'success': True,
'message': message
}
@view_config(
name='add_item',
accept='application/json',
renderer='json',
context=Group,
permission='manage_membership')
def group_add_user_action(model, request):
"""Add user to group.
"""
user_id = request.params.get('id')
if not user_id:
user_ids = request.params.getall('id[]')
else:
user_ids = [user_id]
try:
group = model.model
validate_add_users_to_groups(model, user_ids, [group.id])
for user_id in user_ids:
group.add(user_id)
group()
model.parent.invalidate(group.name)
localizer = get_localizer(request)
message = localizer.translate(_(
'added_user_to_group',
default="Added user '${uid}' to group '${gid}'.",
mapping={
'uid': ', '.join(user_ids),
'gid': group.id
}
))
return {
'success': True,
'message': message
}
except ManageMembershipError as e:
if e.reason is not LM_TARGET_UID_NOT_ALLOWED:
raise Exception(u"Unknown ManageMembershipError reason.")
localizer = get_localizer(request)
message = localizer.translate(_(
'lm_add_target_uid_not_allowed',
default=(
"Failed adding user '${uid}' to group '${gid}'. "
"Manage membership denied for user."
),
mapping={
'uid': e.data,
'gid': group.id
}
))
return {
'success': False,
'message': message
}
except Exception as e:
return {
'success': False,
'message': str(e)
}
@view_config(
name='remove_item',
accept='application/json',
renderer='json',
context=Group,
permission='manage_membership')
def group_remove_user_action(model, request):
"""Remove user from group.
"""
user_id = request.params.get('id')
if not user_id:
user_ids = request.params.getall('id[]')
else:
user_ids = [user_id]
try:
group = model.model
validate_remove_users_from_groups(model, user_ids, [group.id])
for user_id in user_ids:
del group[user_id]
group()
model.parent.invalidate(group.name)
localizer = get_localizer(request)
message = localizer.translate(_(
'removed_user_from_group',
default="Removed user '${uid}' from group '${gid}'.",
mapping={
'uid': ', '.join(user_ids),
'gid': group.id
}
))
return {
'success': True,
'message': message
}
except ManageMembershipError as e:
localizer = get_localizer(request)
if e.reason is LM_TARGET_UID_NOT_ALLOWED:
message = localizer.translate(_(
'lm_remove_target_uid_not_allowed',
default=(
"Failed removing user '${uid}' from group '${gid}'. "
"Manage membership denied for user."
),
mapping={
'uid': e.data,
'gid': group.id
}
))
elif e.reason is LM_TARGET_GID_IS_DEFAULT:
message = localizer.translate(_(
'lm_remove_target_gid_is_default',
default=(
"Failed removing user '${uid}' from group '${gid}'. "
"Target group is default group of user."
),
mapping={
'uid': ', '.join(user_ids),
'gid': e.data
}
))
else:
raise Exception(u"Unknown ManageMembershipError reason.")
return {
'success': False,
'message': message
}
except Exception as e:
return {
'success': False,
'message': str(e)
}
|
bluedynamics/cone.ugm | src/cone/ugm/browser/actions.py | user_add_to_group_action | python | def user_add_to_group_action(model, request):
group_id = request.params.get('id')
if not group_id:
group_ids = request.params.getall('id[]')
else:
group_ids = [group_id]
try:
user = model.model
validate_add_users_to_groups(model, [user.id], group_ids)
groups = user.root.groups
for group_id in group_ids:
groups[group_id].add(user.name)
groups()
model.parent.invalidate(user.name)
localizer = get_localizer(request)
message = localizer.translate(_(
'added_user_to_group',
default="Added user '${uid}' to group '${gid}'.",
mapping={
'uid': user.id,
'gid': ', '.join(group_ids)
}
))
return {
'success': True,
'message': message
}
except ManageMembershipError as e:
if e.reason is not LM_TARGET_GID_NOT_ALLOWED:
raise Exception(u"Unknown ManageMembershipError reason.")
localizer = get_localizer(request)
message = localizer.translate(_(
'lm_add_target_gid_not_allowed',
default=(
"Failed adding user '${uid}' to group '${gid}'. "
"Manage membership denied for target group."
),
mapping={
'uid': user.id,
'gid': e.data
}
))
return {
'success': False,
'message': message
}
except Exception as e:
return {
'success': False,
'message': str(e)
} | Add user to group. | train | https://github.com/bluedynamics/cone.ugm/blob/3c197075f3f6e94781289311c5637bb9c8e5597c/src/cone/ugm/browser/actions.py#L99-L151 | [
"def validate_add_users_to_groups(model, user_ids, group_ids):\n if not model.local_manager_consider_for_user:\n return\n lm_gids = model.local_manager_target_gids\n for group_id in group_ids:\n if group_id not in lm_gids:\n raise ManageMembershipError(LM_TARGET_GID_NOT_ALLOWED, group_id)\n lm_uids = model.local_manager_target_uids\n for user_id in user_ids:\n if user_id not in lm_uids:\n raise ManageMembershipError(LM_TARGET_UID_NOT_ALLOWED, user_id)\n"
] | from cone.ugm.model.group import Group
from cone.ugm.model.user import User
from pyramid.i18n import get_localizer
from pyramid.i18n import TranslationStringFactory
from pyramid.view import view_config
_ = TranslationStringFactory('cone.ugm')
###############################################################################
# local manager manage membership validation
###############################################################################
LM_TARGET_GID_NOT_ALLOWED = 0
LM_TARGET_UID_NOT_ALLOWED = 1
LM_TARGET_GID_IS_DEFAULT = 2
class ManageMembershipError(Exception):
def __init__(self, reason, data):
self.reason = reason
self.data = data
def validate_add_users_to_groups(model, user_ids, group_ids):
if not model.local_manager_consider_for_user:
return
lm_gids = model.local_manager_target_gids
for group_id in group_ids:
if group_id not in lm_gids:
raise ManageMembershipError(LM_TARGET_GID_NOT_ALLOWED, group_id)
lm_uids = model.local_manager_target_uids
for user_id in user_ids:
if user_id not in lm_uids:
raise ManageMembershipError(LM_TARGET_UID_NOT_ALLOWED, user_id)
def validate_remove_users_from_groups(model, user_ids, group_ids):
if not model.local_manager_consider_for_user:
return
lm_gids = model.local_manager_target_gids
for group_id in group_ids:
if group_id not in lm_gids:
raise ManageMembershipError(LM_TARGET_GID_NOT_ALLOWED, group_id)
lm_uids = model.local_manager_target_uids
for user_id in user_ids:
if user_id not in lm_uids:
raise ManageMembershipError(LM_TARGET_UID_NOT_ALLOWED, user_id)
adm_gid = model.local_manager_gid
for group_id in group_ids:
if model.local_manager_is_default(adm_gid, group_id):
raise ManageMembershipError(LM_TARGET_GID_IS_DEFAULT, group_id)
###############################################################################
# Actions for User application node
###############################################################################
@view_config(
name='delete_item',
accept='application/json',
renderer='json',
context=User,
permission='delete_user')
def delete_user_action(model, request):
"""Delete user from database.
"""
try:
users = model.parent.backend
uid = model.model.name
del users[uid]
users()
model.parent.invalidate()
localizer = get_localizer(request)
message = localizer.translate(_(
'delete_user_from_database',
default="Deleted user '${uid}' from database.",
mapping={'uid': uid}
))
return {
'success': True,
'message': message
}
except Exception as e:
return {
'success': False,
'message': str(e)
}
@view_config(
name='add_item',
accept='application/json',
renderer='json',
context=User,
permission='manage_membership')
@view_config(
name='remove_item',
accept='application/json',
renderer='json',
context=User,
permission='manage_membership')
def user_remove_from_group_action(model, request):
"""Remove user from group.
"""
group_id = request.params.get('id')
if not group_id:
group_ids = request.params.getall('id[]')
else:
group_ids = [group_id]
try:
user = model.model
validate_remove_users_from_groups(model, [user.id], group_ids)
groups = user.root.groups
for group_id in group_ids:
del groups[group_id][user.name]
groups()
model.parent.invalidate(user.name)
localizer = get_localizer(request)
message = localizer.translate(_(
'removed_user_from_group',
default="Removed user '${uid}' from group '${gid}'.",
mapping={
'uid': user.id,
'gid': ', '.join(group_ids)
}
))
return {
'success': True,
'message': message
}
except ManageMembershipError as e:
localizer = get_localizer(request)
if e.reason is LM_TARGET_GID_NOT_ALLOWED:
message = localizer.translate(_(
'lm_remove_target_gid_not_allowed',
default=(
"Failed removing user '${uid}' from group '${gid}'. "
"Manage membership denied for target group."),
mapping={
'uid': user.id,
'gid': e.data
}
))
elif e.reason is LM_TARGET_GID_IS_DEFAULT:
message = localizer.translate(_(
'lm_remove_target_gid_is_default',
default=(
"Failed removing user '${uid}' from group '${gid}'. "
"Target group is default group of user."
),
mapping={
'uid': user.id,
'gid': e.data
}
))
else:
raise Exception(u"Unknown ManageMembershipError reason.")
return {
'success': False,
'message': message
}
except Exception as e:
return {
'success': False,
'message': str(e)
}
###############################################################################
# Actions for Group application node
###############################################################################
@view_config(
name='delete_item',
accept='application/json',
renderer='json',
context=Group,
permission='delete_group')
def delete_group_action(model, request):
"""Delete group from database.
"""
try:
groups = model.parent.backend
uid = model.model.name
del groups[uid]
groups()
model.parent.invalidate()
except Exception as e:
return {
'success': False,
'message': str(e)
}
localizer = get_localizer(request)
message = localizer.translate(_(
'deleted_group',
default='Deleted group from database'
))
return {
'success': True,
'message': message
}
@view_config(
name='add_item',
accept='application/json',
renderer='json',
context=Group,
permission='manage_membership')
def group_add_user_action(model, request):
"""Add user to group.
"""
user_id = request.params.get('id')
if not user_id:
user_ids = request.params.getall('id[]')
else:
user_ids = [user_id]
try:
group = model.model
validate_add_users_to_groups(model, user_ids, [group.id])
for user_id in user_ids:
group.add(user_id)
group()
model.parent.invalidate(group.name)
localizer = get_localizer(request)
message = localizer.translate(_(
'added_user_to_group',
default="Added user '${uid}' to group '${gid}'.",
mapping={
'uid': ', '.join(user_ids),
'gid': group.id
}
))
return {
'success': True,
'message': message
}
except ManageMembershipError as e:
if e.reason is not LM_TARGET_UID_NOT_ALLOWED:
raise Exception(u"Unknown ManageMembershipError reason.")
localizer = get_localizer(request)
message = localizer.translate(_(
'lm_add_target_uid_not_allowed',
default=(
"Failed adding user '${uid}' to group '${gid}'. "
"Manage membership denied for user."
),
mapping={
'uid': e.data,
'gid': group.id
}
))
return {
'success': False,
'message': message
}
except Exception as e:
return {
'success': False,
'message': str(e)
}
@view_config(
name='remove_item',
accept='application/json',
renderer='json',
context=Group,
permission='manage_membership')
def group_remove_user_action(model, request):
"""Remove user from group.
"""
user_id = request.params.get('id')
if not user_id:
user_ids = request.params.getall('id[]')
else:
user_ids = [user_id]
try:
group = model.model
validate_remove_users_from_groups(model, user_ids, [group.id])
for user_id in user_ids:
del group[user_id]
group()
model.parent.invalidate(group.name)
localizer = get_localizer(request)
message = localizer.translate(_(
'removed_user_from_group',
default="Removed user '${uid}' from group '${gid}'.",
mapping={
'uid': ', '.join(user_ids),
'gid': group.id
}
))
return {
'success': True,
'message': message
}
except ManageMembershipError as e:
localizer = get_localizer(request)
if e.reason is LM_TARGET_UID_NOT_ALLOWED:
message = localizer.translate(_(
'lm_remove_target_uid_not_allowed',
default=(
"Failed removing user '${uid}' from group '${gid}'. "
"Manage membership denied for user."
),
mapping={
'uid': e.data,
'gid': group.id
}
))
elif e.reason is LM_TARGET_GID_IS_DEFAULT:
message = localizer.translate(_(
'lm_remove_target_gid_is_default',
default=(
"Failed removing user '${uid}' from group '${gid}'. "
"Target group is default group of user."
),
mapping={
'uid': ', '.join(user_ids),
'gid': e.data
}
))
else:
raise Exception(u"Unknown ManageMembershipError reason.")
return {
'success': False,
'message': message
}
except Exception as e:
return {
'success': False,
'message': str(e)
}
|
bluedynamics/cone.ugm | src/cone/ugm/browser/actions.py | delete_group_action | python | def delete_group_action(model, request):
try:
groups = model.parent.backend
uid = model.model.name
del groups[uid]
groups()
model.parent.invalidate()
except Exception as e:
return {
'success': False,
'message': str(e)
}
localizer = get_localizer(request)
message = localizer.translate(_(
'deleted_group',
default='Deleted group from database'
))
return {
'success': True,
'message': message
} | Delete group from database. | train | https://github.com/bluedynamics/cone.ugm/blob/3c197075f3f6e94781289311c5637bb9c8e5597c/src/cone/ugm/browser/actions.py#L237-L259 | null | from cone.ugm.model.group import Group
from cone.ugm.model.user import User
from pyramid.i18n import get_localizer
from pyramid.i18n import TranslationStringFactory
from pyramid.view import view_config
_ = TranslationStringFactory('cone.ugm')
###############################################################################
# local manager manage membership validation
###############################################################################
LM_TARGET_GID_NOT_ALLOWED = 0
LM_TARGET_UID_NOT_ALLOWED = 1
LM_TARGET_GID_IS_DEFAULT = 2
class ManageMembershipError(Exception):
def __init__(self, reason, data):
self.reason = reason
self.data = data
def validate_add_users_to_groups(model, user_ids, group_ids):
if not model.local_manager_consider_for_user:
return
lm_gids = model.local_manager_target_gids
for group_id in group_ids:
if group_id not in lm_gids:
raise ManageMembershipError(LM_TARGET_GID_NOT_ALLOWED, group_id)
lm_uids = model.local_manager_target_uids
for user_id in user_ids:
if user_id not in lm_uids:
raise ManageMembershipError(LM_TARGET_UID_NOT_ALLOWED, user_id)
def validate_remove_users_from_groups(model, user_ids, group_ids):
if not model.local_manager_consider_for_user:
return
lm_gids = model.local_manager_target_gids
for group_id in group_ids:
if group_id not in lm_gids:
raise ManageMembershipError(LM_TARGET_GID_NOT_ALLOWED, group_id)
lm_uids = model.local_manager_target_uids
for user_id in user_ids:
if user_id not in lm_uids:
raise ManageMembershipError(LM_TARGET_UID_NOT_ALLOWED, user_id)
adm_gid = model.local_manager_gid
for group_id in group_ids:
if model.local_manager_is_default(adm_gid, group_id):
raise ManageMembershipError(LM_TARGET_GID_IS_DEFAULT, group_id)
###############################################################################
# Actions for User application node
###############################################################################
@view_config(
name='delete_item',
accept='application/json',
renderer='json',
context=User,
permission='delete_user')
def delete_user_action(model, request):
"""Delete user from database.
"""
try:
users = model.parent.backend
uid = model.model.name
del users[uid]
users()
model.parent.invalidate()
localizer = get_localizer(request)
message = localizer.translate(_(
'delete_user_from_database',
default="Deleted user '${uid}' from database.",
mapping={'uid': uid}
))
return {
'success': True,
'message': message
}
except Exception as e:
return {
'success': False,
'message': str(e)
}
@view_config(
name='add_item',
accept='application/json',
renderer='json',
context=User,
permission='manage_membership')
def user_add_to_group_action(model, request):
"""Add user to group.
"""
group_id = request.params.get('id')
if not group_id:
group_ids = request.params.getall('id[]')
else:
group_ids = [group_id]
try:
user = model.model
validate_add_users_to_groups(model, [user.id], group_ids)
groups = user.root.groups
for group_id in group_ids:
groups[group_id].add(user.name)
groups()
model.parent.invalidate(user.name)
localizer = get_localizer(request)
message = localizer.translate(_(
'added_user_to_group',
default="Added user '${uid}' to group '${gid}'.",
mapping={
'uid': user.id,
'gid': ', '.join(group_ids)
}
))
return {
'success': True,
'message': message
}
except ManageMembershipError as e:
if e.reason is not LM_TARGET_GID_NOT_ALLOWED:
raise Exception(u"Unknown ManageMembershipError reason.")
localizer = get_localizer(request)
message = localizer.translate(_(
'lm_add_target_gid_not_allowed',
default=(
"Failed adding user '${uid}' to group '${gid}'. "
"Manage membership denied for target group."
),
mapping={
'uid': user.id,
'gid': e.data
}
))
return {
'success': False,
'message': message
}
except Exception as e:
return {
'success': False,
'message': str(e)
}
@view_config(
name='remove_item',
accept='application/json',
renderer='json',
context=User,
permission='manage_membership')
def user_remove_from_group_action(model, request):
"""Remove user from group.
"""
group_id = request.params.get('id')
if not group_id:
group_ids = request.params.getall('id[]')
else:
group_ids = [group_id]
try:
user = model.model
validate_remove_users_from_groups(model, [user.id], group_ids)
groups = user.root.groups
for group_id in group_ids:
del groups[group_id][user.name]
groups()
model.parent.invalidate(user.name)
localizer = get_localizer(request)
message = localizer.translate(_(
'removed_user_from_group',
default="Removed user '${uid}' from group '${gid}'.",
mapping={
'uid': user.id,
'gid': ', '.join(group_ids)
}
))
return {
'success': True,
'message': message
}
except ManageMembershipError as e:
localizer = get_localizer(request)
if e.reason is LM_TARGET_GID_NOT_ALLOWED:
message = localizer.translate(_(
'lm_remove_target_gid_not_allowed',
default=(
"Failed removing user '${uid}' from group '${gid}'. "
"Manage membership denied for target group."),
mapping={
'uid': user.id,
'gid': e.data
}
))
elif e.reason is LM_TARGET_GID_IS_DEFAULT:
message = localizer.translate(_(
'lm_remove_target_gid_is_default',
default=(
"Failed removing user '${uid}' from group '${gid}'. "
"Target group is default group of user."
),
mapping={
'uid': user.id,
'gid': e.data
}
))
else:
raise Exception(u"Unknown ManageMembershipError reason.")
return {
'success': False,
'message': message
}
except Exception as e:
return {
'success': False,
'message': str(e)
}
###############################################################################
# Actions for Group application node
###############################################################################
@view_config(
name='delete_item',
accept='application/json',
renderer='json',
context=Group,
permission='delete_group')
@view_config(
name='add_item',
accept='application/json',
renderer='json',
context=Group,
permission='manage_membership')
def group_add_user_action(model, request):
"""Add user to group.
"""
user_id = request.params.get('id')
if not user_id:
user_ids = request.params.getall('id[]')
else:
user_ids = [user_id]
try:
group = model.model
validate_add_users_to_groups(model, user_ids, [group.id])
for user_id in user_ids:
group.add(user_id)
group()
model.parent.invalidate(group.name)
localizer = get_localizer(request)
message = localizer.translate(_(
'added_user_to_group',
default="Added user '${uid}' to group '${gid}'.",
mapping={
'uid': ', '.join(user_ids),
'gid': group.id
}
))
return {
'success': True,
'message': message
}
except ManageMembershipError as e:
if e.reason is not LM_TARGET_UID_NOT_ALLOWED:
raise Exception(u"Unknown ManageMembershipError reason.")
localizer = get_localizer(request)
message = localizer.translate(_(
'lm_add_target_uid_not_allowed',
default=(
"Failed adding user '${uid}' to group '${gid}'. "
"Manage membership denied for user."
),
mapping={
'uid': e.data,
'gid': group.id
}
))
return {
'success': False,
'message': message
}
except Exception as e:
return {
'success': False,
'message': str(e)
}
@view_config(
name='remove_item',
accept='application/json',
renderer='json',
context=Group,
permission='manage_membership')
def group_remove_user_action(model, request):
"""Remove user from group.
"""
user_id = request.params.get('id')
if not user_id:
user_ids = request.params.getall('id[]')
else:
user_ids = [user_id]
try:
group = model.model
validate_remove_users_from_groups(model, user_ids, [group.id])
for user_id in user_ids:
del group[user_id]
group()
model.parent.invalidate(group.name)
localizer = get_localizer(request)
message = localizer.translate(_(
'removed_user_from_group',
default="Removed user '${uid}' from group '${gid}'.",
mapping={
'uid': ', '.join(user_ids),
'gid': group.id
}
))
return {
'success': True,
'message': message
}
except ManageMembershipError as e:
localizer = get_localizer(request)
if e.reason is LM_TARGET_UID_NOT_ALLOWED:
message = localizer.translate(_(
'lm_remove_target_uid_not_allowed',
default=(
"Failed removing user '${uid}' from group '${gid}'. "
"Manage membership denied for user."
),
mapping={
'uid': e.data,
'gid': group.id
}
))
elif e.reason is LM_TARGET_GID_IS_DEFAULT:
message = localizer.translate(_(
'lm_remove_target_gid_is_default',
default=(
"Failed removing user '${uid}' from group '${gid}'. "
"Target group is default group of user."
),
mapping={
'uid': ', '.join(user_ids),
'gid': e.data
}
))
else:
raise Exception(u"Unknown ManageMembershipError reason.")
return {
'success': False,
'message': message
}
except Exception as e:
return {
'success': False,
'message': str(e)
}
|
bluedynamics/cone.ugm | src/cone/ugm/browser/actions.py | group_add_user_action | python | def group_add_user_action(model, request):
user_id = request.params.get('id')
if not user_id:
user_ids = request.params.getall('id[]')
else:
user_ids = [user_id]
try:
group = model.model
validate_add_users_to_groups(model, user_ids, [group.id])
for user_id in user_ids:
group.add(user_id)
group()
model.parent.invalidate(group.name)
localizer = get_localizer(request)
message = localizer.translate(_(
'added_user_to_group',
default="Added user '${uid}' to group '${gid}'.",
mapping={
'uid': ', '.join(user_ids),
'gid': group.id
}
))
return {
'success': True,
'message': message
}
except ManageMembershipError as e:
if e.reason is not LM_TARGET_UID_NOT_ALLOWED:
raise Exception(u"Unknown ManageMembershipError reason.")
localizer = get_localizer(request)
message = localizer.translate(_(
'lm_add_target_uid_not_allowed',
default=(
"Failed adding user '${uid}' to group '${gid}'. "
"Manage membership denied for user."
),
mapping={
'uid': e.data,
'gid': group.id
}
))
return {
'success': False,
'message': message
}
except Exception as e:
return {
'success': False,
'message': str(e)
} | Add user to group. | train | https://github.com/bluedynamics/cone.ugm/blob/3c197075f3f6e94781289311c5637bb9c8e5597c/src/cone/ugm/browser/actions.py#L268-L319 | [
"def validate_add_users_to_groups(model, user_ids, group_ids):\n if not model.local_manager_consider_for_user:\n return\n lm_gids = model.local_manager_target_gids\n for group_id in group_ids:\n if group_id not in lm_gids:\n raise ManageMembershipError(LM_TARGET_GID_NOT_ALLOWED, group_id)\n lm_uids = model.local_manager_target_uids\n for user_id in user_ids:\n if user_id not in lm_uids:\n raise ManageMembershipError(LM_TARGET_UID_NOT_ALLOWED, user_id)\n"
] | from cone.ugm.model.group import Group
from cone.ugm.model.user import User
from pyramid.i18n import get_localizer
from pyramid.i18n import TranslationStringFactory
from pyramid.view import view_config
_ = TranslationStringFactory('cone.ugm')
###############################################################################
# local manager manage membership validation
###############################################################################
LM_TARGET_GID_NOT_ALLOWED = 0
LM_TARGET_UID_NOT_ALLOWED = 1
LM_TARGET_GID_IS_DEFAULT = 2
class ManageMembershipError(Exception):
def __init__(self, reason, data):
self.reason = reason
self.data = data
def validate_add_users_to_groups(model, user_ids, group_ids):
if not model.local_manager_consider_for_user:
return
lm_gids = model.local_manager_target_gids
for group_id in group_ids:
if group_id not in lm_gids:
raise ManageMembershipError(LM_TARGET_GID_NOT_ALLOWED, group_id)
lm_uids = model.local_manager_target_uids
for user_id in user_ids:
if user_id not in lm_uids:
raise ManageMembershipError(LM_TARGET_UID_NOT_ALLOWED, user_id)
def validate_remove_users_from_groups(model, user_ids, group_ids):
if not model.local_manager_consider_for_user:
return
lm_gids = model.local_manager_target_gids
for group_id in group_ids:
if group_id not in lm_gids:
raise ManageMembershipError(LM_TARGET_GID_NOT_ALLOWED, group_id)
lm_uids = model.local_manager_target_uids
for user_id in user_ids:
if user_id not in lm_uids:
raise ManageMembershipError(LM_TARGET_UID_NOT_ALLOWED, user_id)
adm_gid = model.local_manager_gid
for group_id in group_ids:
if model.local_manager_is_default(adm_gid, group_id):
raise ManageMembershipError(LM_TARGET_GID_IS_DEFAULT, group_id)
###############################################################################
# Actions for User application node
###############################################################################
@view_config(
name='delete_item',
accept='application/json',
renderer='json',
context=User,
permission='delete_user')
def delete_user_action(model, request):
"""Delete user from database.
"""
try:
users = model.parent.backend
uid = model.model.name
del users[uid]
users()
model.parent.invalidate()
localizer = get_localizer(request)
message = localizer.translate(_(
'delete_user_from_database',
default="Deleted user '${uid}' from database.",
mapping={'uid': uid}
))
return {
'success': True,
'message': message
}
except Exception as e:
return {
'success': False,
'message': str(e)
}
@view_config(
name='add_item',
accept='application/json',
renderer='json',
context=User,
permission='manage_membership')
def user_add_to_group_action(model, request):
"""Add user to group.
"""
group_id = request.params.get('id')
if not group_id:
group_ids = request.params.getall('id[]')
else:
group_ids = [group_id]
try:
user = model.model
validate_add_users_to_groups(model, [user.id], group_ids)
groups = user.root.groups
for group_id in group_ids:
groups[group_id].add(user.name)
groups()
model.parent.invalidate(user.name)
localizer = get_localizer(request)
message = localizer.translate(_(
'added_user_to_group',
default="Added user '${uid}' to group '${gid}'.",
mapping={
'uid': user.id,
'gid': ', '.join(group_ids)
}
))
return {
'success': True,
'message': message
}
except ManageMembershipError as e:
if e.reason is not LM_TARGET_GID_NOT_ALLOWED:
raise Exception(u"Unknown ManageMembershipError reason.")
localizer = get_localizer(request)
message = localizer.translate(_(
'lm_add_target_gid_not_allowed',
default=(
"Failed adding user '${uid}' to group '${gid}'. "
"Manage membership denied for target group."
),
mapping={
'uid': user.id,
'gid': e.data
}
))
return {
'success': False,
'message': message
}
except Exception as e:
return {
'success': False,
'message': str(e)
}
@view_config(
name='remove_item',
accept='application/json',
renderer='json',
context=User,
permission='manage_membership')
def user_remove_from_group_action(model, request):
"""Remove user from group.
"""
group_id = request.params.get('id')
if not group_id:
group_ids = request.params.getall('id[]')
else:
group_ids = [group_id]
try:
user = model.model
validate_remove_users_from_groups(model, [user.id], group_ids)
groups = user.root.groups
for group_id in group_ids:
del groups[group_id][user.name]
groups()
model.parent.invalidate(user.name)
localizer = get_localizer(request)
message = localizer.translate(_(
'removed_user_from_group',
default="Removed user '${uid}' from group '${gid}'.",
mapping={
'uid': user.id,
'gid': ', '.join(group_ids)
}
))
return {
'success': True,
'message': message
}
except ManageMembershipError as e:
localizer = get_localizer(request)
if e.reason is LM_TARGET_GID_NOT_ALLOWED:
message = localizer.translate(_(
'lm_remove_target_gid_not_allowed',
default=(
"Failed removing user '${uid}' from group '${gid}'. "
"Manage membership denied for target group."),
mapping={
'uid': user.id,
'gid': e.data
}
))
elif e.reason is LM_TARGET_GID_IS_DEFAULT:
message = localizer.translate(_(
'lm_remove_target_gid_is_default',
default=(
"Failed removing user '${uid}' from group '${gid}'. "
"Target group is default group of user."
),
mapping={
'uid': user.id,
'gid': e.data
}
))
else:
raise Exception(u"Unknown ManageMembershipError reason.")
return {
'success': False,
'message': message
}
except Exception as e:
return {
'success': False,
'message': str(e)
}
###############################################################################
# Actions for Group application node
###############################################################################
@view_config(
name='delete_item',
accept='application/json',
renderer='json',
context=Group,
permission='delete_group')
def delete_group_action(model, request):
"""Delete group from database.
"""
try:
groups = model.parent.backend
uid = model.model.name
del groups[uid]
groups()
model.parent.invalidate()
except Exception as e:
return {
'success': False,
'message': str(e)
}
localizer = get_localizer(request)
message = localizer.translate(_(
'deleted_group',
default='Deleted group from database'
))
return {
'success': True,
'message': message
}
@view_config(
name='add_item',
accept='application/json',
renderer='json',
context=Group,
permission='manage_membership')
@view_config(
name='remove_item',
accept='application/json',
renderer='json',
context=Group,
permission='manage_membership')
def group_remove_user_action(model, request):
"""Remove user from group.
"""
user_id = request.params.get('id')
if not user_id:
user_ids = request.params.getall('id[]')
else:
user_ids = [user_id]
try:
group = model.model
validate_remove_users_from_groups(model, user_ids, [group.id])
for user_id in user_ids:
del group[user_id]
group()
model.parent.invalidate(group.name)
localizer = get_localizer(request)
message = localizer.translate(_(
'removed_user_from_group',
default="Removed user '${uid}' from group '${gid}'.",
mapping={
'uid': ', '.join(user_ids),
'gid': group.id
}
))
return {
'success': True,
'message': message
}
except ManageMembershipError as e:
localizer = get_localizer(request)
if e.reason is LM_TARGET_UID_NOT_ALLOWED:
message = localizer.translate(_(
'lm_remove_target_uid_not_allowed',
default=(
"Failed removing user '${uid}' from group '${gid}'. "
"Manage membership denied for user."
),
mapping={
'uid': e.data,
'gid': group.id
}
))
elif e.reason is LM_TARGET_GID_IS_DEFAULT:
message = localizer.translate(_(
'lm_remove_target_gid_is_default',
default=(
"Failed removing user '${uid}' from group '${gid}'. "
"Target group is default group of user."
),
mapping={
'uid': ', '.join(user_ids),
'gid': e.data
}
))
else:
raise Exception(u"Unknown ManageMembershipError reason.")
return {
'success': False,
'message': message
}
except Exception as e:
return {
'success': False,
'message': str(e)
}
|
bluedynamics/cone.ugm | src/cone/ugm/browser/roles.py | PrincipalRolesForm.prepare | python | def prepare(_next, self):
_next(self)
if not self.roles_support:
return
if not self.request.has_permission('manage', self.model.parent):
# XXX: yafowil selection display renderer
return
value = []
if self.action_resource == 'edit':
value = self.model.model.roles
roles_widget = factory(
'field:label:select',
name='principal_roles',
value=value,
props={
'label': _('roles', default='Roles'),
'multivalued': True,
'vocabulary': self.roles_vocab,
'format': 'single',
'listing_tag': 'ul',
'listing_label_position': 'after',
})
save_widget = self.form['save']
self.form.insertbefore(roles_widget, save_widget) | Hook after prepare and set 'principal_roles' as selection to
``self.form``. | train | https://github.com/bluedynamics/cone.ugm/blob/3c197075f3f6e94781289311c5637bb9c8e5597c/src/cone/ugm/browser/roles.py#L26-L52 | null | class PrincipalRolesForm(Behavior):
@default
@property
def roles_vocab(self):
from cone.app.security import DEFAULT_ROLES
return DEFAULT_ROLES
@default
@property
def roles_support(self):
return ugm_roles(self.model).ldap_roles_container_valid
@plumb
@plumb
def save(_next, self, widget, data):
_next(self, widget, data)
if not self.roles_support:
return
if not self.request.has_permission('manage', self.model.parent):
return
existing_roles = list()
if self.action_resource == 'edit':
principal = self.model.model
existing_roles = principal.roles
else:
uid = data.fetch('%s.id' % self.form_name).extracted
principal = self.model.parent[uid].model
new_roles = data.fetch('%s.principal_roles' % self.form_name).extracted
removed_roles = list()
for role in existing_roles:
if role not in new_roles:
principal.remove_role(role)
removed_roles.append(role)
for role in removed_roles:
existing_roles.remove(role)
for role in new_roles:
if role not in existing_roles:
principal.add_role(role)
principal.parent.parent()
|
bluedynamics/cone.ugm | src/cone/ugm/__init__.py | initialize_ugm | python | def initialize_ugm(config, global_config, local_config):
# custom UGM styles
cfg.merged.css.protected.append((static_resources, 'styles.css'))
# custom UGM javascript
cfg.merged.js.protected.append((static_resources, 'ugm.js'))
# UGM settings
register_config('ugm_general', GeneralSettings)
register_config('ugm_server', ServerSettings)
register_config('ugm_users', UsersSettings)
register_config('ugm_groups', GroupsSettings)
register_config('ugm_roles', RolesSettings)
register_config('ugm_localmanager', LocalManagerSettings)
# Users container
register_entry('users', users_factory)
# Groups container
register_entry('groups', groups_factory)
# register default acl's
# XXX: define permissions referring users, user, groups respective group only
acl_registry.register(ugm_user_acl, User, 'user')
acl_registry.register(ugm_default_acl, Users, 'users')
acl_registry.register(ugm_default_acl, Group, 'group')
acl_registry.register(ugm_default_acl, Groups, 'groups')
# localmanager config file location
lm_config = local_config.get('ugm.localmanager_config', '')
os.environ['LOCAL_MANAGER_CFG_FILE'] = lm_config
# add translation
config.add_translation_dirs('cone.ugm:locale/')
# static resources
config.add_view(static_resources, name='cone.ugm.static')
# scan browser package
config.scan('cone.ugm.browser') | Initialize UGM. | train | https://github.com/bluedynamics/cone.ugm/blob/3c197075f3f6e94781289311c5637bb9c8e5597c/src/cone/ugm/__init__.py#L62-L103 | null | from cone.app import cfg
from cone.app import get_root
from cone.app import main_hook
from cone.app import register_config
from cone.app import register_entry
from cone.app.security import acl_registry
from cone.app.ugm import ugm_backend
from cone.app.ugm import UGMFactory
from cone.ugm.browser import static_resources
from cone.ugm.model.group import Group
from cone.ugm.model.groups import Groups
from cone.ugm.model.groups import groups_factory
from cone.ugm.model.settings import GeneralSettings
from cone.ugm.model.settings import GroupsSettings
from cone.ugm.model.settings import LocalManagerSettings
from cone.ugm.model.settings import RolesSettings
from cone.ugm.model.settings import ServerSettings
from cone.ugm.model.settings import UsersSettings
from cone.ugm.model.user import User
from cone.ugm.model.users import Users
from cone.ugm.model.users import users_factory
from node.ext.ldap.ugm import Ugm as LdapUgm
from pyramid.security import ALL_PERMISSIONS
from pyramid.security import Allow
from pyramid.security import Deny
from pyramid.security import Everyone
import logging
import os
logger = logging.getLogger('cone.ugm')
# security
management_permissions = [
'add', 'edit', 'delete',
]
user_management_permissions = [
'add_user', 'edit_user', 'delete_user', 'manage_expiration',
]
group_management_permissions = [
'add_group', 'edit_group', 'delete_group',
]
admin_permissions = [
'view', 'manage_membership', 'view_portrait',
] + management_permissions \
+ user_management_permissions \
+ group_management_permissions
ugm_default_acl = [
(Allow, 'role:editor', ['view', 'manage_membership']),
(Allow, 'role:admin', admin_permissions),
(Allow, 'role:manager', admin_permissions + ['manage']),
(Allow, Everyone, ['login']),
(Deny, Everyone, ALL_PERMISSIONS),
]
ugm_user_acl = [
(Allow, 'system.Authenticated', ['view_portrait']),
] + ugm_default_acl
# application startup hooks
@main_hook
###############################################################################
# XXX: move to cone.ldap
###############################################################################
@main_hook
def initialize_ldap(config, global_config, local_config):
"""Initialize cone.ldap.
"""
os.environ['LDAP_CFG_FILE'] = local_config.get('ldap.config', '')
@ugm_backend('ldap')
class LDAPUGMFactory(UGMFactory):
"""UGM backend factory for file based UGM implementation.
"""
def __init__(self, settings):
"""
"""
def __call__(self):
settings = get_root()['settings']
server_settings = settings['ugm_server']
if not server_settings.ldap_connectivity:
logger.error(u"Could not initialize authentication implementation. "
u"LDAP Server is not available or invalid credentials.")
return
props = server_settings.ldap_props
users_settings = settings['ugm_users']
if not users_settings.ldap_users_container_valid:
logger.error(u"Could not initialize authentication implementation. "
u"Configured users container invalid.")
return
ucfg = users_settings.ldap_ucfg
groups_settings = settings['ugm_groups']
gcfg = None
if groups_settings.ldap_groups_container_valid:
gcfg = groups_settings.ldap_gcfg
else:
logger.warning(u"Configured groups container invalid.")
roles_settings = settings['ugm_roles']
rcfg = None
if roles_settings.ldap_roles_container_valid:
rcfg = roles_settings.ldap_rcfg
else:
logger.warning(u"Configured roles container invalid.")
return LdapUgm(
name='ldap_ugm',
props=props,
ucfg=ucfg,
gcfg=gcfg,
rcfg=rcfg
)
|
bluedynamics/cone.ugm | src/cone/ugm/browser/expires.py | expiration_extractor | python | def expiration_extractor(widget, data):
active = int(data.request.get('%s.active' % widget.name, '0'))
if not active:
return 0
expires = data.extracted
if expires:
return time.mktime(expires.utctimetuple())
return UNSET | Extract expiration information.
- If active flag not set, Account is disabled (value 0).
- If active flag set and value is UNSET, account never expires.
- If active flag set and datetime choosen, account expires at given
datetime.
- Timestamp in seconds since epoch is returned. | train | https://github.com/bluedynamics/cone.ugm/blob/3c197075f3f6e94781289311c5637bb9c8e5597c/src/cone/ugm/browser/expires.py#L23-L38 | null | from cone.ugm.model.utils import ugm_general
from datetime import datetime
from plumber import Behavior
from plumber import plumb
from pyramid.i18n import TranslationStringFactory
from yafowil.base import factory
from yafowil.base import fetch_value
from yafowil.base import UNSET
from yafowil.common import generic_extractor
from yafowil.common import generic_required_extractor
from yafowil.utils import cssid
from yafowil.widget.datetime.widget import datetime_display_renderer
from yafowil.widget.datetime.widget import datetime_extractor
from yafowil.widget.datetime.widget import format_date
from yafowil.widget.datetime.widget import format_time
from yafowil.widget.datetime.widget import render_datetime_input
import time
_ = TranslationStringFactory('cone.ugm')
def expiration_edit_renderer(widget, data):
tag = data.tag
active_attrs = dict()
active_attrs['id'] = cssid(widget, 'checkbox')
active_attrs['type'] = 'checkbox'
active_attrs['name'] = '%s.active' % widget.name
active_attrs['value'] = '1'
value = fetch_value(widget, data)
if value == 8639913600:
value = UNSET
if value != 0:
active_attrs['checked'] = 'checked'
active = tag('input', **active_attrs)
until = tag('label', u'until')
locale = widget.attrs['locale']
if callable(locale):
locale = locale(widget, data)
date = None
time = widget.attrs['time']
if value in [0, UNSET]:
date = ''
else:
date = datetime.fromtimestamp(value)
if time:
time = format_time(date)
date = format_date(date, locale, widget.attrs['delimiter'])
expires = render_datetime_input(widget, data, date, time)
return tag('div', active + until + expires, class_='expiration-widget')
def expiration_display_renderer(widget, data):
tag = data.tag
active_attrs = dict()
active_attrs['id'] = cssid(widget, 'checkbox')
active_attrs['type'] = 'checkbox'
active_attrs['disabled'] = 'disabled'
value = data.value
if value != 0:
active_attrs['checked'] = 'checked'
active = tag('input', **active_attrs)
until = tag('label', u'until')
if value not in [0, UNSET]:
value = datetime.fromtimestamp(value)
expires = datetime_display_renderer(widget, data, value)
if expires:
expires = until + expires
return tag('div', active + expires, class_='expiration-widget')
factory.register(
'expiration',
extractors=[generic_extractor, generic_required_extractor,
datetime_extractor, expiration_extractor],
edit_renderers=[expiration_edit_renderer],
display_renderers=[expiration_display_renderer])
factory.doc['blueprint']['expiration'] = \
"""Add-on blueprint UGM expiration widget. Utilizes yafowil.widget.datetime.
"""
factory.defaults['expiration.class'] = 'expiration form-control'
factory.defaults['expiration.datepicker_class'] = 'datepicker'
factory.defaults['expiration.datepicker'] = True
factory.defaults['expiration.time'] = False
factory.defaults['expiration.tzinfo'] = None
factory.defaults['expiration.delimiter'] = '.'
factory.defaults['expiration.locale'] = 'de'
factory.defaults['expiration.format'] = '%Y.%m.%d'
factory.doc['props']['expiration.format'] = \
"""Pattern accepted by ``datetime.strftime``.
"""
class ExpirationForm(Behavior):
"""Expiration field plumbing behavior for user forms.
"""
@plumb
def prepare(_next, self):
"""Hook after prepare and set expiration widget to
``self.form``.
"""
_next(self)
cfg = ugm_general(self.model)
if cfg.attrs['users_account_expiration'] != 'True':
return
mode = 'edit'
if not self.request.has_permission(
'manage_expiration', self.model.parent):
mode = 'display'
if self.action_resource == 'edit':
attr = cfg.attrs['users_expires_attr']
unit = int(cfg.attrs['users_expires_unit'])
value = int(self.model.attrs.get(attr, 0))
# if format days, convert to seconds
if unit == 0:
value *= 86400
else:
value = UNSET
expires_widget = factory(
'field:label:expiration',
name='active',
value=value,
props={
'label': _('active', default='Active')
},
mode=mode
)
save_widget = self.form['save']
self.form.insertbefore(expires_widget, save_widget)
@plumb
def save(_next, self, widget, data):
if self.request.has_permission(
'manage_expiration', self.model.parent):
cfg = ugm_general(self.model)
if cfg.attrs['users_account_expiration'] == 'True':
attr = cfg.attrs['users_expires_attr']
unit = int(cfg.attrs['users_expires_unit'])
value = data.fetch('userform.active').extracted
if value is UNSET:
if unit == 0:
value = 99999
else:
value = 8639913600
elif value != 0:
if unit == 0:
add = 0
if value % 86400 != 0:
add = 1
value /= 86400
value += add
value = int(value)
self.model.attrs[attr] = str(value)
_next(self, widget, data)
|
bluedynamics/cone.ugm | src/cone/ugm/browser/expires.py | ExpirationForm.prepare | python | def prepare(_next, self):
_next(self)
cfg = ugm_general(self.model)
if cfg.attrs['users_account_expiration'] != 'True':
return
mode = 'edit'
if not self.request.has_permission(
'manage_expiration', self.model.parent):
mode = 'display'
if self.action_resource == 'edit':
attr = cfg.attrs['users_expires_attr']
unit = int(cfg.attrs['users_expires_unit'])
value = int(self.model.attrs.get(attr, 0))
# if format days, convert to seconds
if unit == 0:
value *= 86400
else:
value = UNSET
expires_widget = factory(
'field:label:expiration',
name='active',
value=value,
props={
'label': _('active', default='Active')
},
mode=mode
)
save_widget = self.form['save']
self.form.insertbefore(expires_widget, save_widget) | Hook after prepare and set expiration widget to
``self.form``. | train | https://github.com/bluedynamics/cone.ugm/blob/3c197075f3f6e94781289311c5637bb9c8e5597c/src/cone/ugm/browser/expires.py#L127-L158 | [
"def ugm_general(model):\n return model.root['settings']['ugm_general']\n"
] | class ExpirationForm(Behavior):
"""Expiration field plumbing behavior for user forms.
"""
@plumb
@plumb
def save(_next, self, widget, data):
if self.request.has_permission(
'manage_expiration', self.model.parent):
cfg = ugm_general(self.model)
if cfg.attrs['users_account_expiration'] == 'True':
attr = cfg.attrs['users_expires_attr']
unit = int(cfg.attrs['users_expires_unit'])
value = data.fetch('userform.active').extracted
if value is UNSET:
if unit == 0:
value = 99999
else:
value = 8639913600
elif value != 0:
if unit == 0:
add = 0
if value % 86400 != 0:
add = 1
value /= 86400
value += add
value = int(value)
self.model.attrs[attr] = str(value)
_next(self, widget, data)
|
bluedynamics/cone.ugm | src/cone/ugm/browser/portrait.py | portrait_image | python | def portrait_image(model, request):
response = Response()
cfg = ugm_general(model)
response.body = model.attrs[cfg.attrs['users_portrait_attr']]
response.headers['Content-Type'] = 'image/jpeg'
response.headers['Cache-Control'] = 'max-age=0'
return response | XXX: needs polishing. Return configured default portrait if not set
on user. | train | https://github.com/bluedynamics/cone.ugm/blob/3c197075f3f6e94781289311c5637bb9c8e5597c/src/cone/ugm/browser/portrait.py#L22-L31 | [
"def ugm_general(model):\n return model.root['settings']['ugm_general']\n"
] | from cone.app.browser.utils import make_url
from cone.ugm.model.user import User
from cone.ugm.model.utils import ugm_general
from io import BytesIO
from plumber import Behavior
from plumber import default
from plumber import plumb
from pyramid.i18n import TranslationStringFactory
from pyramid.response import Response
from pyramid.view import view_config
from yafowil.base import factory
from yafowil.base import UNSET
_ = TranslationStringFactory('cone.ugm')
@view_config(
name='portrait_image',
context=User,
permission='view_portrait')
class PortraitForm(Behavior):
"""Plumbing behavior for setting user portrait image.
"""
@default
@property
def portrait_support(self):
cfg = ugm_general(self.model)
return cfg.attrs['users_portrait'] == 'True'
@plumb
def prepare(_next, self):
"""Hook after prepare and set 'portrait' as image widget to
``self.form``.
"""
_next(self)
if not self.portrait_support:
return
model = self.model
request = self.request
if request.has_permission('edit_user', model.parent):
mode = 'edit'
else:
mode = 'display'
cfg = ugm_general(model)
image_attr = cfg.attrs['users_portrait_attr']
image_accept = cfg.attrs['users_portrait_accept']
image_width = int(cfg.attrs['users_portrait_width'])
image_height = int(cfg.attrs['users_portrait_height'])
image_data = model.attrs.get(image_attr)
if image_data:
image_value = {
'file': BytesIO(image_data),
'mimetype': 'image/jpeg',
}
image_url = make_url(request, node=model,
resource='portrait_image')
else:
image_value = UNSET
resource = 'cone.ugm.static/images/default_portrait.jpg'
image_url = make_url(request, node=model.root, resource=resource)
portrait_widget = factory(
'field:label:error:image',
name='portrait',
value=image_value,
props={
'label': _('portrait', default='Portrait'),
'src': image_url,
'alt': _('portrait', default='Portrait'),
'accept': image_accept,
'minsize': (image_width, image_height),
'crop': {
'size': (image_width, image_height),
'fitting': True,
}
},
mode=mode)
save_widget = self.form['save']
self.form.insertbefore(portrait_widget, save_widget)
@plumb
def save(_next, self, widget, data):
if not self.portrait_support or \
not self.request.has_permission('edit_user', self.model.parent):
_next(self, widget, data)
return
cfg = ugm_general(self.model)
image_attr = cfg.attrs['users_portrait_attr']
portrait = data.fetch('userform.portrait').extracted
if portrait:
if portrait['action'] in ['new', 'replace']:
cropped = portrait['cropped']
image_data = BytesIO()
cropped.save(image_data, 'jpeg', quality=100)
image_data.seek(0)
self.model.attrs[image_attr] = image_data.read()
if portrait['action'] == 'delete':
del self.model.attrs[image_attr]
_next(self, widget, data)
|
bluedynamics/cone.ugm | src/cone/ugm/browser/portrait.py | PortraitForm.prepare | python | def prepare(_next, self):
_next(self)
if not self.portrait_support:
return
model = self.model
request = self.request
if request.has_permission('edit_user', model.parent):
mode = 'edit'
else:
mode = 'display'
cfg = ugm_general(model)
image_attr = cfg.attrs['users_portrait_attr']
image_accept = cfg.attrs['users_portrait_accept']
image_width = int(cfg.attrs['users_portrait_width'])
image_height = int(cfg.attrs['users_portrait_height'])
image_data = model.attrs.get(image_attr)
if image_data:
image_value = {
'file': BytesIO(image_data),
'mimetype': 'image/jpeg',
}
image_url = make_url(request, node=model,
resource='portrait_image')
else:
image_value = UNSET
resource = 'cone.ugm.static/images/default_portrait.jpg'
image_url = make_url(request, node=model.root, resource=resource)
portrait_widget = factory(
'field:label:error:image',
name='portrait',
value=image_value,
props={
'label': _('portrait', default='Portrait'),
'src': image_url,
'alt': _('portrait', default='Portrait'),
'accept': image_accept,
'minsize': (image_width, image_height),
'crop': {
'size': (image_width, image_height),
'fitting': True,
}
},
mode=mode)
save_widget = self.form['save']
self.form.insertbefore(portrait_widget, save_widget) | Hook after prepare and set 'portrait' as image widget to
``self.form``. | train | https://github.com/bluedynamics/cone.ugm/blob/3c197075f3f6e94781289311c5637bb9c8e5597c/src/cone/ugm/browser/portrait.py#L45-L92 | [
"def ugm_general(model):\n return model.root['settings']['ugm_general']\n"
] | class PortraitForm(Behavior):
"""Plumbing behavior for setting user portrait image.
"""
@default
@property
def portrait_support(self):
cfg = ugm_general(self.model)
return cfg.attrs['users_portrait'] == 'True'
@plumb
@plumb
def save(_next, self, widget, data):
if not self.portrait_support or \
not self.request.has_permission('edit_user', self.model.parent):
_next(self, widget, data)
return
cfg = ugm_general(self.model)
image_attr = cfg.attrs['users_portrait_attr']
portrait = data.fetch('userform.portrait').extracted
if portrait:
if portrait['action'] in ['new', 'replace']:
cropped = portrait['cropped']
image_data = BytesIO()
cropped.save(image_data, 'jpeg', quality=100)
image_data.seek(0)
self.model.attrs[image_attr] = image_data.read()
if portrait['action'] == 'delete':
del self.model.attrs[image_attr]
_next(self, widget, data)
|
bluedynamics/cone.ugm | src/cone/ugm/model/localmanager.py | LocalManager.local_manager_consider_for_user | python | def local_manager_consider_for_user(self):
if not self.local_management_enabled:
return False
request = get_current_request()
if authenticated_userid(request) == security.ADMIN_USER:
return False
roles = security.authenticated_user(request).roles
if 'admin' in roles or 'manager' in roles:
return False
return True | Flag whether local manager ACL should be considered for current
authenticated user. | train | https://github.com/bluedynamics/cone.ugm/blob/3c197075f3f6e94781289311c5637bb9c8e5597c/src/cone/ugm/model/localmanager.py#L73-L85 | null | class LocalManager(Behavior):
"""Behavior providing local manager information for authenticated user.
"""
@finalize
@property
def local_management_enabled(self):
"""Flag whether local management is enabled.
"""
general_settings = self.root['settings']['ugm_general']
return general_settings.attrs.users_local_management_enabled == 'True'
@finalize
@property
@finalize
@property
def local_manager_gid(self):
"""Group id of local manager group of current authenticated member.
Currently a user can be assigned only to one local manager group. If
more than one local manager group is configured, an error is raised.
"""
config = self.root['settings']['ugm_localmanager'].attrs
user = security.authenticated_user(get_current_request())
if not user:
return None
gids = user.group_ids
adm_gids = list()
for gid in gids:
rule = config.get(gid)
if rule:
adm_gids.append(gid)
if len(adm_gids) == 0:
return None
if len(adm_gids) > 1:
msg = (u"Authenticated member defined in local manager "
u"groups %s but only one management group allowed for "
u"each user. Please contact System Administrator in "
u"order to fix this problem.")
exc = msg % ', '.join(["'%s'" % gid for gid in adm_gids])
raise Exception(exc)
return adm_gids[0]
@finalize
@property
def local_manager_rule(self):
"""Return rule for local manager.
"""
adm_gid = self.local_manager_gid
if not adm_gid:
return None
config = self.root['settings']['ugm_localmanager'].attrs
return config[adm_gid]
@finalize
@property
def local_manager_default_gids(self):
"""Return default group id's for local manager.
"""
rule = self.local_manager_rule
if not rule:
return list()
return rule['default']
@finalize
@property
def local_manager_target_gids(self):
"""Target group id's for local manager.
"""
rule = self.local_manager_rule
if not rule:
return list()
return rule['target']
@finalize
@property
def local_manager_target_uids(self):
"""Target uid's for local manager.
"""
groups = self.root['groups'].backend
managed_uids = set()
for gid in self.local_manager_target_gids:
group = groups.get(gid)
if group:
managed_uids.update(group.member_ids)
return list(managed_uids)
@finalize
def local_manager_is_default(self, adm_gid, gid):
"""Check whether gid is default group for local manager group.
"""
config = self.root['settings']['ugm_localmanager'].attrs
rule = config[adm_gid]
if gid not in rule['target']:
raise Exception(u"group '%s' not managed by '%s'" % (gid, adm_gid))
return gid in rule['default']
|
bluedynamics/cone.ugm | src/cone/ugm/model/localmanager.py | LocalManager.local_manager_gid | python | def local_manager_gid(self):
config = self.root['settings']['ugm_localmanager'].attrs
user = security.authenticated_user(get_current_request())
if not user:
return None
gids = user.group_ids
adm_gids = list()
for gid in gids:
rule = config.get(gid)
if rule:
adm_gids.append(gid)
if len(adm_gids) == 0:
return None
if len(adm_gids) > 1:
msg = (u"Authenticated member defined in local manager "
u"groups %s but only one management group allowed for "
u"each user. Please contact System Administrator in "
u"order to fix this problem.")
exc = msg % ', '.join(["'%s'" % gid for gid in adm_gids])
raise Exception(exc)
return adm_gids[0] | Group id of local manager group of current authenticated member.
Currently a user can be assigned only to one local manager group. If
more than one local manager group is configured, an error is raised. | train | https://github.com/bluedynamics/cone.ugm/blob/3c197075f3f6e94781289311c5637bb9c8e5597c/src/cone/ugm/model/localmanager.py#L89-L114 | null | class LocalManager(Behavior):
"""Behavior providing local manager information for authenticated user.
"""
@finalize
@property
def local_management_enabled(self):
"""Flag whether local management is enabled.
"""
general_settings = self.root['settings']['ugm_general']
return general_settings.attrs.users_local_management_enabled == 'True'
@finalize
@property
def local_manager_consider_for_user(self):
"""Flag whether local manager ACL should be considered for current
authenticated user.
"""
if not self.local_management_enabled:
return False
request = get_current_request()
if authenticated_userid(request) == security.ADMIN_USER:
return False
roles = security.authenticated_user(request).roles
if 'admin' in roles or 'manager' in roles:
return False
return True
@finalize
@property
@finalize
@property
def local_manager_rule(self):
"""Return rule for local manager.
"""
adm_gid = self.local_manager_gid
if not adm_gid:
return None
config = self.root['settings']['ugm_localmanager'].attrs
return config[adm_gid]
@finalize
@property
def local_manager_default_gids(self):
"""Return default group id's for local manager.
"""
rule = self.local_manager_rule
if not rule:
return list()
return rule['default']
@finalize
@property
def local_manager_target_gids(self):
"""Target group id's for local manager.
"""
rule = self.local_manager_rule
if not rule:
return list()
return rule['target']
@finalize
@property
def local_manager_target_uids(self):
"""Target uid's for local manager.
"""
groups = self.root['groups'].backend
managed_uids = set()
for gid in self.local_manager_target_gids:
group = groups.get(gid)
if group:
managed_uids.update(group.member_ids)
return list(managed_uids)
@finalize
def local_manager_is_default(self, adm_gid, gid):
"""Check whether gid is default group for local manager group.
"""
config = self.root['settings']['ugm_localmanager'].attrs
rule = config[adm_gid]
if gid not in rule['target']:
raise Exception(u"group '%s' not managed by '%s'" % (gid, adm_gid))
return gid in rule['default']
|
bluedynamics/cone.ugm | src/cone/ugm/model/localmanager.py | LocalManager.local_manager_rule | python | def local_manager_rule(self):
adm_gid = self.local_manager_gid
if not adm_gid:
return None
config = self.root['settings']['ugm_localmanager'].attrs
return config[adm_gid] | Return rule for local manager. | train | https://github.com/bluedynamics/cone.ugm/blob/3c197075f3f6e94781289311c5637bb9c8e5597c/src/cone/ugm/model/localmanager.py#L118-L125 | null | class LocalManager(Behavior):
"""Behavior providing local manager information for authenticated user.
"""
@finalize
@property
def local_management_enabled(self):
"""Flag whether local management is enabled.
"""
general_settings = self.root['settings']['ugm_general']
return general_settings.attrs.users_local_management_enabled == 'True'
@finalize
@property
def local_manager_consider_for_user(self):
"""Flag whether local manager ACL should be considered for current
authenticated user.
"""
if not self.local_management_enabled:
return False
request = get_current_request()
if authenticated_userid(request) == security.ADMIN_USER:
return False
roles = security.authenticated_user(request).roles
if 'admin' in roles or 'manager' in roles:
return False
return True
@finalize
@property
def local_manager_gid(self):
"""Group id of local manager group of current authenticated member.
Currently a user can be assigned only to one local manager group. If
more than one local manager group is configured, an error is raised.
"""
config = self.root['settings']['ugm_localmanager'].attrs
user = security.authenticated_user(get_current_request())
if not user:
return None
gids = user.group_ids
adm_gids = list()
for gid in gids:
rule = config.get(gid)
if rule:
adm_gids.append(gid)
if len(adm_gids) == 0:
return None
if len(adm_gids) > 1:
msg = (u"Authenticated member defined in local manager "
u"groups %s but only one management group allowed for "
u"each user. Please contact System Administrator in "
u"order to fix this problem.")
exc = msg % ', '.join(["'%s'" % gid for gid in adm_gids])
raise Exception(exc)
return adm_gids[0]
@finalize
@property
@finalize
@property
def local_manager_default_gids(self):
"""Return default group id's for local manager.
"""
rule = self.local_manager_rule
if not rule:
return list()
return rule['default']
@finalize
@property
def local_manager_target_gids(self):
"""Target group id's for local manager.
"""
rule = self.local_manager_rule
if not rule:
return list()
return rule['target']
@finalize
@property
def local_manager_target_uids(self):
"""Target uid's for local manager.
"""
groups = self.root['groups'].backend
managed_uids = set()
for gid in self.local_manager_target_gids:
group = groups.get(gid)
if group:
managed_uids.update(group.member_ids)
return list(managed_uids)
@finalize
def local_manager_is_default(self, adm_gid, gid):
"""Check whether gid is default group for local manager group.
"""
config = self.root['settings']['ugm_localmanager'].attrs
rule = config[adm_gid]
if gid not in rule['target']:
raise Exception(u"group '%s' not managed by '%s'" % (gid, adm_gid))
return gid in rule['default']
|
bluedynamics/cone.ugm | src/cone/ugm/model/localmanager.py | LocalManager.local_manager_target_uids | python | def local_manager_target_uids(self):
groups = self.root['groups'].backend
managed_uids = set()
for gid in self.local_manager_target_gids:
group = groups.get(gid)
if group:
managed_uids.update(group.member_ids)
return list(managed_uids) | Target uid's for local manager. | train | https://github.com/bluedynamics/cone.ugm/blob/3c197075f3f6e94781289311c5637bb9c8e5597c/src/cone/ugm/model/localmanager.py#L149-L158 | null | class LocalManager(Behavior):
"""Behavior providing local manager information for authenticated user.
"""
@finalize
@property
def local_management_enabled(self):
"""Flag whether local management is enabled.
"""
general_settings = self.root['settings']['ugm_general']
return general_settings.attrs.users_local_management_enabled == 'True'
@finalize
@property
def local_manager_consider_for_user(self):
"""Flag whether local manager ACL should be considered for current
authenticated user.
"""
if not self.local_management_enabled:
return False
request = get_current_request()
if authenticated_userid(request) == security.ADMIN_USER:
return False
roles = security.authenticated_user(request).roles
if 'admin' in roles or 'manager' in roles:
return False
return True
@finalize
@property
def local_manager_gid(self):
"""Group id of local manager group of current authenticated member.
Currently a user can be assigned only to one local manager group. If
more than one local manager group is configured, an error is raised.
"""
config = self.root['settings']['ugm_localmanager'].attrs
user = security.authenticated_user(get_current_request())
if not user:
return None
gids = user.group_ids
adm_gids = list()
for gid in gids:
rule = config.get(gid)
if rule:
adm_gids.append(gid)
if len(adm_gids) == 0:
return None
if len(adm_gids) > 1:
msg = (u"Authenticated member defined in local manager "
u"groups %s but only one management group allowed for "
u"each user. Please contact System Administrator in "
u"order to fix this problem.")
exc = msg % ', '.join(["'%s'" % gid for gid in adm_gids])
raise Exception(exc)
return adm_gids[0]
@finalize
@property
def local_manager_rule(self):
"""Return rule for local manager.
"""
adm_gid = self.local_manager_gid
if not adm_gid:
return None
config = self.root['settings']['ugm_localmanager'].attrs
return config[adm_gid]
@finalize
@property
def local_manager_default_gids(self):
"""Return default group id's for local manager.
"""
rule = self.local_manager_rule
if not rule:
return list()
return rule['default']
@finalize
@property
def local_manager_target_gids(self):
"""Target group id's for local manager.
"""
rule = self.local_manager_rule
if not rule:
return list()
return rule['target']
@finalize
@property
@finalize
def local_manager_is_default(self, adm_gid, gid):
"""Check whether gid is default group for local manager group.
"""
config = self.root['settings']['ugm_localmanager'].attrs
rule = config[adm_gid]
if gid not in rule['target']:
raise Exception(u"group '%s' not managed by '%s'" % (gid, adm_gid))
return gid in rule['default']
|
bluedynamics/cone.ugm | src/cone/ugm/model/localmanager.py | LocalManager.local_manager_is_default | python | def local_manager_is_default(self, adm_gid, gid):
config = self.root['settings']['ugm_localmanager'].attrs
rule = config[adm_gid]
if gid not in rule['target']:
raise Exception(u"group '%s' not managed by '%s'" % (gid, adm_gid))
return gid in rule['default'] | Check whether gid is default group for local manager group. | train | https://github.com/bluedynamics/cone.ugm/blob/3c197075f3f6e94781289311c5637bb9c8e5597c/src/cone/ugm/model/localmanager.py#L161-L168 | null | class LocalManager(Behavior):
"""Behavior providing local manager information for authenticated user.
"""
@finalize
@property
def local_management_enabled(self):
"""Flag whether local management is enabled.
"""
general_settings = self.root['settings']['ugm_general']
return general_settings.attrs.users_local_management_enabled == 'True'
@finalize
@property
def local_manager_consider_for_user(self):
"""Flag whether local manager ACL should be considered for current
authenticated user.
"""
if not self.local_management_enabled:
return False
request = get_current_request()
if authenticated_userid(request) == security.ADMIN_USER:
return False
roles = security.authenticated_user(request).roles
if 'admin' in roles or 'manager' in roles:
return False
return True
@finalize
@property
def local_manager_gid(self):
"""Group id of local manager group of current authenticated member.
Currently a user can be assigned only to one local manager group. If
more than one local manager group is configured, an error is raised.
"""
config = self.root['settings']['ugm_localmanager'].attrs
user = security.authenticated_user(get_current_request())
if not user:
return None
gids = user.group_ids
adm_gids = list()
for gid in gids:
rule = config.get(gid)
if rule:
adm_gids.append(gid)
if len(adm_gids) == 0:
return None
if len(adm_gids) > 1:
msg = (u"Authenticated member defined in local manager "
u"groups %s but only one management group allowed for "
u"each user. Please contact System Administrator in "
u"order to fix this problem.")
exc = msg % ', '.join(["'%s'" % gid for gid in adm_gids])
raise Exception(exc)
return adm_gids[0]
@finalize
@property
def local_manager_rule(self):
"""Return rule for local manager.
"""
adm_gid = self.local_manager_gid
if not adm_gid:
return None
config = self.root['settings']['ugm_localmanager'].attrs
return config[adm_gid]
@finalize
@property
def local_manager_default_gids(self):
"""Return default group id's for local manager.
"""
rule = self.local_manager_rule
if not rule:
return list()
return rule['default']
@finalize
@property
def local_manager_target_gids(self):
"""Target group id's for local manager.
"""
rule = self.local_manager_rule
if not rule:
return list()
return rule['target']
@finalize
@property
def local_manager_target_uids(self):
"""Target uid's for local manager.
"""
groups = self.root['groups'].backend
managed_uids = set()
for gid in self.local_manager_target_gids:
group = groups.get(gid)
if group:
managed_uids.update(group.member_ids)
return list(managed_uids)
@finalize
|
bluedynamics/cone.ugm | src/cone/ugm/browser/user.py | UserForm.form_field_definitions | python | def form_field_definitions(self):
schema = copy.deepcopy(form_field_definitions.user)
uid, login = self._get_auth_attrs()
if uid != login:
field = schema.get(login, schema['default'])
if field['chain'].find('*optional_login') == -1:
field['chain'] = '%s:%s' % (
'*optional_login', field['chain'])
if not field.get('custom'):
field['custom'] = dict()
field['custom']['optional_login'] = \
(['context.optional_login'], [], [], [], [])
schema[login] = field
return schema | Hook optional_login extractor if necessary for form defaults. | train | https://github.com/bluedynamics/cone.ugm/blob/3c197075f3f6e94781289311c5637bb9c8e5597c/src/cone/ugm/browser/user.py#L183-L198 | [
"def _get_auth_attrs(self):\n config = ugm_users(self.model)\n aliases = config.attrs.users_aliases_attrmap\n return aliases['id'], aliases['login']\n"
] | class UserForm(PrincipalForm):
form_name = 'userform'
@property
def form_attrmap(self):
settings = ugm_users(self.model)
return settings.attrs.users_form_attrmap
@property
def exists(self, widget, data):
uid = data.extracted
if uid is UNSET:
return data.extracted
if uid in self.model.parent.backend:
message = _('user_already_exists',
default="User ${uid} already exists.",
mapping={'uid': uid})
raise ExtractionError(message)
return data.extracted
def optional_login(self, widget, data):
login = self._get_auth_attrs()[1]
res = self.model.parent.backend.search(
criteria={login: data.extracted})
# no entries found with same login attribute set.
if not res:
return data.extracted
# 1-lenght result, unchange login attribute
if len(res) == 1:
if res[0] == self.model.name:
return data.extracted
message = _('user_login_not_unique',
default="User login ${login} not unique.",
mapping={'login': data.extracted})
raise ExtractionError(message)
def _get_auth_attrs(self):
config = ugm_users(self.model)
aliases = config.attrs.users_aliases_attrmap
return aliases['id'], aliases['login']
|
bluedynamics/cone.ugm | src/cone/ugm/browser/remote.py | remote_add_user | python | def remote_add_user(model, request):
params = request.params
uid = params.get('id')
if not uid:
return {
'success': False,
'message': u"No user ID given.",
}
users = model.backend
if uid in users:
return {
'success': False,
'message': u"User with given ID already exists.",
}
password = params.get('password')
add_roles = params.get('roles', '')
add_roles = [val.strip() for val in add_roles.split(',') if val]
add_groups = params.get('groups', '')
add_groups = [val.strip() for val in add_groups.split(',') if val]
attrs = dict()
for key, val in params.items():
if not key.startswith('attr.'):
continue
key = key[key.find('.') + 1:]
attrs[key] = val
settings = ugm_users(model)
attrmap = settings.attrs.users_form_attrmap
exposed = settings.attrs.users_exposed_attributes
if not exposed:
exposed = list()
valid_attrs = attrmap.keys() + exposed
checked_attrs = dict()
for key in valid_attrs:
val = attrs.get(key)
if not val:
continue
checked_attrs[key] = val
try:
user = users.create(uid, **checked_attrs)
message = u""
from cone.app.security import DEFAULT_ROLES
available_roles = [role[0] for role in DEFAULT_ROLES]
for role in add_roles:
if role not in available_roles:
message += u"Role '%s' given but inexistent. " % role
continue
user.add_role(role)
groups = users.parent.groups
for group in add_groups:
if group not in groups:
message += u"Group '%s' given but inexistent. " % group
continue
groups[group].add(uid)
users.parent()
if password is not None:
users.passwd(uid, None, password)
message += u"Created user with ID '%s'." % uid
return {
'success': True,
'message': message,
}
except Exception as e:
return {
'success': False,
'message': str(e),
}
finally:
model.invalidate() | Add user via remote service.
Returns a JSON response containing success state and a message indicating
what happened::
{
success: true, // respective false
message: 'message'
}
Expected request parameters:
id
New user id.
password
User password to be set initially (optional).
roles
Comma seperated role names the user initially has.
groups
Comma seperated groups names the user should initially be member of.
attr.*
User attributes to be set. I.e. ``attr.mail`` would set the mail
attribute for newly created user. All request parameters prefixed with
``attr`` get checked against user attribute attrmap from settings.
Restrictions - All values, whether single or multi valued, are passed
as string or list of strings to the create function. | train | https://github.com/bluedynamics/cone.ugm/blob/3c197075f3f6e94781289311c5637bb9c8e5597c/src/cone/ugm/browser/remote.py#L12-L124 | [
"def ugm_users(model):\n return model.root['settings']['ugm_users']\n"
] | from cone.ugm.model.users import Users
from cone.ugm.model.utils import ugm_users
from pyramid.view import view_config
@view_config(
name='remote_add_user',
accept='application/json',
renderer='json',
context=Users,
permission='add_user')
@view_config(
name='remote_delete_user',
accept='application/json',
renderer='json',
context=Users,
permission='delete_user')
def remote_delete_user(model, request):
"""Remove user via remote service.
Returns a JSON response containing success state and a message indicating
what happened::
{
success: true, // respective false
message: 'message'
}
Expected request parameters:
id
Id of user to delete.
"""
params = request.params
uid = params.get('id')
if not uid:
return {
'success': False,
'message': u"No user ID given.",
}
users = model.backend
if uid not in users:
return {
'success': False,
'message': u"User with given ID not exists.",
}
try:
del users[uid]
users.parent()
message = u"Deleted user with ID '%s'." % uid
return {
'success': True,
'message': message,
}
except Exception as e:
return {
'success': False,
'message': str(e),
}
finally:
model.invalidate()
|
bluedynamics/cone.ugm | src/cone/ugm/browser/remote.py | remote_delete_user | python | def remote_delete_user(model, request):
params = request.params
uid = params.get('id')
if not uid:
return {
'success': False,
'message': u"No user ID given.",
}
users = model.backend
if uid not in users:
return {
'success': False,
'message': u"User with given ID not exists.",
}
try:
del users[uid]
users.parent()
message = u"Deleted user with ID '%s'." % uid
return {
'success': True,
'message': message,
}
except Exception as e:
return {
'success': False,
'message': str(e),
}
finally:
model.invalidate() | Remove user via remote service.
Returns a JSON response containing success state and a message indicating
what happened::
{
success: true, // respective false
message: 'message'
}
Expected request parameters:
id
Id of user to delete. | train | https://github.com/bluedynamics/cone.ugm/blob/3c197075f3f6e94781289311c5637bb9c8e5597c/src/cone/ugm/browser/remote.py#L133-L180 | null | from cone.ugm.model.users import Users
from cone.ugm.model.utils import ugm_users
from pyramid.view import view_config
@view_config(
name='remote_add_user',
accept='application/json',
renderer='json',
context=Users,
permission='add_user')
def remote_add_user(model, request):
"""Add user via remote service.
Returns a JSON response containing success state and a message indicating
what happened::
{
success: true, // respective false
message: 'message'
}
Expected request parameters:
id
New user id.
password
User password to be set initially (optional).
roles
Comma seperated role names the user initially has.
groups
Comma seperated groups names the user should initially be member of.
attr.*
User attributes to be set. I.e. ``attr.mail`` would set the mail
attribute for newly created user. All request parameters prefixed with
``attr`` get checked against user attribute attrmap from settings.
Restrictions - All values, whether single or multi valued, are passed
as string or list of strings to the create function.
"""
params = request.params
uid = params.get('id')
if not uid:
return {
'success': False,
'message': u"No user ID given.",
}
users = model.backend
if uid in users:
return {
'success': False,
'message': u"User with given ID already exists.",
}
password = params.get('password')
add_roles = params.get('roles', '')
add_roles = [val.strip() for val in add_roles.split(',') if val]
add_groups = params.get('groups', '')
add_groups = [val.strip() for val in add_groups.split(',') if val]
attrs = dict()
for key, val in params.items():
if not key.startswith('attr.'):
continue
key = key[key.find('.') + 1:]
attrs[key] = val
settings = ugm_users(model)
attrmap = settings.attrs.users_form_attrmap
exposed = settings.attrs.users_exposed_attributes
if not exposed:
exposed = list()
valid_attrs = attrmap.keys() + exposed
checked_attrs = dict()
for key in valid_attrs:
val = attrs.get(key)
if not val:
continue
checked_attrs[key] = val
try:
user = users.create(uid, **checked_attrs)
message = u""
from cone.app.security import DEFAULT_ROLES
available_roles = [role[0] for role in DEFAULT_ROLES]
for role in add_roles:
if role not in available_roles:
message += u"Role '%s' given but inexistent. " % role
continue
user.add_role(role)
groups = users.parent.groups
for group in add_groups:
if group not in groups:
message += u"Group '%s' given but inexistent. " % group
continue
groups[group].add(uid)
users.parent()
if password is not None:
users.passwd(uid, None, password)
message += u"Created user with ID '%s'." % uid
return {
'success': True,
'message': message,
}
except Exception as e:
return {
'success': False,
'message': str(e),
}
finally:
model.invalidate()
@view_config(
name='remote_delete_user',
accept='application/json',
renderer='json',
context=Users,
permission='delete_user')
|
alfredodeza/notario | notario/store.py | create_store | python | def create_store():
new_storage = _proxy('store')
_state.store = type('store', (object,), {})
new_storage.store = dict()
return new_storage.store | A helper for setting the _proxy and slapping the store
object for us.
:return: A thread-local storage as a dictionary | train | https://github.com/alfredodeza/notario/blob/d5dc2edfcb75d9291ced3f2551f368c35dd31475/notario/store.py#L25-L35 | [
"def _proxy(key):\n class ObjectProxy(object):\n def __getattr__(self, attr):\n obj = getattr(_state, key)\n return getattr(obj, attr)\n\n def __setattr__(self, attr, value):\n obj = getattr(_state, key)\n return setattr(obj, attr, value)\n\n def __delattr__(self, attr):\n obj = getattr(_state, key)\n return delattr(obj, attr)\n return ObjectProxy()\n"
] | """
Thread-safe storage for Notario.
"""
from threading import local
_state = local()
def _proxy(key):
class ObjectProxy(object):
def __getattr__(self, attr):
obj = getattr(_state, key)
return getattr(obj, attr)
def __setattr__(self, attr, value):
obj = getattr(_state, key)
return setattr(obj, attr, value)
def __delattr__(self, attr):
obj = getattr(_state, key)
return delattr(obj, attr)
return ObjectProxy()
store = create_store()
#
# Helpers
#
class new_store(object):
def __init__(self):
self.store = create_store()
def __enter__(self):
return self.store
def __exit__( self, exc_type, exc_val, exc_tb):
# clear the store regardless of exceptions
del _state.store
if exc_type:
return False
return True |
alfredodeza/notario | notario/validators/types.py | string | python | def string(_object):
if is_callable(_object):
_validator = _object
@wraps(_validator)
def decorated(value):
ensure(isinstance(value, basestring), "not of type string")
return _validator(value)
return decorated
ensure(isinstance(_object, basestring), "not of type string") | Validates a given input is of type string.
Example usage::
data = {'a' : 21}
schema = (string, 21)
You can also use this as a decorator, as a way to check for the
input before it even hits a validator you may be writing.
.. note::
If the argument is a callable, the decorating behavior will be
triggered, otherwise it will act as a normal function. | train | https://github.com/alfredodeza/notario/blob/d5dc2edfcb75d9291ced3f2551f368c35dd31475/notario/validators/types.py#L10-L34 | [
"def ensure(assertion, message=None):\n \"\"\"\n Checks an assertion argument for truth-ness. Will return ``True`` or\n explicitly raise ``AssertionError``. This is to deal with environments\n using ``python -O` or ``PYTHONOPTIMIZE=``.\n\n :param assertion: some value to evaluate for truth-ness\n :param message: optional message used for raising AssertionError\n \"\"\"\n message = message or assertion\n\n if not assertion:\n raise AssertionError(message)\n\n return True\n",
"def is_callable(data):\n if hasattr(data, '__call__'):\n return True\n return False\n"
] | """
Basic type validators
"""
from functools import wraps
from notario._compat import basestring
from notario.exceptions import Invalid
from notario.utils import is_callable, forced_leaf_validator, ensure
def boolean(_object):
"""
Validates a given input is of type boolean.
Example usage::
data = {'a' : True}
schema = ('a', boolean)
You can also use this as a decorator, as a way to check for the
input before it even hits a validator you may be writing.
.. note::
If the argument is a callable, the decorating behavior will be
triggered, otherwise it will act as a normal function.
"""
if is_callable(_object):
_validator = _object
@wraps(_validator)
def decorated(value):
ensure(isinstance(value, bool), "not of type boolean")
return _validator(value)
return decorated
ensure(isinstance(_object, bool), "not of type boolean")
@forced_leaf_validator
def dictionary(_object, *args):
"""
Validates a given input is of type dictionary.
Example usage::
data = {'a' : {'b': 1}}
schema = ('a', dictionary)
You can also use this as a decorator, as a way to check for the
input before it even hits a validator you may be writing.
.. note::
If the argument is a callable, the decorating behavior will be
triggered, otherwise it will act as a normal function.
"""
error_msg = 'not of type dictionary'
if is_callable(_object):
_validator = _object
@wraps(_validator)
def decorated(value):
ensure(isinstance(value, dict), error_msg)
return _validator(value)
return decorated
try:
ensure(isinstance(_object, dict), error_msg)
except AssertionError:
if args:
msg = 'did not pass validation against callable: dictionary'
raise Invalid('', msg=msg, reason=error_msg, *args)
raise
def array(_object):
"""
Validates a given input is of type list.
Example usage::
data = {'a' : [1,2]}
schema = ('a', array)
You can also use this as a decorator, as a way to check for the
input before it even hits a validator you may be writing.
.. note::
If the argument is a callable, the decorating behavior will be
triggered, otherwise it will act as a normal function.
"""
if is_callable(_object):
_validator = _object
@wraps(_validator)
def decorated(value):
ensure(isinstance(value, list), "not of type array")
return _validator(value)
return decorated
ensure(isinstance(_object, list), "not of type array")
def integer(_object):
"""
Validates a given input is of type int..
Example usage::
data = {'a' : 21}
schema = ('a', integer)
You can also use this as a decorator, as a way to check for the
input before it even hits a validator you may be writing.
.. note::
If the argument is a callable, the decorating behavior will be
triggered, otherwise it will act as a normal function.
"""
if is_callable(_object):
_validator = _object
@wraps(_validator)
def decorated(value):
ensure(isinstance(value, int), "not of type int")
return _validator(value)
return decorated
ensure(isinstance(_object, int), "not of type int")
|
alfredodeza/notario | notario/validators/types.py | boolean | python | def boolean(_object):
if is_callable(_object):
_validator = _object
@wraps(_validator)
def decorated(value):
ensure(isinstance(value, bool), "not of type boolean")
return _validator(value)
return decorated
ensure(isinstance(_object, bool), "not of type boolean") | Validates a given input is of type boolean.
Example usage::
data = {'a' : True}
schema = ('a', boolean)
You can also use this as a decorator, as a way to check for the
input before it even hits a validator you may be writing.
.. note::
If the argument is a callable, the decorating behavior will be
triggered, otherwise it will act as a normal function. | train | https://github.com/alfredodeza/notario/blob/d5dc2edfcb75d9291ced3f2551f368c35dd31475/notario/validators/types.py#L37-L62 | [
"def ensure(assertion, message=None):\n \"\"\"\n Checks an assertion argument for truth-ness. Will return ``True`` or\n explicitly raise ``AssertionError``. This is to deal with environments\n using ``python -O` or ``PYTHONOPTIMIZE=``.\n\n :param assertion: some value to evaluate for truth-ness\n :param message: optional message used for raising AssertionError\n \"\"\"\n message = message or assertion\n\n if not assertion:\n raise AssertionError(message)\n\n return True\n",
"def is_callable(data):\n if hasattr(data, '__call__'):\n return True\n return False\n"
] | """
Basic type validators
"""
from functools import wraps
from notario._compat import basestring
from notario.exceptions import Invalid
from notario.utils import is_callable, forced_leaf_validator, ensure
def string(_object):
"""
Validates a given input is of type string.
Example usage::
data = {'a' : 21}
schema = (string, 21)
You can also use this as a decorator, as a way to check for the
input before it even hits a validator you may be writing.
.. note::
If the argument is a callable, the decorating behavior will be
triggered, otherwise it will act as a normal function.
"""
if is_callable(_object):
_validator = _object
@wraps(_validator)
def decorated(value):
ensure(isinstance(value, basestring), "not of type string")
return _validator(value)
return decorated
ensure(isinstance(_object, basestring), "not of type string")
@forced_leaf_validator
def dictionary(_object, *args):
"""
Validates a given input is of type dictionary.
Example usage::
data = {'a' : {'b': 1}}
schema = ('a', dictionary)
You can also use this as a decorator, as a way to check for the
input before it even hits a validator you may be writing.
.. note::
If the argument is a callable, the decorating behavior will be
triggered, otherwise it will act as a normal function.
"""
error_msg = 'not of type dictionary'
if is_callable(_object):
_validator = _object
@wraps(_validator)
def decorated(value):
ensure(isinstance(value, dict), error_msg)
return _validator(value)
return decorated
try:
ensure(isinstance(_object, dict), error_msg)
except AssertionError:
if args:
msg = 'did not pass validation against callable: dictionary'
raise Invalid('', msg=msg, reason=error_msg, *args)
raise
def array(_object):
"""
Validates a given input is of type list.
Example usage::
data = {'a' : [1,2]}
schema = ('a', array)
You can also use this as a decorator, as a way to check for the
input before it even hits a validator you may be writing.
.. note::
If the argument is a callable, the decorating behavior will be
triggered, otherwise it will act as a normal function.
"""
if is_callable(_object):
_validator = _object
@wraps(_validator)
def decorated(value):
ensure(isinstance(value, list), "not of type array")
return _validator(value)
return decorated
ensure(isinstance(_object, list), "not of type array")
def integer(_object):
"""
Validates a given input is of type int..
Example usage::
data = {'a' : 21}
schema = ('a', integer)
You can also use this as a decorator, as a way to check for the
input before it even hits a validator you may be writing.
.. note::
If the argument is a callable, the decorating behavior will be
triggered, otherwise it will act as a normal function.
"""
if is_callable(_object):
_validator = _object
@wraps(_validator)
def decorated(value):
ensure(isinstance(value, int), "not of type int")
return _validator(value)
return decorated
ensure(isinstance(_object, int), "not of type int")
|
alfredodeza/notario | notario/validators/types.py | dictionary | python | def dictionary(_object, *args):
error_msg = 'not of type dictionary'
if is_callable(_object):
_validator = _object
@wraps(_validator)
def decorated(value):
ensure(isinstance(value, dict), error_msg)
return _validator(value)
return decorated
try:
ensure(isinstance(_object, dict), error_msg)
except AssertionError:
if args:
msg = 'did not pass validation against callable: dictionary'
raise Invalid('', msg=msg, reason=error_msg, *args)
raise | Validates a given input is of type dictionary.
Example usage::
data = {'a' : {'b': 1}}
schema = ('a', dictionary)
You can also use this as a decorator, as a way to check for the
input before it even hits a validator you may be writing.
.. note::
If the argument is a callable, the decorating behavior will be
triggered, otherwise it will act as a normal function. | train | https://github.com/alfredodeza/notario/blob/d5dc2edfcb75d9291ced3f2551f368c35dd31475/notario/validators/types.py#L66-L98 | [
"def ensure(assertion, message=None):\n \"\"\"\n Checks an assertion argument for truth-ness. Will return ``True`` or\n explicitly raise ``AssertionError``. This is to deal with environments\n using ``python -O` or ``PYTHONOPTIMIZE=``.\n\n :param assertion: some value to evaluate for truth-ness\n :param message: optional message used for raising AssertionError\n \"\"\"\n message = message or assertion\n\n if not assertion:\n raise AssertionError(message)\n\n return True\n",
"def is_callable(data):\n if hasattr(data, '__call__'):\n return True\n return False\n"
] | """
Basic type validators
"""
from functools import wraps
from notario._compat import basestring
from notario.exceptions import Invalid
from notario.utils import is_callable, forced_leaf_validator, ensure
def string(_object):
"""
Validates a given input is of type string.
Example usage::
data = {'a' : 21}
schema = (string, 21)
You can also use this as a decorator, as a way to check for the
input before it even hits a validator you may be writing.
.. note::
If the argument is a callable, the decorating behavior will be
triggered, otherwise it will act as a normal function.
"""
if is_callable(_object):
_validator = _object
@wraps(_validator)
def decorated(value):
ensure(isinstance(value, basestring), "not of type string")
return _validator(value)
return decorated
ensure(isinstance(_object, basestring), "not of type string")
def boolean(_object):
"""
Validates a given input is of type boolean.
Example usage::
data = {'a' : True}
schema = ('a', boolean)
You can also use this as a decorator, as a way to check for the
input before it even hits a validator you may be writing.
.. note::
If the argument is a callable, the decorating behavior will be
triggered, otherwise it will act as a normal function.
"""
if is_callable(_object):
_validator = _object
@wraps(_validator)
def decorated(value):
ensure(isinstance(value, bool), "not of type boolean")
return _validator(value)
return decorated
ensure(isinstance(_object, bool), "not of type boolean")
@forced_leaf_validator
def array(_object):
"""
Validates a given input is of type list.
Example usage::
data = {'a' : [1,2]}
schema = ('a', array)
You can also use this as a decorator, as a way to check for the
input before it even hits a validator you may be writing.
.. note::
If the argument is a callable, the decorating behavior will be
triggered, otherwise it will act as a normal function.
"""
if is_callable(_object):
_validator = _object
@wraps(_validator)
def decorated(value):
ensure(isinstance(value, list), "not of type array")
return _validator(value)
return decorated
ensure(isinstance(_object, list), "not of type array")
def integer(_object):
"""
Validates a given input is of type int..
Example usage::
data = {'a' : 21}
schema = ('a', integer)
You can also use this as a decorator, as a way to check for the
input before it even hits a validator you may be writing.
.. note::
If the argument is a callable, the decorating behavior will be
triggered, otherwise it will act as a normal function.
"""
if is_callable(_object):
_validator = _object
@wraps(_validator)
def decorated(value):
ensure(isinstance(value, int), "not of type int")
return _validator(value)
return decorated
ensure(isinstance(_object, int), "not of type int")
|
alfredodeza/notario | notario/validators/types.py | array | python | def array(_object):
if is_callable(_object):
_validator = _object
@wraps(_validator)
def decorated(value):
ensure(isinstance(value, list), "not of type array")
return _validator(value)
return decorated
ensure(isinstance(_object, list), "not of type array") | Validates a given input is of type list.
Example usage::
data = {'a' : [1,2]}
schema = ('a', array)
You can also use this as a decorator, as a way to check for the
input before it even hits a validator you may be writing.
.. note::
If the argument is a callable, the decorating behavior will be
triggered, otherwise it will act as a normal function. | train | https://github.com/alfredodeza/notario/blob/d5dc2edfcb75d9291ced3f2551f368c35dd31475/notario/validators/types.py#L101-L126 | [
"def ensure(assertion, message=None):\n \"\"\"\n Checks an assertion argument for truth-ness. Will return ``True`` or\n explicitly raise ``AssertionError``. This is to deal with environments\n using ``python -O` or ``PYTHONOPTIMIZE=``.\n\n :param assertion: some value to evaluate for truth-ness\n :param message: optional message used for raising AssertionError\n \"\"\"\n message = message or assertion\n\n if not assertion:\n raise AssertionError(message)\n\n return True\n",
"def is_callable(data):\n if hasattr(data, '__call__'):\n return True\n return False\n"
] | """
Basic type validators
"""
from functools import wraps
from notario._compat import basestring
from notario.exceptions import Invalid
from notario.utils import is_callable, forced_leaf_validator, ensure
def string(_object):
"""
Validates a given input is of type string.
Example usage::
data = {'a' : 21}
schema = (string, 21)
You can also use this as a decorator, as a way to check for the
input before it even hits a validator you may be writing.
.. note::
If the argument is a callable, the decorating behavior will be
triggered, otherwise it will act as a normal function.
"""
if is_callable(_object):
_validator = _object
@wraps(_validator)
def decorated(value):
ensure(isinstance(value, basestring), "not of type string")
return _validator(value)
return decorated
ensure(isinstance(_object, basestring), "not of type string")
def boolean(_object):
"""
Validates a given input is of type boolean.
Example usage::
data = {'a' : True}
schema = ('a', boolean)
You can also use this as a decorator, as a way to check for the
input before it even hits a validator you may be writing.
.. note::
If the argument is a callable, the decorating behavior will be
triggered, otherwise it will act as a normal function.
"""
if is_callable(_object):
_validator = _object
@wraps(_validator)
def decorated(value):
ensure(isinstance(value, bool), "not of type boolean")
return _validator(value)
return decorated
ensure(isinstance(_object, bool), "not of type boolean")
@forced_leaf_validator
def dictionary(_object, *args):
"""
Validates a given input is of type dictionary.
Example usage::
data = {'a' : {'b': 1}}
schema = ('a', dictionary)
You can also use this as a decorator, as a way to check for the
input before it even hits a validator you may be writing.
.. note::
If the argument is a callable, the decorating behavior will be
triggered, otherwise it will act as a normal function.
"""
error_msg = 'not of type dictionary'
if is_callable(_object):
_validator = _object
@wraps(_validator)
def decorated(value):
ensure(isinstance(value, dict), error_msg)
return _validator(value)
return decorated
try:
ensure(isinstance(_object, dict), error_msg)
except AssertionError:
if args:
msg = 'did not pass validation against callable: dictionary'
raise Invalid('', msg=msg, reason=error_msg, *args)
raise
def integer(_object):
"""
Validates a given input is of type int..
Example usage::
data = {'a' : 21}
schema = ('a', integer)
You can also use this as a decorator, as a way to check for the
input before it even hits a validator you may be writing.
.. note::
If the argument is a callable, the decorating behavior will be
triggered, otherwise it will act as a normal function.
"""
if is_callable(_object):
_validator = _object
@wraps(_validator)
def decorated(value):
ensure(isinstance(value, int), "not of type int")
return _validator(value)
return decorated
ensure(isinstance(_object, int), "not of type int")
|
alfredodeza/notario | notario/validators/types.py | integer | python | def integer(_object):
if is_callable(_object):
_validator = _object
@wraps(_validator)
def decorated(value):
ensure(isinstance(value, int), "not of type int")
return _validator(value)
return decorated
ensure(isinstance(_object, int), "not of type int") | Validates a given input is of type int..
Example usage::
data = {'a' : 21}
schema = ('a', integer)
You can also use this as a decorator, as a way to check for the
input before it even hits a validator you may be writing.
.. note::
If the argument is a callable, the decorating behavior will be
triggered, otherwise it will act as a normal function. | train | https://github.com/alfredodeza/notario/blob/d5dc2edfcb75d9291ced3f2551f368c35dd31475/notario/validators/types.py#L129-L153 | [
"def ensure(assertion, message=None):\n \"\"\"\n Checks an assertion argument for truth-ness. Will return ``True`` or\n explicitly raise ``AssertionError``. This is to deal with environments\n using ``python -O` or ``PYTHONOPTIMIZE=``.\n\n :param assertion: some value to evaluate for truth-ness\n :param message: optional message used for raising AssertionError\n \"\"\"\n message = message or assertion\n\n if not assertion:\n raise AssertionError(message)\n\n return True\n",
"def is_callable(data):\n if hasattr(data, '__call__'):\n return True\n return False\n"
] | """
Basic type validators
"""
from functools import wraps
from notario._compat import basestring
from notario.exceptions import Invalid
from notario.utils import is_callable, forced_leaf_validator, ensure
def string(_object):
"""
Validates a given input is of type string.
Example usage::
data = {'a' : 21}
schema = (string, 21)
You can also use this as a decorator, as a way to check for the
input before it even hits a validator you may be writing.
.. note::
If the argument is a callable, the decorating behavior will be
triggered, otherwise it will act as a normal function.
"""
if is_callable(_object):
_validator = _object
@wraps(_validator)
def decorated(value):
ensure(isinstance(value, basestring), "not of type string")
return _validator(value)
return decorated
ensure(isinstance(_object, basestring), "not of type string")
def boolean(_object):
"""
Validates a given input is of type boolean.
Example usage::
data = {'a' : True}
schema = ('a', boolean)
You can also use this as a decorator, as a way to check for the
input before it even hits a validator you may be writing.
.. note::
If the argument is a callable, the decorating behavior will be
triggered, otherwise it will act as a normal function.
"""
if is_callable(_object):
_validator = _object
@wraps(_validator)
def decorated(value):
ensure(isinstance(value, bool), "not of type boolean")
return _validator(value)
return decorated
ensure(isinstance(_object, bool), "not of type boolean")
@forced_leaf_validator
def dictionary(_object, *args):
"""
Validates a given input is of type dictionary.
Example usage::
data = {'a' : {'b': 1}}
schema = ('a', dictionary)
You can also use this as a decorator, as a way to check for the
input before it even hits a validator you may be writing.
.. note::
If the argument is a callable, the decorating behavior will be
triggered, otherwise it will act as a normal function.
"""
error_msg = 'not of type dictionary'
if is_callable(_object):
_validator = _object
@wraps(_validator)
def decorated(value):
ensure(isinstance(value, dict), error_msg)
return _validator(value)
return decorated
try:
ensure(isinstance(_object, dict), error_msg)
except AssertionError:
if args:
msg = 'did not pass validation against callable: dictionary'
raise Invalid('', msg=msg, reason=error_msg, *args)
raise
def array(_object):
"""
Validates a given input is of type list.
Example usage::
data = {'a' : [1,2]}
schema = ('a', array)
You can also use this as a decorator, as a way to check for the
input before it even hits a validator you may be writing.
.. note::
If the argument is a callable, the decorating behavior will be
triggered, otherwise it will act as a normal function.
"""
if is_callable(_object):
_validator = _object
@wraps(_validator)
def decorated(value):
ensure(isinstance(value, list), "not of type array")
return _validator(value)
return decorated
ensure(isinstance(_object, list), "not of type array")
|
alfredodeza/notario | notario/validators/iterables.py | BasicIterableValidator.safe_type | python | def safe_type(self, data, tree):
if not isinstance(data, list):
name = self.__class__.__name__
msg = "did not pass validation against callable: %s" % name
reason = 'expected a list but got %s' % safe_repr(data)
raise Invalid(self.schema, tree, reason=reason, pair='value', msg=msg) | Make sure that the incoming data complies with the class type we
are expecting it to be. In this case, classes that inherit from this
base class expect data to be of type ``list``. | train | https://github.com/alfredodeza/notario/blob/d5dc2edfcb75d9291ced3f2551f368c35dd31475/notario/validators/iterables.py#L22-L32 | [
"def safe_repr(obj):\n \"\"\"\n Try to get ``__name__`` first, ``__class__.__name__`` second\n and finally, if we can't get anything acceptable, fallback\n to user a ``repr()`` call.\n \"\"\"\n name = getattr(obj, '__name__', getattr(obj.__class__, '__name__'))\n if name == 'ndict':\n name = 'dict'\n return name or repr(obj)\n"
] | class BasicIterableValidator(object):
"""
Base class for iterable validators, can be sub-classed
for other type of iterable validators but should not be
used directly.
"""
__validator_leaf__ = True
def __init__(self, schema):
self.schema = schema
|
alfredodeza/notario | notario/utils.py | safe_repr | python | def safe_repr(obj):
name = getattr(obj, '__name__', getattr(obj.__class__, '__name__'))
if name == 'ndict':
name = 'dict'
return name or repr(obj) | Try to get ``__name__`` first, ``__class__.__name__`` second
and finally, if we can't get anything acceptable, fallback
to user a ``repr()`` call. | train | https://github.com/alfredodeza/notario/blob/d5dc2edfcb75d9291ced3f2551f368c35dd31475/notario/utils.py#L10-L19 | null | import warnings
def is_callable(data):
if hasattr(data, '__call__'):
return True
return False
# Backwards compatibility
def optional(validator):
from notario import decorators
msg = "import optional from notario.decorators, not from utils"
warnings.warn(msg, DeprecationWarning, stacklevel=2)
return decorators.optional(validator)
class ndict(dict):
"""
Used by Notario so that it can slap attributes to the object
when it is created, something that regular dictionaries will not
allow to do.
"""
pass
def re_sort(data):
"""
A data with keys that are not enumerated sequentially will be
re sorted and sequentially ordered.
For example::
>>> data = {16: ('1', 'b'), 3: ('1', 'a')}
>>> re_sort(data)
>>> {0: ('1', 'a'), 1: ('1', 'b')}
"""
keys = sorted(data.keys())
new_data = {}
for number, key in enumerate(keys):
new_data[number] = data[key]
return new_data
def sift(data, required_items=None):
"""
Receive a ``data`` object that will be in the form
of a normalized structure (e.g. ``{0: {'a': 0}}``) and
filter out keys that match the ``required_items``.
"""
required_items = required_items or []
new_data = {}
for k, v in data.items():
if v[0] in required_items:
new_data[k] = v
continue
for required_item in required_items:
key = getattr(required_item, '_object', False)
if key:
if v[0] == key:
new_data[k] = v
return re_sort(new_data)
def is_empty(value):
try:
return len(value) == 0
except TypeError:
return False
def is_not_empty(value):
return not is_empty(value)
def is_nested_tuple(value):
if len(value) == 2 and isinstance(value[1], tuple): # nested tuple
return True
return False
def data_item(data):
"""
When trying to return a meaningful error about an unexpected data item
we cannot just `repr(data)` as that could show a gigantic data struture.
This utility should try to get the key of the first item or the single item
in the data structure.
"""
if isinstance(data, ndict):
# OK, we have something that looks like {0: ('a', 'b')}
# or something that is a regular dictionary
# so try to return 'a' regardless of the length
for item in data:
return repr(data[item][0])
elif isinstance(data, dict):
for item in data:
return repr(data[item])
elif isinstance(data, list):
return repr(data[0])
return repr(data)
def forced_leaf_validator(func):
"""
Some decorators may need to use this if doing anything related
with a dictionary value. Since callables are usually validating
single values (e.g. a string or a boolean) and not a dictionary
per-se because Notario will already normalize the data.
"""
func.__validator_leaf__ = True
return func
def expand_schema(schema):
if hasattr(schema, '__delayed__'):
return schema()
return schema
def is_schema(_object):
"""
A helper to determine if we are indeed dealing with what it seems to be
a schema.
"""
if hasattr(_object, '__delayed__') or isinstance(_object, tuple):
return True
return False
def ensure(assertion, message=None):
"""
Checks an assertion argument for truth-ness. Will return ``True`` or
explicitly raise ``AssertionError``. This is to deal with environments
using ``python -O` or ``PYTHONOPTIMIZE=``.
:param assertion: some value to evaluate for truth-ness
:param message: optional message used for raising AssertionError
"""
message = message or assertion
if not assertion:
raise AssertionError(message)
return True
|
alfredodeza/notario | notario/utils.py | re_sort | python | def re_sort(data):
keys = sorted(data.keys())
new_data = {}
for number, key in enumerate(keys):
new_data[number] = data[key]
return new_data | A data with keys that are not enumerated sequentially will be
re sorted and sequentially ordered.
For example::
>>> data = {16: ('1', 'b'), 3: ('1', 'a')}
>>> re_sort(data)
>>> {0: ('1', 'a'), 1: ('1', 'b')} | train | https://github.com/alfredodeza/notario/blob/d5dc2edfcb75d9291ced3f2551f368c35dd31475/notario/utils.py#L39-L54 | null | import warnings
def is_callable(data):
if hasattr(data, '__call__'):
return True
return False
def safe_repr(obj):
"""
Try to get ``__name__`` first, ``__class__.__name__`` second
and finally, if we can't get anything acceptable, fallback
to user a ``repr()`` call.
"""
name = getattr(obj, '__name__', getattr(obj.__class__, '__name__'))
if name == 'ndict':
name = 'dict'
return name or repr(obj)
# Backwards compatibility
def optional(validator):
from notario import decorators
msg = "import optional from notario.decorators, not from utils"
warnings.warn(msg, DeprecationWarning, stacklevel=2)
return decorators.optional(validator)
class ndict(dict):
"""
Used by Notario so that it can slap attributes to the object
when it is created, something that regular dictionaries will not
allow to do.
"""
pass
def sift(data, required_items=None):
"""
Receive a ``data`` object that will be in the form
of a normalized structure (e.g. ``{0: {'a': 0}}``) and
filter out keys that match the ``required_items``.
"""
required_items = required_items or []
new_data = {}
for k, v in data.items():
if v[0] in required_items:
new_data[k] = v
continue
for required_item in required_items:
key = getattr(required_item, '_object', False)
if key:
if v[0] == key:
new_data[k] = v
return re_sort(new_data)
def is_empty(value):
try:
return len(value) == 0
except TypeError:
return False
def is_not_empty(value):
return not is_empty(value)
def is_nested_tuple(value):
if len(value) == 2 and isinstance(value[1], tuple): # nested tuple
return True
return False
def data_item(data):
"""
When trying to return a meaningful error about an unexpected data item
we cannot just `repr(data)` as that could show a gigantic data struture.
This utility should try to get the key of the first item or the single item
in the data structure.
"""
if isinstance(data, ndict):
# OK, we have something that looks like {0: ('a', 'b')}
# or something that is a regular dictionary
# so try to return 'a' regardless of the length
for item in data:
return repr(data[item][0])
elif isinstance(data, dict):
for item in data:
return repr(data[item])
elif isinstance(data, list):
return repr(data[0])
return repr(data)
def forced_leaf_validator(func):
"""
Some decorators may need to use this if doing anything related
with a dictionary value. Since callables are usually validating
single values (e.g. a string or a boolean) and not a dictionary
per-se because Notario will already normalize the data.
"""
func.__validator_leaf__ = True
return func
def expand_schema(schema):
if hasattr(schema, '__delayed__'):
return schema()
return schema
def is_schema(_object):
"""
A helper to determine if we are indeed dealing with what it seems to be
a schema.
"""
if hasattr(_object, '__delayed__') or isinstance(_object, tuple):
return True
return False
def ensure(assertion, message=None):
"""
Checks an assertion argument for truth-ness. Will return ``True`` or
explicitly raise ``AssertionError``. This is to deal with environments
using ``python -O` or ``PYTHONOPTIMIZE=``.
:param assertion: some value to evaluate for truth-ness
:param message: optional message used for raising AssertionError
"""
message = message or assertion
if not assertion:
raise AssertionError(message)
return True
|
alfredodeza/notario | notario/utils.py | sift | python | def sift(data, required_items=None):
required_items = required_items or []
new_data = {}
for k, v in data.items():
if v[0] in required_items:
new_data[k] = v
continue
for required_item in required_items:
key = getattr(required_item, '_object', False)
if key:
if v[0] == key:
new_data[k] = v
return re_sort(new_data) | Receive a ``data`` object that will be in the form
of a normalized structure (e.g. ``{0: {'a': 0}}``) and
filter out keys that match the ``required_items``. | train | https://github.com/alfredodeza/notario/blob/d5dc2edfcb75d9291ced3f2551f368c35dd31475/notario/utils.py#L57-L75 | [
"def re_sort(data):\n \"\"\"\n A data with keys that are not enumerated sequentially will be\n re sorted and sequentially ordered.\n\n For example::\n\n >>> data = {16: ('1', 'b'), 3: ('1', 'a')}\n >>> re_sort(data)\n >>> {0: ('1', 'a'), 1: ('1', 'b')}\n \"\"\"\n keys = sorted(data.keys())\n new_data = {}\n for number, key in enumerate(keys):\n new_data[number] = data[key]\n return new_data\n"
] | import warnings
def is_callable(data):
if hasattr(data, '__call__'):
return True
return False
def safe_repr(obj):
"""
Try to get ``__name__`` first, ``__class__.__name__`` second
and finally, if we can't get anything acceptable, fallback
to user a ``repr()`` call.
"""
name = getattr(obj, '__name__', getattr(obj.__class__, '__name__'))
if name == 'ndict':
name = 'dict'
return name or repr(obj)
# Backwards compatibility
def optional(validator):
from notario import decorators
msg = "import optional from notario.decorators, not from utils"
warnings.warn(msg, DeprecationWarning, stacklevel=2)
return decorators.optional(validator)
class ndict(dict):
"""
Used by Notario so that it can slap attributes to the object
when it is created, something that regular dictionaries will not
allow to do.
"""
pass
def re_sort(data):
"""
A data with keys that are not enumerated sequentially will be
re sorted and sequentially ordered.
For example::
>>> data = {16: ('1', 'b'), 3: ('1', 'a')}
>>> re_sort(data)
>>> {0: ('1', 'a'), 1: ('1', 'b')}
"""
keys = sorted(data.keys())
new_data = {}
for number, key in enumerate(keys):
new_data[number] = data[key]
return new_data
def is_empty(value):
try:
return len(value) == 0
except TypeError:
return False
def is_not_empty(value):
return not is_empty(value)
def is_nested_tuple(value):
if len(value) == 2 and isinstance(value[1], tuple): # nested tuple
return True
return False
def data_item(data):
"""
When trying to return a meaningful error about an unexpected data item
we cannot just `repr(data)` as that could show a gigantic data struture.
This utility should try to get the key of the first item or the single item
in the data structure.
"""
if isinstance(data, ndict):
# OK, we have something that looks like {0: ('a', 'b')}
# or something that is a regular dictionary
# so try to return 'a' regardless of the length
for item in data:
return repr(data[item][0])
elif isinstance(data, dict):
for item in data:
return repr(data[item])
elif isinstance(data, list):
return repr(data[0])
return repr(data)
def forced_leaf_validator(func):
"""
Some decorators may need to use this if doing anything related
with a dictionary value. Since callables are usually validating
single values (e.g. a string or a boolean) and not a dictionary
per-se because Notario will already normalize the data.
"""
func.__validator_leaf__ = True
return func
def expand_schema(schema):
if hasattr(schema, '__delayed__'):
return schema()
return schema
def is_schema(_object):
"""
A helper to determine if we are indeed dealing with what it seems to be
a schema.
"""
if hasattr(_object, '__delayed__') or isinstance(_object, tuple):
return True
return False
def ensure(assertion, message=None):
"""
Checks an assertion argument for truth-ness. Will return ``True`` or
explicitly raise ``AssertionError``. This is to deal with environments
using ``python -O` or ``PYTHONOPTIMIZE=``.
:param assertion: some value to evaluate for truth-ness
:param message: optional message used for raising AssertionError
"""
message = message or assertion
if not assertion:
raise AssertionError(message)
return True
|
alfredodeza/notario | notario/utils.py | data_item | python | def data_item(data):
if isinstance(data, ndict):
# OK, we have something that looks like {0: ('a', 'b')}
# or something that is a regular dictionary
# so try to return 'a' regardless of the length
for item in data:
return repr(data[item][0])
elif isinstance(data, dict):
for item in data:
return repr(data[item])
elif isinstance(data, list):
return repr(data[0])
return repr(data) | When trying to return a meaningful error about an unexpected data item
we cannot just `repr(data)` as that could show a gigantic data struture.
This utility should try to get the key of the first item or the single item
in the data structure. | train | https://github.com/alfredodeza/notario/blob/d5dc2edfcb75d9291ced3f2551f368c35dd31475/notario/utils.py#L95-L114 | null | import warnings
def is_callable(data):
if hasattr(data, '__call__'):
return True
return False
def safe_repr(obj):
"""
Try to get ``__name__`` first, ``__class__.__name__`` second
and finally, if we can't get anything acceptable, fallback
to user a ``repr()`` call.
"""
name = getattr(obj, '__name__', getattr(obj.__class__, '__name__'))
if name == 'ndict':
name = 'dict'
return name or repr(obj)
# Backwards compatibility
def optional(validator):
from notario import decorators
msg = "import optional from notario.decorators, not from utils"
warnings.warn(msg, DeprecationWarning, stacklevel=2)
return decorators.optional(validator)
class ndict(dict):
"""
Used by Notario so that it can slap attributes to the object
when it is created, something that regular dictionaries will not
allow to do.
"""
pass
def re_sort(data):
"""
A data with keys that are not enumerated sequentially will be
re sorted and sequentially ordered.
For example::
>>> data = {16: ('1', 'b'), 3: ('1', 'a')}
>>> re_sort(data)
>>> {0: ('1', 'a'), 1: ('1', 'b')}
"""
keys = sorted(data.keys())
new_data = {}
for number, key in enumerate(keys):
new_data[number] = data[key]
return new_data
def sift(data, required_items=None):
"""
Receive a ``data`` object that will be in the form
of a normalized structure (e.g. ``{0: {'a': 0}}``) and
filter out keys that match the ``required_items``.
"""
required_items = required_items or []
new_data = {}
for k, v in data.items():
if v[0] in required_items:
new_data[k] = v
continue
for required_item in required_items:
key = getattr(required_item, '_object', False)
if key:
if v[0] == key:
new_data[k] = v
return re_sort(new_data)
def is_empty(value):
try:
return len(value) == 0
except TypeError:
return False
def is_not_empty(value):
return not is_empty(value)
def is_nested_tuple(value):
if len(value) == 2 and isinstance(value[1], tuple): # nested tuple
return True
return False
def forced_leaf_validator(func):
"""
Some decorators may need to use this if doing anything related
with a dictionary value. Since callables are usually validating
single values (e.g. a string or a boolean) and not a dictionary
per-se because Notario will already normalize the data.
"""
func.__validator_leaf__ = True
return func
def expand_schema(schema):
if hasattr(schema, '__delayed__'):
return schema()
return schema
def is_schema(_object):
"""
A helper to determine if we are indeed dealing with what it seems to be
a schema.
"""
if hasattr(_object, '__delayed__') or isinstance(_object, tuple):
return True
return False
def ensure(assertion, message=None):
"""
Checks an assertion argument for truth-ness. Will return ``True`` or
explicitly raise ``AssertionError``. This is to deal with environments
using ``python -O` or ``PYTHONOPTIMIZE=``.
:param assertion: some value to evaluate for truth-ness
:param message: optional message used for raising AssertionError
"""
message = message or assertion
if not assertion:
raise AssertionError(message)
return True
|
alfredodeza/notario | notario/utils.py | ensure | python | def ensure(assertion, message=None):
message = message or assertion
if not assertion:
raise AssertionError(message)
return True | Checks an assertion argument for truth-ness. Will return ``True`` or
explicitly raise ``AssertionError``. This is to deal with environments
using ``python -O` or ``PYTHONOPTIMIZE=``.
:param assertion: some value to evaluate for truth-ness
:param message: optional message used for raising AssertionError | train | https://github.com/alfredodeza/notario/blob/d5dc2edfcb75d9291ced3f2551f368c35dd31475/notario/utils.py#L144-L158 | null | import warnings
def is_callable(data):
if hasattr(data, '__call__'):
return True
return False
def safe_repr(obj):
"""
Try to get ``__name__`` first, ``__class__.__name__`` second
and finally, if we can't get anything acceptable, fallback
to user a ``repr()`` call.
"""
name = getattr(obj, '__name__', getattr(obj.__class__, '__name__'))
if name == 'ndict':
name = 'dict'
return name or repr(obj)
# Backwards compatibility
def optional(validator):
from notario import decorators
msg = "import optional from notario.decorators, not from utils"
warnings.warn(msg, DeprecationWarning, stacklevel=2)
return decorators.optional(validator)
class ndict(dict):
"""
Used by Notario so that it can slap attributes to the object
when it is created, something that regular dictionaries will not
allow to do.
"""
pass
def re_sort(data):
"""
A data with keys that are not enumerated sequentially will be
re sorted and sequentially ordered.
For example::
>>> data = {16: ('1', 'b'), 3: ('1', 'a')}
>>> re_sort(data)
>>> {0: ('1', 'a'), 1: ('1', 'b')}
"""
keys = sorted(data.keys())
new_data = {}
for number, key in enumerate(keys):
new_data[number] = data[key]
return new_data
def sift(data, required_items=None):
"""
Receive a ``data`` object that will be in the form
of a normalized structure (e.g. ``{0: {'a': 0}}``) and
filter out keys that match the ``required_items``.
"""
required_items = required_items or []
new_data = {}
for k, v in data.items():
if v[0] in required_items:
new_data[k] = v
continue
for required_item in required_items:
key = getattr(required_item, '_object', False)
if key:
if v[0] == key:
new_data[k] = v
return re_sort(new_data)
def is_empty(value):
try:
return len(value) == 0
except TypeError:
return False
def is_not_empty(value):
return not is_empty(value)
def is_nested_tuple(value):
if len(value) == 2 and isinstance(value[1], tuple): # nested tuple
return True
return False
def data_item(data):
"""
When trying to return a meaningful error about an unexpected data item
we cannot just `repr(data)` as that could show a gigantic data struture.
This utility should try to get the key of the first item or the single item
in the data structure.
"""
if isinstance(data, ndict):
# OK, we have something that looks like {0: ('a', 'b')}
# or something that is a regular dictionary
# so try to return 'a' regardless of the length
for item in data:
return repr(data[item][0])
elif isinstance(data, dict):
for item in data:
return repr(data[item])
elif isinstance(data, list):
return repr(data[0])
return repr(data)
def forced_leaf_validator(func):
"""
Some decorators may need to use this if doing anything related
with a dictionary value. Since callables are usually validating
single values (e.g. a string or a boolean) and not a dictionary
per-se because Notario will already normalize the data.
"""
func.__validator_leaf__ = True
return func
def expand_schema(schema):
if hasattr(schema, '__delayed__'):
return schema()
return schema
def is_schema(_object):
"""
A helper to determine if we are indeed dealing with what it seems to be
a schema.
"""
if hasattr(_object, '__delayed__') or isinstance(_object, tuple):
return True
return False
|
alfredodeza/notario | notario/decorators.py | not_empty | python | def not_empty(_object):
if is_callable(_object):
_validator = _object
@wraps(_validator)
@instance_of()
def decorated(value):
ensure(value, "%s is empty" % safe_repr(value))
return _validator(value)
return decorated
try:
ensure(len(_object), "%s is empty" % safe_repr(_object))
except TypeError:
raise AssertionError("not of any valid types: [list, dict, str]") | Validates the given input (has to be a valid data structure) is empty.
Input *has* to be one of: `list`, `dict`, or `string`.
It is specially useful when most of the validators being created are
dealing with data structures that should not be empty. | train | https://github.com/alfredodeza/notario/blob/d5dc2edfcb75d9291ced3f2551f368c35dd31475/notario/decorators.py#L72-L92 | [
"def ensure(assertion, message=None):\n \"\"\"\n Checks an assertion argument for truth-ness. Will return ``True`` or\n explicitly raise ``AssertionError``. This is to deal with environments\n using ``python -O` or ``PYTHONOPTIMIZE=``.\n\n :param assertion: some value to evaluate for truth-ness\n :param message: optional message used for raising AssertionError\n \"\"\"\n message = message or assertion\n\n if not assertion:\n raise AssertionError(message)\n\n return True\n",
"def safe_repr(obj):\n \"\"\"\n Try to get ``__name__`` first, ``__class__.__name__`` second\n and finally, if we can't get anything acceptable, fallback\n to user a ``repr()`` call.\n \"\"\"\n name = getattr(obj, '__name__', getattr(obj.__class__, '__name__'))\n if name == 'ndict':\n name = 'dict'\n return name or repr(obj)\n",
"def is_callable(data):\n if hasattr(data, '__call__'):\n return True\n return False\n"
] | from functools import wraps
from notario.utils import is_callable, safe_repr, ensure
class instance_of(object):
"""
When trying to make sure the value is coming from any number of valid
objects, you will want to use this decorator as it will make sure that
before executing the validator it will comply being of any of the
``valid_types``.
For example, if the input for a given validator can be either a dictionary
or a list, this validator could be used like::
from notario import ensure
@instance_of((list, dict))
def my_validator(value):
ensure(len(value) > 0)
This decorator **needs** to be called as it has a default for valid types,
which is: ``(list, dict, str)``. A working implementation would look like
this with the default types::
from notario import ensure
@instance_of()
def my_validator(value):
ensure(len(value) > 0)
When it fails, as almost all of Notario's exceptions, it will return
a meaningful error, this is how passing a boolean to a validator that
accepts the defaults would raise said error:
.. doctest:: instance_of
>>> from notario.decorators import instance_of
>>> from notario import ensure
>>> @instance_of()
... def my_validator(value):
... ensure(len(value) == 2)
...
>>> my_validator(True)
Traceback (most recent call last):
...
AssertionError: not of any valid types: ['list', 'dict', 'str']
"""
def __init__(self, valid_types=None):
self.valid_types = valid_types or (list, dict, str)
self.__validator_leaf__ = False
for t in self.valid_types:
if t is dict or isinstance(t, dict):
self.__validator_leaf__ = True
def __call__(self, func):
fail_msg = "not of any valid types: %s" % self.valid_names()
def decorated(value, *a):
if self.__validator_leaf__ is True:
if isinstance(value, dict):
# convert the 'data' to an actual value. 'data' is of {0: {'key': 'value'}}
value = dict(i for i in value.values())
ensure(isinstance(value, self.valid_types), fail_msg)
func(value)
decorated.__validator_leaf__ = self.__validator_leaf__
return decorated
def valid_names(self):
return [safe_repr(obj)
for obj in self.valid_types]
def optional(_object):
"""
This decorator has a double functionality, it can wrap validators and make
them optional or it can wrap keys and make that entry optional.
**Optional Validator:**
Allows to have validators work only when there is a value that contains
some data, otherwise it will just not pass the information to the actual
validator and will not fail as a result.
As any normal decorator, it can be used corectly with the decorator
syntax or in the actual schema.
This is how it would look in a schema::
('key', optional(my_validator))
Where ``my_validator`` can be any validator that accepts a single
argument.
In case a class based validator is being used (like the ``recursive`` or
``iterables`` then it would look like::
('key', optional(class_validator(('key', 'value'))))
Of course, the schema should vary depending on your needs, it is just the
way of constructing the validator call that should be important.
**Optional Keys:**
Sometimes a given data structure may present optional entries. For example
this data::
data = {'required': 1, 'optional': 2}
To represent this, you will need to declare the `optional` key in the
schema but by wrapping the key with this decorator you will basically tell
the validation engine that if that key is present it should be validated,
otherwise, it should be skipped. This is how the schema would look::
schema = (('required', 1), (optional('optional'), 1))
The above schema would allow data that is missing the ``optional`` key. The
data below would pass validation without any issues::
data = {'required': 1}
"""
if is_callable(_object):
validator = _object
@wraps(validator)
def decorated(value):
if value:
return validator(value)
return
return decorated
else:
def optional(*args):
return _object
optional.is_optional = True
optional._object = _object
return optional
def delay(func):
"""
When schemas are referencing to each other, this decorator will help by
marking a schema as ``delayed`` to avoid the need for calling a schema to
generate itself until it is actually needed.
For example, if a schema function references to itself in this manner::
def my_schema():
return (
('a', 'foo'),
('b', my_schema()),
)
Because ``my_schema`` is being called within itself, it will get into
a recursion problem as soon as it is executed.
To avoid this, applying the decorator will make it so that the engine will
acknowledge this is the case, and will *expand* the schema only when it is
needed. No recursion problems will happen then since we are effectively
delaying its execution.
"""
func.__delayed__ = True
return func
|
alfredodeza/notario | notario/decorators.py | optional | python | def optional(_object):
if is_callable(_object):
validator = _object
@wraps(validator)
def decorated(value):
if value:
return validator(value)
return
return decorated
else:
def optional(*args):
return _object
optional.is_optional = True
optional._object = _object
return optional | This decorator has a double functionality, it can wrap validators and make
them optional or it can wrap keys and make that entry optional.
**Optional Validator:**
Allows to have validators work only when there is a value that contains
some data, otherwise it will just not pass the information to the actual
validator and will not fail as a result.
As any normal decorator, it can be used corectly with the decorator
syntax or in the actual schema.
This is how it would look in a schema::
('key', optional(my_validator))
Where ``my_validator`` can be any validator that accepts a single
argument.
In case a class based validator is being used (like the ``recursive`` or
``iterables`` then it would look like::
('key', optional(class_validator(('key', 'value'))))
Of course, the schema should vary depending on your needs, it is just the
way of constructing the validator call that should be important.
**Optional Keys:**
Sometimes a given data structure may present optional entries. For example
this data::
data = {'required': 1, 'optional': 2}
To represent this, you will need to declare the `optional` key in the
schema but by wrapping the key with this decorator you will basically tell
the validation engine that if that key is present it should be validated,
otherwise, it should be skipped. This is how the schema would look::
schema = (('required', 1), (optional('optional'), 1))
The above schema would allow data that is missing the ``optional`` key. The
data below would pass validation without any issues::
data = {'required': 1} | train | https://github.com/alfredodeza/notario/blob/d5dc2edfcb75d9291ced3f2551f368c35dd31475/notario/decorators.py#L95-L156 | [
"def is_callable(data):\n if hasattr(data, '__call__'):\n return True\n return False\n"
] | from functools import wraps
from notario.utils import is_callable, safe_repr, ensure
class instance_of(object):
"""
When trying to make sure the value is coming from any number of valid
objects, you will want to use this decorator as it will make sure that
before executing the validator it will comply being of any of the
``valid_types``.
For example, if the input for a given validator can be either a dictionary
or a list, this validator could be used like::
from notario import ensure
@instance_of((list, dict))
def my_validator(value):
ensure(len(value) > 0)
This decorator **needs** to be called as it has a default for valid types,
which is: ``(list, dict, str)``. A working implementation would look like
this with the default types::
from notario import ensure
@instance_of()
def my_validator(value):
ensure(len(value) > 0)
When it fails, as almost all of Notario's exceptions, it will return
a meaningful error, this is how passing a boolean to a validator that
accepts the defaults would raise said error:
.. doctest:: instance_of
>>> from notario.decorators import instance_of
>>> from notario import ensure
>>> @instance_of()
... def my_validator(value):
... ensure(len(value) == 2)
...
>>> my_validator(True)
Traceback (most recent call last):
...
AssertionError: not of any valid types: ['list', 'dict', 'str']
"""
def __init__(self, valid_types=None):
self.valid_types = valid_types or (list, dict, str)
self.__validator_leaf__ = False
for t in self.valid_types:
if t is dict or isinstance(t, dict):
self.__validator_leaf__ = True
def __call__(self, func):
fail_msg = "not of any valid types: %s" % self.valid_names()
def decorated(value, *a):
if self.__validator_leaf__ is True:
if isinstance(value, dict):
# convert the 'data' to an actual value. 'data' is of {0: {'key': 'value'}}
value = dict(i for i in value.values())
ensure(isinstance(value, self.valid_types), fail_msg)
func(value)
decorated.__validator_leaf__ = self.__validator_leaf__
return decorated
def valid_names(self):
return [safe_repr(obj)
for obj in self.valid_types]
def not_empty(_object):
"""
Validates the given input (has to be a valid data structure) is empty.
Input *has* to be one of: `list`, `dict`, or `string`.
It is specially useful when most of the validators being created are
dealing with data structures that should not be empty.
"""
if is_callable(_object):
_validator = _object
@wraps(_validator)
@instance_of()
def decorated(value):
ensure(value, "%s is empty" % safe_repr(value))
return _validator(value)
return decorated
try:
ensure(len(_object), "%s is empty" % safe_repr(_object))
except TypeError:
raise AssertionError("not of any valid types: [list, dict, str]")
def delay(func):
"""
When schemas are referencing to each other, this decorator will help by
marking a schema as ``delayed`` to avoid the need for calling a schema to
generate itself until it is actually needed.
For example, if a schema function references to itself in this manner::
def my_schema():
return (
('a', 'foo'),
('b', my_schema()),
)
Because ``my_schema`` is being called within itself, it will get into
a recursion problem as soon as it is executed.
To avoid this, applying the decorator will make it so that the engine will
acknowledge this is the case, and will *expand* the schema only when it is
needed. No recursion problems will happen then since we are effectively
delaying its execution.
"""
func.__delayed__ = True
return func
|
alfredodeza/notario | notario/regex.py | chain | python | def chain(*regexes, **kwargs):
prepend_negation = kwargs.get('prepend_negation', True)
return Linker(regexes, prepend_negation=prepend_negation) | A helper function to interact with the regular expression engine
that compiles and applies partial matches to a string.
It expects key value tuples as arguments (any number of them) where the
first pair is the regex to compile and the latter is the message to display
when the regular expression does not match.
The engine constructs partial regular expressions from the input and
applies them sequentially to find the exact point of failure and allowing
the ability to return a meaningful message.
Because adding negation statements like "does not..." can become
repetitive, the function defaults to ``True`` to include the option to
prepend the negative.
For example, this is what would happen with a failing regex::
>>> rx = chain((r'^\d+', 'start with a digit'))
>>> rx('foo')
Traceback (most recent call last):
...
AssertionError: does not start with a digit
If there is no need for prepending the negation, the keyword argument will
need to set it as ``False``::
>>> rx = chain((r'^\d+', 'it should start with a digit'),
... prepend_negation=False)
>>> rx('foo')
Traceback (most recent call last):
...
AssertionError: it should start with a digit | train | https://github.com/alfredodeza/notario/blob/d5dc2edfcb75d9291ced3f2551f368c35dd31475/notario/regex.py#L57-L94 | null | import re
class Linker(list):
"""
This list-like object will receive key/value pairs as regexes with
accompanying failure error messages and upon a call, it will apply them
sequentially so that a user can understand at what point in the regex the
failure happened with a given value.
.. note::
Direct use of this class is discouraged as the functionality is exposed
through the :def:`chain` helper function.
"""
def __init__(self, regexes, prepend_negation=None):
if prepend_negation is None:
self.prepend_negation = False
else:
self.prepend_negation = prepend_negation
self._sanity(regexes)
self.prepend_negation = prepend_negation
self.regexes = regexes
self._build()
def _sanity(self, items):
try:
for item in items:
pattern, reason = item
except ValueError:
raise TypeError('arguments must be key value pairs')
def _build(self):
self.append(re.compile(self.regexes[0][0]))
for number in range(1, len(self.regexes)):
new_regex = self._get_regex(number)
self.append(re.compile(new_regex))
def _get_regex(self, number):
items = []
for item, _ in self.regexes[:number + 1]:
items.append(item)
return ''.join(items)
def _get_reason(self, item_number):
reason = self.regexes[item_number][1]
if self.prepend_negation:
return "does not %s" % self.regexes[item_number][1]
return reason
def __call__(self, value):
for count, regex in enumerate(self):
if not regex.match(value):
raise AssertionError(self._get_reason(count))
|
alfredodeza/notario | notario/engine.py | validate | python | def validate(data, schema, defined_keys=False):
if isinstance(data, dict):
validator = Validator(data, schema, defined_keys=defined_keys)
validator.validate()
else:
raise TypeError('expected data to be of type dict, but got: %s' % type(data)) | Main entry point for the validation engine.
:param data: The incoming data, as a dictionary object.
:param schema: The schema from which data will be validated against | train | https://github.com/alfredodeza/notario/blob/d5dc2edfcb75d9291ced3f2551f368c35dd31475/notario/engine.py#L343-L354 | [
"def validate(self):\n if self.data == {} and self.schema:\n msg = 'has no data to validate against schema'\n reason = 'an empty dictionary object was provided'\n raise Invalid(None, {}, msg=msg, reason=reason, pair='value')\n self.traverser(self.data, self.schema, [])\n"
] | import sys
from notario.exceptions import Invalid, SchemaError
from notario.utils import (is_callable, sift, is_empty, re_sort, is_not_empty,
data_item, safe_repr, ensure)
from notario.normal import Data, Schema
from notario.validators import cherry_pick
class Validator(object):
def __init__(self, data, schema, defined_keys=None):
if defined_keys:
schema = cherry_pick(schema)
self.data = Data(data, schema).normalized()
self.schema = Schema(data, schema).normalized()
def validate(self):
if self.data == {} and self.schema:
msg = 'has no data to validate against schema'
reason = 'an empty dictionary object was provided'
raise Invalid(None, {}, msg=msg, reason=reason, pair='value')
self.traverser(self.data, self.schema, [])
def traverser(self, data, schema, tree):
"""
Traverses the dictionary, recursing onto itself if
it sees appropriate key/value pairs that indicate that
there is a need for more validation in a branch below us.
"""
if hasattr(schema, '__validator_leaf__'):
return schema(data, tree)
if hasattr(schema, 'must_validate'): # cherry picking?
if not len(schema.must_validate):
reason = "must_validate attribute must not be empty"
raise SchemaError(data, tree, reason=reason)
data = sift(data, schema.must_validate)
schema = self.sanitize_optionals(data, schema, tree)
self.is_alpha_ordered(data, schema, tree)
validated_indexes = []
skip_missing_indexes = getattr(schema, 'must_validate', False)
if len(data) < len(schema):
# we have missing required items in data, but we don't know
# which ones so find what may fail:
data_keys = [v[0] for v in data.values()]
schema_keys = [v[0] for v in schema.values()]
def enforce_once(data_keys, schema_key):
# XXX Go through all the data keys and try and see if they pass
# validation against the schema. At this point it is impossible
# to know which data key corresponds to what schema key
# (because schema keys can be a function/callable) so it is
# a *very* naive way to try and detect which one might be
# missing
for data_key in data_keys:
failed = None
try:
enforce(data_key, schema_key, tree, pair='key')
return
except Invalid:
failed = data_key, schema_key
if failed:
return failed
# if there are no callables in the schema keys, just
# find the missing data key directly
if all([not is_callable(s) for s in schema_keys]):
for schema_key in schema_keys:
if schema_key not in data_keys:
msg = "required key in data is missing: %s" % str(schema_key)
raise Invalid(None, tree, reason=msg, pair='key')
for schema_key in schema_keys:
failure = enforce_once(data_keys, schema_key)
if failure:
_, failed_schema_key = failure
msg = "required key in data is missing: %s" % str(failed_schema_key)
raise Invalid(None, tree, reason=msg, pair='key')
for index in range(len(data)):
self.length_equality(data, schema, index, tree)
key, value = data[index]
skey, svalue = schema[index]
tree.append(key)
# Validate the key before anything, to prevent recursing
self.key_leaf(data[index], schema[index], tree)
# If a dict is a value we need to recurse.
# XXX Should we check isinstance(value, ndict) ?
if isinstance(value, dict) and len(value):
self.traverser(value, svalue, tree)
else:
self.value_leaf(data[index], schema[index], tree)
if tree:
tree.pop()
validated_indexes.append(index)
# XXX There is a chance we might have missing items from
# the incoming data that are labeled as required from the schema
# we should make sure *here* that we account for that and raise
# the appropriate exception. Since the loop finished and everything
# seems to have passed, this lack of check will give false positives.
missing_indexes = set(schema.keys()).difference(validated_indexes)
if missing_indexes:
if skip_missing_indexes:
return
for i in missing_indexes:
if not hasattr(schema[i], 'is_optional'):
required_key = schema[i][0]
tree.append('item[%s]' % i)
msg = "required item in schema is missing: %s" % str(required_key)
raise Invalid(required_key, tree, reason=msg, pair='key')
def key_leaf(self, data, schema, tree):
"""
The deepest validation we can make in any given circumstance for a key.
Does not recurse, it will just receive both values and the tree,
passing them on to the :fun:`enforce` function.
"""
key, value = data
schema_key, schema_value = schema
enforce(key, schema_key, tree, 'key')
def value_leaf(self, data, schema, tree):
"""
The deepest validation we can make in any given circumstance for
a value. Does not recurse, it will just receive both values and the
tree, passing them on to the :fun:`enforce` function.
"""
key, value = data
schema_key, schema_value = schema
if hasattr(schema_value, '__validator_leaf__'):
return schema_value(value, tree)
enforce(value, schema_value, tree, 'value')
def is_alpha_ordered(self, data, normalized_schema, tree):
keys = []
indexes = normalized_schema.keys()
for index in indexes:
key = normalized_schema[index][0]
if isinstance(key, str):
keys.append(key)
elif hasattr(key, '_object') :
if isinstance(key._object, str):
keys.append(key._object)
sorted_keys = sorted(keys)
if keys != sorted_keys:
for index, key in enumerate(keys):
if key != sorted_keys[index]:
raise SchemaError(
keys, [key],
reason='schema item is not alphabetically ordered'
)
def length_equality(self, data, schema, index, tree):
try:
data = data[index]
try:
schema = schema[index]
except KeyError:
if not hasattr(schema, 'must_validate'):
reason = 'has unexpected item in data: %s' % data_item(data)
raise Invalid(None, tree, msg=reason, reason=reason, pair='value')
except (KeyError, TypeError):
if not hasattr(schema, 'must_validate'):
reason = "has less items in schema than in data"
raise SchemaError(data, tree, reason=reason)
if hasattr(schema, '__validator_leaf__'):
return
if len(data) != len(schema):
raise SchemaError(data, tree, reason='length did not match schema')
def sanitize_optionals(self, data, schema, tree):
schema_key_map = {}
try:
for number, value in schema.items():
schema_key_map[number] = getattr(value[0], '_object', value[0])
except AttributeError: # maybe not a dict?
self.length_equality(data, schema, 0, tree)
optional_keys = {}
for k, v in schema.items():
try:
key = getattr(v[0], '_object')
if key:
optional_keys[k] = key
except AttributeError:
pass
data_keys = [v[0] for k, v in data.items()]
for number, value in optional_keys.items():
if value not in data_keys:
del schema[number]
if not schema and is_not_empty(data):
msg = "unexpected extra items"
raise Invalid(schema, tree, reason=msg)
return re_sort(schema)
class BaseItemValidator(object):
def __init__(self, data, schema, tree=None, index=None, name=None):
self.data = data
self.schema = schema
self.tree = tree or []
self.index = index or 0
self.name = name
def validate(self):
self.traverser(self.data, self.schema, self.tree)
def traverser(self, data, schema, tree):
if len(data) < self.index:
reason = "has not enough items to select from"
raise SchemaError(data, tree, reason=reason)
self.leaves(data, schema, tree)
class IterableValidator(BaseItemValidator):
"""
The iterable validator allows the definition of a single schema that can be
run against any number of items in a given data structure
"""
def data_sanity(self, data, tree=None):
if not isinstance(data, list):
name = self.name or 'IterableValidator'
reason = 'expected a list but got %s' % safe_repr(data)
msg = 'did not pass validation against callable: %s' % name
raise Invalid('', tree or [], msg=msg, reason=reason, pair='value')
def leaf(self, index):
self.data_sanity(self.data, tree=self.tree)
self.enforce(self.data, self.schema, index, self.tree)
def leaves(self, data, schema, tree):
self.data_sanity(data, tree=tree)
for item_index in range(self.index, len(data)):
self.enforce(data, schema, item_index, tree)
def enforce(self, data, schema, item_index, tree):
# yo dawg, a recursive validator within a recursive validator anyone?
if is_callable(schema) and hasattr(schema, '__validator_leaf__'):
return schema(data[item_index], tree)
if isinstance(data[item_index], dict) and isinstance(schema, tuple):
try:
_validator = Validator(data[item_index], schema)
_validator.validate()
except Invalid:
e = sys.exc_info()[1]
tree.append('list[%s]' % item_index)
tree.extend(e.path)
raise Invalid(e.schema_item, tree, reason=e._reason, pair='value')
# FIXME this is utterly redundant, and also happens in
# RecursiveValidator
except SchemaError:
e = sys.exc_info()[1]
tree.extend(e.path)
raise SchemaError('', tree, reason=e._reason, pair='value')
elif isinstance(schema, tuple) and not isinstance(data[item_index], (tuple, dict)):
raise SchemaError(data, tree, reason='iterable contains single items, schema does not')
else:
try:
if is_callable(schema):
schema(data[item_index])
else:
ensure(data[item_index] == schema)
except AssertionError:
reason = sys.exc_info()[1]
tree.append('list[%s]' % item_index)
raise Invalid(schema, tree, reason=reason, pair='item')
class RecursiveValidator(BaseItemValidator):
"""
The recursive validator allows the definition of a single schema that can
be run against any number of items in a given data structure
"""
def leaf(self, index):
self.enforce(self.data, self.schema, index, self.tree)
def leaves(self, data, schema, tree):
for item_index in range(self.index, len(data)):
self.enforce(data, schema, item_index, tree)
def enforce(self, data, schema, item_index, tree):
# yo dawg, a recursive validator within a recursive validator anyone?
if is_callable(schema) and hasattr(schema, '__validator_leaf__'):
return schema(data, tree)
try:
_validate = Validator({}, self.schema)
_validate.data = {0: data[item_index]}
_validate.validate()
except Invalid:
e = sys.exc_info()[1]
tree.extend(e.path)
raise Invalid(e.schema_item, tree, pair='value', msg=e._msg, reason=e._reason)
except SchemaError:
e = sys.exc_info()[1]
tree.extend(e.path)
raise SchemaError('', tree, reason=e._reason, pair='value')
def enforce(data_item, schema_item, tree, pair):
schema_is_optional = hasattr(schema_item, 'is_optional')
if is_callable(schema_item) and not schema_is_optional:
try:
schema_item(data_item)
except AssertionError:
e = sys.exc_info()[1]
if pair == 'value':
tree.append(data_item)
raise Invalid(schema_item, tree, reason=e, pair=pair)
else:
try:
if schema_is_optional:
if is_empty(data_item): # we received nothing here
return
ensure(data_item == schema_item())
else:
ensure(data_item == schema_item)
except AssertionError:
e = sys.exc_info()[1]
if pair == 'value':
tree.append(data_item)
raise Invalid(schema_item, tree, reason=e, pair=pair)
|
alfredodeza/notario | notario/engine.py | Validator.traverser | python | def traverser(self, data, schema, tree):
if hasattr(schema, '__validator_leaf__'):
return schema(data, tree)
if hasattr(schema, 'must_validate'): # cherry picking?
if not len(schema.must_validate):
reason = "must_validate attribute must not be empty"
raise SchemaError(data, tree, reason=reason)
data = sift(data, schema.must_validate)
schema = self.sanitize_optionals(data, schema, tree)
self.is_alpha_ordered(data, schema, tree)
validated_indexes = []
skip_missing_indexes = getattr(schema, 'must_validate', False)
if len(data) < len(schema):
# we have missing required items in data, but we don't know
# which ones so find what may fail:
data_keys = [v[0] for v in data.values()]
schema_keys = [v[0] for v in schema.values()]
def enforce_once(data_keys, schema_key):
# XXX Go through all the data keys and try and see if they pass
# validation against the schema. At this point it is impossible
# to know which data key corresponds to what schema key
# (because schema keys can be a function/callable) so it is
# a *very* naive way to try and detect which one might be
# missing
for data_key in data_keys:
failed = None
try:
enforce(data_key, schema_key, tree, pair='key')
return
except Invalid:
failed = data_key, schema_key
if failed:
return failed
# if there are no callables in the schema keys, just
# find the missing data key directly
if all([not is_callable(s) for s in schema_keys]):
for schema_key in schema_keys:
if schema_key not in data_keys:
msg = "required key in data is missing: %s" % str(schema_key)
raise Invalid(None, tree, reason=msg, pair='key')
for schema_key in schema_keys:
failure = enforce_once(data_keys, schema_key)
if failure:
_, failed_schema_key = failure
msg = "required key in data is missing: %s" % str(failed_schema_key)
raise Invalid(None, tree, reason=msg, pair='key')
for index in range(len(data)):
self.length_equality(data, schema, index, tree)
key, value = data[index]
skey, svalue = schema[index]
tree.append(key)
# Validate the key before anything, to prevent recursing
self.key_leaf(data[index], schema[index], tree)
# If a dict is a value we need to recurse.
# XXX Should we check isinstance(value, ndict) ?
if isinstance(value, dict) and len(value):
self.traverser(value, svalue, tree)
else:
self.value_leaf(data[index], schema[index], tree)
if tree:
tree.pop()
validated_indexes.append(index)
# XXX There is a chance we might have missing items from
# the incoming data that are labeled as required from the schema
# we should make sure *here* that we account for that and raise
# the appropriate exception. Since the loop finished and everything
# seems to have passed, this lack of check will give false positives.
missing_indexes = set(schema.keys()).difference(validated_indexes)
if missing_indexes:
if skip_missing_indexes:
return
for i in missing_indexes:
if not hasattr(schema[i], 'is_optional'):
required_key = schema[i][0]
tree.append('item[%s]' % i)
msg = "required item in schema is missing: %s" % str(required_key)
raise Invalid(required_key, tree, reason=msg, pair='key') | Traverses the dictionary, recursing onto itself if
it sees appropriate key/value pairs that indicate that
there is a need for more validation in a branch below us. | train | https://github.com/alfredodeza/notario/blob/d5dc2edfcb75d9291ced3f2551f368c35dd31475/notario/engine.py#L24-L118 | [
"def sift(data, required_items=None):\n \"\"\"\n Receive a ``data`` object that will be in the form\n of a normalized structure (e.g. ``{0: {'a': 0}}``) and\n filter out keys that match the ``required_items``.\n \"\"\"\n required_items = required_items or []\n new_data = {}\n for k, v in data.items():\n if v[0] in required_items:\n new_data[k] = v\n continue\n for required_item in required_items:\n key = getattr(required_item, '_object', False)\n if key:\n if v[0] == key:\n new_data[k] = v\n\n return re_sort(new_data)\n",
"def traverser(self, data, schema, tree):\n \"\"\"\n Traverses the dictionary, recursing onto itself if\n it sees appropriate key/value pairs that indicate that\n there is a need for more validation in a branch below us.\n \"\"\"\n if hasattr(schema, '__validator_leaf__'):\n return schema(data, tree)\n\n if hasattr(schema, 'must_validate'): # cherry picking?\n if not len(schema.must_validate):\n reason = \"must_validate attribute must not be empty\"\n raise SchemaError(data, tree, reason=reason)\n data = sift(data, schema.must_validate)\n\n schema = self.sanitize_optionals(data, schema, tree)\n self.is_alpha_ordered(data, schema, tree)\n\n validated_indexes = []\n skip_missing_indexes = getattr(schema, 'must_validate', False)\n\n if len(data) < len(schema):\n # we have missing required items in data, but we don't know\n # which ones so find what may fail:\n data_keys = [v[0] for v in data.values()]\n schema_keys = [v[0] for v in schema.values()]\n\n def enforce_once(data_keys, schema_key):\n # XXX Go through all the data keys and try and see if they pass\n # validation against the schema. At this point it is impossible\n # to know which data key corresponds to what schema key\n # (because schema keys can be a function/callable) so it is\n # a *very* naive way to try and detect which one might be\n # missing\n for data_key in data_keys:\n failed = None\n try:\n enforce(data_key, schema_key, tree, pair='key')\n return\n except Invalid:\n failed = data_key, schema_key\n\n if failed:\n return failed\n\n # if there are no callables in the schema keys, just\n # find the missing data key directly\n if all([not is_callable(s) for s in schema_keys]):\n for schema_key in schema_keys:\n if schema_key not in data_keys:\n msg = \"required key in data is missing: %s\" % str(schema_key)\n raise Invalid(None, tree, reason=msg, pair='key')\n\n for schema_key in schema_keys:\n failure = enforce_once(data_keys, schema_key)\n if failure:\n _, failed_schema_key = failure\n msg = \"required key in data is missing: %s\" % str(failed_schema_key)\n raise Invalid(None, tree, reason=msg, pair='key')\n\n for index in range(len(data)):\n self.length_equality(data, schema, index, tree)\n key, value = data[index]\n skey, svalue = schema[index]\n tree.append(key)\n\n # Validate the key before anything, to prevent recursing\n self.key_leaf(data[index], schema[index], tree)\n\n # If a dict is a value we need to recurse.\n # XXX Should we check isinstance(value, ndict) ?\n if isinstance(value, dict) and len(value):\n self.traverser(value, svalue, tree)\n else:\n self.value_leaf(data[index], schema[index], tree)\n if tree:\n tree.pop()\n\n validated_indexes.append(index)\n\n # XXX There is a chance we might have missing items from\n # the incoming data that are labeled as required from the schema\n # we should make sure *here* that we account for that and raise\n # the appropriate exception. Since the loop finished and everything\n # seems to have passed, this lack of check will give false positives.\n missing_indexes = set(schema.keys()).difference(validated_indexes)\n if missing_indexes:\n if skip_missing_indexes:\n return\n for i in missing_indexes:\n if not hasattr(schema[i], 'is_optional'):\n required_key = schema[i][0]\n tree.append('item[%s]' % i)\n msg = \"required item in schema is missing: %s\" % str(required_key)\n raise Invalid(required_key, tree, reason=msg, pair='key')\n",
"def key_leaf(self, data, schema, tree):\n \"\"\"\n The deepest validation we can make in any given circumstance for a key.\n Does not recurse, it will just receive both values and the tree,\n passing them on to the :fun:`enforce` function.\n \"\"\"\n key, value = data\n schema_key, schema_value = schema\n enforce(key, schema_key, tree, 'key')\n",
"def value_leaf(self, data, schema, tree):\n \"\"\"\n The deepest validation we can make in any given circumstance for\n a value. Does not recurse, it will just receive both values and the\n tree, passing them on to the :fun:`enforce` function.\n \"\"\"\n key, value = data\n schema_key, schema_value = schema\n\n if hasattr(schema_value, '__validator_leaf__'):\n return schema_value(value, tree)\n enforce(value, schema_value, tree, 'value')\n",
"def is_alpha_ordered(self, data, normalized_schema, tree):\n keys = []\n indexes = normalized_schema.keys()\n for index in indexes:\n key = normalized_schema[index][0]\n if isinstance(key, str):\n keys.append(key)\n elif hasattr(key, '_object') :\n if isinstance(key._object, str):\n keys.append(key._object)\n\n sorted_keys = sorted(keys)\n if keys != sorted_keys:\n for index, key in enumerate(keys):\n if key != sorted_keys[index]:\n raise SchemaError(\n keys, [key],\n reason='schema item is not alphabetically ordered'\n )\n",
"def length_equality(self, data, schema, index, tree):\n try:\n data = data[index]\n try:\n schema = schema[index]\n except KeyError:\n if not hasattr(schema, 'must_validate'):\n reason = 'has unexpected item in data: %s' % data_item(data)\n raise Invalid(None, tree, msg=reason, reason=reason, pair='value')\n except (KeyError, TypeError):\n if not hasattr(schema, 'must_validate'):\n reason = \"has less items in schema than in data\"\n raise SchemaError(data, tree, reason=reason)\n if hasattr(schema, '__validator_leaf__'):\n return\n\n if len(data) != len(schema):\n raise SchemaError(data, tree, reason='length did not match schema')\n",
"def sanitize_optionals(self, data, schema, tree):\n schema_key_map = {}\n try:\n for number, value in schema.items():\n schema_key_map[number] = getattr(value[0], '_object', value[0])\n except AttributeError: # maybe not a dict?\n self.length_equality(data, schema, 0, tree)\n\n optional_keys = {}\n for k, v in schema.items():\n try:\n key = getattr(v[0], '_object')\n if key:\n optional_keys[k] = key\n except AttributeError:\n pass\n\n data_keys = [v[0] for k, v in data.items()]\n\n for number, value in optional_keys.items():\n if value not in data_keys:\n del schema[number]\n if not schema and is_not_empty(data):\n msg = \"unexpected extra items\"\n raise Invalid(schema, tree, reason=msg)\n return re_sort(schema)\n",
"def enforce_once(data_keys, schema_key):\n # XXX Go through all the data keys and try and see if they pass\n # validation against the schema. At this point it is impossible\n # to know which data key corresponds to what schema key\n # (because schema keys can be a function/callable) so it is\n # a *very* naive way to try and detect which one might be\n # missing\n for data_key in data_keys:\n failed = None\n try:\n enforce(data_key, schema_key, tree, pair='key')\n return\n except Invalid:\n failed = data_key, schema_key\n\n if failed:\n return failed\n"
] | class Validator(object):
def __init__(self, data, schema, defined_keys=None):
if defined_keys:
schema = cherry_pick(schema)
self.data = Data(data, schema).normalized()
self.schema = Schema(data, schema).normalized()
def validate(self):
if self.data == {} and self.schema:
msg = 'has no data to validate against schema'
reason = 'an empty dictionary object was provided'
raise Invalid(None, {}, msg=msg, reason=reason, pair='value')
self.traverser(self.data, self.schema, [])
def key_leaf(self, data, schema, tree):
"""
The deepest validation we can make in any given circumstance for a key.
Does not recurse, it will just receive both values and the tree,
passing them on to the :fun:`enforce` function.
"""
key, value = data
schema_key, schema_value = schema
enforce(key, schema_key, tree, 'key')
def value_leaf(self, data, schema, tree):
"""
The deepest validation we can make in any given circumstance for
a value. Does not recurse, it will just receive both values and the
tree, passing them on to the :fun:`enforce` function.
"""
key, value = data
schema_key, schema_value = schema
if hasattr(schema_value, '__validator_leaf__'):
return schema_value(value, tree)
enforce(value, schema_value, tree, 'value')
def is_alpha_ordered(self, data, normalized_schema, tree):
keys = []
indexes = normalized_schema.keys()
for index in indexes:
key = normalized_schema[index][0]
if isinstance(key, str):
keys.append(key)
elif hasattr(key, '_object') :
if isinstance(key._object, str):
keys.append(key._object)
sorted_keys = sorted(keys)
if keys != sorted_keys:
for index, key in enumerate(keys):
if key != sorted_keys[index]:
raise SchemaError(
keys, [key],
reason='schema item is not alphabetically ordered'
)
def length_equality(self, data, schema, index, tree):
try:
data = data[index]
try:
schema = schema[index]
except KeyError:
if not hasattr(schema, 'must_validate'):
reason = 'has unexpected item in data: %s' % data_item(data)
raise Invalid(None, tree, msg=reason, reason=reason, pair='value')
except (KeyError, TypeError):
if not hasattr(schema, 'must_validate'):
reason = "has less items in schema than in data"
raise SchemaError(data, tree, reason=reason)
if hasattr(schema, '__validator_leaf__'):
return
if len(data) != len(schema):
raise SchemaError(data, tree, reason='length did not match schema')
def sanitize_optionals(self, data, schema, tree):
schema_key_map = {}
try:
for number, value in schema.items():
schema_key_map[number] = getattr(value[0], '_object', value[0])
except AttributeError: # maybe not a dict?
self.length_equality(data, schema, 0, tree)
optional_keys = {}
for k, v in schema.items():
try:
key = getattr(v[0], '_object')
if key:
optional_keys[k] = key
except AttributeError:
pass
data_keys = [v[0] for k, v in data.items()]
for number, value in optional_keys.items():
if value not in data_keys:
del schema[number]
if not schema and is_not_empty(data):
msg = "unexpected extra items"
raise Invalid(schema, tree, reason=msg)
return re_sort(schema)
|
alfredodeza/notario | notario/engine.py | Validator.key_leaf | python | def key_leaf(self, data, schema, tree):
key, value = data
schema_key, schema_value = schema
enforce(key, schema_key, tree, 'key') | The deepest validation we can make in any given circumstance for a key.
Does not recurse, it will just receive both values and the tree,
passing them on to the :fun:`enforce` function. | train | https://github.com/alfredodeza/notario/blob/d5dc2edfcb75d9291ced3f2551f368c35dd31475/notario/engine.py#L121-L129 | [
"def enforce(data_item, schema_item, tree, pair):\n schema_is_optional = hasattr(schema_item, 'is_optional')\n if is_callable(schema_item) and not schema_is_optional:\n try:\n schema_item(data_item)\n except AssertionError:\n e = sys.exc_info()[1]\n if pair == 'value':\n tree.append(data_item)\n raise Invalid(schema_item, tree, reason=e, pair=pair)\n else:\n try:\n if schema_is_optional:\n if is_empty(data_item): # we received nothing here\n return\n ensure(data_item == schema_item())\n else:\n ensure(data_item == schema_item)\n except AssertionError:\n e = sys.exc_info()[1]\n if pair == 'value':\n tree.append(data_item)\n raise Invalid(schema_item, tree, reason=e, pair=pair)\n"
] | class Validator(object):
def __init__(self, data, schema, defined_keys=None):
if defined_keys:
schema = cherry_pick(schema)
self.data = Data(data, schema).normalized()
self.schema = Schema(data, schema).normalized()
def validate(self):
if self.data == {} and self.schema:
msg = 'has no data to validate against schema'
reason = 'an empty dictionary object was provided'
raise Invalid(None, {}, msg=msg, reason=reason, pair='value')
self.traverser(self.data, self.schema, [])
def traverser(self, data, schema, tree):
"""
Traverses the dictionary, recursing onto itself if
it sees appropriate key/value pairs that indicate that
there is a need for more validation in a branch below us.
"""
if hasattr(schema, '__validator_leaf__'):
return schema(data, tree)
if hasattr(schema, 'must_validate'): # cherry picking?
if not len(schema.must_validate):
reason = "must_validate attribute must not be empty"
raise SchemaError(data, tree, reason=reason)
data = sift(data, schema.must_validate)
schema = self.sanitize_optionals(data, schema, tree)
self.is_alpha_ordered(data, schema, tree)
validated_indexes = []
skip_missing_indexes = getattr(schema, 'must_validate', False)
if len(data) < len(schema):
# we have missing required items in data, but we don't know
# which ones so find what may fail:
data_keys = [v[0] for v in data.values()]
schema_keys = [v[0] for v in schema.values()]
def enforce_once(data_keys, schema_key):
# XXX Go through all the data keys and try and see if they pass
# validation against the schema. At this point it is impossible
# to know which data key corresponds to what schema key
# (because schema keys can be a function/callable) so it is
# a *very* naive way to try and detect which one might be
# missing
for data_key in data_keys:
failed = None
try:
enforce(data_key, schema_key, tree, pair='key')
return
except Invalid:
failed = data_key, schema_key
if failed:
return failed
# if there are no callables in the schema keys, just
# find the missing data key directly
if all([not is_callable(s) for s in schema_keys]):
for schema_key in schema_keys:
if schema_key not in data_keys:
msg = "required key in data is missing: %s" % str(schema_key)
raise Invalid(None, tree, reason=msg, pair='key')
for schema_key in schema_keys:
failure = enforce_once(data_keys, schema_key)
if failure:
_, failed_schema_key = failure
msg = "required key in data is missing: %s" % str(failed_schema_key)
raise Invalid(None, tree, reason=msg, pair='key')
for index in range(len(data)):
self.length_equality(data, schema, index, tree)
key, value = data[index]
skey, svalue = schema[index]
tree.append(key)
# Validate the key before anything, to prevent recursing
self.key_leaf(data[index], schema[index], tree)
# If a dict is a value we need to recurse.
# XXX Should we check isinstance(value, ndict) ?
if isinstance(value, dict) and len(value):
self.traverser(value, svalue, tree)
else:
self.value_leaf(data[index], schema[index], tree)
if tree:
tree.pop()
validated_indexes.append(index)
# XXX There is a chance we might have missing items from
# the incoming data that are labeled as required from the schema
# we should make sure *here* that we account for that and raise
# the appropriate exception. Since the loop finished and everything
# seems to have passed, this lack of check will give false positives.
missing_indexes = set(schema.keys()).difference(validated_indexes)
if missing_indexes:
if skip_missing_indexes:
return
for i in missing_indexes:
if not hasattr(schema[i], 'is_optional'):
required_key = schema[i][0]
tree.append('item[%s]' % i)
msg = "required item in schema is missing: %s" % str(required_key)
raise Invalid(required_key, tree, reason=msg, pair='key')
def value_leaf(self, data, schema, tree):
"""
The deepest validation we can make in any given circumstance for
a value. Does not recurse, it will just receive both values and the
tree, passing them on to the :fun:`enforce` function.
"""
key, value = data
schema_key, schema_value = schema
if hasattr(schema_value, '__validator_leaf__'):
return schema_value(value, tree)
enforce(value, schema_value, tree, 'value')
def is_alpha_ordered(self, data, normalized_schema, tree):
keys = []
indexes = normalized_schema.keys()
for index in indexes:
key = normalized_schema[index][0]
if isinstance(key, str):
keys.append(key)
elif hasattr(key, '_object') :
if isinstance(key._object, str):
keys.append(key._object)
sorted_keys = sorted(keys)
if keys != sorted_keys:
for index, key in enumerate(keys):
if key != sorted_keys[index]:
raise SchemaError(
keys, [key],
reason='schema item is not alphabetically ordered'
)
def length_equality(self, data, schema, index, tree):
try:
data = data[index]
try:
schema = schema[index]
except KeyError:
if not hasattr(schema, 'must_validate'):
reason = 'has unexpected item in data: %s' % data_item(data)
raise Invalid(None, tree, msg=reason, reason=reason, pair='value')
except (KeyError, TypeError):
if not hasattr(schema, 'must_validate'):
reason = "has less items in schema than in data"
raise SchemaError(data, tree, reason=reason)
if hasattr(schema, '__validator_leaf__'):
return
if len(data) != len(schema):
raise SchemaError(data, tree, reason='length did not match schema')
def sanitize_optionals(self, data, schema, tree):
schema_key_map = {}
try:
for number, value in schema.items():
schema_key_map[number] = getattr(value[0], '_object', value[0])
except AttributeError: # maybe not a dict?
self.length_equality(data, schema, 0, tree)
optional_keys = {}
for k, v in schema.items():
try:
key = getattr(v[0], '_object')
if key:
optional_keys[k] = key
except AttributeError:
pass
data_keys = [v[0] for k, v in data.items()]
for number, value in optional_keys.items():
if value not in data_keys:
del schema[number]
if not schema and is_not_empty(data):
msg = "unexpected extra items"
raise Invalid(schema, tree, reason=msg)
return re_sort(schema)
|
alfredodeza/notario | notario/engine.py | Validator.value_leaf | python | def value_leaf(self, data, schema, tree):
key, value = data
schema_key, schema_value = schema
if hasattr(schema_value, '__validator_leaf__'):
return schema_value(value, tree)
enforce(value, schema_value, tree, 'value') | The deepest validation we can make in any given circumstance for
a value. Does not recurse, it will just receive both values and the
tree, passing them on to the :fun:`enforce` function. | train | https://github.com/alfredodeza/notario/blob/d5dc2edfcb75d9291ced3f2551f368c35dd31475/notario/engine.py#L131-L142 | [
"def enforce(data_item, schema_item, tree, pair):\n schema_is_optional = hasattr(schema_item, 'is_optional')\n if is_callable(schema_item) and not schema_is_optional:\n try:\n schema_item(data_item)\n except AssertionError:\n e = sys.exc_info()[1]\n if pair == 'value':\n tree.append(data_item)\n raise Invalid(schema_item, tree, reason=e, pair=pair)\n else:\n try:\n if schema_is_optional:\n if is_empty(data_item): # we received nothing here\n return\n ensure(data_item == schema_item())\n else:\n ensure(data_item == schema_item)\n except AssertionError:\n e = sys.exc_info()[1]\n if pair == 'value':\n tree.append(data_item)\n raise Invalid(schema_item, tree, reason=e, pair=pair)\n"
] | class Validator(object):
def __init__(self, data, schema, defined_keys=None):
if defined_keys:
schema = cherry_pick(schema)
self.data = Data(data, schema).normalized()
self.schema = Schema(data, schema).normalized()
def validate(self):
if self.data == {} and self.schema:
msg = 'has no data to validate against schema'
reason = 'an empty dictionary object was provided'
raise Invalid(None, {}, msg=msg, reason=reason, pair='value')
self.traverser(self.data, self.schema, [])
def traverser(self, data, schema, tree):
"""
Traverses the dictionary, recursing onto itself if
it sees appropriate key/value pairs that indicate that
there is a need for more validation in a branch below us.
"""
if hasattr(schema, '__validator_leaf__'):
return schema(data, tree)
if hasattr(schema, 'must_validate'): # cherry picking?
if not len(schema.must_validate):
reason = "must_validate attribute must not be empty"
raise SchemaError(data, tree, reason=reason)
data = sift(data, schema.must_validate)
schema = self.sanitize_optionals(data, schema, tree)
self.is_alpha_ordered(data, schema, tree)
validated_indexes = []
skip_missing_indexes = getattr(schema, 'must_validate', False)
if len(data) < len(schema):
# we have missing required items in data, but we don't know
# which ones so find what may fail:
data_keys = [v[0] for v in data.values()]
schema_keys = [v[0] for v in schema.values()]
def enforce_once(data_keys, schema_key):
# XXX Go through all the data keys and try and see if they pass
# validation against the schema. At this point it is impossible
# to know which data key corresponds to what schema key
# (because schema keys can be a function/callable) so it is
# a *very* naive way to try and detect which one might be
# missing
for data_key in data_keys:
failed = None
try:
enforce(data_key, schema_key, tree, pair='key')
return
except Invalid:
failed = data_key, schema_key
if failed:
return failed
# if there are no callables in the schema keys, just
# find the missing data key directly
if all([not is_callable(s) for s in schema_keys]):
for schema_key in schema_keys:
if schema_key not in data_keys:
msg = "required key in data is missing: %s" % str(schema_key)
raise Invalid(None, tree, reason=msg, pair='key')
for schema_key in schema_keys:
failure = enforce_once(data_keys, schema_key)
if failure:
_, failed_schema_key = failure
msg = "required key in data is missing: %s" % str(failed_schema_key)
raise Invalid(None, tree, reason=msg, pair='key')
for index in range(len(data)):
self.length_equality(data, schema, index, tree)
key, value = data[index]
skey, svalue = schema[index]
tree.append(key)
# Validate the key before anything, to prevent recursing
self.key_leaf(data[index], schema[index], tree)
# If a dict is a value we need to recurse.
# XXX Should we check isinstance(value, ndict) ?
if isinstance(value, dict) and len(value):
self.traverser(value, svalue, tree)
else:
self.value_leaf(data[index], schema[index], tree)
if tree:
tree.pop()
validated_indexes.append(index)
# XXX There is a chance we might have missing items from
# the incoming data that are labeled as required from the schema
# we should make sure *here* that we account for that and raise
# the appropriate exception. Since the loop finished and everything
# seems to have passed, this lack of check will give false positives.
missing_indexes = set(schema.keys()).difference(validated_indexes)
if missing_indexes:
if skip_missing_indexes:
return
for i in missing_indexes:
if not hasattr(schema[i], 'is_optional'):
required_key = schema[i][0]
tree.append('item[%s]' % i)
msg = "required item in schema is missing: %s" % str(required_key)
raise Invalid(required_key, tree, reason=msg, pair='key')
def key_leaf(self, data, schema, tree):
"""
The deepest validation we can make in any given circumstance for a key.
Does not recurse, it will just receive both values and the tree,
passing them on to the :fun:`enforce` function.
"""
key, value = data
schema_key, schema_value = schema
enforce(key, schema_key, tree, 'key')
def is_alpha_ordered(self, data, normalized_schema, tree):
keys = []
indexes = normalized_schema.keys()
for index in indexes:
key = normalized_schema[index][0]
if isinstance(key, str):
keys.append(key)
elif hasattr(key, '_object') :
if isinstance(key._object, str):
keys.append(key._object)
sorted_keys = sorted(keys)
if keys != sorted_keys:
for index, key in enumerate(keys):
if key != sorted_keys[index]:
raise SchemaError(
keys, [key],
reason='schema item is not alphabetically ordered'
)
def length_equality(self, data, schema, index, tree):
try:
data = data[index]
try:
schema = schema[index]
except KeyError:
if not hasattr(schema, 'must_validate'):
reason = 'has unexpected item in data: %s' % data_item(data)
raise Invalid(None, tree, msg=reason, reason=reason, pair='value')
except (KeyError, TypeError):
if not hasattr(schema, 'must_validate'):
reason = "has less items in schema than in data"
raise SchemaError(data, tree, reason=reason)
if hasattr(schema, '__validator_leaf__'):
return
if len(data) != len(schema):
raise SchemaError(data, tree, reason='length did not match schema')
def sanitize_optionals(self, data, schema, tree):
schema_key_map = {}
try:
for number, value in schema.items():
schema_key_map[number] = getattr(value[0], '_object', value[0])
except AttributeError: # maybe not a dict?
self.length_equality(data, schema, 0, tree)
optional_keys = {}
for k, v in schema.items():
try:
key = getattr(v[0], '_object')
if key:
optional_keys[k] = key
except AttributeError:
pass
data_keys = [v[0] for k, v in data.items()]
for number, value in optional_keys.items():
if value not in data_keys:
del schema[number]
if not schema and is_not_empty(data):
msg = "unexpected extra items"
raise Invalid(schema, tree, reason=msg)
return re_sort(schema)
|
meyersj/geotweet | geotweet/twitter/stream_steps.py | GeoFilterStep.validate_geotweet | python | def validate_geotweet(self, record):
if record and self._validate('user', record) \
and self._validate('coordinates', record):
return True
return False | check that stream record is actual tweet with coordinates | train | https://github.com/meyersj/geotweet/blob/1a6b55f98adf34d1b91f172d9187d599616412d9/geotweet/twitter/stream_steps.py#L53-L58 | [
"def _validate(self, key, record):\n if key in record and record[key]:\n return True\n return False\n"
] | class GeoFilterStep(ProcessStep):
"""
Process output from Twitter Streaming API
For each record output from the API will be called as argument to process.
That function will validate and convert tweet to desired format.
"""
def _validate(self, key, record):
if key in record and record[key]:
return True
return False
def process(self, tweet):
""" Passes on tweet if missing 'geo' or 'user' property """
if self.validate_geotweet(tweet):
return self.next(tweet)
return None
|
meyersj/geotweet | geotweet/mapreduce/utils/lookup.py | SpatialLookup.get_object | python | def get_object(self, point, buffer_size=0, multiple=False):
# first search bounding boxes
# idx.intersection method modifies input if it is a list
try:
tmp = tuple(point)
except TypeError:
return None
# point must be in the form (minx, miny, maxx, maxy) or (x, y)
if len(tmp) not in [2, 4]:
return None
# buffer point if size is specified
geom = tmp = Point(tmp)
if buffer_size:
geom = tmp.buffer(buffer_size)
if multiple:
return self._get_all_near(geom)
return self._get_nearest(tmp, geom) | lookup object based on point as [longitude, latitude] | train | https://github.com/meyersj/geotweet/blob/1a6b55f98adf34d1b91f172d9187d599616412d9/geotweet/mapreduce/utils/lookup.py#L83-L101 | null | class SpatialLookup(FileReader):
""" Create a indexed spatial lookup of a geojson file """
idx = None
data_store = {}
def __init__(self, src=None):
if src:
if not self.is_valid_src(src):
error = "Arg src=< {0} > is invalid."
error += " Must be existing file or valid url that starts with 'http'"
raise ValueError(error.format(src))
# build index from geojson
self.data_store, self.idx = self._build_from_geojson(src)
else:
# create empty index in memory
self.data_store, self.idx = self._initialize()
def _get_nearest(self, point, geom):
nearest = None
for bbox_match in self.idx.intersection(geom.bounds):
# check actual geometry after matching bounding box
#record = bbox_match.object
record = self.data_store[bbox_match]
try:
if not record['geometry'].intersects(geom):
# skip processing current matching bbox
continue
# save only nearest record
dist = point.distance(record['geometry'])
if not nearest or dist < nearest['dist']:
nearest = dict(data=record, dist=dist)
except shapely.geos.TopologicalError as e:
# geometry is invalid so stop processing
pass
if nearest:
return nearest['data']['properties']
return None
def _get_all_near(self, geom):
results = []
for bbox_match in self.idx.intersection(geom.bounds):
# check actual geometry after matching bounding box
#record = bbox_match.object
record = self.data_store[bbox_match]
try:
if not record['geometry'].intersects(geom):
# skip processing current matching bbox
continue
# return all intersecting records
results.append(record['properties'])
except shapely.geos.TopologicalError as e:
# geometry is invalid so stop processing
pass
return results
def _build_obj(self, feature):
feature['geometry'] = shape(feature['geometry'])
return feature
def _build_from_geojson(self, src):
""" Build a RTree index to disk using bounding box of each feature """
geojson = json.loads(self.read(src))
idx = index.Index()
data_store = {}
for i, feature in enumerate(geojson['features']):
feature = self._build_obj(feature)
idx.insert(i, feature['geometry'].bounds)
data_store[i] = feature
return data_store, idx
def _initialize(self):
""" Build a RTree in memory for features to be added to """
return {}, index.Index()
def insert(self, key, feature):
feature = self._build_obj(feature)
self.data_store[key] = feature
self.idx.insert(key, feature['geometry'].bounds)
|
meyersj/geotweet | geotweet/mapreduce/utils/lookup.py | SpatialLookup._build_from_geojson | python | def _build_from_geojson(self, src):
geojson = json.loads(self.read(src))
idx = index.Index()
data_store = {}
for i, feature in enumerate(geojson['features']):
feature = self._build_obj(feature)
idx.insert(i, feature['geometry'].bounds)
data_store[i] = feature
return data_store, idx | Build a RTree index to disk using bounding box of each feature | train | https://github.com/meyersj/geotweet/blob/1a6b55f98adf34d1b91f172d9187d599616412d9/geotweet/mapreduce/utils/lookup.py#L107-L116 | null | class SpatialLookup(FileReader):
""" Create a indexed spatial lookup of a geojson file """
idx = None
data_store = {}
def __init__(self, src=None):
if src:
if not self.is_valid_src(src):
error = "Arg src=< {0} > is invalid."
error += " Must be existing file or valid url that starts with 'http'"
raise ValueError(error.format(src))
# build index from geojson
self.data_store, self.idx = self._build_from_geojson(src)
else:
# create empty index in memory
self.data_store, self.idx = self._initialize()
def _get_nearest(self, point, geom):
nearest = None
for bbox_match in self.idx.intersection(geom.bounds):
# check actual geometry after matching bounding box
#record = bbox_match.object
record = self.data_store[bbox_match]
try:
if not record['geometry'].intersects(geom):
# skip processing current matching bbox
continue
# save only nearest record
dist = point.distance(record['geometry'])
if not nearest or dist < nearest['dist']:
nearest = dict(data=record, dist=dist)
except shapely.geos.TopologicalError as e:
# geometry is invalid so stop processing
pass
if nearest:
return nearest['data']['properties']
return None
def _get_all_near(self, geom):
results = []
for bbox_match in self.idx.intersection(geom.bounds):
# check actual geometry after matching bounding box
#record = bbox_match.object
record = self.data_store[bbox_match]
try:
if not record['geometry'].intersects(geom):
# skip processing current matching bbox
continue
# return all intersecting records
results.append(record['properties'])
except shapely.geos.TopologicalError as e:
# geometry is invalid so stop processing
pass
return results
def get_object(self, point, buffer_size=0, multiple=False):
""" lookup object based on point as [longitude, latitude] """
# first search bounding boxes
# idx.intersection method modifies input if it is a list
try:
tmp = tuple(point)
except TypeError:
return None
# point must be in the form (minx, miny, maxx, maxy) or (x, y)
if len(tmp) not in [2, 4]:
return None
# buffer point if size is specified
geom = tmp = Point(tmp)
if buffer_size:
geom = tmp.buffer(buffer_size)
if multiple:
return self._get_all_near(geom)
return self._get_nearest(tmp, geom)
def _build_obj(self, feature):
feature['geometry'] = shape(feature['geometry'])
return feature
def _initialize(self):
""" Build a RTree in memory for features to be added to """
return {}, index.Index()
def insert(self, key, feature):
feature = self._build_obj(feature)
self.data_store[key] = feature
self.idx.insert(key, feature['geometry'].bounds)
|
meyersj/geotweet | geotweet/mapreduce/utils/lookup.py | CachedLookup.get | python | def get(self, point, buffer_size=0, multiple=False):
lon, lat = point
geohash = Geohash.encode(lat, lon, precision=self.precision)
key = (geohash, buffer_size, multiple)
if key in self.geohash_cache:
# cache hit on geohash
self.hit += 1
#print self.hit, self.miss
return self.geohash_cache[key]
self.miss += 1
# cache miss on geohash
# project point to ESRI:102005
lat, lon = Geohash.decode(geohash)
proj_point = project([float(lon), float(lat)])
args = dict(buffer_size=buffer_size, multiple=multiple)
payload = self.get_object(proj_point, **args)
self.geohash_cache[key] = payload
return payload | lookup state and county based on geohash of coordinates from tweet | train | https://github.com/meyersj/geotweet/blob/1a6b55f98adf34d1b91f172d9187d599616412d9/geotweet/mapreduce/utils/lookup.py#L139-L157 | [
"def project(lonlat):\n return transform(proj4326, proj102005, *lonlat)\n"
] | class CachedLookup(SpatialLookup):
""" Cache results of spatial lookups """
geohash_cache = {}
def __init__(self, precision=7, *args, **kwargs):
super(CachedLookup, self).__init__(*args, **kwargs)
self.precision = precision
self.hit = 0
self.miss = 0
|
meyersj/geotweet | geotweet/mapreduce/metro_wordcount.py | MRMetroMongoWordCount.mapper_init | python | def mapper_init(self):
self.lookup = CachedMetroLookup(precision=GEOHASH_PRECISION)
self.extractor = WordExtractor() | build local spatial index of US metro areas | train | https://github.com/meyersj/geotweet/blob/1a6b55f98adf34d1b91f172d9187d599616412d9/geotweet/mapreduce/metro_wordcount.py#L96-L99 | null | class MRMetroMongoWordCount(MRJob):
"""
Map Reduce job that counts word occurences for each US Metro Area
Requires a running MongoDB instance with us_metro_areas.geojson loaded
Mapper Init:
1. Build local Rtree spatial index of US metro areas
- geojson file downloaded from S3 bucket
Mapper:
1. Ignore tweets that appear to be from HR accounts about jobs and hiring
2. Lookup nearest metro area from spatial index using coordinates
3. Tokenize tweet in individual words
4. For each word output the tuple: (('metro area', 'word'), 1)
Reducer:
1. Sum count for each ('metro area', 'word') key
2. Insert record into MongoDB with word count for that metro area
Reducer Mongo:
1. Build list of all documents for each metro area and insert as batch
into MongoDB
"""
INPUT_PROTOCOL = JSONValueProtocol
INTERNAL_PROTOCOL = JSONProtocol
OUTPUT_PROTOCOL = RawValueProtocol
def steps(self):
return [
MRStep(
mapper_init=self.mapper_init,
mapper=self.mapper,
combiner=self.combiner,
reducer=self.reducer
),
MRStep(
reducer_init=self.reducer_init_output,
reducer=self.reducer_output
)
]
def mapper(self, _, data):
# ignore HR geo-tweets for job postings
expr = "|".join(["(job)", "(hiring)", "(career)"])
if data['description'] and re.findall(expr, data['description']):
return
# lookup nearest metro area
metro = self.lookup.get(data['lonlat'], METRO_DISTANCE)
if not metro:
return
# count each word
for word in self.extractor.run(data['text']):
yield (metro, word), 1
def combiner(self, key, value):
yield key, sum(value)
def reducer(self, key, values):
total = int(sum(values))
if total < MIN_WORD_COUNT:
return
metro, word = key
yield metro, (total, word)
def reducer_init_output(self):
""" establish connection to MongoDB """
try:
self.mongo = MongoGeo(db=DB, collection=COLLECTION, timeout=MONGO_TIMEOUT)
except ServerSelectionTimeoutError:
# failed to connect to running MongoDB instance
self.mongo = None
def reducer_output(self, metro, values):
records = []
for record in values:
total, word = record
records.append(dict(
metro_area=metro,
word=word,
count=total
))
output = "{0}\t{1}\t{2}"
output = output.format(metro.encode('utf-8'), total, word.encode('utf-8'))
yield None, output
if self.mongo:
self.mongo.insert_many(records)
|
meyersj/geotweet | geotweet/mapreduce/metro_wordcount.py | MRMetroMongoWordCount.reducer_init_output | python | def reducer_init_output(self):
try:
self.mongo = MongoGeo(db=DB, collection=COLLECTION, timeout=MONGO_TIMEOUT)
except ServerSelectionTimeoutError:
# failed to connect to running MongoDB instance
self.mongo = None | establish connection to MongoDB | train | https://github.com/meyersj/geotweet/blob/1a6b55f98adf34d1b91f172d9187d599616412d9/geotweet/mapreduce/metro_wordcount.py#L124-L130 | null | class MRMetroMongoWordCount(MRJob):
"""
Map Reduce job that counts word occurences for each US Metro Area
Requires a running MongoDB instance with us_metro_areas.geojson loaded
Mapper Init:
1. Build local Rtree spatial index of US metro areas
- geojson file downloaded from S3 bucket
Mapper:
1. Ignore tweets that appear to be from HR accounts about jobs and hiring
2. Lookup nearest metro area from spatial index using coordinates
3. Tokenize tweet in individual words
4. For each word output the tuple: (('metro area', 'word'), 1)
Reducer:
1. Sum count for each ('metro area', 'word') key
2. Insert record into MongoDB with word count for that metro area
Reducer Mongo:
1. Build list of all documents for each metro area and insert as batch
into MongoDB
"""
INPUT_PROTOCOL = JSONValueProtocol
INTERNAL_PROTOCOL = JSONProtocol
OUTPUT_PROTOCOL = RawValueProtocol
def steps(self):
return [
MRStep(
mapper_init=self.mapper_init,
mapper=self.mapper,
combiner=self.combiner,
reducer=self.reducer
),
MRStep(
reducer_init=self.reducer_init_output,
reducer=self.reducer_output
)
]
def mapper_init(self):
""" build local spatial index of US metro areas """
self.lookup = CachedMetroLookup(precision=GEOHASH_PRECISION)
self.extractor = WordExtractor()
def mapper(self, _, data):
# ignore HR geo-tweets for job postings
expr = "|".join(["(job)", "(hiring)", "(career)"])
if data['description'] and re.findall(expr, data['description']):
return
# lookup nearest metro area
metro = self.lookup.get(data['lonlat'], METRO_DISTANCE)
if not metro:
return
# count each word
for word in self.extractor.run(data['text']):
yield (metro, word), 1
def combiner(self, key, value):
yield key, sum(value)
def reducer(self, key, values):
total = int(sum(values))
if total < MIN_WORD_COUNT:
return
metro, word = key
yield metro, (total, word)
def reducer_output(self, metro, values):
records = []
for record in values:
total, word = record
records.append(dict(
metro_area=metro,
word=word,
count=total
))
output = "{0}\t{1}\t{2}"
output = output.format(metro.encode('utf-8'), total, word.encode('utf-8'))
yield None, output
if self.mongo:
self.mongo.insert_many(records)
|
meyersj/geotweet | geotweet/osm.py | OSMRunner.run | python | def run(self):
states = open(self.states, 'r').read().splitlines()
for state in states:
url = self.build_url(state)
log = "Downloading State < {0} > from < {1} >"
logging.info(log.format(state, url))
tmp = self.download(self.output, url, self.overwrite)
self.s3.store(self.extract(tmp, self.tmp2poi(tmp))) | For each state in states file build url and download file | train | https://github.com/meyersj/geotweet/blob/1a6b55f98adf34d1b91f172d9187d599616412d9/geotweet/osm.py#L37-L45 | [
"def download(self, output_dir, url, overwrite):\n \"\"\" Dowload file to /tmp \"\"\"\n tmp = self.url2tmp(output_dir, url)\n if os.path.isfile(tmp) and not overwrite:\n logging.info(\"File {0} already exists. Skipping download.\".format(tmp))\n return tmp\n f = open(tmp, 'wb')\n logging.info(\"Downloading {0}\".format(url))\n res = requests.get(url, stream=True)\n if res.status_code != 200:\n # failed to download, cleanup and raise exception\n f.close()\n os.remove(tmp)\n error = \"{0}\\n\\nFailed to download < {0} >\".format(res.content, url)\n raise IOError(error)\n for block in res.iter_content(1024):\n f.write(block)\n f.close()\n return tmp\n",
"def extract(self, pbf, output):\n \"\"\" extract POI nodes from osm pbf extract \"\"\"\n logging.info(\"Extracting POI nodes from {0} to {1}\".format(pbf, output))\n with open(output, 'w') as f:\n # define callback for each node that is processed\n def nodes_callback(nodes):\n for node in nodes:\n node_id, tags, coordinates = node\n # if any tags have a matching key then write record\n if any([t in tags for t in POI_TAGS]):\n f.write(json.dumps(dict(tags=tags, coordinates=coordinates)))\n f.write('\\n')\n parser = OSMParser(concurrency=4, nodes_callback=nodes_callback)\n parser.parse(pbf)\n return output\n",
"def build_url(self, state):\n return US_GEOFABRIK.format(state.replace(' ', '-').lower())\n",
"def tmp2poi(self, osm):\n return osm.rsplit('.', 2)[0] + '.poi'\n"
] | class OSMRunner(object):
"""
Downloads OSM extracts from GeoFabrik in pbf format
"""
def __init__(self, args):
self.states = args.states
if not args.states:
self.states = DEFAULT_STATES
self.output = args.output
self.overwrite = False
self.s3 = S3Loader(bucket=args.bucket, region=args.region)
try:
self.s3.valid()
except EnvironmentError as e:
logging.error(e)
sys.exit(1)
def download(self, output_dir, url, overwrite):
""" Dowload file to /tmp """
tmp = self.url2tmp(output_dir, url)
if os.path.isfile(tmp) and not overwrite:
logging.info("File {0} already exists. Skipping download.".format(tmp))
return tmp
f = open(tmp, 'wb')
logging.info("Downloading {0}".format(url))
res = requests.get(url, stream=True)
if res.status_code != 200:
# failed to download, cleanup and raise exception
f.close()
os.remove(tmp)
error = "{0}\n\nFailed to download < {0} >".format(res.content, url)
raise IOError(error)
for block in res.iter_content(1024):
f.write(block)
f.close()
return tmp
def extract(self, pbf, output):
""" extract POI nodes from osm pbf extract """
logging.info("Extracting POI nodes from {0} to {1}".format(pbf, output))
with open(output, 'w') as f:
# define callback for each node that is processed
def nodes_callback(nodes):
for node in nodes:
node_id, tags, coordinates = node
# if any tags have a matching key then write record
if any([t in tags for t in POI_TAGS]):
f.write(json.dumps(dict(tags=tags, coordinates=coordinates)))
f.write('\n')
parser = OSMParser(concurrency=4, nodes_callback=nodes_callback)
parser.parse(pbf)
return output
def build_url(self, state):
return US_GEOFABRIK.format(state.replace(' ', '-').lower())
def url2tmp(self, root, url):
""" convert url path to filename """
filename = url.rsplit('/', 1)[-1]
return os.path.join(root, filename)
def tmp2poi(self, osm):
return osm.rsplit('.', 2)[0] + '.poi'
|
meyersj/geotweet | geotweet/osm.py | OSMRunner.download | python | def download(self, output_dir, url, overwrite):
tmp = self.url2tmp(output_dir, url)
if os.path.isfile(tmp) and not overwrite:
logging.info("File {0} already exists. Skipping download.".format(tmp))
return tmp
f = open(tmp, 'wb')
logging.info("Downloading {0}".format(url))
res = requests.get(url, stream=True)
if res.status_code != 200:
# failed to download, cleanup and raise exception
f.close()
os.remove(tmp)
error = "{0}\n\nFailed to download < {0} >".format(res.content, url)
raise IOError(error)
for block in res.iter_content(1024):
f.write(block)
f.close()
return tmp | Dowload file to /tmp | train | https://github.com/meyersj/geotweet/blob/1a6b55f98adf34d1b91f172d9187d599616412d9/geotweet/osm.py#L47-L65 | [
"def url2tmp(self, root, url):\n \"\"\" convert url path to filename \"\"\"\n filename = url.rsplit('/', 1)[-1]\n return os.path.join(root, filename)\n"
] | class OSMRunner(object):
"""
Downloads OSM extracts from GeoFabrik in pbf format
"""
def __init__(self, args):
self.states = args.states
if not args.states:
self.states = DEFAULT_STATES
self.output = args.output
self.overwrite = False
self.s3 = S3Loader(bucket=args.bucket, region=args.region)
try:
self.s3.valid()
except EnvironmentError as e:
logging.error(e)
sys.exit(1)
def run(self):
""" For each state in states file build url and download file """
states = open(self.states, 'r').read().splitlines()
for state in states:
url = self.build_url(state)
log = "Downloading State < {0} > from < {1} >"
logging.info(log.format(state, url))
tmp = self.download(self.output, url, self.overwrite)
self.s3.store(self.extract(tmp, self.tmp2poi(tmp)))
def extract(self, pbf, output):
""" extract POI nodes from osm pbf extract """
logging.info("Extracting POI nodes from {0} to {1}".format(pbf, output))
with open(output, 'w') as f:
# define callback for each node that is processed
def nodes_callback(nodes):
for node in nodes:
node_id, tags, coordinates = node
# if any tags have a matching key then write record
if any([t in tags for t in POI_TAGS]):
f.write(json.dumps(dict(tags=tags, coordinates=coordinates)))
f.write('\n')
parser = OSMParser(concurrency=4, nodes_callback=nodes_callback)
parser.parse(pbf)
return output
def build_url(self, state):
return US_GEOFABRIK.format(state.replace(' ', '-').lower())
def url2tmp(self, root, url):
""" convert url path to filename """
filename = url.rsplit('/', 1)[-1]
return os.path.join(root, filename)
def tmp2poi(self, osm):
return osm.rsplit('.', 2)[0] + '.poi'
|
meyersj/geotweet | geotweet/osm.py | OSMRunner.extract | python | def extract(self, pbf, output):
logging.info("Extracting POI nodes from {0} to {1}".format(pbf, output))
with open(output, 'w') as f:
# define callback for each node that is processed
def nodes_callback(nodes):
for node in nodes:
node_id, tags, coordinates = node
# if any tags have a matching key then write record
if any([t in tags for t in POI_TAGS]):
f.write(json.dumps(dict(tags=tags, coordinates=coordinates)))
f.write('\n')
parser = OSMParser(concurrency=4, nodes_callback=nodes_callback)
parser.parse(pbf)
return output | extract POI nodes from osm pbf extract | train | https://github.com/meyersj/geotweet/blob/1a6b55f98adf34d1b91f172d9187d599616412d9/geotweet/osm.py#L67-L81 | null | class OSMRunner(object):
"""
Downloads OSM extracts from GeoFabrik in pbf format
"""
def __init__(self, args):
self.states = args.states
if not args.states:
self.states = DEFAULT_STATES
self.output = args.output
self.overwrite = False
self.s3 = S3Loader(bucket=args.bucket, region=args.region)
try:
self.s3.valid()
except EnvironmentError as e:
logging.error(e)
sys.exit(1)
def run(self):
""" For each state in states file build url and download file """
states = open(self.states, 'r').read().splitlines()
for state in states:
url = self.build_url(state)
log = "Downloading State < {0} > from < {1} >"
logging.info(log.format(state, url))
tmp = self.download(self.output, url, self.overwrite)
self.s3.store(self.extract(tmp, self.tmp2poi(tmp)))
def download(self, output_dir, url, overwrite):
""" Dowload file to /tmp """
tmp = self.url2tmp(output_dir, url)
if os.path.isfile(tmp) and not overwrite:
logging.info("File {0} already exists. Skipping download.".format(tmp))
return tmp
f = open(tmp, 'wb')
logging.info("Downloading {0}".format(url))
res = requests.get(url, stream=True)
if res.status_code != 200:
# failed to download, cleanup and raise exception
f.close()
os.remove(tmp)
error = "{0}\n\nFailed to download < {0} >".format(res.content, url)
raise IOError(error)
for block in res.iter_content(1024):
f.write(block)
f.close()
return tmp
def build_url(self, state):
return US_GEOFABRIK.format(state.replace(' ', '-').lower())
def url2tmp(self, root, url):
""" convert url path to filename """
filename = url.rsplit('/', 1)[-1]
return os.path.join(root, filename)
def tmp2poi(self, osm):
return osm.rsplit('.', 2)[0] + '.poi'
|
meyersj/geotweet | geotweet/osm.py | OSMRunner.url2tmp | python | def url2tmp(self, root, url):
filename = url.rsplit('/', 1)[-1]
return os.path.join(root, filename) | convert url path to filename | train | https://github.com/meyersj/geotweet/blob/1a6b55f98adf34d1b91f172d9187d599616412d9/geotweet/osm.py#L86-L89 | null | class OSMRunner(object):
"""
Downloads OSM extracts from GeoFabrik in pbf format
"""
def __init__(self, args):
self.states = args.states
if not args.states:
self.states = DEFAULT_STATES
self.output = args.output
self.overwrite = False
self.s3 = S3Loader(bucket=args.bucket, region=args.region)
try:
self.s3.valid()
except EnvironmentError as e:
logging.error(e)
sys.exit(1)
def run(self):
""" For each state in states file build url and download file """
states = open(self.states, 'r').read().splitlines()
for state in states:
url = self.build_url(state)
log = "Downloading State < {0} > from < {1} >"
logging.info(log.format(state, url))
tmp = self.download(self.output, url, self.overwrite)
self.s3.store(self.extract(tmp, self.tmp2poi(tmp)))
def download(self, output_dir, url, overwrite):
""" Dowload file to /tmp """
tmp = self.url2tmp(output_dir, url)
if os.path.isfile(tmp) and not overwrite:
logging.info("File {0} already exists. Skipping download.".format(tmp))
return tmp
f = open(tmp, 'wb')
logging.info("Downloading {0}".format(url))
res = requests.get(url, stream=True)
if res.status_code != 200:
# failed to download, cleanup and raise exception
f.close()
os.remove(tmp)
error = "{0}\n\nFailed to download < {0} >".format(res.content, url)
raise IOError(error)
for block in res.iter_content(1024):
f.write(block)
f.close()
return tmp
def extract(self, pbf, output):
""" extract POI nodes from osm pbf extract """
logging.info("Extracting POI nodes from {0} to {1}".format(pbf, output))
with open(output, 'w') as f:
# define callback for each node that is processed
def nodes_callback(nodes):
for node in nodes:
node_id, tags, coordinates = node
# if any tags have a matching key then write record
if any([t in tags for t in POI_TAGS]):
f.write(json.dumps(dict(tags=tags, coordinates=coordinates)))
f.write('\n')
parser = OSMParser(concurrency=4, nodes_callback=nodes_callback)
parser.parse(pbf)
return output
def build_url(self, state):
return US_GEOFABRIK.format(state.replace(' ', '-').lower())
def tmp2poi(self, osm):
return osm.rsplit('.', 2)[0] + '.poi'
|
meyersj/geotweet | geotweet/mapreduce/poi_nearby_tweets.py | POINearbyTweetsMRJob.mapper_metro | python | def mapper_metro(self, _, data):
# OSM POI record
if 'tags' in data:
type_tag = 1
lonlat = data['coordinates']
payload = data['tags']
# Tweet with coordinates from Streaming API
elif 'user_id' in data:
type_tag = 2
# only allow tweets from the listed domains to try and filter out
# noise such as HR tweets, Weather reports and news updates
accept = [
"twitter\.com",
"foursquare\.com",
"instagram\.com",
"untappd\.com"
]
expr = "|".join(accept)
if not re.findall(expr, data['source']):
return
lonlat = data['lonlat']
payload = None
# spatial lookup using Rtree with cached results
metro = self.lookup.get(lonlat, METRO_DISTANCE)
if not metro:
return
yield metro, (type_tag, lonlat, payload) | map each osm POI and geotweets based on spatial lookup of metro area | train | https://github.com/meyersj/geotweet/blob/1a6b55f98adf34d1b91f172d9187d599616412d9/geotweet/mapreduce/poi_nearby_tweets.py#L81-L108 | null | class POINearbyTweetsMRJob(MRJob):
""" Count common OSM points-of-interest around Tweets with coordinates """
INPUT_PROTOCOL = JSONValueProtocol
INTERNAL_PROTOCOL = JSONProtocol
OUTPUT_PROTOCOL = RawValueProtocol
SORT_VALUES = True
def steps(self):
return [
# 1. lookup metro area for each geotweet and osm POI
# emit to same reducer to perform POI lookup
# 2. lookup nearby osm POIs around each geotweet
# 3. emit metro area + name of POI and 1 to count
MRStep(
mapper_init=self.mapper_init_metro,
mapper=self.mapper_metro,
reducer=self.reducer_metro
),
# aggregate count for each (metro area, POI)
MRStep(
reducer=self.reducer_count
),
# convert output to final form and persist to Mongo
MRStep(
reducer_init=self.reducer_init_output,
reducer=self.reducer_output
)
]
def mapper_init_metro(self):
""" build local spatial index of US metro areas """
self.lookup = CachedMetroLookup(precision=METRO_GEOHASH_PRECISION)
def reducer_metro(self, metro, values):
"""
Output tags of POI locations nearby tweet locations
Values will be sorted coming into reducer.
First element in each value tuple will be either 1 (osm POI) or 2 (geotweet).
Build a spatial index with POI records.
For each tweet lookup nearby POI, and emit tag values for predefined tags.
"""
lookup = CachedLookup(precision=POI_GEOHASH_PRECISION)
for i, value in enumerate(values):
type_tag, lonlat, data = value
if type_tag == 1:
# OSM POI node, construct geojson and add to Rtree index
lookup.insert(i, dict(
geometry=dict(type='Point', coordinates=project(lonlat)),
properties=dict(tags=data)
))
else:
# geotweet, lookup nearest POI from index
if not lookup.data_store:
return
poi_names = []
kwargs = dict(buffer_size=POI_DISTANCE, multiple=True)
# lookup nearby POI from Rtree index (caching results)
# for any tags we care about emit the tags value and 1
for poi in lookup.get(lonlat, **kwargs):
has_tag = [ tag in poi['tags'] for tag in POI_TAGS ]
if any(has_tag) and 'name' in poi['tags']:
poi_names.append(poi['tags']['name'])
for poi in set(poi_names):
yield (metro, poi), 1
def reducer_count(self, key, values):
""" count occurences for each (metro, POI) record """
total = sum(values)
metro, poi = key
# group data by metro areas for final output
yield metro, (total, poi)
def reducer_init_output(self):
""" establish connection to MongoDB """
try:
self.mongo = MongoGeo(db=DB, collection=COLLECTION, timeout=MONGO_TIMEOUT)
except ServerSelectionTimeoutError:
# failed to connect to running MongoDB instance
self.mongo = None
def reducer_output(self, metro, values):
""" store each record in MongoDB and output tab delimited lines """
records = []
# build up list of data for each metro area and submit as one network
# call instead of individually
for value in values:
total, poi = value
records.append(dict(
metro_area=metro,
poi=poi,
count=total
))
output = "{0}\t{1}\t{2}"
output = output.format(metro.encode('utf-8'), total, poi.encode('utf-8'))
yield None, output
if self.mongo:
self.mongo.insert_many(records)
|
meyersj/geotweet | geotweet/mapreduce/poi_nearby_tweets.py | POINearbyTweetsMRJob.reducer_metro | python | def reducer_metro(self, metro, values):
lookup = CachedLookup(precision=POI_GEOHASH_PRECISION)
for i, value in enumerate(values):
type_tag, lonlat, data = value
if type_tag == 1:
# OSM POI node, construct geojson and add to Rtree index
lookup.insert(i, dict(
geometry=dict(type='Point', coordinates=project(lonlat)),
properties=dict(tags=data)
))
else:
# geotweet, lookup nearest POI from index
if not lookup.data_store:
return
poi_names = []
kwargs = dict(buffer_size=POI_DISTANCE, multiple=True)
# lookup nearby POI from Rtree index (caching results)
# for any tags we care about emit the tags value and 1
for poi in lookup.get(lonlat, **kwargs):
has_tag = [ tag in poi['tags'] for tag in POI_TAGS ]
if any(has_tag) and 'name' in poi['tags']:
poi_names.append(poi['tags']['name'])
for poi in set(poi_names):
yield (metro, poi), 1 | Output tags of POI locations nearby tweet locations
Values will be sorted coming into reducer.
First element in each value tuple will be either 1 (osm POI) or 2 (geotweet).
Build a spatial index with POI records.
For each tweet lookup nearby POI, and emit tag values for predefined tags. | train | https://github.com/meyersj/geotweet/blob/1a6b55f98adf34d1b91f172d9187d599616412d9/geotweet/mapreduce/poi_nearby_tweets.py#L110-L142 | [
"def project(lonlat):\n return transform(proj4326, proj102005, *lonlat)\n"
] | class POINearbyTweetsMRJob(MRJob):
""" Count common OSM points-of-interest around Tweets with coordinates """
INPUT_PROTOCOL = JSONValueProtocol
INTERNAL_PROTOCOL = JSONProtocol
OUTPUT_PROTOCOL = RawValueProtocol
SORT_VALUES = True
def steps(self):
return [
# 1. lookup metro area for each geotweet and osm POI
# emit to same reducer to perform POI lookup
# 2. lookup nearby osm POIs around each geotweet
# 3. emit metro area + name of POI and 1 to count
MRStep(
mapper_init=self.mapper_init_metro,
mapper=self.mapper_metro,
reducer=self.reducer_metro
),
# aggregate count for each (metro area, POI)
MRStep(
reducer=self.reducer_count
),
# convert output to final form and persist to Mongo
MRStep(
reducer_init=self.reducer_init_output,
reducer=self.reducer_output
)
]
def mapper_init_metro(self):
""" build local spatial index of US metro areas """
self.lookup = CachedMetroLookup(precision=METRO_GEOHASH_PRECISION)
def mapper_metro(self, _, data):
""" map each osm POI and geotweets based on spatial lookup of metro area """
# OSM POI record
if 'tags' in data:
type_tag = 1
lonlat = data['coordinates']
payload = data['tags']
# Tweet with coordinates from Streaming API
elif 'user_id' in data:
type_tag = 2
# only allow tweets from the listed domains to try and filter out
# noise such as HR tweets, Weather reports and news updates
accept = [
"twitter\.com",
"foursquare\.com",
"instagram\.com",
"untappd\.com"
]
expr = "|".join(accept)
if not re.findall(expr, data['source']):
return
lonlat = data['lonlat']
payload = None
# spatial lookup using Rtree with cached results
metro = self.lookup.get(lonlat, METRO_DISTANCE)
if not metro:
return
yield metro, (type_tag, lonlat, payload)
def reducer_metro(self, metro, values):
"""
Output tags of POI locations nearby tweet locations
Values will be sorted coming into reducer.
First element in each value tuple will be either 1 (osm POI) or 2 (geotweet).
Build a spatial index with POI records.
For each tweet lookup nearby POI, and emit tag values for predefined tags.
"""
lookup = CachedLookup(precision=POI_GEOHASH_PRECISION)
for i, value in enumerate(values):
type_tag, lonlat, data = value
if type_tag == 1:
# OSM POI node, construct geojson and add to Rtree index
lookup.insert(i, dict(
geometry=dict(type='Point', coordinates=project(lonlat)),
properties=dict(tags=data)
))
else:
# geotweet, lookup nearest POI from index
if not lookup.data_store:
return
poi_names = []
kwargs = dict(buffer_size=POI_DISTANCE, multiple=True)
# lookup nearby POI from Rtree index (caching results)
# for any tags we care about emit the tags value and 1
for poi in lookup.get(lonlat, **kwargs):
has_tag = [ tag in poi['tags'] for tag in POI_TAGS ]
if any(has_tag) and 'name' in poi['tags']:
poi_names.append(poi['tags']['name'])
for poi in set(poi_names):
yield (metro, poi), 1
def reducer_count(self, key, values):
""" count occurences for each (metro, POI) record """
total = sum(values)
metro, poi = key
# group data by metro areas for final output
yield metro, (total, poi)
def reducer_init_output(self):
""" establish connection to MongoDB """
try:
self.mongo = MongoGeo(db=DB, collection=COLLECTION, timeout=MONGO_TIMEOUT)
except ServerSelectionTimeoutError:
# failed to connect to running MongoDB instance
self.mongo = None
def reducer_output(self, metro, values):
""" store each record in MongoDB and output tab delimited lines """
records = []
# build up list of data for each metro area and submit as one network
# call instead of individually
for value in values:
total, poi = value
records.append(dict(
metro_area=metro,
poi=poi,
count=total
))
output = "{0}\t{1}\t{2}"
output = output.format(metro.encode('utf-8'), total, poi.encode('utf-8'))
yield None, output
if self.mongo:
self.mongo.insert_many(records)
|
meyersj/geotweet | geotweet/mapreduce/poi_nearby_tweets.py | POINearbyTweetsMRJob.reducer_count | python | def reducer_count(self, key, values):
total = sum(values)
metro, poi = key
# group data by metro areas for final output
yield metro, (total, poi) | count occurences for each (metro, POI) record | train | https://github.com/meyersj/geotweet/blob/1a6b55f98adf34d1b91f172d9187d599616412d9/geotweet/mapreduce/poi_nearby_tweets.py#L144-L149 | null | class POINearbyTweetsMRJob(MRJob):
""" Count common OSM points-of-interest around Tweets with coordinates """
INPUT_PROTOCOL = JSONValueProtocol
INTERNAL_PROTOCOL = JSONProtocol
OUTPUT_PROTOCOL = RawValueProtocol
SORT_VALUES = True
def steps(self):
return [
# 1. lookup metro area for each geotweet and osm POI
# emit to same reducer to perform POI lookup
# 2. lookup nearby osm POIs around each geotweet
# 3. emit metro area + name of POI and 1 to count
MRStep(
mapper_init=self.mapper_init_metro,
mapper=self.mapper_metro,
reducer=self.reducer_metro
),
# aggregate count for each (metro area, POI)
MRStep(
reducer=self.reducer_count
),
# convert output to final form and persist to Mongo
MRStep(
reducer_init=self.reducer_init_output,
reducer=self.reducer_output
)
]
def mapper_init_metro(self):
""" build local spatial index of US metro areas """
self.lookup = CachedMetroLookup(precision=METRO_GEOHASH_PRECISION)
def mapper_metro(self, _, data):
""" map each osm POI and geotweets based on spatial lookup of metro area """
# OSM POI record
if 'tags' in data:
type_tag = 1
lonlat = data['coordinates']
payload = data['tags']
# Tweet with coordinates from Streaming API
elif 'user_id' in data:
type_tag = 2
# only allow tweets from the listed domains to try and filter out
# noise such as HR tweets, Weather reports and news updates
accept = [
"twitter\.com",
"foursquare\.com",
"instagram\.com",
"untappd\.com"
]
expr = "|".join(accept)
if not re.findall(expr, data['source']):
return
lonlat = data['lonlat']
payload = None
# spatial lookup using Rtree with cached results
metro = self.lookup.get(lonlat, METRO_DISTANCE)
if not metro:
return
yield metro, (type_tag, lonlat, payload)
def reducer_metro(self, metro, values):
"""
Output tags of POI locations nearby tweet locations
Values will be sorted coming into reducer.
First element in each value tuple will be either 1 (osm POI) or 2 (geotweet).
Build a spatial index with POI records.
For each tweet lookup nearby POI, and emit tag values for predefined tags.
"""
lookup = CachedLookup(precision=POI_GEOHASH_PRECISION)
for i, value in enumerate(values):
type_tag, lonlat, data = value
if type_tag == 1:
# OSM POI node, construct geojson and add to Rtree index
lookup.insert(i, dict(
geometry=dict(type='Point', coordinates=project(lonlat)),
properties=dict(tags=data)
))
else:
# geotweet, lookup nearest POI from index
if not lookup.data_store:
return
poi_names = []
kwargs = dict(buffer_size=POI_DISTANCE, multiple=True)
# lookup nearby POI from Rtree index (caching results)
# for any tags we care about emit the tags value and 1
for poi in lookup.get(lonlat, **kwargs):
has_tag = [ tag in poi['tags'] for tag in POI_TAGS ]
if any(has_tag) and 'name' in poi['tags']:
poi_names.append(poi['tags']['name'])
for poi in set(poi_names):
yield (metro, poi), 1
def reducer_init_output(self):
""" establish connection to MongoDB """
try:
self.mongo = MongoGeo(db=DB, collection=COLLECTION, timeout=MONGO_TIMEOUT)
except ServerSelectionTimeoutError:
# failed to connect to running MongoDB instance
self.mongo = None
def reducer_output(self, metro, values):
""" store each record in MongoDB and output tab delimited lines """
records = []
# build up list of data for each metro area and submit as one network
# call instead of individually
for value in values:
total, poi = value
records.append(dict(
metro_area=metro,
poi=poi,
count=total
))
output = "{0}\t{1}\t{2}"
output = output.format(metro.encode('utf-8'), total, poi.encode('utf-8'))
yield None, output
if self.mongo:
self.mongo.insert_many(records)
|
meyersj/geotweet | geotweet/mapreduce/poi_nearby_tweets.py | POINearbyTweetsMRJob.reducer_output | python | def reducer_output(self, metro, values):
records = []
# build up list of data for each metro area and submit as one network
# call instead of individually
for value in values:
total, poi = value
records.append(dict(
metro_area=metro,
poi=poi,
count=total
))
output = "{0}\t{1}\t{2}"
output = output.format(metro.encode('utf-8'), total, poi.encode('utf-8'))
yield None, output
if self.mongo:
self.mongo.insert_many(records) | store each record in MongoDB and output tab delimited lines | train | https://github.com/meyersj/geotweet/blob/1a6b55f98adf34d1b91f172d9187d599616412d9/geotweet/mapreduce/poi_nearby_tweets.py#L159-L175 | null | class POINearbyTweetsMRJob(MRJob):
""" Count common OSM points-of-interest around Tweets with coordinates """
INPUT_PROTOCOL = JSONValueProtocol
INTERNAL_PROTOCOL = JSONProtocol
OUTPUT_PROTOCOL = RawValueProtocol
SORT_VALUES = True
def steps(self):
return [
# 1. lookup metro area for each geotweet and osm POI
# emit to same reducer to perform POI lookup
# 2. lookup nearby osm POIs around each geotweet
# 3. emit metro area + name of POI and 1 to count
MRStep(
mapper_init=self.mapper_init_metro,
mapper=self.mapper_metro,
reducer=self.reducer_metro
),
# aggregate count for each (metro area, POI)
MRStep(
reducer=self.reducer_count
),
# convert output to final form and persist to Mongo
MRStep(
reducer_init=self.reducer_init_output,
reducer=self.reducer_output
)
]
def mapper_init_metro(self):
""" build local spatial index of US metro areas """
self.lookup = CachedMetroLookup(precision=METRO_GEOHASH_PRECISION)
def mapper_metro(self, _, data):
""" map each osm POI and geotweets based on spatial lookup of metro area """
# OSM POI record
if 'tags' in data:
type_tag = 1
lonlat = data['coordinates']
payload = data['tags']
# Tweet with coordinates from Streaming API
elif 'user_id' in data:
type_tag = 2
# only allow tweets from the listed domains to try and filter out
# noise such as HR tweets, Weather reports and news updates
accept = [
"twitter\.com",
"foursquare\.com",
"instagram\.com",
"untappd\.com"
]
expr = "|".join(accept)
if not re.findall(expr, data['source']):
return
lonlat = data['lonlat']
payload = None
# spatial lookup using Rtree with cached results
metro = self.lookup.get(lonlat, METRO_DISTANCE)
if not metro:
return
yield metro, (type_tag, lonlat, payload)
def reducer_metro(self, metro, values):
"""
Output tags of POI locations nearby tweet locations
Values will be sorted coming into reducer.
First element in each value tuple will be either 1 (osm POI) or 2 (geotweet).
Build a spatial index with POI records.
For each tweet lookup nearby POI, and emit tag values for predefined tags.
"""
lookup = CachedLookup(precision=POI_GEOHASH_PRECISION)
for i, value in enumerate(values):
type_tag, lonlat, data = value
if type_tag == 1:
# OSM POI node, construct geojson and add to Rtree index
lookup.insert(i, dict(
geometry=dict(type='Point', coordinates=project(lonlat)),
properties=dict(tags=data)
))
else:
# geotweet, lookup nearest POI from index
if not lookup.data_store:
return
poi_names = []
kwargs = dict(buffer_size=POI_DISTANCE, multiple=True)
# lookup nearby POI from Rtree index (caching results)
# for any tags we care about emit the tags value and 1
for poi in lookup.get(lonlat, **kwargs):
has_tag = [ tag in poi['tags'] for tag in POI_TAGS ]
if any(has_tag) and 'name' in poi['tags']:
poi_names.append(poi['tags']['name'])
for poi in set(poi_names):
yield (metro, poi), 1
def reducer_count(self, key, values):
""" count occurences for each (metro, POI) record """
total = sum(values)
metro, poi = key
# group data by metro areas for final output
yield metro, (total, poi)
def reducer_init_output(self):
""" establish connection to MongoDB """
try:
self.mongo = MongoGeo(db=DB, collection=COLLECTION, timeout=MONGO_TIMEOUT)
except ServerSelectionTimeoutError:
# failed to connect to running MongoDB instance
self.mongo = None
|
meyersj/geotweet | geotweet/mapreduce/utils/words.py | WordExtractor.run | python | def run(self, line):
words = []
for word in self.clean_unicode(line.lower()).split():
if word.startswith('http'):
continue
cleaned = self.clean_punctuation(word)
if len(cleaned) > 1 and cleaned not in self.stopwords:
words.append(cleaned)
return words | Extract words from tweet
1. Remove non-ascii characters
2. Split line into individual words
3. Clean up puncuation characters | train | https://github.com/meyersj/geotweet/blob/1a6b55f98adf34d1b91f172d9187d599616412d9/geotweet/mapreduce/utils/words.py#L47-L63 | null | class WordExtractor(FileReader):
"""
Extract words from a tweet.
If a provided `src` keyword param references a local file or remote resource
containing list of stop words, the will be download and used to exclude
extracted words
"""
def __init__(self, src=STOPWORDS_LIST):
self.sub_all = re.compile("""[#.!?,"(){}[\]|]|&""")
self.sub_ends = re.compile("""^[@\\\/~]*|[\\\/:~]*$""")
self.stopwords = {}
if src:
if not self.is_valid_src(src):
error = "Arg src=< {0} > is invalid."
error += " Must be existing file or url that starts with 'http'"
raise ValueError(error.format(src))
words = self.read(src)
for word in words.splitlines():
self.stopwords[word] = ""
def clean_unicode(self, line):
chars = [char for char in line.lower()]
only_ascii = lambda char: char if ord(char) < 128 else ''
return str(''.join(filter(only_ascii, chars)))
def clean_punctuation(self, word):
return self.sub_ends.sub('', self.sub_all.sub('', word))
def run(self, line):
"""
Extract words from tweet
1. Remove non-ascii characters
2. Split line into individual words
3. Clean up puncuation characters
"""
words = []
for word in self.clean_unicode(line.lower()).split():
if word.startswith('http'):
continue
cleaned = self.clean_punctuation(word)
if len(cleaned) > 1 and cleaned not in self.stopwords:
words.append(cleaned)
return words
|
meyersj/geotweet | geotweet/mapreduce/utils/reader.py | FileReader.read | python | def read(self, src):
geojson = None
if not self.is_valid_src(src):
error = "File < {0} > does not exists or does start with 'http'."
raise ValueError(error.format(src))
if not self.is_url(src):
return open(src, 'r').read().decode('latin-1').encode('utf-8')
tmp = self.get_location(src)
# if src poits to url that was already downloaded
# read from local file instead
if os.path.isfile(tmp):
with open(tmp, 'r') as f:
return f.read()
# download file and write to local filesystem before returning
response = urllib2.urlopen(src)
data = response.read().decode('latin-1').encode('utf-8')
with open(tmp, 'w') as f:
f.write(data)
return data | Download GeoJSON file of US counties from url (S3 bucket) | train | https://github.com/meyersj/geotweet/blob/1a6b55f98adf34d1b91f172d9187d599616412d9/geotweet/mapreduce/utils/reader.py#L32-L51 | [
"def is_valid_src(self, src):\n return os.path.isfile(src) or self.is_url(src)\n"
] | class FileReader(object):
""" Read file from the local file system or remote url and cache """
def is_url(self, src):
return src.startswith('http')
def is_valid_src(self, src):
return os.path.isfile(src) or self.is_url(src)
def get_location(self, src):
digest = self.digest(src)
if not digest:
return None
return os.path.join('/tmp', "geotweet-file-{0}".format(digest))
def digest(self, src):
if not src or type(src) != str:
return None
m = hashlib.md5()
if self.is_url(src):
m.update(src)
else:
m.update(os.path.abspath(src))
return m.hexdigest()
|
meyersj/geotweet | geotweet/mapreduce/state_county_wordcount.py | StateCountyWordCountJob.mapper_init | python | def mapper_init(self):
self.counties = CachedCountyLookup(precision=GEOHASH_PRECISION)
self.extractor = WordExtractor() | Download counties geojson from S3 and build spatial index and cache | train | https://github.com/meyersj/geotweet/blob/1a6b55f98adf34d1b91f172d9187d599616412d9/geotweet/mapreduce/state_county_wordcount.py#L60-L63 | null | class StateCountyWordCountJob(MRJob):
"""
Count word occurences for US tweets by entire county, by State and County
A geojson file of US counties is downloaded from an S3 bucket. A RTree index
is built using the bounding box of each county, and is used for determining
State and County for each tweet.
"""
INPUT_PROTOCOL = JSONValueProtocol
INTERNAL_PROTOCOL = JSONProtocol
OUTPUT_PROTOCOL = RawValueProtocol
def steps(self):
return [
MRStep(
mapper_init=self.mapper_init,
mapper=self.mapper,
combiner=self.combiner,
reducer=self.reducer
)
]
def mapper(self, _, data):
# ignore HR geo-tweets for job postings
if data['description'] and self.hr_filter(data['description']):
return
lonlat = data['lonlat']
# spatial lookup for state and county
state, county = self.counties.get(lonlat)
if not state or not county:
return
# count words
for word in self.extractor.run(data['text']):
yield (word, ), 1
yield (word, state), 1
yield (word, state, county), 1
def hr_filter(self, text):
""" check if description of twitter using contains job related key words """
expr = "|".join(["(job)", "(hiring)", "(career)"])
return re.findall(expr, text)
def combiner(self, key, values):
yield key, sum(values)
def reducer(self, key, values):
total = int(sum(values))
if total < MIN_WORD_COUNT:
return
word = state = county = None
word = key[0]
if len(key) >= 2:
state = key[1]
if len(key) >= 3:
county = key[2]
output = "{0}\t{1}\t{2}\t{3}"
word = word.encode('utf-8')
state = state.encode('utf-8') if state else None
county = county.encode('utf-8') if county else None
yield None, output.format(word, state, county, total)
|
meyersj/geotweet | geotweet/geomongo/__init__.py | GeoMongo.run | python | def run(self):
logging.info("Starting GeoJSON MongoDB loading process.")
mongo = dict(uri=self.mongo, db=self.db, collection=self.collection)
self.load(self.source, **mongo)
logging.info("Finished loading {0} into MongoDB".format(self.source)) | Top level runner to load State and County GeoJSON files into Mongo DB | train | https://github.com/meyersj/geotweet/blob/1a6b55f98adf34d1b91f172d9187d599616412d9/geotweet/geomongo/__init__.py#L19-L24 | [
"def load(self, geojson, uri=None, db=None, collection=None):\n \"\"\" Load geojson file into mongodb instance \"\"\"\n logging.info(\"Mongo URI: {0}\".format(uri))\n logging.info(\"Mongo DB: {0}\".format(db))\n logging.info(\"Mongo Collection: {0}\".format(collection))\n logging.info(\"Geojson File to be loaded: {0}\".format(geojson))\n mongo = MongoGeo(db=db, collection=collection, uri=uri)\n GeoJSONLoader().load(geojson, mongo.insert) \n"
] | class GeoMongo(object):
def __init__(self, args):
self.source = args.file
self.mongo = args.mongo
self.db = args.db
self.collection = args.collection
def load(self, geojson, uri=None, db=None, collection=None):
""" Load geojson file into mongodb instance """
logging.info("Mongo URI: {0}".format(uri))
logging.info("Mongo DB: {0}".format(db))
logging.info("Mongo Collection: {0}".format(collection))
logging.info("Geojson File to be loaded: {0}".format(geojson))
mongo = MongoGeo(db=db, collection=collection, uri=uri)
GeoJSONLoader().load(geojson, mongo.insert)
|
meyersj/geotweet | geotweet/geomongo/__init__.py | GeoMongo.load | python | def load(self, geojson, uri=None, db=None, collection=None):
logging.info("Mongo URI: {0}".format(uri))
logging.info("Mongo DB: {0}".format(db))
logging.info("Mongo Collection: {0}".format(collection))
logging.info("Geojson File to be loaded: {0}".format(geojson))
mongo = MongoGeo(db=db, collection=collection, uri=uri)
GeoJSONLoader().load(geojson, mongo.insert) | Load geojson file into mongodb instance | train | https://github.com/meyersj/geotweet/blob/1a6b55f98adf34d1b91f172d9187d599616412d9/geotweet/geomongo/__init__.py#L26-L33 | null | class GeoMongo(object):
def __init__(self, args):
self.source = args.file
self.mongo = args.mongo
self.db = args.db
self.collection = args.collection
def run(self):
""" Top level runner to load State and County GeoJSON files into Mongo DB """
logging.info("Starting GeoJSON MongoDB loading process.")
mongo = dict(uri=self.mongo, db=self.db, collection=self.collection)
self.load(self.source, **mongo)
logging.info("Finished loading {0} into MongoDB".format(self.source))
|
elkiwy/paynter | paynter/paynter.py | Paynter.drawLine | python | def drawLine(self, x1, y1, x2, y2, silent=False):
start = time.time()
#Downsample the coordinates
x1 = int(x1/config.DOWNSAMPLING)
x2 = int(x2/config.DOWNSAMPLING)
y1 = int(y1/config.DOWNSAMPLING)
y2 = int(y2/config.DOWNSAMPLING)
if not silent :
print('drawing line from: '+str((x1,y1))+' to: '+str((x2,y2)))
#Calculate the direction and the length of the step
direction = N.arctan2(y2 - y1, x2 - x1)
length = self.brush.spacing
#Prepare the loop
x, y = x1, y1
totalSteps = int(N.sqrt((x2 - x)**2 + (y2 - y)**2)/length)
lay = self.image.getActiveLayer()
col = self.color
secCol = self.secondColor
mirr = self.mirrorMode
#If I use source caching..
if self.brush.usesSourceCaching:
#..than optimize it for faster drawing
laydata = lay.data
x -= self.brush.brushSize*0.5
y -= self.brush.brushSize*0.5
colbrsource = self.brush.coloredBrushSource
canvSize = config.CANVAS_SIZE
brmask = self.brush.brushMask
for _ in range(totalSteps):
#Make the dab on this point
applyMirroredDab_jit(mirr, laydata, int(x), int(y), colbrsource.copy(), canvSize, brmask)
#Mode the point for the next step and update the distances
x += lendir_x(length, direction)
y += lendir_y(length, direction)
#..if I don't use source caching..
else:
#..do the normal drawing
for _ in range(totalSteps):
#Make the dab on this point
self.brush.makeDab(lay, int(x), int(y), col, secCol, mirror=mirr)
#Mode the point for the next step and update the distances
x += lendir_x(length, direction)
y += lendir_y(length, direction) | Draws a line on the current :py:class:`Layer` with the current :py:class:`Brush`.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param x1: Starting X coordinate.
:param y1: Starting Y coordinate.
:param x2: End X coordinate.
:param y2: End Y coordinate.
:rtype: Nothing. | train | https://github.com/elkiwy/paynter/blob/f73cb5bb010a6b32ee41640a50396ed0bae8d496/paynter/paynter.py#L65-L123 | [
"def getActiveLayer(self):\n\t\"\"\"\n\tReturns the currently active :py:class:`Layer`.\n\n\t:rtype: A :py:class:`Layer` object.\n\t\"\"\"\n\treturn self.layers[self.activeLayer]\n"
] | class Paynter:
"""
This class is the main object of the library and the one that will draw everything you ask.
To create this class you can use the default constructor.
.. code-block:: python
from paynter import *
P = Paynter()
"""
brush = 0
layer = 0
color = Color(0, 0, 0, 1)
secondColor = Color(1,1,1,1)
image = 0
mirrorMode = 0
#Init the paynter
def __init__(self):
#Setup some stuff
config.CANVAS_SIZE = int(config.REAL_CANVAS_SIZE/config.DOWNSAMPLING)
self.image = Image()
######################################################################
# Level 0 Functions, needs downsampling
######################################################################
#Draw a line between two points
#Draw a single dab
def drawPoint(self, x, y, silent=True):
"""
Draws a point on the current :py:class:`Layer` with the current :py:class:`Brush`.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param x1: Point X coordinate.
:param y1: Point Y coordinate.
:rtype: Nothing.
"""
start = time.time()
#Downsample the coordinates
x = int(x/config.DOWNSAMPLING)
y = int(y/config.DOWNSAMPLING)
#Apply the dab with or without source caching
if self.brush.usesSourceCaching:
applyMirroredDab_jit(self.mirrorMode, self.image.getActiveLayer().data, int(x-self.brush.brushSize*0.5), int(y-self.brush.brushSize*0.5), self.brush.coloredBrushSource.copy(), config.CANVAS_SIZE, self.brush.brushMask)
else:
self.brush.makeDab(self.image.getActiveLayer(), int(x), int(y), self.color, self.secondColor, mirror=self.mirrorMode)
config.AVGTIME.append(time.time()-start)
######################################################################
# Level 1 Functions, calls Level 0 functions, no downsampling
######################################################################
#Draw a path from a series of points
def drawPath(self, pointList):
"""
Draws a series of lines on the current :py:class:`Layer` with the current :py:class:`Brush`.
No interpolation is applied to these point and :py:meth:`drawLine` will be used to connect all the points lineraly.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param pointList: A list of point like :code:`[(0, 0), (100, 100), (100, 200)]`.
:rtype: Nothing.
"""
self.drawLine(pointList[0][0], pointList[0][1], pointList[1][0], pointList[1][1])
i = 1
while i<len(pointList)-1:
self.drawLine(pointList[i][0], pointList[i][1], pointList[i+1][0], pointList[i+1][1])
i+=1
#Draw a path from a series of points
def drawClosedPath(self, pointList):
"""
Draws a closed series of lines on the current :py:class:`Layer` with the current :py:class:`Brush`.
No interpolation is applied to these point and :py:meth:`drawLine` will be used to connect all the points lineraly.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param pointList: A list of point like :code:`[(0, 0), (100, 100), (100, 200)]`.
:rtype: Nothing.
"""
self.drawLine(pointList[0][0], pointList[0][1], pointList[1][0], pointList[1][1])
i = 1
while i<len(pointList)-1:
self.drawLine(pointList[i][0], pointList[i][1], pointList[i+1][0], pointList[i+1][1])
i+=1
self.drawLine(pointList[-1][0], pointList[-1][1], pointList[0][0], pointList[0][1])
#Draw a rectangle
def drawRect(self, x1, y1, x2, y2, angle=0):
"""
Draws a rectangle on the current :py:class:`Layer` with the current :py:class:`Brush`.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param x1: The X of the top-left corner of the rectangle.
:param y1: The Y of the top-left corner of the rectangle.
:param x2: The X of the bottom-right corner of the rectangle.
:param y2: The Y of the bottom-right corner of the rectangle.
:param angle: An angle (in degrees) of rotation around the center of the rectangle.
:rtype: Nothing.
"""
vertices = [[x1,y1],[x2,y1],[x2,y2],[x1,y2],]
rotatedVertices = rotateMatrix(vertices, (x1+x2)*0.5, (y1+y2)*0.5, angle)
self.drawClosedPath(rotatedVertices)
#Fill the current layer with a color
def fillLayerWithColor(self, color):
"""
Fills the current :py:class:`Layer` with the current :py:class:`Color`.
:param color: The :py:class:`Color` to apply to the layer.
:rtype: Nothing.
"""
layer = self.image.getActiveLayer().data
colorRGBA = color.get_0_255()
layer[:,:,0] = colorRGBA[0]
layer[:,:,1] = colorRGBA[1]
layer[:,:,2] = colorRGBA[2]
layer[:,:,3] = colorRGBA[3]
#Add border to image
def addBorder(self, width, color=None):
"""
Add a border to the current :py:class:`Layer`.
:param width: The width of the border.
:param color: The :py:class:`Color` of the border, current :py:class:`Color` is the default value.
:rtype: Nothing.
"""
width = int(width/config.DOWNSAMPLING)
if color==None:
color = self.color
layer = self.image.getActiveLayer().data
colorRGBA = color.get_0_255()
print('adding border'+str(colorRGBA)+str(width)+str(layer.shape))
layer[0:width,:,0] = colorRGBA[0]
layer[0:width,:,1] = colorRGBA[1]
layer[0:width,:,2] = colorRGBA[2]
layer[0:width,:,3] = colorRGBA[3]
layer[:,0:width,0] = colorRGBA[0]
layer[:,0:width,1] = colorRGBA[1]
layer[:,0:width,2] = colorRGBA[2]
layer[:,0:width,3] = colorRGBA[3]
layer[layer.shape[0]-width:layer.shape[0],:,0] = colorRGBA[0]
layer[layer.shape[0]-width:layer.shape[0],:,1] = colorRGBA[1]
layer[layer.shape[0]-width:layer.shape[0],:,2] = colorRGBA[2]
layer[layer.shape[0]-width:layer.shape[0],:,3] = colorRGBA[3]
layer[:,layer.shape[1]-width:layer.shape[1],0] = colorRGBA[0]
layer[:,layer.shape[1]-width:layer.shape[1],1] = colorRGBA[1]
layer[:,layer.shape[1]-width:layer.shape[1],2] = colorRGBA[2]
layer[:,layer.shape[1]-width:layer.shape[1],3] = colorRGBA[3]
######################################################################
# Setters, getters, and more
######################################################################
#Setter for color, takes 0-255 RGBA
def setColor(self, color):
"""
Sets the current :py:class:`Color` to use.
:param color: The :py:class:`Color` to use.
:rtype: Nothing.
"""
self.color = color
if self.brush and self.brush.doesUseSourceCaching():
self.brush.cacheBrush(color)
#Change only the alpha of the current color
def setColorAlpha(self, fixed=None, proportional=None):
"""
Change the alpha of the current :py:class:`Color`.
:param fixed: Set the absolute 0-1 value of the alpha.
:param proportional: Set the relative value of the alpha (Es: If the current alpha is 0.8, a proportional value of 0.5 will set the final value to 0.4).
:rtype: Nothing.
"""
if fixed!=None:
self.color.set_alpha(fixed)
elif proportional!=None:
self.color.set_alpha(self.color.get_alpha()*proportional)
#Gets the brush alpha
def getColorAlpha(self):
"""
Retrieve the alpha of the current :py:class:`Color`.
:rtype: A float 0-1 value of the current :py:class:`Color` alpha.
"""
return self.color.get_alpha()
#Gets the brush size
def getBrushSize(self):
"""
Retrieve the size of the current :py:class:`Brush`.
:rtype: An integer value of the current :py:class:`Brush` size in pixels.
"""
return self.brush.brushSize
#Swap between first and second color
def swapColors(self):
"""
Swaps the current :py:class:`Color` with the secondary :py:class:`Color`.
:rtype: Nothing.
"""
rgba = self.color.get_0_255()
self.color = self.secondColor
self.secondColor = Color(rgba, '0-255')
#Setter for brush reference
def setBrush(self, b, resize=0, proportional=None):
"""
Sets the size of the current :py:class:`Brush`.
:param brush: The :py:class:`Brush` object to use as a brush.
:param resize: An optional absolute value to resize the brush before using it.
:param proportional: An optional relative float 0-1 value to resize the brush before using it.
:rtype: Nothing.
"""
if proportional!=None:
resize = int(self.brush.brushSize*0.5)
b.resizeBrush(resize) #If resize=0 it reset to its default size
self.brush = b
if self.brush and self.brush.doesUseSourceCaching():
self.brush.cacheBrush(self.color)
#Setter for the mirror mode
def setMirrorMode(self, mirror):
"""
Sets the mirror mode to use in the next operation.
:param mirror: A string object with one of these values : '', 'h', 'v', 'hv'. "h" stands for horizontal mirroring, while "v" stands for vertical mirroring. "hv" sets both at the same time.
:rtype: Nothing.
"""
assert (mirror=='' or mirror=='h' or mirror=='v' or mirror=='hv'or mirror=='vh'), 'setMirrorMode: wrong mirror mode, got '+str(mirror)+' expected one of ["","h","v","hv"]'
#Round up all the coordinates and convert them to int
if mirror=='': mirror = 0
elif mirror=='h': mirror = 1
elif mirror=='v': mirror = 2
elif mirror=='hv': mirror = 3
elif mirror=='vh': mirror = 3
self.mirrorMode = mirror
#Render the final image
def renderImage(self, output='', show=True):
"""
Renders the :py:class:`Image` and outputs the final PNG file.
:param output: A string with the output file path, can be empty if you don't want to save the final image.
:param show: A boolean telling the system to display the final image after the rendering is done.
:rtype: Nothing.
"""
#Merge all the layers to apply blending modes
resultLayer = self.image.mergeAllLayers()
#Show and save the results
img = PIL.Image.fromarray(resultLayer.data, 'RGBA')
if show:
img.show()
if output!='':
img.save(output, 'PNG')
#Shortcut for image operations
def newLayer(self, effect=''):
"""
Creates a new :py:class:`Layer` to the current :py:class:`Image`.
:param effect: A string with the blend mode for that layer that will be used when during the rendering process. The accepted values are: :code:`'soft_light','lighten','screen','dodge','addition','darken','multiply','hard_light','difference','subtract','grain_extract','grain_merge','divide','overlay'`.
:rtype: Nothing.
"""
self.image.newLayer(effect)
#Shortcut for image operations
def setActiveLayerEffect(self, effect):
"""
Changes the effect of the current active :py:class:`Layer`.
:param output: A string with the one of the blend modes listed in :py:meth:`newLayer`.
:rtype: Nothing.
"""
self.image.layers[self.image.activeLayer].effect = effect
#Shortcut for image operations
def duplicateActiveLayer(self):
"""
Duplicates the current active :py:class:`Layer`.
:rtype: Nothing.
"""
self.image.duplicateActiveLayer()
|
elkiwy/paynter | paynter/paynter.py | Paynter.drawPoint | python | def drawPoint(self, x, y, silent=True):
start = time.time()
#Downsample the coordinates
x = int(x/config.DOWNSAMPLING)
y = int(y/config.DOWNSAMPLING)
#Apply the dab with or without source caching
if self.brush.usesSourceCaching:
applyMirroredDab_jit(self.mirrorMode, self.image.getActiveLayer().data, int(x-self.brush.brushSize*0.5), int(y-self.brush.brushSize*0.5), self.brush.coloredBrushSource.copy(), config.CANVAS_SIZE, self.brush.brushMask)
else:
self.brush.makeDab(self.image.getActiveLayer(), int(x), int(y), self.color, self.secondColor, mirror=self.mirrorMode)
config.AVGTIME.append(time.time()-start) | Draws a point on the current :py:class:`Layer` with the current :py:class:`Brush`.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param x1: Point X coordinate.
:param y1: Point Y coordinate.
:rtype: Nothing. | train | https://github.com/elkiwy/paynter/blob/f73cb5bb010a6b32ee41640a50396ed0bae8d496/paynter/paynter.py#L127-L147 | null | class Paynter:
"""
This class is the main object of the library and the one that will draw everything you ask.
To create this class you can use the default constructor.
.. code-block:: python
from paynter import *
P = Paynter()
"""
brush = 0
layer = 0
color = Color(0, 0, 0, 1)
secondColor = Color(1,1,1,1)
image = 0
mirrorMode = 0
#Init the paynter
def __init__(self):
#Setup some stuff
config.CANVAS_SIZE = int(config.REAL_CANVAS_SIZE/config.DOWNSAMPLING)
self.image = Image()
######################################################################
# Level 0 Functions, needs downsampling
######################################################################
#Draw a line between two points
def drawLine(self, x1, y1, x2, y2, silent=False):
"""
Draws a line on the current :py:class:`Layer` with the current :py:class:`Brush`.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param x1: Starting X coordinate.
:param y1: Starting Y coordinate.
:param x2: End X coordinate.
:param y2: End Y coordinate.
:rtype: Nothing.
"""
start = time.time()
#Downsample the coordinates
x1 = int(x1/config.DOWNSAMPLING)
x2 = int(x2/config.DOWNSAMPLING)
y1 = int(y1/config.DOWNSAMPLING)
y2 = int(y2/config.DOWNSAMPLING)
if not silent :
print('drawing line from: '+str((x1,y1))+' to: '+str((x2,y2)))
#Calculate the direction and the length of the step
direction = N.arctan2(y2 - y1, x2 - x1)
length = self.brush.spacing
#Prepare the loop
x, y = x1, y1
totalSteps = int(N.sqrt((x2 - x)**2 + (y2 - y)**2)/length)
lay = self.image.getActiveLayer()
col = self.color
secCol = self.secondColor
mirr = self.mirrorMode
#If I use source caching..
if self.brush.usesSourceCaching:
#..than optimize it for faster drawing
laydata = lay.data
x -= self.brush.brushSize*0.5
y -= self.brush.brushSize*0.5
colbrsource = self.brush.coloredBrushSource
canvSize = config.CANVAS_SIZE
brmask = self.brush.brushMask
for _ in range(totalSteps):
#Make the dab on this point
applyMirroredDab_jit(mirr, laydata, int(x), int(y), colbrsource.copy(), canvSize, brmask)
#Mode the point for the next step and update the distances
x += lendir_x(length, direction)
y += lendir_y(length, direction)
#..if I don't use source caching..
else:
#..do the normal drawing
for _ in range(totalSteps):
#Make the dab on this point
self.brush.makeDab(lay, int(x), int(y), col, secCol, mirror=mirr)
#Mode the point for the next step and update the distances
x += lendir_x(length, direction)
y += lendir_y(length, direction)
#Draw a single dab
######################################################################
# Level 1 Functions, calls Level 0 functions, no downsampling
######################################################################
#Draw a path from a series of points
def drawPath(self, pointList):
"""
Draws a series of lines on the current :py:class:`Layer` with the current :py:class:`Brush`.
No interpolation is applied to these point and :py:meth:`drawLine` will be used to connect all the points lineraly.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param pointList: A list of point like :code:`[(0, 0), (100, 100), (100, 200)]`.
:rtype: Nothing.
"""
self.drawLine(pointList[0][0], pointList[0][1], pointList[1][0], pointList[1][1])
i = 1
while i<len(pointList)-1:
self.drawLine(pointList[i][0], pointList[i][1], pointList[i+1][0], pointList[i+1][1])
i+=1
#Draw a path from a series of points
def drawClosedPath(self, pointList):
"""
Draws a closed series of lines on the current :py:class:`Layer` with the current :py:class:`Brush`.
No interpolation is applied to these point and :py:meth:`drawLine` will be used to connect all the points lineraly.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param pointList: A list of point like :code:`[(0, 0), (100, 100), (100, 200)]`.
:rtype: Nothing.
"""
self.drawLine(pointList[0][0], pointList[0][1], pointList[1][0], pointList[1][1])
i = 1
while i<len(pointList)-1:
self.drawLine(pointList[i][0], pointList[i][1], pointList[i+1][0], pointList[i+1][1])
i+=1
self.drawLine(pointList[-1][0], pointList[-1][1], pointList[0][0], pointList[0][1])
#Draw a rectangle
def drawRect(self, x1, y1, x2, y2, angle=0):
"""
Draws a rectangle on the current :py:class:`Layer` with the current :py:class:`Brush`.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param x1: The X of the top-left corner of the rectangle.
:param y1: The Y of the top-left corner of the rectangle.
:param x2: The X of the bottom-right corner of the rectangle.
:param y2: The Y of the bottom-right corner of the rectangle.
:param angle: An angle (in degrees) of rotation around the center of the rectangle.
:rtype: Nothing.
"""
vertices = [[x1,y1],[x2,y1],[x2,y2],[x1,y2],]
rotatedVertices = rotateMatrix(vertices, (x1+x2)*0.5, (y1+y2)*0.5, angle)
self.drawClosedPath(rotatedVertices)
#Fill the current layer with a color
def fillLayerWithColor(self, color):
"""
Fills the current :py:class:`Layer` with the current :py:class:`Color`.
:param color: The :py:class:`Color` to apply to the layer.
:rtype: Nothing.
"""
layer = self.image.getActiveLayer().data
colorRGBA = color.get_0_255()
layer[:,:,0] = colorRGBA[0]
layer[:,:,1] = colorRGBA[1]
layer[:,:,2] = colorRGBA[2]
layer[:,:,3] = colorRGBA[3]
#Add border to image
def addBorder(self, width, color=None):
"""
Add a border to the current :py:class:`Layer`.
:param width: The width of the border.
:param color: The :py:class:`Color` of the border, current :py:class:`Color` is the default value.
:rtype: Nothing.
"""
width = int(width/config.DOWNSAMPLING)
if color==None:
color = self.color
layer = self.image.getActiveLayer().data
colorRGBA = color.get_0_255()
print('adding border'+str(colorRGBA)+str(width)+str(layer.shape))
layer[0:width,:,0] = colorRGBA[0]
layer[0:width,:,1] = colorRGBA[1]
layer[0:width,:,2] = colorRGBA[2]
layer[0:width,:,3] = colorRGBA[3]
layer[:,0:width,0] = colorRGBA[0]
layer[:,0:width,1] = colorRGBA[1]
layer[:,0:width,2] = colorRGBA[2]
layer[:,0:width,3] = colorRGBA[3]
layer[layer.shape[0]-width:layer.shape[0],:,0] = colorRGBA[0]
layer[layer.shape[0]-width:layer.shape[0],:,1] = colorRGBA[1]
layer[layer.shape[0]-width:layer.shape[0],:,2] = colorRGBA[2]
layer[layer.shape[0]-width:layer.shape[0],:,3] = colorRGBA[3]
layer[:,layer.shape[1]-width:layer.shape[1],0] = colorRGBA[0]
layer[:,layer.shape[1]-width:layer.shape[1],1] = colorRGBA[1]
layer[:,layer.shape[1]-width:layer.shape[1],2] = colorRGBA[2]
layer[:,layer.shape[1]-width:layer.shape[1],3] = colorRGBA[3]
######################################################################
# Setters, getters, and more
######################################################################
#Setter for color, takes 0-255 RGBA
def setColor(self, color):
"""
Sets the current :py:class:`Color` to use.
:param color: The :py:class:`Color` to use.
:rtype: Nothing.
"""
self.color = color
if self.brush and self.brush.doesUseSourceCaching():
self.brush.cacheBrush(color)
#Change only the alpha of the current color
def setColorAlpha(self, fixed=None, proportional=None):
"""
Change the alpha of the current :py:class:`Color`.
:param fixed: Set the absolute 0-1 value of the alpha.
:param proportional: Set the relative value of the alpha (Es: If the current alpha is 0.8, a proportional value of 0.5 will set the final value to 0.4).
:rtype: Nothing.
"""
if fixed!=None:
self.color.set_alpha(fixed)
elif proportional!=None:
self.color.set_alpha(self.color.get_alpha()*proportional)
#Gets the brush alpha
def getColorAlpha(self):
"""
Retrieve the alpha of the current :py:class:`Color`.
:rtype: A float 0-1 value of the current :py:class:`Color` alpha.
"""
return self.color.get_alpha()
#Gets the brush size
def getBrushSize(self):
"""
Retrieve the size of the current :py:class:`Brush`.
:rtype: An integer value of the current :py:class:`Brush` size in pixels.
"""
return self.brush.brushSize
#Swap between first and second color
def swapColors(self):
"""
Swaps the current :py:class:`Color` with the secondary :py:class:`Color`.
:rtype: Nothing.
"""
rgba = self.color.get_0_255()
self.color = self.secondColor
self.secondColor = Color(rgba, '0-255')
#Setter for brush reference
def setBrush(self, b, resize=0, proportional=None):
"""
Sets the size of the current :py:class:`Brush`.
:param brush: The :py:class:`Brush` object to use as a brush.
:param resize: An optional absolute value to resize the brush before using it.
:param proportional: An optional relative float 0-1 value to resize the brush before using it.
:rtype: Nothing.
"""
if proportional!=None:
resize = int(self.brush.brushSize*0.5)
b.resizeBrush(resize) #If resize=0 it reset to its default size
self.brush = b
if self.brush and self.brush.doesUseSourceCaching():
self.brush.cacheBrush(self.color)
#Setter for the mirror mode
def setMirrorMode(self, mirror):
"""
Sets the mirror mode to use in the next operation.
:param mirror: A string object with one of these values : '', 'h', 'v', 'hv'. "h" stands for horizontal mirroring, while "v" stands for vertical mirroring. "hv" sets both at the same time.
:rtype: Nothing.
"""
assert (mirror=='' or mirror=='h' or mirror=='v' or mirror=='hv'or mirror=='vh'), 'setMirrorMode: wrong mirror mode, got '+str(mirror)+' expected one of ["","h","v","hv"]'
#Round up all the coordinates and convert them to int
if mirror=='': mirror = 0
elif mirror=='h': mirror = 1
elif mirror=='v': mirror = 2
elif mirror=='hv': mirror = 3
elif mirror=='vh': mirror = 3
self.mirrorMode = mirror
#Render the final image
def renderImage(self, output='', show=True):
"""
Renders the :py:class:`Image` and outputs the final PNG file.
:param output: A string with the output file path, can be empty if you don't want to save the final image.
:param show: A boolean telling the system to display the final image after the rendering is done.
:rtype: Nothing.
"""
#Merge all the layers to apply blending modes
resultLayer = self.image.mergeAllLayers()
#Show and save the results
img = PIL.Image.fromarray(resultLayer.data, 'RGBA')
if show:
img.show()
if output!='':
img.save(output, 'PNG')
#Shortcut for image operations
def newLayer(self, effect=''):
"""
Creates a new :py:class:`Layer` to the current :py:class:`Image`.
:param effect: A string with the blend mode for that layer that will be used when during the rendering process. The accepted values are: :code:`'soft_light','lighten','screen','dodge','addition','darken','multiply','hard_light','difference','subtract','grain_extract','grain_merge','divide','overlay'`.
:rtype: Nothing.
"""
self.image.newLayer(effect)
#Shortcut for image operations
def setActiveLayerEffect(self, effect):
"""
Changes the effect of the current active :py:class:`Layer`.
:param output: A string with the one of the blend modes listed in :py:meth:`newLayer`.
:rtype: Nothing.
"""
self.image.layers[self.image.activeLayer].effect = effect
#Shortcut for image operations
def duplicateActiveLayer(self):
"""
Duplicates the current active :py:class:`Layer`.
:rtype: Nothing.
"""
self.image.duplicateActiveLayer()
|
elkiwy/paynter | paynter/paynter.py | Paynter.drawPath | python | def drawPath(self, pointList):
self.drawLine(pointList[0][0], pointList[0][1], pointList[1][0], pointList[1][1])
i = 1
while i<len(pointList)-1:
self.drawLine(pointList[i][0], pointList[i][1], pointList[i+1][0], pointList[i+1][1])
i+=1 | Draws a series of lines on the current :py:class:`Layer` with the current :py:class:`Brush`.
No interpolation is applied to these point and :py:meth:`drawLine` will be used to connect all the points lineraly.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param pointList: A list of point like :code:`[(0, 0), (100, 100), (100, 200)]`.
:rtype: Nothing. | train | https://github.com/elkiwy/paynter/blob/f73cb5bb010a6b32ee41640a50396ed0bae8d496/paynter/paynter.py#L153-L166 | [
"def drawLine(self, x1, y1, x2, y2, silent=False):\n\t\"\"\"\n\tDraws a line on the current :py:class:`Layer` with the current :py:class:`Brush`.\n\tCoordinates are relative to the original layer size WITHOUT downsampling applied.\n\n\t:param x1: Starting X coordinate.\n\t:param y1: Starting Y coordinate.\n\t:param x2: End X coordinate.\n\t:param y2: End Y coordinate.\n\t:rtype: Nothing.\n\t\"\"\"\n\tstart = time.time()\n\n\t#Downsample the coordinates\n\tx1 = int(x1/config.DOWNSAMPLING)\n\tx2 = int(x2/config.DOWNSAMPLING)\n\ty1 = int(y1/config.DOWNSAMPLING)\n\ty2 = int(y2/config.DOWNSAMPLING)\n\tif not silent :\n\t\tprint('drawing line from: '+str((x1,y1))+' to: '+str((x2,y2)))\n\n\t#Calculate the direction and the length of the step\n\tdirection = N.arctan2(y2 - y1, x2 - x1)\n\tlength = self.brush.spacing\n\n\t#Prepare the loop\n\tx, y = x1, y1\n\ttotalSteps = int(N.sqrt((x2 - x)**2 + (y2 - y)**2)/length)\n\tlay = self.image.getActiveLayer()\n\tcol = self.color\n\tsecCol = self.secondColor\n\tmirr = self.mirrorMode\n\n\t#If I use source caching..\n\tif self.brush.usesSourceCaching:\n\t\t#..than optimize it for faster drawing\n\t\tlaydata = lay.data\n\t\tx -= self.brush.brushSize*0.5\n\t\ty -= self.brush.brushSize*0.5\n\t\tcolbrsource = self.brush.coloredBrushSource\n\t\tcanvSize = config.CANVAS_SIZE\n\t\tbrmask = self.brush.brushMask\n\t\tfor _ in range(totalSteps):\n\t\t\t#Make the dab on this point\n\t\t\tapplyMirroredDab_jit(mirr, laydata, int(x), int(y), colbrsource.copy(), canvSize, brmask)\n\n\t\t\t#Mode the point for the next step and update the distances\n\t\t\tx += lendir_x(length, direction)\n\t\t\ty += lendir_y(length, direction)\n\t#..if I don't use source caching..\n\telse:\n\t\t#..do the normal drawing\n\t\tfor _ in range(totalSteps):\n\t\t\t#Make the dab on this point\n\t\t\tself.brush.makeDab(lay, int(x), int(y), col, secCol, mirror=mirr)\n\n\t\t\t#Mode the point for the next step and update the distances\n\t\t\tx += lendir_x(length, direction)\n\t\t\ty += lendir_y(length, direction)\n"
] | class Paynter:
"""
This class is the main object of the library and the one that will draw everything you ask.
To create this class you can use the default constructor.
.. code-block:: python
from paynter import *
P = Paynter()
"""
brush = 0
layer = 0
color = Color(0, 0, 0, 1)
secondColor = Color(1,1,1,1)
image = 0
mirrorMode = 0
#Init the paynter
def __init__(self):
#Setup some stuff
config.CANVAS_SIZE = int(config.REAL_CANVAS_SIZE/config.DOWNSAMPLING)
self.image = Image()
######################################################################
# Level 0 Functions, needs downsampling
######################################################################
#Draw a line between two points
def drawLine(self, x1, y1, x2, y2, silent=False):
"""
Draws a line on the current :py:class:`Layer` with the current :py:class:`Brush`.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param x1: Starting X coordinate.
:param y1: Starting Y coordinate.
:param x2: End X coordinate.
:param y2: End Y coordinate.
:rtype: Nothing.
"""
start = time.time()
#Downsample the coordinates
x1 = int(x1/config.DOWNSAMPLING)
x2 = int(x2/config.DOWNSAMPLING)
y1 = int(y1/config.DOWNSAMPLING)
y2 = int(y2/config.DOWNSAMPLING)
if not silent :
print('drawing line from: '+str((x1,y1))+' to: '+str((x2,y2)))
#Calculate the direction and the length of the step
direction = N.arctan2(y2 - y1, x2 - x1)
length = self.brush.spacing
#Prepare the loop
x, y = x1, y1
totalSteps = int(N.sqrt((x2 - x)**2 + (y2 - y)**2)/length)
lay = self.image.getActiveLayer()
col = self.color
secCol = self.secondColor
mirr = self.mirrorMode
#If I use source caching..
if self.brush.usesSourceCaching:
#..than optimize it for faster drawing
laydata = lay.data
x -= self.brush.brushSize*0.5
y -= self.brush.brushSize*0.5
colbrsource = self.brush.coloredBrushSource
canvSize = config.CANVAS_SIZE
brmask = self.brush.brushMask
for _ in range(totalSteps):
#Make the dab on this point
applyMirroredDab_jit(mirr, laydata, int(x), int(y), colbrsource.copy(), canvSize, brmask)
#Mode the point for the next step and update the distances
x += lendir_x(length, direction)
y += lendir_y(length, direction)
#..if I don't use source caching..
else:
#..do the normal drawing
for _ in range(totalSteps):
#Make the dab on this point
self.brush.makeDab(lay, int(x), int(y), col, secCol, mirror=mirr)
#Mode the point for the next step and update the distances
x += lendir_x(length, direction)
y += lendir_y(length, direction)
#Draw a single dab
def drawPoint(self, x, y, silent=True):
"""
Draws a point on the current :py:class:`Layer` with the current :py:class:`Brush`.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param x1: Point X coordinate.
:param y1: Point Y coordinate.
:rtype: Nothing.
"""
start = time.time()
#Downsample the coordinates
x = int(x/config.DOWNSAMPLING)
y = int(y/config.DOWNSAMPLING)
#Apply the dab with or without source caching
if self.brush.usesSourceCaching:
applyMirroredDab_jit(self.mirrorMode, self.image.getActiveLayer().data, int(x-self.brush.brushSize*0.5), int(y-self.brush.brushSize*0.5), self.brush.coloredBrushSource.copy(), config.CANVAS_SIZE, self.brush.brushMask)
else:
self.brush.makeDab(self.image.getActiveLayer(), int(x), int(y), self.color, self.secondColor, mirror=self.mirrorMode)
config.AVGTIME.append(time.time()-start)
######################################################################
# Level 1 Functions, calls Level 0 functions, no downsampling
######################################################################
#Draw a path from a series of points
def drawPath(self, pointList):
"""
Draws a series of lines on the current :py:class:`Layer` with the current :py:class:`Brush`.
No interpolation is applied to these point and :py:meth:`drawLine` will be used to connect all the points lineraly.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param pointList: A list of point like :code:`[(0, 0), (100, 100), (100, 200)]`.
:rtype: Nothing.
"""
self.drawLine(pointList[0][0], pointList[0][1], pointList[1][0], pointList[1][1])
i = 1
while i<len(pointList)-1:
self.drawLine(pointList[i][0], pointList[i][1], pointList[i+1][0], pointList[i+1][1])
i+=1
#Draw a path from a series of points
def drawClosedPath(self, pointList):
"""
Draws a closed series of lines on the current :py:class:`Layer` with the current :py:class:`Brush`.
No interpolation is applied to these point and :py:meth:`drawLine` will be used to connect all the points lineraly.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param pointList: A list of point like :code:`[(0, 0), (100, 100), (100, 200)]`.
:rtype: Nothing.
"""
self.drawLine(pointList[0][0], pointList[0][1], pointList[1][0], pointList[1][1])
i = 1
while i<len(pointList)-1:
self.drawLine(pointList[i][0], pointList[i][1], pointList[i+1][0], pointList[i+1][1])
i+=1
self.drawLine(pointList[-1][0], pointList[-1][1], pointList[0][0], pointList[0][1])
#Draw a rectangle
def drawRect(self, x1, y1, x2, y2, angle=0):
"""
Draws a rectangle on the current :py:class:`Layer` with the current :py:class:`Brush`.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param x1: The X of the top-left corner of the rectangle.
:param y1: The Y of the top-left corner of the rectangle.
:param x2: The X of the bottom-right corner of the rectangle.
:param y2: The Y of the bottom-right corner of the rectangle.
:param angle: An angle (in degrees) of rotation around the center of the rectangle.
:rtype: Nothing.
"""
vertices = [[x1,y1],[x2,y1],[x2,y2],[x1,y2],]
rotatedVertices = rotateMatrix(vertices, (x1+x2)*0.5, (y1+y2)*0.5, angle)
self.drawClosedPath(rotatedVertices)
#Fill the current layer with a color
def fillLayerWithColor(self, color):
"""
Fills the current :py:class:`Layer` with the current :py:class:`Color`.
:param color: The :py:class:`Color` to apply to the layer.
:rtype: Nothing.
"""
layer = self.image.getActiveLayer().data
colorRGBA = color.get_0_255()
layer[:,:,0] = colorRGBA[0]
layer[:,:,1] = colorRGBA[1]
layer[:,:,2] = colorRGBA[2]
layer[:,:,3] = colorRGBA[3]
#Add border to image
def addBorder(self, width, color=None):
"""
Add a border to the current :py:class:`Layer`.
:param width: The width of the border.
:param color: The :py:class:`Color` of the border, current :py:class:`Color` is the default value.
:rtype: Nothing.
"""
width = int(width/config.DOWNSAMPLING)
if color==None:
color = self.color
layer = self.image.getActiveLayer().data
colorRGBA = color.get_0_255()
print('adding border'+str(colorRGBA)+str(width)+str(layer.shape))
layer[0:width,:,0] = colorRGBA[0]
layer[0:width,:,1] = colorRGBA[1]
layer[0:width,:,2] = colorRGBA[2]
layer[0:width,:,3] = colorRGBA[3]
layer[:,0:width,0] = colorRGBA[0]
layer[:,0:width,1] = colorRGBA[1]
layer[:,0:width,2] = colorRGBA[2]
layer[:,0:width,3] = colorRGBA[3]
layer[layer.shape[0]-width:layer.shape[0],:,0] = colorRGBA[0]
layer[layer.shape[0]-width:layer.shape[0],:,1] = colorRGBA[1]
layer[layer.shape[0]-width:layer.shape[0],:,2] = colorRGBA[2]
layer[layer.shape[0]-width:layer.shape[0],:,3] = colorRGBA[3]
layer[:,layer.shape[1]-width:layer.shape[1],0] = colorRGBA[0]
layer[:,layer.shape[1]-width:layer.shape[1],1] = colorRGBA[1]
layer[:,layer.shape[1]-width:layer.shape[1],2] = colorRGBA[2]
layer[:,layer.shape[1]-width:layer.shape[1],3] = colorRGBA[3]
######################################################################
# Setters, getters, and more
######################################################################
#Setter for color, takes 0-255 RGBA
def setColor(self, color):
"""
Sets the current :py:class:`Color` to use.
:param color: The :py:class:`Color` to use.
:rtype: Nothing.
"""
self.color = color
if self.brush and self.brush.doesUseSourceCaching():
self.brush.cacheBrush(color)
#Change only the alpha of the current color
def setColorAlpha(self, fixed=None, proportional=None):
"""
Change the alpha of the current :py:class:`Color`.
:param fixed: Set the absolute 0-1 value of the alpha.
:param proportional: Set the relative value of the alpha (Es: If the current alpha is 0.8, a proportional value of 0.5 will set the final value to 0.4).
:rtype: Nothing.
"""
if fixed!=None:
self.color.set_alpha(fixed)
elif proportional!=None:
self.color.set_alpha(self.color.get_alpha()*proportional)
#Gets the brush alpha
def getColorAlpha(self):
"""
Retrieve the alpha of the current :py:class:`Color`.
:rtype: A float 0-1 value of the current :py:class:`Color` alpha.
"""
return self.color.get_alpha()
#Gets the brush size
def getBrushSize(self):
"""
Retrieve the size of the current :py:class:`Brush`.
:rtype: An integer value of the current :py:class:`Brush` size in pixels.
"""
return self.brush.brushSize
#Swap between first and second color
def swapColors(self):
"""
Swaps the current :py:class:`Color` with the secondary :py:class:`Color`.
:rtype: Nothing.
"""
rgba = self.color.get_0_255()
self.color = self.secondColor
self.secondColor = Color(rgba, '0-255')
#Setter for brush reference
def setBrush(self, b, resize=0, proportional=None):
"""
Sets the size of the current :py:class:`Brush`.
:param brush: The :py:class:`Brush` object to use as a brush.
:param resize: An optional absolute value to resize the brush before using it.
:param proportional: An optional relative float 0-1 value to resize the brush before using it.
:rtype: Nothing.
"""
if proportional!=None:
resize = int(self.brush.brushSize*0.5)
b.resizeBrush(resize) #If resize=0 it reset to its default size
self.brush = b
if self.brush and self.brush.doesUseSourceCaching():
self.brush.cacheBrush(self.color)
#Setter for the mirror mode
def setMirrorMode(self, mirror):
"""
Sets the mirror mode to use in the next operation.
:param mirror: A string object with one of these values : '', 'h', 'v', 'hv'. "h" stands for horizontal mirroring, while "v" stands for vertical mirroring. "hv" sets both at the same time.
:rtype: Nothing.
"""
assert (mirror=='' or mirror=='h' or mirror=='v' or mirror=='hv'or mirror=='vh'), 'setMirrorMode: wrong mirror mode, got '+str(mirror)+' expected one of ["","h","v","hv"]'
#Round up all the coordinates and convert them to int
if mirror=='': mirror = 0
elif mirror=='h': mirror = 1
elif mirror=='v': mirror = 2
elif mirror=='hv': mirror = 3
elif mirror=='vh': mirror = 3
self.mirrorMode = mirror
#Render the final image
def renderImage(self, output='', show=True):
"""
Renders the :py:class:`Image` and outputs the final PNG file.
:param output: A string with the output file path, can be empty if you don't want to save the final image.
:param show: A boolean telling the system to display the final image after the rendering is done.
:rtype: Nothing.
"""
#Merge all the layers to apply blending modes
resultLayer = self.image.mergeAllLayers()
#Show and save the results
img = PIL.Image.fromarray(resultLayer.data, 'RGBA')
if show:
img.show()
if output!='':
img.save(output, 'PNG')
#Shortcut for image operations
def newLayer(self, effect=''):
"""
Creates a new :py:class:`Layer` to the current :py:class:`Image`.
:param effect: A string with the blend mode for that layer that will be used when during the rendering process. The accepted values are: :code:`'soft_light','lighten','screen','dodge','addition','darken','multiply','hard_light','difference','subtract','grain_extract','grain_merge','divide','overlay'`.
:rtype: Nothing.
"""
self.image.newLayer(effect)
#Shortcut for image operations
def setActiveLayerEffect(self, effect):
"""
Changes the effect of the current active :py:class:`Layer`.
:param output: A string with the one of the blend modes listed in :py:meth:`newLayer`.
:rtype: Nothing.
"""
self.image.layers[self.image.activeLayer].effect = effect
#Shortcut for image operations
def duplicateActiveLayer(self):
"""
Duplicates the current active :py:class:`Layer`.
:rtype: Nothing.
"""
self.image.duplicateActiveLayer()
|
elkiwy/paynter | paynter/paynter.py | Paynter.drawClosedPath | python | def drawClosedPath(self, pointList):
self.drawLine(pointList[0][0], pointList[0][1], pointList[1][0], pointList[1][1])
i = 1
while i<len(pointList)-1:
self.drawLine(pointList[i][0], pointList[i][1], pointList[i+1][0], pointList[i+1][1])
i+=1
self.drawLine(pointList[-1][0], pointList[-1][1], pointList[0][0], pointList[0][1]) | Draws a closed series of lines on the current :py:class:`Layer` with the current :py:class:`Brush`.
No interpolation is applied to these point and :py:meth:`drawLine` will be used to connect all the points lineraly.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param pointList: A list of point like :code:`[(0, 0), (100, 100), (100, 200)]`.
:rtype: Nothing. | train | https://github.com/elkiwy/paynter/blob/f73cb5bb010a6b32ee41640a50396ed0bae8d496/paynter/paynter.py#L169-L183 | [
"def drawLine(self, x1, y1, x2, y2, silent=False):\n\t\"\"\"\n\tDraws a line on the current :py:class:`Layer` with the current :py:class:`Brush`.\n\tCoordinates are relative to the original layer size WITHOUT downsampling applied.\n\n\t:param x1: Starting X coordinate.\n\t:param y1: Starting Y coordinate.\n\t:param x2: End X coordinate.\n\t:param y2: End Y coordinate.\n\t:rtype: Nothing.\n\t\"\"\"\n\tstart = time.time()\n\n\t#Downsample the coordinates\n\tx1 = int(x1/config.DOWNSAMPLING)\n\tx2 = int(x2/config.DOWNSAMPLING)\n\ty1 = int(y1/config.DOWNSAMPLING)\n\ty2 = int(y2/config.DOWNSAMPLING)\n\tif not silent :\n\t\tprint('drawing line from: '+str((x1,y1))+' to: '+str((x2,y2)))\n\n\t#Calculate the direction and the length of the step\n\tdirection = N.arctan2(y2 - y1, x2 - x1)\n\tlength = self.brush.spacing\n\n\t#Prepare the loop\n\tx, y = x1, y1\n\ttotalSteps = int(N.sqrt((x2 - x)**2 + (y2 - y)**2)/length)\n\tlay = self.image.getActiveLayer()\n\tcol = self.color\n\tsecCol = self.secondColor\n\tmirr = self.mirrorMode\n\n\t#If I use source caching..\n\tif self.brush.usesSourceCaching:\n\t\t#..than optimize it for faster drawing\n\t\tlaydata = lay.data\n\t\tx -= self.brush.brushSize*0.5\n\t\ty -= self.brush.brushSize*0.5\n\t\tcolbrsource = self.brush.coloredBrushSource\n\t\tcanvSize = config.CANVAS_SIZE\n\t\tbrmask = self.brush.brushMask\n\t\tfor _ in range(totalSteps):\n\t\t\t#Make the dab on this point\n\t\t\tapplyMirroredDab_jit(mirr, laydata, int(x), int(y), colbrsource.copy(), canvSize, brmask)\n\n\t\t\t#Mode the point for the next step and update the distances\n\t\t\tx += lendir_x(length, direction)\n\t\t\ty += lendir_y(length, direction)\n\t#..if I don't use source caching..\n\telse:\n\t\t#..do the normal drawing\n\t\tfor _ in range(totalSteps):\n\t\t\t#Make the dab on this point\n\t\t\tself.brush.makeDab(lay, int(x), int(y), col, secCol, mirror=mirr)\n\n\t\t\t#Mode the point for the next step and update the distances\n\t\t\tx += lendir_x(length, direction)\n\t\t\ty += lendir_y(length, direction)\n"
] | class Paynter:
"""
This class is the main object of the library and the one that will draw everything you ask.
To create this class you can use the default constructor.
.. code-block:: python
from paynter import *
P = Paynter()
"""
brush = 0
layer = 0
color = Color(0, 0, 0, 1)
secondColor = Color(1,1,1,1)
image = 0
mirrorMode = 0
#Init the paynter
def __init__(self):
#Setup some stuff
config.CANVAS_SIZE = int(config.REAL_CANVAS_SIZE/config.DOWNSAMPLING)
self.image = Image()
######################################################################
# Level 0 Functions, needs downsampling
######################################################################
#Draw a line between two points
def drawLine(self, x1, y1, x2, y2, silent=False):
"""
Draws a line on the current :py:class:`Layer` with the current :py:class:`Brush`.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param x1: Starting X coordinate.
:param y1: Starting Y coordinate.
:param x2: End X coordinate.
:param y2: End Y coordinate.
:rtype: Nothing.
"""
start = time.time()
#Downsample the coordinates
x1 = int(x1/config.DOWNSAMPLING)
x2 = int(x2/config.DOWNSAMPLING)
y1 = int(y1/config.DOWNSAMPLING)
y2 = int(y2/config.DOWNSAMPLING)
if not silent :
print('drawing line from: '+str((x1,y1))+' to: '+str((x2,y2)))
#Calculate the direction and the length of the step
direction = N.arctan2(y2 - y1, x2 - x1)
length = self.brush.spacing
#Prepare the loop
x, y = x1, y1
totalSteps = int(N.sqrt((x2 - x)**2 + (y2 - y)**2)/length)
lay = self.image.getActiveLayer()
col = self.color
secCol = self.secondColor
mirr = self.mirrorMode
#If I use source caching..
if self.brush.usesSourceCaching:
#..than optimize it for faster drawing
laydata = lay.data
x -= self.brush.brushSize*0.5
y -= self.brush.brushSize*0.5
colbrsource = self.brush.coloredBrushSource
canvSize = config.CANVAS_SIZE
brmask = self.brush.brushMask
for _ in range(totalSteps):
#Make the dab on this point
applyMirroredDab_jit(mirr, laydata, int(x), int(y), colbrsource.copy(), canvSize, brmask)
#Mode the point for the next step and update the distances
x += lendir_x(length, direction)
y += lendir_y(length, direction)
#..if I don't use source caching..
else:
#..do the normal drawing
for _ in range(totalSteps):
#Make the dab on this point
self.brush.makeDab(lay, int(x), int(y), col, secCol, mirror=mirr)
#Mode the point for the next step and update the distances
x += lendir_x(length, direction)
y += lendir_y(length, direction)
#Draw a single dab
def drawPoint(self, x, y, silent=True):
"""
Draws a point on the current :py:class:`Layer` with the current :py:class:`Brush`.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param x1: Point X coordinate.
:param y1: Point Y coordinate.
:rtype: Nothing.
"""
start = time.time()
#Downsample the coordinates
x = int(x/config.DOWNSAMPLING)
y = int(y/config.DOWNSAMPLING)
#Apply the dab with or without source caching
if self.brush.usesSourceCaching:
applyMirroredDab_jit(self.mirrorMode, self.image.getActiveLayer().data, int(x-self.brush.brushSize*0.5), int(y-self.brush.brushSize*0.5), self.brush.coloredBrushSource.copy(), config.CANVAS_SIZE, self.brush.brushMask)
else:
self.brush.makeDab(self.image.getActiveLayer(), int(x), int(y), self.color, self.secondColor, mirror=self.mirrorMode)
config.AVGTIME.append(time.time()-start)
######################################################################
# Level 1 Functions, calls Level 0 functions, no downsampling
######################################################################
#Draw a path from a series of points
def drawPath(self, pointList):
"""
Draws a series of lines on the current :py:class:`Layer` with the current :py:class:`Brush`.
No interpolation is applied to these point and :py:meth:`drawLine` will be used to connect all the points lineraly.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param pointList: A list of point like :code:`[(0, 0), (100, 100), (100, 200)]`.
:rtype: Nothing.
"""
self.drawLine(pointList[0][0], pointList[0][1], pointList[1][0], pointList[1][1])
i = 1
while i<len(pointList)-1:
self.drawLine(pointList[i][0], pointList[i][1], pointList[i+1][0], pointList[i+1][1])
i+=1
#Draw a path from a series of points
def drawClosedPath(self, pointList):
"""
Draws a closed series of lines on the current :py:class:`Layer` with the current :py:class:`Brush`.
No interpolation is applied to these point and :py:meth:`drawLine` will be used to connect all the points lineraly.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param pointList: A list of point like :code:`[(0, 0), (100, 100), (100, 200)]`.
:rtype: Nothing.
"""
self.drawLine(pointList[0][0], pointList[0][1], pointList[1][0], pointList[1][1])
i = 1
while i<len(pointList)-1:
self.drawLine(pointList[i][0], pointList[i][1], pointList[i+1][0], pointList[i+1][1])
i+=1
self.drawLine(pointList[-1][0], pointList[-1][1], pointList[0][0], pointList[0][1])
#Draw a rectangle
def drawRect(self, x1, y1, x2, y2, angle=0):
"""
Draws a rectangle on the current :py:class:`Layer` with the current :py:class:`Brush`.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param x1: The X of the top-left corner of the rectangle.
:param y1: The Y of the top-left corner of the rectangle.
:param x2: The X of the bottom-right corner of the rectangle.
:param y2: The Y of the bottom-right corner of the rectangle.
:param angle: An angle (in degrees) of rotation around the center of the rectangle.
:rtype: Nothing.
"""
vertices = [[x1,y1],[x2,y1],[x2,y2],[x1,y2],]
rotatedVertices = rotateMatrix(vertices, (x1+x2)*0.5, (y1+y2)*0.5, angle)
self.drawClosedPath(rotatedVertices)
#Fill the current layer with a color
def fillLayerWithColor(self, color):
"""
Fills the current :py:class:`Layer` with the current :py:class:`Color`.
:param color: The :py:class:`Color` to apply to the layer.
:rtype: Nothing.
"""
layer = self.image.getActiveLayer().data
colorRGBA = color.get_0_255()
layer[:,:,0] = colorRGBA[0]
layer[:,:,1] = colorRGBA[1]
layer[:,:,2] = colorRGBA[2]
layer[:,:,3] = colorRGBA[3]
#Add border to image
def addBorder(self, width, color=None):
"""
Add a border to the current :py:class:`Layer`.
:param width: The width of the border.
:param color: The :py:class:`Color` of the border, current :py:class:`Color` is the default value.
:rtype: Nothing.
"""
width = int(width/config.DOWNSAMPLING)
if color==None:
color = self.color
layer = self.image.getActiveLayer().data
colorRGBA = color.get_0_255()
print('adding border'+str(colorRGBA)+str(width)+str(layer.shape))
layer[0:width,:,0] = colorRGBA[0]
layer[0:width,:,1] = colorRGBA[1]
layer[0:width,:,2] = colorRGBA[2]
layer[0:width,:,3] = colorRGBA[3]
layer[:,0:width,0] = colorRGBA[0]
layer[:,0:width,1] = colorRGBA[1]
layer[:,0:width,2] = colorRGBA[2]
layer[:,0:width,3] = colorRGBA[3]
layer[layer.shape[0]-width:layer.shape[0],:,0] = colorRGBA[0]
layer[layer.shape[0]-width:layer.shape[0],:,1] = colorRGBA[1]
layer[layer.shape[0]-width:layer.shape[0],:,2] = colorRGBA[2]
layer[layer.shape[0]-width:layer.shape[0],:,3] = colorRGBA[3]
layer[:,layer.shape[1]-width:layer.shape[1],0] = colorRGBA[0]
layer[:,layer.shape[1]-width:layer.shape[1],1] = colorRGBA[1]
layer[:,layer.shape[1]-width:layer.shape[1],2] = colorRGBA[2]
layer[:,layer.shape[1]-width:layer.shape[1],3] = colorRGBA[3]
######################################################################
# Setters, getters, and more
######################################################################
#Setter for color, takes 0-255 RGBA
def setColor(self, color):
"""
Sets the current :py:class:`Color` to use.
:param color: The :py:class:`Color` to use.
:rtype: Nothing.
"""
self.color = color
if self.brush and self.brush.doesUseSourceCaching():
self.brush.cacheBrush(color)
#Change only the alpha of the current color
def setColorAlpha(self, fixed=None, proportional=None):
"""
Change the alpha of the current :py:class:`Color`.
:param fixed: Set the absolute 0-1 value of the alpha.
:param proportional: Set the relative value of the alpha (Es: If the current alpha is 0.8, a proportional value of 0.5 will set the final value to 0.4).
:rtype: Nothing.
"""
if fixed!=None:
self.color.set_alpha(fixed)
elif proportional!=None:
self.color.set_alpha(self.color.get_alpha()*proportional)
#Gets the brush alpha
def getColorAlpha(self):
"""
Retrieve the alpha of the current :py:class:`Color`.
:rtype: A float 0-1 value of the current :py:class:`Color` alpha.
"""
return self.color.get_alpha()
#Gets the brush size
def getBrushSize(self):
"""
Retrieve the size of the current :py:class:`Brush`.
:rtype: An integer value of the current :py:class:`Brush` size in pixels.
"""
return self.brush.brushSize
#Swap between first and second color
def swapColors(self):
"""
Swaps the current :py:class:`Color` with the secondary :py:class:`Color`.
:rtype: Nothing.
"""
rgba = self.color.get_0_255()
self.color = self.secondColor
self.secondColor = Color(rgba, '0-255')
#Setter for brush reference
def setBrush(self, b, resize=0, proportional=None):
"""
Sets the size of the current :py:class:`Brush`.
:param brush: The :py:class:`Brush` object to use as a brush.
:param resize: An optional absolute value to resize the brush before using it.
:param proportional: An optional relative float 0-1 value to resize the brush before using it.
:rtype: Nothing.
"""
if proportional!=None:
resize = int(self.brush.brushSize*0.5)
b.resizeBrush(resize) #If resize=0 it reset to its default size
self.brush = b
if self.brush and self.brush.doesUseSourceCaching():
self.brush.cacheBrush(self.color)
#Setter for the mirror mode
def setMirrorMode(self, mirror):
"""
Sets the mirror mode to use in the next operation.
:param mirror: A string object with one of these values : '', 'h', 'v', 'hv'. "h" stands for horizontal mirroring, while "v" stands for vertical mirroring. "hv" sets both at the same time.
:rtype: Nothing.
"""
assert (mirror=='' or mirror=='h' or mirror=='v' or mirror=='hv'or mirror=='vh'), 'setMirrorMode: wrong mirror mode, got '+str(mirror)+' expected one of ["","h","v","hv"]'
#Round up all the coordinates and convert them to int
if mirror=='': mirror = 0
elif mirror=='h': mirror = 1
elif mirror=='v': mirror = 2
elif mirror=='hv': mirror = 3
elif mirror=='vh': mirror = 3
self.mirrorMode = mirror
#Render the final image
def renderImage(self, output='', show=True):
"""
Renders the :py:class:`Image` and outputs the final PNG file.
:param output: A string with the output file path, can be empty if you don't want to save the final image.
:param show: A boolean telling the system to display the final image after the rendering is done.
:rtype: Nothing.
"""
#Merge all the layers to apply blending modes
resultLayer = self.image.mergeAllLayers()
#Show and save the results
img = PIL.Image.fromarray(resultLayer.data, 'RGBA')
if show:
img.show()
if output!='':
img.save(output, 'PNG')
#Shortcut for image operations
def newLayer(self, effect=''):
"""
Creates a new :py:class:`Layer` to the current :py:class:`Image`.
:param effect: A string with the blend mode for that layer that will be used when during the rendering process. The accepted values are: :code:`'soft_light','lighten','screen','dodge','addition','darken','multiply','hard_light','difference','subtract','grain_extract','grain_merge','divide','overlay'`.
:rtype: Nothing.
"""
self.image.newLayer(effect)
#Shortcut for image operations
def setActiveLayerEffect(self, effect):
"""
Changes the effect of the current active :py:class:`Layer`.
:param output: A string with the one of the blend modes listed in :py:meth:`newLayer`.
:rtype: Nothing.
"""
self.image.layers[self.image.activeLayer].effect = effect
#Shortcut for image operations
def duplicateActiveLayer(self):
"""
Duplicates the current active :py:class:`Layer`.
:rtype: Nothing.
"""
self.image.duplicateActiveLayer()
|
elkiwy/paynter | paynter/paynter.py | Paynter.drawRect | python | def drawRect(self, x1, y1, x2, y2, angle=0):
vertices = [[x1,y1],[x2,y1],[x2,y2],[x1,y2],]
rotatedVertices = rotateMatrix(vertices, (x1+x2)*0.5, (y1+y2)*0.5, angle)
self.drawClosedPath(rotatedVertices) | Draws a rectangle on the current :py:class:`Layer` with the current :py:class:`Brush`.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param x1: The X of the top-left corner of the rectangle.
:param y1: The Y of the top-left corner of the rectangle.
:param x2: The X of the bottom-right corner of the rectangle.
:param y2: The Y of the bottom-right corner of the rectangle.
:param angle: An angle (in degrees) of rotation around the center of the rectangle.
:rtype: Nothing. | train | https://github.com/elkiwy/paynter/blob/f73cb5bb010a6b32ee41640a50396ed0bae8d496/paynter/paynter.py#L186-L200 | [
"def rotateMatrix(pointList, cx, cy, angle):\t\n\trotatedPoints = []\n\t#For each point in the list\n\tfor point in pointList:\n\t\t#Grab the coords and get dir and len\n\t\toldX = point[0]\n\t\toldY = point[1]\n\t\tdirection = math.degrees(math.atan2(cy-oldY, cx-oldX))\n\t\tlength = math.sqrt((cx - oldX)**2 + (cy - oldY)**2)\n\n\t\t#Rotate them and insert in the return list\n\t\tnewX = cx+length*dcos(direction+angle)\n\t\tnewY = cy+length*dsin(direction+angle)\n\t\trotatedPoints.append([newX, newY])\n\treturn rotatedPoints\n",
"def drawClosedPath(self, pointList):\n\t\"\"\"\n\tDraws a closed series of lines on the current :py:class:`Layer` with the current :py:class:`Brush`.\n\tNo interpolation is applied to these point and :py:meth:`drawLine` will be used to connect all the points lineraly.\n\tCoordinates are relative to the original layer size WITHOUT downsampling applied.\n\n\t:param pointList: A list of point like :code:`[(0, 0), (100, 100), (100, 200)]`.\n\t:rtype: Nothing.\n\t\"\"\"\n\tself.drawLine(pointList[0][0], pointList[0][1], pointList[1][0], pointList[1][1])\n\ti = 1\n\twhile i<len(pointList)-1:\n\t\tself.drawLine(pointList[i][0], pointList[i][1], pointList[i+1][0], pointList[i+1][1])\n\t\ti+=1\n\tself.drawLine(pointList[-1][0], pointList[-1][1], pointList[0][0], pointList[0][1])\n"
] | class Paynter:
"""
This class is the main object of the library and the one that will draw everything you ask.
To create this class you can use the default constructor.
.. code-block:: python
from paynter import *
P = Paynter()
"""
brush = 0
layer = 0
color = Color(0, 0, 0, 1)
secondColor = Color(1,1,1,1)
image = 0
mirrorMode = 0
#Init the paynter
def __init__(self):
#Setup some stuff
config.CANVAS_SIZE = int(config.REAL_CANVAS_SIZE/config.DOWNSAMPLING)
self.image = Image()
######################################################################
# Level 0 Functions, needs downsampling
######################################################################
#Draw a line between two points
def drawLine(self, x1, y1, x2, y2, silent=False):
"""
Draws a line on the current :py:class:`Layer` with the current :py:class:`Brush`.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param x1: Starting X coordinate.
:param y1: Starting Y coordinate.
:param x2: End X coordinate.
:param y2: End Y coordinate.
:rtype: Nothing.
"""
start = time.time()
#Downsample the coordinates
x1 = int(x1/config.DOWNSAMPLING)
x2 = int(x2/config.DOWNSAMPLING)
y1 = int(y1/config.DOWNSAMPLING)
y2 = int(y2/config.DOWNSAMPLING)
if not silent :
print('drawing line from: '+str((x1,y1))+' to: '+str((x2,y2)))
#Calculate the direction and the length of the step
direction = N.arctan2(y2 - y1, x2 - x1)
length = self.brush.spacing
#Prepare the loop
x, y = x1, y1
totalSteps = int(N.sqrt((x2 - x)**2 + (y2 - y)**2)/length)
lay = self.image.getActiveLayer()
col = self.color
secCol = self.secondColor
mirr = self.mirrorMode
#If I use source caching..
if self.brush.usesSourceCaching:
#..than optimize it for faster drawing
laydata = lay.data
x -= self.brush.brushSize*0.5
y -= self.brush.brushSize*0.5
colbrsource = self.brush.coloredBrushSource
canvSize = config.CANVAS_SIZE
brmask = self.brush.brushMask
for _ in range(totalSteps):
#Make the dab on this point
applyMirroredDab_jit(mirr, laydata, int(x), int(y), colbrsource.copy(), canvSize, brmask)
#Mode the point for the next step and update the distances
x += lendir_x(length, direction)
y += lendir_y(length, direction)
#..if I don't use source caching..
else:
#..do the normal drawing
for _ in range(totalSteps):
#Make the dab on this point
self.brush.makeDab(lay, int(x), int(y), col, secCol, mirror=mirr)
#Mode the point for the next step and update the distances
x += lendir_x(length, direction)
y += lendir_y(length, direction)
#Draw a single dab
def drawPoint(self, x, y, silent=True):
"""
Draws a point on the current :py:class:`Layer` with the current :py:class:`Brush`.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param x1: Point X coordinate.
:param y1: Point Y coordinate.
:rtype: Nothing.
"""
start = time.time()
#Downsample the coordinates
x = int(x/config.DOWNSAMPLING)
y = int(y/config.DOWNSAMPLING)
#Apply the dab with or without source caching
if self.brush.usesSourceCaching:
applyMirroredDab_jit(self.mirrorMode, self.image.getActiveLayer().data, int(x-self.brush.brushSize*0.5), int(y-self.brush.brushSize*0.5), self.brush.coloredBrushSource.copy(), config.CANVAS_SIZE, self.brush.brushMask)
else:
self.brush.makeDab(self.image.getActiveLayer(), int(x), int(y), self.color, self.secondColor, mirror=self.mirrorMode)
config.AVGTIME.append(time.time()-start)
######################################################################
# Level 1 Functions, calls Level 0 functions, no downsampling
######################################################################
#Draw a path from a series of points
def drawPath(self, pointList):
"""
Draws a series of lines on the current :py:class:`Layer` with the current :py:class:`Brush`.
No interpolation is applied to these point and :py:meth:`drawLine` will be used to connect all the points lineraly.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param pointList: A list of point like :code:`[(0, 0), (100, 100), (100, 200)]`.
:rtype: Nothing.
"""
self.drawLine(pointList[0][0], pointList[0][1], pointList[1][0], pointList[1][1])
i = 1
while i<len(pointList)-1:
self.drawLine(pointList[i][0], pointList[i][1], pointList[i+1][0], pointList[i+1][1])
i+=1
#Draw a path from a series of points
def drawClosedPath(self, pointList):
"""
Draws a closed series of lines on the current :py:class:`Layer` with the current :py:class:`Brush`.
No interpolation is applied to these point and :py:meth:`drawLine` will be used to connect all the points lineraly.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param pointList: A list of point like :code:`[(0, 0), (100, 100), (100, 200)]`.
:rtype: Nothing.
"""
self.drawLine(pointList[0][0], pointList[0][1], pointList[1][0], pointList[1][1])
i = 1
while i<len(pointList)-1:
self.drawLine(pointList[i][0], pointList[i][1], pointList[i+1][0], pointList[i+1][1])
i+=1
self.drawLine(pointList[-1][0], pointList[-1][1], pointList[0][0], pointList[0][1])
#Draw a rectangle
def drawRect(self, x1, y1, x2, y2, angle=0):
"""
Draws a rectangle on the current :py:class:`Layer` with the current :py:class:`Brush`.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param x1: The X of the top-left corner of the rectangle.
:param y1: The Y of the top-left corner of the rectangle.
:param x2: The X of the bottom-right corner of the rectangle.
:param y2: The Y of the bottom-right corner of the rectangle.
:param angle: An angle (in degrees) of rotation around the center of the rectangle.
:rtype: Nothing.
"""
vertices = [[x1,y1],[x2,y1],[x2,y2],[x1,y2],]
rotatedVertices = rotateMatrix(vertices, (x1+x2)*0.5, (y1+y2)*0.5, angle)
self.drawClosedPath(rotatedVertices)
#Fill the current layer with a color
def fillLayerWithColor(self, color):
"""
Fills the current :py:class:`Layer` with the current :py:class:`Color`.
:param color: The :py:class:`Color` to apply to the layer.
:rtype: Nothing.
"""
layer = self.image.getActiveLayer().data
colorRGBA = color.get_0_255()
layer[:,:,0] = colorRGBA[0]
layer[:,:,1] = colorRGBA[1]
layer[:,:,2] = colorRGBA[2]
layer[:,:,3] = colorRGBA[3]
#Add border to image
def addBorder(self, width, color=None):
"""
Add a border to the current :py:class:`Layer`.
:param width: The width of the border.
:param color: The :py:class:`Color` of the border, current :py:class:`Color` is the default value.
:rtype: Nothing.
"""
width = int(width/config.DOWNSAMPLING)
if color==None:
color = self.color
layer = self.image.getActiveLayer().data
colorRGBA = color.get_0_255()
print('adding border'+str(colorRGBA)+str(width)+str(layer.shape))
layer[0:width,:,0] = colorRGBA[0]
layer[0:width,:,1] = colorRGBA[1]
layer[0:width,:,2] = colorRGBA[2]
layer[0:width,:,3] = colorRGBA[3]
layer[:,0:width,0] = colorRGBA[0]
layer[:,0:width,1] = colorRGBA[1]
layer[:,0:width,2] = colorRGBA[2]
layer[:,0:width,3] = colorRGBA[3]
layer[layer.shape[0]-width:layer.shape[0],:,0] = colorRGBA[0]
layer[layer.shape[0]-width:layer.shape[0],:,1] = colorRGBA[1]
layer[layer.shape[0]-width:layer.shape[0],:,2] = colorRGBA[2]
layer[layer.shape[0]-width:layer.shape[0],:,3] = colorRGBA[3]
layer[:,layer.shape[1]-width:layer.shape[1],0] = colorRGBA[0]
layer[:,layer.shape[1]-width:layer.shape[1],1] = colorRGBA[1]
layer[:,layer.shape[1]-width:layer.shape[1],2] = colorRGBA[2]
layer[:,layer.shape[1]-width:layer.shape[1],3] = colorRGBA[3]
######################################################################
# Setters, getters, and more
######################################################################
#Setter for color, takes 0-255 RGBA
def setColor(self, color):
"""
Sets the current :py:class:`Color` to use.
:param color: The :py:class:`Color` to use.
:rtype: Nothing.
"""
self.color = color
if self.brush and self.brush.doesUseSourceCaching():
self.brush.cacheBrush(color)
#Change only the alpha of the current color
def setColorAlpha(self, fixed=None, proportional=None):
"""
Change the alpha of the current :py:class:`Color`.
:param fixed: Set the absolute 0-1 value of the alpha.
:param proportional: Set the relative value of the alpha (Es: If the current alpha is 0.8, a proportional value of 0.5 will set the final value to 0.4).
:rtype: Nothing.
"""
if fixed!=None:
self.color.set_alpha(fixed)
elif proportional!=None:
self.color.set_alpha(self.color.get_alpha()*proportional)
#Gets the brush alpha
def getColorAlpha(self):
"""
Retrieve the alpha of the current :py:class:`Color`.
:rtype: A float 0-1 value of the current :py:class:`Color` alpha.
"""
return self.color.get_alpha()
#Gets the brush size
def getBrushSize(self):
"""
Retrieve the size of the current :py:class:`Brush`.
:rtype: An integer value of the current :py:class:`Brush` size in pixels.
"""
return self.brush.brushSize
#Swap between first and second color
def swapColors(self):
"""
Swaps the current :py:class:`Color` with the secondary :py:class:`Color`.
:rtype: Nothing.
"""
rgba = self.color.get_0_255()
self.color = self.secondColor
self.secondColor = Color(rgba, '0-255')
#Setter for brush reference
def setBrush(self, b, resize=0, proportional=None):
"""
Sets the size of the current :py:class:`Brush`.
:param brush: The :py:class:`Brush` object to use as a brush.
:param resize: An optional absolute value to resize the brush before using it.
:param proportional: An optional relative float 0-1 value to resize the brush before using it.
:rtype: Nothing.
"""
if proportional!=None:
resize = int(self.brush.brushSize*0.5)
b.resizeBrush(resize) #If resize=0 it reset to its default size
self.brush = b
if self.brush and self.brush.doesUseSourceCaching():
self.brush.cacheBrush(self.color)
#Setter for the mirror mode
def setMirrorMode(self, mirror):
"""
Sets the mirror mode to use in the next operation.
:param mirror: A string object with one of these values : '', 'h', 'v', 'hv'. "h" stands for horizontal mirroring, while "v" stands for vertical mirroring. "hv" sets both at the same time.
:rtype: Nothing.
"""
assert (mirror=='' or mirror=='h' or mirror=='v' or mirror=='hv'or mirror=='vh'), 'setMirrorMode: wrong mirror mode, got '+str(mirror)+' expected one of ["","h","v","hv"]'
#Round up all the coordinates and convert them to int
if mirror=='': mirror = 0
elif mirror=='h': mirror = 1
elif mirror=='v': mirror = 2
elif mirror=='hv': mirror = 3
elif mirror=='vh': mirror = 3
self.mirrorMode = mirror
#Render the final image
def renderImage(self, output='', show=True):
"""
Renders the :py:class:`Image` and outputs the final PNG file.
:param output: A string with the output file path, can be empty if you don't want to save the final image.
:param show: A boolean telling the system to display the final image after the rendering is done.
:rtype: Nothing.
"""
#Merge all the layers to apply blending modes
resultLayer = self.image.mergeAllLayers()
#Show and save the results
img = PIL.Image.fromarray(resultLayer.data, 'RGBA')
if show:
img.show()
if output!='':
img.save(output, 'PNG')
#Shortcut for image operations
def newLayer(self, effect=''):
"""
Creates a new :py:class:`Layer` to the current :py:class:`Image`.
:param effect: A string with the blend mode for that layer that will be used when during the rendering process. The accepted values are: :code:`'soft_light','lighten','screen','dodge','addition','darken','multiply','hard_light','difference','subtract','grain_extract','grain_merge','divide','overlay'`.
:rtype: Nothing.
"""
self.image.newLayer(effect)
#Shortcut for image operations
def setActiveLayerEffect(self, effect):
"""
Changes the effect of the current active :py:class:`Layer`.
:param output: A string with the one of the blend modes listed in :py:meth:`newLayer`.
:rtype: Nothing.
"""
self.image.layers[self.image.activeLayer].effect = effect
#Shortcut for image operations
def duplicateActiveLayer(self):
"""
Duplicates the current active :py:class:`Layer`.
:rtype: Nothing.
"""
self.image.duplicateActiveLayer()
|
elkiwy/paynter | paynter/paynter.py | Paynter.fillLayerWithColor | python | def fillLayerWithColor(self, color):
layer = self.image.getActiveLayer().data
colorRGBA = color.get_0_255()
layer[:,:,0] = colorRGBA[0]
layer[:,:,1] = colorRGBA[1]
layer[:,:,2] = colorRGBA[2]
layer[:,:,3] = colorRGBA[3] | Fills the current :py:class:`Layer` with the current :py:class:`Color`.
:param color: The :py:class:`Color` to apply to the layer.
:rtype: Nothing. | train | https://github.com/elkiwy/paynter/blob/f73cb5bb010a6b32ee41640a50396ed0bae8d496/paynter/paynter.py#L203-L215 | [
"def getActiveLayer(self):\n\t\"\"\"\n\tReturns the currently active :py:class:`Layer`.\n\n\t:rtype: A :py:class:`Layer` object.\n\t\"\"\"\n\treturn self.layers[self.activeLayer]\n"
] | class Paynter:
"""
This class is the main object of the library and the one that will draw everything you ask.
To create this class you can use the default constructor.
.. code-block:: python
from paynter import *
P = Paynter()
"""
brush = 0
layer = 0
color = Color(0, 0, 0, 1)
secondColor = Color(1,1,1,1)
image = 0
mirrorMode = 0
#Init the paynter
def __init__(self):
#Setup some stuff
config.CANVAS_SIZE = int(config.REAL_CANVAS_SIZE/config.DOWNSAMPLING)
self.image = Image()
######################################################################
# Level 0 Functions, needs downsampling
######################################################################
#Draw a line between two points
def drawLine(self, x1, y1, x2, y2, silent=False):
"""
Draws a line on the current :py:class:`Layer` with the current :py:class:`Brush`.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param x1: Starting X coordinate.
:param y1: Starting Y coordinate.
:param x2: End X coordinate.
:param y2: End Y coordinate.
:rtype: Nothing.
"""
start = time.time()
#Downsample the coordinates
x1 = int(x1/config.DOWNSAMPLING)
x2 = int(x2/config.DOWNSAMPLING)
y1 = int(y1/config.DOWNSAMPLING)
y2 = int(y2/config.DOWNSAMPLING)
if not silent :
print('drawing line from: '+str((x1,y1))+' to: '+str((x2,y2)))
#Calculate the direction and the length of the step
direction = N.arctan2(y2 - y1, x2 - x1)
length = self.brush.spacing
#Prepare the loop
x, y = x1, y1
totalSteps = int(N.sqrt((x2 - x)**2 + (y2 - y)**2)/length)
lay = self.image.getActiveLayer()
col = self.color
secCol = self.secondColor
mirr = self.mirrorMode
#If I use source caching..
if self.brush.usesSourceCaching:
#..than optimize it for faster drawing
laydata = lay.data
x -= self.brush.brushSize*0.5
y -= self.brush.brushSize*0.5
colbrsource = self.brush.coloredBrushSource
canvSize = config.CANVAS_SIZE
brmask = self.brush.brushMask
for _ in range(totalSteps):
#Make the dab on this point
applyMirroredDab_jit(mirr, laydata, int(x), int(y), colbrsource.copy(), canvSize, brmask)
#Mode the point for the next step and update the distances
x += lendir_x(length, direction)
y += lendir_y(length, direction)
#..if I don't use source caching..
else:
#..do the normal drawing
for _ in range(totalSteps):
#Make the dab on this point
self.brush.makeDab(lay, int(x), int(y), col, secCol, mirror=mirr)
#Mode the point for the next step and update the distances
x += lendir_x(length, direction)
y += lendir_y(length, direction)
#Draw a single dab
def drawPoint(self, x, y, silent=True):
"""
Draws a point on the current :py:class:`Layer` with the current :py:class:`Brush`.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param x1: Point X coordinate.
:param y1: Point Y coordinate.
:rtype: Nothing.
"""
start = time.time()
#Downsample the coordinates
x = int(x/config.DOWNSAMPLING)
y = int(y/config.DOWNSAMPLING)
#Apply the dab with or without source caching
if self.brush.usesSourceCaching:
applyMirroredDab_jit(self.mirrorMode, self.image.getActiveLayer().data, int(x-self.brush.brushSize*0.5), int(y-self.brush.brushSize*0.5), self.brush.coloredBrushSource.copy(), config.CANVAS_SIZE, self.brush.brushMask)
else:
self.brush.makeDab(self.image.getActiveLayer(), int(x), int(y), self.color, self.secondColor, mirror=self.mirrorMode)
config.AVGTIME.append(time.time()-start)
######################################################################
# Level 1 Functions, calls Level 0 functions, no downsampling
######################################################################
#Draw a path from a series of points
def drawPath(self, pointList):
"""
Draws a series of lines on the current :py:class:`Layer` with the current :py:class:`Brush`.
No interpolation is applied to these point and :py:meth:`drawLine` will be used to connect all the points lineraly.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param pointList: A list of point like :code:`[(0, 0), (100, 100), (100, 200)]`.
:rtype: Nothing.
"""
self.drawLine(pointList[0][0], pointList[0][1], pointList[1][0], pointList[1][1])
i = 1
while i<len(pointList)-1:
self.drawLine(pointList[i][0], pointList[i][1], pointList[i+1][0], pointList[i+1][1])
i+=1
#Draw a path from a series of points
def drawClosedPath(self, pointList):
"""
Draws a closed series of lines on the current :py:class:`Layer` with the current :py:class:`Brush`.
No interpolation is applied to these point and :py:meth:`drawLine` will be used to connect all the points lineraly.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param pointList: A list of point like :code:`[(0, 0), (100, 100), (100, 200)]`.
:rtype: Nothing.
"""
self.drawLine(pointList[0][0], pointList[0][1], pointList[1][0], pointList[1][1])
i = 1
while i<len(pointList)-1:
self.drawLine(pointList[i][0], pointList[i][1], pointList[i+1][0], pointList[i+1][1])
i+=1
self.drawLine(pointList[-1][0], pointList[-1][1], pointList[0][0], pointList[0][1])
#Draw a rectangle
def drawRect(self, x1, y1, x2, y2, angle=0):
"""
Draws a rectangle on the current :py:class:`Layer` with the current :py:class:`Brush`.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param x1: The X of the top-left corner of the rectangle.
:param y1: The Y of the top-left corner of the rectangle.
:param x2: The X of the bottom-right corner of the rectangle.
:param y2: The Y of the bottom-right corner of the rectangle.
:param angle: An angle (in degrees) of rotation around the center of the rectangle.
:rtype: Nothing.
"""
vertices = [[x1,y1],[x2,y1],[x2,y2],[x1,y2],]
rotatedVertices = rotateMatrix(vertices, (x1+x2)*0.5, (y1+y2)*0.5, angle)
self.drawClosedPath(rotatedVertices)
#Fill the current layer with a color
def fillLayerWithColor(self, color):
"""
Fills the current :py:class:`Layer` with the current :py:class:`Color`.
:param color: The :py:class:`Color` to apply to the layer.
:rtype: Nothing.
"""
layer = self.image.getActiveLayer().data
colorRGBA = color.get_0_255()
layer[:,:,0] = colorRGBA[0]
layer[:,:,1] = colorRGBA[1]
layer[:,:,2] = colorRGBA[2]
layer[:,:,3] = colorRGBA[3]
#Add border to image
def addBorder(self, width, color=None):
"""
Add a border to the current :py:class:`Layer`.
:param width: The width of the border.
:param color: The :py:class:`Color` of the border, current :py:class:`Color` is the default value.
:rtype: Nothing.
"""
width = int(width/config.DOWNSAMPLING)
if color==None:
color = self.color
layer = self.image.getActiveLayer().data
colorRGBA = color.get_0_255()
print('adding border'+str(colorRGBA)+str(width)+str(layer.shape))
layer[0:width,:,0] = colorRGBA[0]
layer[0:width,:,1] = colorRGBA[1]
layer[0:width,:,2] = colorRGBA[2]
layer[0:width,:,3] = colorRGBA[3]
layer[:,0:width,0] = colorRGBA[0]
layer[:,0:width,1] = colorRGBA[1]
layer[:,0:width,2] = colorRGBA[2]
layer[:,0:width,3] = colorRGBA[3]
layer[layer.shape[0]-width:layer.shape[0],:,0] = colorRGBA[0]
layer[layer.shape[0]-width:layer.shape[0],:,1] = colorRGBA[1]
layer[layer.shape[0]-width:layer.shape[0],:,2] = colorRGBA[2]
layer[layer.shape[0]-width:layer.shape[0],:,3] = colorRGBA[3]
layer[:,layer.shape[1]-width:layer.shape[1],0] = colorRGBA[0]
layer[:,layer.shape[1]-width:layer.shape[1],1] = colorRGBA[1]
layer[:,layer.shape[1]-width:layer.shape[1],2] = colorRGBA[2]
layer[:,layer.shape[1]-width:layer.shape[1],3] = colorRGBA[3]
######################################################################
# Setters, getters, and more
######################################################################
#Setter for color, takes 0-255 RGBA
def setColor(self, color):
"""
Sets the current :py:class:`Color` to use.
:param color: The :py:class:`Color` to use.
:rtype: Nothing.
"""
self.color = color
if self.brush and self.brush.doesUseSourceCaching():
self.brush.cacheBrush(color)
#Change only the alpha of the current color
def setColorAlpha(self, fixed=None, proportional=None):
"""
Change the alpha of the current :py:class:`Color`.
:param fixed: Set the absolute 0-1 value of the alpha.
:param proportional: Set the relative value of the alpha (Es: If the current alpha is 0.8, a proportional value of 0.5 will set the final value to 0.4).
:rtype: Nothing.
"""
if fixed!=None:
self.color.set_alpha(fixed)
elif proportional!=None:
self.color.set_alpha(self.color.get_alpha()*proportional)
#Gets the brush alpha
def getColorAlpha(self):
"""
Retrieve the alpha of the current :py:class:`Color`.
:rtype: A float 0-1 value of the current :py:class:`Color` alpha.
"""
return self.color.get_alpha()
#Gets the brush size
def getBrushSize(self):
"""
Retrieve the size of the current :py:class:`Brush`.
:rtype: An integer value of the current :py:class:`Brush` size in pixels.
"""
return self.brush.brushSize
#Swap between first and second color
def swapColors(self):
"""
Swaps the current :py:class:`Color` with the secondary :py:class:`Color`.
:rtype: Nothing.
"""
rgba = self.color.get_0_255()
self.color = self.secondColor
self.secondColor = Color(rgba, '0-255')
#Setter for brush reference
def setBrush(self, b, resize=0, proportional=None):
"""
Sets the size of the current :py:class:`Brush`.
:param brush: The :py:class:`Brush` object to use as a brush.
:param resize: An optional absolute value to resize the brush before using it.
:param proportional: An optional relative float 0-1 value to resize the brush before using it.
:rtype: Nothing.
"""
if proportional!=None:
resize = int(self.brush.brushSize*0.5)
b.resizeBrush(resize) #If resize=0 it reset to its default size
self.brush = b
if self.brush and self.brush.doesUseSourceCaching():
self.brush.cacheBrush(self.color)
#Setter for the mirror mode
def setMirrorMode(self, mirror):
"""
Sets the mirror mode to use in the next operation.
:param mirror: A string object with one of these values : '', 'h', 'v', 'hv'. "h" stands for horizontal mirroring, while "v" stands for vertical mirroring. "hv" sets both at the same time.
:rtype: Nothing.
"""
assert (mirror=='' or mirror=='h' or mirror=='v' or mirror=='hv'or mirror=='vh'), 'setMirrorMode: wrong mirror mode, got '+str(mirror)+' expected one of ["","h","v","hv"]'
#Round up all the coordinates and convert them to int
if mirror=='': mirror = 0
elif mirror=='h': mirror = 1
elif mirror=='v': mirror = 2
elif mirror=='hv': mirror = 3
elif mirror=='vh': mirror = 3
self.mirrorMode = mirror
#Render the final image
def renderImage(self, output='', show=True):
"""
Renders the :py:class:`Image` and outputs the final PNG file.
:param output: A string with the output file path, can be empty if you don't want to save the final image.
:param show: A boolean telling the system to display the final image after the rendering is done.
:rtype: Nothing.
"""
#Merge all the layers to apply blending modes
resultLayer = self.image.mergeAllLayers()
#Show and save the results
img = PIL.Image.fromarray(resultLayer.data, 'RGBA')
if show:
img.show()
if output!='':
img.save(output, 'PNG')
#Shortcut for image operations
def newLayer(self, effect=''):
"""
Creates a new :py:class:`Layer` to the current :py:class:`Image`.
:param effect: A string with the blend mode for that layer that will be used when during the rendering process. The accepted values are: :code:`'soft_light','lighten','screen','dodge','addition','darken','multiply','hard_light','difference','subtract','grain_extract','grain_merge','divide','overlay'`.
:rtype: Nothing.
"""
self.image.newLayer(effect)
#Shortcut for image operations
def setActiveLayerEffect(self, effect):
"""
Changes the effect of the current active :py:class:`Layer`.
:param output: A string with the one of the blend modes listed in :py:meth:`newLayer`.
:rtype: Nothing.
"""
self.image.layers[self.image.activeLayer].effect = effect
#Shortcut for image operations
def duplicateActiveLayer(self):
"""
Duplicates the current active :py:class:`Layer`.
:rtype: Nothing.
"""
self.image.duplicateActiveLayer()
|
elkiwy/paynter | paynter/paynter.py | Paynter.addBorder | python | def addBorder(self, width, color=None):
width = int(width/config.DOWNSAMPLING)
if color==None:
color = self.color
layer = self.image.getActiveLayer().data
colorRGBA = color.get_0_255()
print('adding border'+str(colorRGBA)+str(width)+str(layer.shape))
layer[0:width,:,0] = colorRGBA[0]
layer[0:width,:,1] = colorRGBA[1]
layer[0:width,:,2] = colorRGBA[2]
layer[0:width,:,3] = colorRGBA[3]
layer[:,0:width,0] = colorRGBA[0]
layer[:,0:width,1] = colorRGBA[1]
layer[:,0:width,2] = colorRGBA[2]
layer[:,0:width,3] = colorRGBA[3]
layer[layer.shape[0]-width:layer.shape[0],:,0] = colorRGBA[0]
layer[layer.shape[0]-width:layer.shape[0],:,1] = colorRGBA[1]
layer[layer.shape[0]-width:layer.shape[0],:,2] = colorRGBA[2]
layer[layer.shape[0]-width:layer.shape[0],:,3] = colorRGBA[3]
layer[:,layer.shape[1]-width:layer.shape[1],0] = colorRGBA[0]
layer[:,layer.shape[1]-width:layer.shape[1],1] = colorRGBA[1]
layer[:,layer.shape[1]-width:layer.shape[1],2] = colorRGBA[2]
layer[:,layer.shape[1]-width:layer.shape[1],3] = colorRGBA[3] | Add a border to the current :py:class:`Layer`.
:param width: The width of the border.
:param color: The :py:class:`Color` of the border, current :py:class:`Color` is the default value.
:rtype: Nothing. | train | https://github.com/elkiwy/paynter/blob/f73cb5bb010a6b32ee41640a50396ed0bae8d496/paynter/paynter.py#L218-L250 | [
"def getActiveLayer(self):\n\t\"\"\"\n\tReturns the currently active :py:class:`Layer`.\n\n\t:rtype: A :py:class:`Layer` object.\n\t\"\"\"\n\treturn self.layers[self.activeLayer]\n"
] | class Paynter:
"""
This class is the main object of the library and the one that will draw everything you ask.
To create this class you can use the default constructor.
.. code-block:: python
from paynter import *
P = Paynter()
"""
brush = 0
layer = 0
color = Color(0, 0, 0, 1)
secondColor = Color(1,1,1,1)
image = 0
mirrorMode = 0
#Init the paynter
def __init__(self):
#Setup some stuff
config.CANVAS_SIZE = int(config.REAL_CANVAS_SIZE/config.DOWNSAMPLING)
self.image = Image()
######################################################################
# Level 0 Functions, needs downsampling
######################################################################
#Draw a line between two points
def drawLine(self, x1, y1, x2, y2, silent=False):
"""
Draws a line on the current :py:class:`Layer` with the current :py:class:`Brush`.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param x1: Starting X coordinate.
:param y1: Starting Y coordinate.
:param x2: End X coordinate.
:param y2: End Y coordinate.
:rtype: Nothing.
"""
start = time.time()
#Downsample the coordinates
x1 = int(x1/config.DOWNSAMPLING)
x2 = int(x2/config.DOWNSAMPLING)
y1 = int(y1/config.DOWNSAMPLING)
y2 = int(y2/config.DOWNSAMPLING)
if not silent :
print('drawing line from: '+str((x1,y1))+' to: '+str((x2,y2)))
#Calculate the direction and the length of the step
direction = N.arctan2(y2 - y1, x2 - x1)
length = self.brush.spacing
#Prepare the loop
x, y = x1, y1
totalSteps = int(N.sqrt((x2 - x)**2 + (y2 - y)**2)/length)
lay = self.image.getActiveLayer()
col = self.color
secCol = self.secondColor
mirr = self.mirrorMode
#If I use source caching..
if self.brush.usesSourceCaching:
#..than optimize it for faster drawing
laydata = lay.data
x -= self.brush.brushSize*0.5
y -= self.brush.brushSize*0.5
colbrsource = self.brush.coloredBrushSource
canvSize = config.CANVAS_SIZE
brmask = self.brush.brushMask
for _ in range(totalSteps):
#Make the dab on this point
applyMirroredDab_jit(mirr, laydata, int(x), int(y), colbrsource.copy(), canvSize, brmask)
#Mode the point for the next step and update the distances
x += lendir_x(length, direction)
y += lendir_y(length, direction)
#..if I don't use source caching..
else:
#..do the normal drawing
for _ in range(totalSteps):
#Make the dab on this point
self.brush.makeDab(lay, int(x), int(y), col, secCol, mirror=mirr)
#Mode the point for the next step and update the distances
x += lendir_x(length, direction)
y += lendir_y(length, direction)
#Draw a single dab
def drawPoint(self, x, y, silent=True):
"""
Draws a point on the current :py:class:`Layer` with the current :py:class:`Brush`.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param x1: Point X coordinate.
:param y1: Point Y coordinate.
:rtype: Nothing.
"""
start = time.time()
#Downsample the coordinates
x = int(x/config.DOWNSAMPLING)
y = int(y/config.DOWNSAMPLING)
#Apply the dab with or without source caching
if self.brush.usesSourceCaching:
applyMirroredDab_jit(self.mirrorMode, self.image.getActiveLayer().data, int(x-self.brush.brushSize*0.5), int(y-self.brush.brushSize*0.5), self.brush.coloredBrushSource.copy(), config.CANVAS_SIZE, self.brush.brushMask)
else:
self.brush.makeDab(self.image.getActiveLayer(), int(x), int(y), self.color, self.secondColor, mirror=self.mirrorMode)
config.AVGTIME.append(time.time()-start)
######################################################################
# Level 1 Functions, calls Level 0 functions, no downsampling
######################################################################
#Draw a path from a series of points
def drawPath(self, pointList):
"""
Draws a series of lines on the current :py:class:`Layer` with the current :py:class:`Brush`.
No interpolation is applied to these point and :py:meth:`drawLine` will be used to connect all the points lineraly.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param pointList: A list of point like :code:`[(0, 0), (100, 100), (100, 200)]`.
:rtype: Nothing.
"""
self.drawLine(pointList[0][0], pointList[0][1], pointList[1][0], pointList[1][1])
i = 1
while i<len(pointList)-1:
self.drawLine(pointList[i][0], pointList[i][1], pointList[i+1][0], pointList[i+1][1])
i+=1
#Draw a path from a series of points
def drawClosedPath(self, pointList):
"""
Draws a closed series of lines on the current :py:class:`Layer` with the current :py:class:`Brush`.
No interpolation is applied to these point and :py:meth:`drawLine` will be used to connect all the points lineraly.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param pointList: A list of point like :code:`[(0, 0), (100, 100), (100, 200)]`.
:rtype: Nothing.
"""
self.drawLine(pointList[0][0], pointList[0][1], pointList[1][0], pointList[1][1])
i = 1
while i<len(pointList)-1:
self.drawLine(pointList[i][0], pointList[i][1], pointList[i+1][0], pointList[i+1][1])
i+=1
self.drawLine(pointList[-1][0], pointList[-1][1], pointList[0][0], pointList[0][1])
#Draw a rectangle
def drawRect(self, x1, y1, x2, y2, angle=0):
"""
Draws a rectangle on the current :py:class:`Layer` with the current :py:class:`Brush`.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param x1: The X of the top-left corner of the rectangle.
:param y1: The Y of the top-left corner of the rectangle.
:param x2: The X of the bottom-right corner of the rectangle.
:param y2: The Y of the bottom-right corner of the rectangle.
:param angle: An angle (in degrees) of rotation around the center of the rectangle.
:rtype: Nothing.
"""
vertices = [[x1,y1],[x2,y1],[x2,y2],[x1,y2],]
rotatedVertices = rotateMatrix(vertices, (x1+x2)*0.5, (y1+y2)*0.5, angle)
self.drawClosedPath(rotatedVertices)
#Fill the current layer with a color
def fillLayerWithColor(self, color):
"""
Fills the current :py:class:`Layer` with the current :py:class:`Color`.
:param color: The :py:class:`Color` to apply to the layer.
:rtype: Nothing.
"""
layer = self.image.getActiveLayer().data
colorRGBA = color.get_0_255()
layer[:,:,0] = colorRGBA[0]
layer[:,:,1] = colorRGBA[1]
layer[:,:,2] = colorRGBA[2]
layer[:,:,3] = colorRGBA[3]
#Add border to image
def addBorder(self, width, color=None):
"""
Add a border to the current :py:class:`Layer`.
:param width: The width of the border.
:param color: The :py:class:`Color` of the border, current :py:class:`Color` is the default value.
:rtype: Nothing.
"""
width = int(width/config.DOWNSAMPLING)
if color==None:
color = self.color
layer = self.image.getActiveLayer().data
colorRGBA = color.get_0_255()
print('adding border'+str(colorRGBA)+str(width)+str(layer.shape))
layer[0:width,:,0] = colorRGBA[0]
layer[0:width,:,1] = colorRGBA[1]
layer[0:width,:,2] = colorRGBA[2]
layer[0:width,:,3] = colorRGBA[3]
layer[:,0:width,0] = colorRGBA[0]
layer[:,0:width,1] = colorRGBA[1]
layer[:,0:width,2] = colorRGBA[2]
layer[:,0:width,3] = colorRGBA[3]
layer[layer.shape[0]-width:layer.shape[0],:,0] = colorRGBA[0]
layer[layer.shape[0]-width:layer.shape[0],:,1] = colorRGBA[1]
layer[layer.shape[0]-width:layer.shape[0],:,2] = colorRGBA[2]
layer[layer.shape[0]-width:layer.shape[0],:,3] = colorRGBA[3]
layer[:,layer.shape[1]-width:layer.shape[1],0] = colorRGBA[0]
layer[:,layer.shape[1]-width:layer.shape[1],1] = colorRGBA[1]
layer[:,layer.shape[1]-width:layer.shape[1],2] = colorRGBA[2]
layer[:,layer.shape[1]-width:layer.shape[1],3] = colorRGBA[3]
######################################################################
# Setters, getters, and more
######################################################################
#Setter for color, takes 0-255 RGBA
def setColor(self, color):
"""
Sets the current :py:class:`Color` to use.
:param color: The :py:class:`Color` to use.
:rtype: Nothing.
"""
self.color = color
if self.brush and self.brush.doesUseSourceCaching():
self.brush.cacheBrush(color)
#Change only the alpha of the current color
def setColorAlpha(self, fixed=None, proportional=None):
"""
Change the alpha of the current :py:class:`Color`.
:param fixed: Set the absolute 0-1 value of the alpha.
:param proportional: Set the relative value of the alpha (Es: If the current alpha is 0.8, a proportional value of 0.5 will set the final value to 0.4).
:rtype: Nothing.
"""
if fixed!=None:
self.color.set_alpha(fixed)
elif proportional!=None:
self.color.set_alpha(self.color.get_alpha()*proportional)
#Gets the brush alpha
def getColorAlpha(self):
"""
Retrieve the alpha of the current :py:class:`Color`.
:rtype: A float 0-1 value of the current :py:class:`Color` alpha.
"""
return self.color.get_alpha()
#Gets the brush size
def getBrushSize(self):
"""
Retrieve the size of the current :py:class:`Brush`.
:rtype: An integer value of the current :py:class:`Brush` size in pixels.
"""
return self.brush.brushSize
#Swap between first and second color
def swapColors(self):
"""
Swaps the current :py:class:`Color` with the secondary :py:class:`Color`.
:rtype: Nothing.
"""
rgba = self.color.get_0_255()
self.color = self.secondColor
self.secondColor = Color(rgba, '0-255')
#Setter for brush reference
def setBrush(self, b, resize=0, proportional=None):
"""
Sets the size of the current :py:class:`Brush`.
:param brush: The :py:class:`Brush` object to use as a brush.
:param resize: An optional absolute value to resize the brush before using it.
:param proportional: An optional relative float 0-1 value to resize the brush before using it.
:rtype: Nothing.
"""
if proportional!=None:
resize = int(self.brush.brushSize*0.5)
b.resizeBrush(resize) #If resize=0 it reset to its default size
self.brush = b
if self.brush and self.brush.doesUseSourceCaching():
self.brush.cacheBrush(self.color)
#Setter for the mirror mode
def setMirrorMode(self, mirror):
"""
Sets the mirror mode to use in the next operation.
:param mirror: A string object with one of these values : '', 'h', 'v', 'hv'. "h" stands for horizontal mirroring, while "v" stands for vertical mirroring. "hv" sets both at the same time.
:rtype: Nothing.
"""
assert (mirror=='' or mirror=='h' or mirror=='v' or mirror=='hv'or mirror=='vh'), 'setMirrorMode: wrong mirror mode, got '+str(mirror)+' expected one of ["","h","v","hv"]'
#Round up all the coordinates and convert them to int
if mirror=='': mirror = 0
elif mirror=='h': mirror = 1
elif mirror=='v': mirror = 2
elif mirror=='hv': mirror = 3
elif mirror=='vh': mirror = 3
self.mirrorMode = mirror
#Render the final image
def renderImage(self, output='', show=True):
"""
Renders the :py:class:`Image` and outputs the final PNG file.
:param output: A string with the output file path, can be empty if you don't want to save the final image.
:param show: A boolean telling the system to display the final image after the rendering is done.
:rtype: Nothing.
"""
#Merge all the layers to apply blending modes
resultLayer = self.image.mergeAllLayers()
#Show and save the results
img = PIL.Image.fromarray(resultLayer.data, 'RGBA')
if show:
img.show()
if output!='':
img.save(output, 'PNG')
#Shortcut for image operations
def newLayer(self, effect=''):
"""
Creates a new :py:class:`Layer` to the current :py:class:`Image`.
:param effect: A string with the blend mode for that layer that will be used when during the rendering process. The accepted values are: :code:`'soft_light','lighten','screen','dodge','addition','darken','multiply','hard_light','difference','subtract','grain_extract','grain_merge','divide','overlay'`.
:rtype: Nothing.
"""
self.image.newLayer(effect)
#Shortcut for image operations
def setActiveLayerEffect(self, effect):
"""
Changes the effect of the current active :py:class:`Layer`.
:param output: A string with the one of the blend modes listed in :py:meth:`newLayer`.
:rtype: Nothing.
"""
self.image.layers[self.image.activeLayer].effect = effect
#Shortcut for image operations
def duplicateActiveLayer(self):
"""
Duplicates the current active :py:class:`Layer`.
:rtype: Nothing.
"""
self.image.duplicateActiveLayer()
|
elkiwy/paynter | paynter/paynter.py | Paynter.setColor | python | def setColor(self, color):
self.color = color
if self.brush and self.brush.doesUseSourceCaching():
self.brush.cacheBrush(color) | Sets the current :py:class:`Color` to use.
:param color: The :py:class:`Color` to use.
:rtype: Nothing. | train | https://github.com/elkiwy/paynter/blob/f73cb5bb010a6b32ee41640a50396ed0bae8d496/paynter/paynter.py#L259-L268 | null | class Paynter:
"""
This class is the main object of the library and the one that will draw everything you ask.
To create this class you can use the default constructor.
.. code-block:: python
from paynter import *
P = Paynter()
"""
brush = 0
layer = 0
color = Color(0, 0, 0, 1)
secondColor = Color(1,1,1,1)
image = 0
mirrorMode = 0
#Init the paynter
def __init__(self):
#Setup some stuff
config.CANVAS_SIZE = int(config.REAL_CANVAS_SIZE/config.DOWNSAMPLING)
self.image = Image()
######################################################################
# Level 0 Functions, needs downsampling
######################################################################
#Draw a line between two points
def drawLine(self, x1, y1, x2, y2, silent=False):
"""
Draws a line on the current :py:class:`Layer` with the current :py:class:`Brush`.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param x1: Starting X coordinate.
:param y1: Starting Y coordinate.
:param x2: End X coordinate.
:param y2: End Y coordinate.
:rtype: Nothing.
"""
start = time.time()
#Downsample the coordinates
x1 = int(x1/config.DOWNSAMPLING)
x2 = int(x2/config.DOWNSAMPLING)
y1 = int(y1/config.DOWNSAMPLING)
y2 = int(y2/config.DOWNSAMPLING)
if not silent :
print('drawing line from: '+str((x1,y1))+' to: '+str((x2,y2)))
#Calculate the direction and the length of the step
direction = N.arctan2(y2 - y1, x2 - x1)
length = self.brush.spacing
#Prepare the loop
x, y = x1, y1
totalSteps = int(N.sqrt((x2 - x)**2 + (y2 - y)**2)/length)
lay = self.image.getActiveLayer()
col = self.color
secCol = self.secondColor
mirr = self.mirrorMode
#If I use source caching..
if self.brush.usesSourceCaching:
#..than optimize it for faster drawing
laydata = lay.data
x -= self.brush.brushSize*0.5
y -= self.brush.brushSize*0.5
colbrsource = self.brush.coloredBrushSource
canvSize = config.CANVAS_SIZE
brmask = self.brush.brushMask
for _ in range(totalSteps):
#Make the dab on this point
applyMirroredDab_jit(mirr, laydata, int(x), int(y), colbrsource.copy(), canvSize, brmask)
#Mode the point for the next step and update the distances
x += lendir_x(length, direction)
y += lendir_y(length, direction)
#..if I don't use source caching..
else:
#..do the normal drawing
for _ in range(totalSteps):
#Make the dab on this point
self.brush.makeDab(lay, int(x), int(y), col, secCol, mirror=mirr)
#Mode the point for the next step and update the distances
x += lendir_x(length, direction)
y += lendir_y(length, direction)
#Draw a single dab
def drawPoint(self, x, y, silent=True):
"""
Draws a point on the current :py:class:`Layer` with the current :py:class:`Brush`.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param x1: Point X coordinate.
:param y1: Point Y coordinate.
:rtype: Nothing.
"""
start = time.time()
#Downsample the coordinates
x = int(x/config.DOWNSAMPLING)
y = int(y/config.DOWNSAMPLING)
#Apply the dab with or without source caching
if self.brush.usesSourceCaching:
applyMirroredDab_jit(self.mirrorMode, self.image.getActiveLayer().data, int(x-self.brush.brushSize*0.5), int(y-self.brush.brushSize*0.5), self.brush.coloredBrushSource.copy(), config.CANVAS_SIZE, self.brush.brushMask)
else:
self.brush.makeDab(self.image.getActiveLayer(), int(x), int(y), self.color, self.secondColor, mirror=self.mirrorMode)
config.AVGTIME.append(time.time()-start)
######################################################################
# Level 1 Functions, calls Level 0 functions, no downsampling
######################################################################
#Draw a path from a series of points
def drawPath(self, pointList):
"""
Draws a series of lines on the current :py:class:`Layer` with the current :py:class:`Brush`.
No interpolation is applied to these point and :py:meth:`drawLine` will be used to connect all the points lineraly.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param pointList: A list of point like :code:`[(0, 0), (100, 100), (100, 200)]`.
:rtype: Nothing.
"""
self.drawLine(pointList[0][0], pointList[0][1], pointList[1][0], pointList[1][1])
i = 1
while i<len(pointList)-1:
self.drawLine(pointList[i][0], pointList[i][1], pointList[i+1][0], pointList[i+1][1])
i+=1
#Draw a path from a series of points
def drawClosedPath(self, pointList):
"""
Draws a closed series of lines on the current :py:class:`Layer` with the current :py:class:`Brush`.
No interpolation is applied to these point and :py:meth:`drawLine` will be used to connect all the points lineraly.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param pointList: A list of point like :code:`[(0, 0), (100, 100), (100, 200)]`.
:rtype: Nothing.
"""
self.drawLine(pointList[0][0], pointList[0][1], pointList[1][0], pointList[1][1])
i = 1
while i<len(pointList)-1:
self.drawLine(pointList[i][0], pointList[i][1], pointList[i+1][0], pointList[i+1][1])
i+=1
self.drawLine(pointList[-1][0], pointList[-1][1], pointList[0][0], pointList[0][1])
#Draw a rectangle
def drawRect(self, x1, y1, x2, y2, angle=0):
"""
Draws a rectangle on the current :py:class:`Layer` with the current :py:class:`Brush`.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param x1: The X of the top-left corner of the rectangle.
:param y1: The Y of the top-left corner of the rectangle.
:param x2: The X of the bottom-right corner of the rectangle.
:param y2: The Y of the bottom-right corner of the rectangle.
:param angle: An angle (in degrees) of rotation around the center of the rectangle.
:rtype: Nothing.
"""
vertices = [[x1,y1],[x2,y1],[x2,y2],[x1,y2],]
rotatedVertices = rotateMatrix(vertices, (x1+x2)*0.5, (y1+y2)*0.5, angle)
self.drawClosedPath(rotatedVertices)
#Fill the current layer with a color
def fillLayerWithColor(self, color):
"""
Fills the current :py:class:`Layer` with the current :py:class:`Color`.
:param color: The :py:class:`Color` to apply to the layer.
:rtype: Nothing.
"""
layer = self.image.getActiveLayer().data
colorRGBA = color.get_0_255()
layer[:,:,0] = colorRGBA[0]
layer[:,:,1] = colorRGBA[1]
layer[:,:,2] = colorRGBA[2]
layer[:,:,3] = colorRGBA[3]
#Add border to image
def addBorder(self, width, color=None):
"""
Add a border to the current :py:class:`Layer`.
:param width: The width of the border.
:param color: The :py:class:`Color` of the border, current :py:class:`Color` is the default value.
:rtype: Nothing.
"""
width = int(width/config.DOWNSAMPLING)
if color==None:
color = self.color
layer = self.image.getActiveLayer().data
colorRGBA = color.get_0_255()
print('adding border'+str(colorRGBA)+str(width)+str(layer.shape))
layer[0:width,:,0] = colorRGBA[0]
layer[0:width,:,1] = colorRGBA[1]
layer[0:width,:,2] = colorRGBA[2]
layer[0:width,:,3] = colorRGBA[3]
layer[:,0:width,0] = colorRGBA[0]
layer[:,0:width,1] = colorRGBA[1]
layer[:,0:width,2] = colorRGBA[2]
layer[:,0:width,3] = colorRGBA[3]
layer[layer.shape[0]-width:layer.shape[0],:,0] = colorRGBA[0]
layer[layer.shape[0]-width:layer.shape[0],:,1] = colorRGBA[1]
layer[layer.shape[0]-width:layer.shape[0],:,2] = colorRGBA[2]
layer[layer.shape[0]-width:layer.shape[0],:,3] = colorRGBA[3]
layer[:,layer.shape[1]-width:layer.shape[1],0] = colorRGBA[0]
layer[:,layer.shape[1]-width:layer.shape[1],1] = colorRGBA[1]
layer[:,layer.shape[1]-width:layer.shape[1],2] = colorRGBA[2]
layer[:,layer.shape[1]-width:layer.shape[1],3] = colorRGBA[3]
######################################################################
# Setters, getters, and more
######################################################################
#Setter for color, takes 0-255 RGBA
def setColor(self, color):
"""
Sets the current :py:class:`Color` to use.
:param color: The :py:class:`Color` to use.
:rtype: Nothing.
"""
self.color = color
if self.brush and self.brush.doesUseSourceCaching():
self.brush.cacheBrush(color)
#Change only the alpha of the current color
def setColorAlpha(self, fixed=None, proportional=None):
"""
Change the alpha of the current :py:class:`Color`.
:param fixed: Set the absolute 0-1 value of the alpha.
:param proportional: Set the relative value of the alpha (Es: If the current alpha is 0.8, a proportional value of 0.5 will set the final value to 0.4).
:rtype: Nothing.
"""
if fixed!=None:
self.color.set_alpha(fixed)
elif proportional!=None:
self.color.set_alpha(self.color.get_alpha()*proportional)
#Gets the brush alpha
def getColorAlpha(self):
"""
Retrieve the alpha of the current :py:class:`Color`.
:rtype: A float 0-1 value of the current :py:class:`Color` alpha.
"""
return self.color.get_alpha()
#Gets the brush size
def getBrushSize(self):
"""
Retrieve the size of the current :py:class:`Brush`.
:rtype: An integer value of the current :py:class:`Brush` size in pixels.
"""
return self.brush.brushSize
#Swap between first and second color
def swapColors(self):
"""
Swaps the current :py:class:`Color` with the secondary :py:class:`Color`.
:rtype: Nothing.
"""
rgba = self.color.get_0_255()
self.color = self.secondColor
self.secondColor = Color(rgba, '0-255')
#Setter for brush reference
def setBrush(self, b, resize=0, proportional=None):
"""
Sets the size of the current :py:class:`Brush`.
:param brush: The :py:class:`Brush` object to use as a brush.
:param resize: An optional absolute value to resize the brush before using it.
:param proportional: An optional relative float 0-1 value to resize the brush before using it.
:rtype: Nothing.
"""
if proportional!=None:
resize = int(self.brush.brushSize*0.5)
b.resizeBrush(resize) #If resize=0 it reset to its default size
self.brush = b
if self.brush and self.brush.doesUseSourceCaching():
self.brush.cacheBrush(self.color)
#Setter for the mirror mode
def setMirrorMode(self, mirror):
"""
Sets the mirror mode to use in the next operation.
:param mirror: A string object with one of these values : '', 'h', 'v', 'hv'. "h" stands for horizontal mirroring, while "v" stands for vertical mirroring. "hv" sets both at the same time.
:rtype: Nothing.
"""
assert (mirror=='' or mirror=='h' or mirror=='v' or mirror=='hv'or mirror=='vh'), 'setMirrorMode: wrong mirror mode, got '+str(mirror)+' expected one of ["","h","v","hv"]'
#Round up all the coordinates and convert them to int
if mirror=='': mirror = 0
elif mirror=='h': mirror = 1
elif mirror=='v': mirror = 2
elif mirror=='hv': mirror = 3
elif mirror=='vh': mirror = 3
self.mirrorMode = mirror
#Render the final image
def renderImage(self, output='', show=True):
"""
Renders the :py:class:`Image` and outputs the final PNG file.
:param output: A string with the output file path, can be empty if you don't want to save the final image.
:param show: A boolean telling the system to display the final image after the rendering is done.
:rtype: Nothing.
"""
#Merge all the layers to apply blending modes
resultLayer = self.image.mergeAllLayers()
#Show and save the results
img = PIL.Image.fromarray(resultLayer.data, 'RGBA')
if show:
img.show()
if output!='':
img.save(output, 'PNG')
#Shortcut for image operations
def newLayer(self, effect=''):
"""
Creates a new :py:class:`Layer` to the current :py:class:`Image`.
:param effect: A string with the blend mode for that layer that will be used when during the rendering process. The accepted values are: :code:`'soft_light','lighten','screen','dodge','addition','darken','multiply','hard_light','difference','subtract','grain_extract','grain_merge','divide','overlay'`.
:rtype: Nothing.
"""
self.image.newLayer(effect)
#Shortcut for image operations
def setActiveLayerEffect(self, effect):
"""
Changes the effect of the current active :py:class:`Layer`.
:param output: A string with the one of the blend modes listed in :py:meth:`newLayer`.
:rtype: Nothing.
"""
self.image.layers[self.image.activeLayer].effect = effect
#Shortcut for image operations
def duplicateActiveLayer(self):
"""
Duplicates the current active :py:class:`Layer`.
:rtype: Nothing.
"""
self.image.duplicateActiveLayer()
|
elkiwy/paynter | paynter/paynter.py | Paynter.setColorAlpha | python | def setColorAlpha(self, fixed=None, proportional=None):
if fixed!=None:
self.color.set_alpha(fixed)
elif proportional!=None:
self.color.set_alpha(self.color.get_alpha()*proportional) | Change the alpha of the current :py:class:`Color`.
:param fixed: Set the absolute 0-1 value of the alpha.
:param proportional: Set the relative value of the alpha (Es: If the current alpha is 0.8, a proportional value of 0.5 will set the final value to 0.4).
:rtype: Nothing. | train | https://github.com/elkiwy/paynter/blob/f73cb5bb010a6b32ee41640a50396ed0bae8d496/paynter/paynter.py#L271-L282 | null | class Paynter:
"""
This class is the main object of the library and the one that will draw everything you ask.
To create this class you can use the default constructor.
.. code-block:: python
from paynter import *
P = Paynter()
"""
brush = 0
layer = 0
color = Color(0, 0, 0, 1)
secondColor = Color(1,1,1,1)
image = 0
mirrorMode = 0
#Init the paynter
def __init__(self):
#Setup some stuff
config.CANVAS_SIZE = int(config.REAL_CANVAS_SIZE/config.DOWNSAMPLING)
self.image = Image()
######################################################################
# Level 0 Functions, needs downsampling
######################################################################
#Draw a line between two points
def drawLine(self, x1, y1, x2, y2, silent=False):
"""
Draws a line on the current :py:class:`Layer` with the current :py:class:`Brush`.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param x1: Starting X coordinate.
:param y1: Starting Y coordinate.
:param x2: End X coordinate.
:param y2: End Y coordinate.
:rtype: Nothing.
"""
start = time.time()
#Downsample the coordinates
x1 = int(x1/config.DOWNSAMPLING)
x2 = int(x2/config.DOWNSAMPLING)
y1 = int(y1/config.DOWNSAMPLING)
y2 = int(y2/config.DOWNSAMPLING)
if not silent :
print('drawing line from: '+str((x1,y1))+' to: '+str((x2,y2)))
#Calculate the direction and the length of the step
direction = N.arctan2(y2 - y1, x2 - x1)
length = self.brush.spacing
#Prepare the loop
x, y = x1, y1
totalSteps = int(N.sqrt((x2 - x)**2 + (y2 - y)**2)/length)
lay = self.image.getActiveLayer()
col = self.color
secCol = self.secondColor
mirr = self.mirrorMode
#If I use source caching..
if self.brush.usesSourceCaching:
#..than optimize it for faster drawing
laydata = lay.data
x -= self.brush.brushSize*0.5
y -= self.brush.brushSize*0.5
colbrsource = self.brush.coloredBrushSource
canvSize = config.CANVAS_SIZE
brmask = self.brush.brushMask
for _ in range(totalSteps):
#Make the dab on this point
applyMirroredDab_jit(mirr, laydata, int(x), int(y), colbrsource.copy(), canvSize, brmask)
#Mode the point for the next step and update the distances
x += lendir_x(length, direction)
y += lendir_y(length, direction)
#..if I don't use source caching..
else:
#..do the normal drawing
for _ in range(totalSteps):
#Make the dab on this point
self.brush.makeDab(lay, int(x), int(y), col, secCol, mirror=mirr)
#Mode the point for the next step and update the distances
x += lendir_x(length, direction)
y += lendir_y(length, direction)
#Draw a single dab
def drawPoint(self, x, y, silent=True):
"""
Draws a point on the current :py:class:`Layer` with the current :py:class:`Brush`.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param x1: Point X coordinate.
:param y1: Point Y coordinate.
:rtype: Nothing.
"""
start = time.time()
#Downsample the coordinates
x = int(x/config.DOWNSAMPLING)
y = int(y/config.DOWNSAMPLING)
#Apply the dab with or without source caching
if self.brush.usesSourceCaching:
applyMirroredDab_jit(self.mirrorMode, self.image.getActiveLayer().data, int(x-self.brush.brushSize*0.5), int(y-self.brush.brushSize*0.5), self.brush.coloredBrushSource.copy(), config.CANVAS_SIZE, self.brush.brushMask)
else:
self.brush.makeDab(self.image.getActiveLayer(), int(x), int(y), self.color, self.secondColor, mirror=self.mirrorMode)
config.AVGTIME.append(time.time()-start)
######################################################################
# Level 1 Functions, calls Level 0 functions, no downsampling
######################################################################
#Draw a path from a series of points
def drawPath(self, pointList):
"""
Draws a series of lines on the current :py:class:`Layer` with the current :py:class:`Brush`.
No interpolation is applied to these point and :py:meth:`drawLine` will be used to connect all the points lineraly.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param pointList: A list of point like :code:`[(0, 0), (100, 100), (100, 200)]`.
:rtype: Nothing.
"""
self.drawLine(pointList[0][0], pointList[0][1], pointList[1][0], pointList[1][1])
i = 1
while i<len(pointList)-1:
self.drawLine(pointList[i][0], pointList[i][1], pointList[i+1][0], pointList[i+1][1])
i+=1
#Draw a path from a series of points
def drawClosedPath(self, pointList):
"""
Draws a closed series of lines on the current :py:class:`Layer` with the current :py:class:`Brush`.
No interpolation is applied to these point and :py:meth:`drawLine` will be used to connect all the points lineraly.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param pointList: A list of point like :code:`[(0, 0), (100, 100), (100, 200)]`.
:rtype: Nothing.
"""
self.drawLine(pointList[0][0], pointList[0][1], pointList[1][0], pointList[1][1])
i = 1
while i<len(pointList)-1:
self.drawLine(pointList[i][0], pointList[i][1], pointList[i+1][0], pointList[i+1][1])
i+=1
self.drawLine(pointList[-1][0], pointList[-1][1], pointList[0][0], pointList[0][1])
#Draw a rectangle
def drawRect(self, x1, y1, x2, y2, angle=0):
"""
Draws a rectangle on the current :py:class:`Layer` with the current :py:class:`Brush`.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param x1: The X of the top-left corner of the rectangle.
:param y1: The Y of the top-left corner of the rectangle.
:param x2: The X of the bottom-right corner of the rectangle.
:param y2: The Y of the bottom-right corner of the rectangle.
:param angle: An angle (in degrees) of rotation around the center of the rectangle.
:rtype: Nothing.
"""
vertices = [[x1,y1],[x2,y1],[x2,y2],[x1,y2],]
rotatedVertices = rotateMatrix(vertices, (x1+x2)*0.5, (y1+y2)*0.5, angle)
self.drawClosedPath(rotatedVertices)
#Fill the current layer with a color
def fillLayerWithColor(self, color):
"""
Fills the current :py:class:`Layer` with the current :py:class:`Color`.
:param color: The :py:class:`Color` to apply to the layer.
:rtype: Nothing.
"""
layer = self.image.getActiveLayer().data
colorRGBA = color.get_0_255()
layer[:,:,0] = colorRGBA[0]
layer[:,:,1] = colorRGBA[1]
layer[:,:,2] = colorRGBA[2]
layer[:,:,3] = colorRGBA[3]
#Add border to image
def addBorder(self, width, color=None):
"""
Add a border to the current :py:class:`Layer`.
:param width: The width of the border.
:param color: The :py:class:`Color` of the border, current :py:class:`Color` is the default value.
:rtype: Nothing.
"""
width = int(width/config.DOWNSAMPLING)
if color==None:
color = self.color
layer = self.image.getActiveLayer().data
colorRGBA = color.get_0_255()
print('adding border'+str(colorRGBA)+str(width)+str(layer.shape))
layer[0:width,:,0] = colorRGBA[0]
layer[0:width,:,1] = colorRGBA[1]
layer[0:width,:,2] = colorRGBA[2]
layer[0:width,:,3] = colorRGBA[3]
layer[:,0:width,0] = colorRGBA[0]
layer[:,0:width,1] = colorRGBA[1]
layer[:,0:width,2] = colorRGBA[2]
layer[:,0:width,3] = colorRGBA[3]
layer[layer.shape[0]-width:layer.shape[0],:,0] = colorRGBA[0]
layer[layer.shape[0]-width:layer.shape[0],:,1] = colorRGBA[1]
layer[layer.shape[0]-width:layer.shape[0],:,2] = colorRGBA[2]
layer[layer.shape[0]-width:layer.shape[0],:,3] = colorRGBA[3]
layer[:,layer.shape[1]-width:layer.shape[1],0] = colorRGBA[0]
layer[:,layer.shape[1]-width:layer.shape[1],1] = colorRGBA[1]
layer[:,layer.shape[1]-width:layer.shape[1],2] = colorRGBA[2]
layer[:,layer.shape[1]-width:layer.shape[1],3] = colorRGBA[3]
######################################################################
# Setters, getters, and more
######################################################################
#Setter for color, takes 0-255 RGBA
def setColor(self, color):
"""
Sets the current :py:class:`Color` to use.
:param color: The :py:class:`Color` to use.
:rtype: Nothing.
"""
self.color = color
if self.brush and self.brush.doesUseSourceCaching():
self.brush.cacheBrush(color)
#Change only the alpha of the current color
def setColorAlpha(self, fixed=None, proportional=None):
"""
Change the alpha of the current :py:class:`Color`.
:param fixed: Set the absolute 0-1 value of the alpha.
:param proportional: Set the relative value of the alpha (Es: If the current alpha is 0.8, a proportional value of 0.5 will set the final value to 0.4).
:rtype: Nothing.
"""
if fixed!=None:
self.color.set_alpha(fixed)
elif proportional!=None:
self.color.set_alpha(self.color.get_alpha()*proportional)
#Gets the brush alpha
def getColorAlpha(self):
"""
Retrieve the alpha of the current :py:class:`Color`.
:rtype: A float 0-1 value of the current :py:class:`Color` alpha.
"""
return self.color.get_alpha()
#Gets the brush size
def getBrushSize(self):
"""
Retrieve the size of the current :py:class:`Brush`.
:rtype: An integer value of the current :py:class:`Brush` size in pixels.
"""
return self.brush.brushSize
#Swap between first and second color
def swapColors(self):
"""
Swaps the current :py:class:`Color` with the secondary :py:class:`Color`.
:rtype: Nothing.
"""
rgba = self.color.get_0_255()
self.color = self.secondColor
self.secondColor = Color(rgba, '0-255')
#Setter for brush reference
def setBrush(self, b, resize=0, proportional=None):
"""
Sets the size of the current :py:class:`Brush`.
:param brush: The :py:class:`Brush` object to use as a brush.
:param resize: An optional absolute value to resize the brush before using it.
:param proportional: An optional relative float 0-1 value to resize the brush before using it.
:rtype: Nothing.
"""
if proportional!=None:
resize = int(self.brush.brushSize*0.5)
b.resizeBrush(resize) #If resize=0 it reset to its default size
self.brush = b
if self.brush and self.brush.doesUseSourceCaching():
self.brush.cacheBrush(self.color)
#Setter for the mirror mode
def setMirrorMode(self, mirror):
"""
Sets the mirror mode to use in the next operation.
:param mirror: A string object with one of these values : '', 'h', 'v', 'hv'. "h" stands for horizontal mirroring, while "v" stands for vertical mirroring. "hv" sets both at the same time.
:rtype: Nothing.
"""
assert (mirror=='' or mirror=='h' or mirror=='v' or mirror=='hv'or mirror=='vh'), 'setMirrorMode: wrong mirror mode, got '+str(mirror)+' expected one of ["","h","v","hv"]'
#Round up all the coordinates and convert them to int
if mirror=='': mirror = 0
elif mirror=='h': mirror = 1
elif mirror=='v': mirror = 2
elif mirror=='hv': mirror = 3
elif mirror=='vh': mirror = 3
self.mirrorMode = mirror
#Render the final image
def renderImage(self, output='', show=True):
"""
Renders the :py:class:`Image` and outputs the final PNG file.
:param output: A string with the output file path, can be empty if you don't want to save the final image.
:param show: A boolean telling the system to display the final image after the rendering is done.
:rtype: Nothing.
"""
#Merge all the layers to apply blending modes
resultLayer = self.image.mergeAllLayers()
#Show and save the results
img = PIL.Image.fromarray(resultLayer.data, 'RGBA')
if show:
img.show()
if output!='':
img.save(output, 'PNG')
#Shortcut for image operations
def newLayer(self, effect=''):
"""
Creates a new :py:class:`Layer` to the current :py:class:`Image`.
:param effect: A string with the blend mode for that layer that will be used when during the rendering process. The accepted values are: :code:`'soft_light','lighten','screen','dodge','addition','darken','multiply','hard_light','difference','subtract','grain_extract','grain_merge','divide','overlay'`.
:rtype: Nothing.
"""
self.image.newLayer(effect)
#Shortcut for image operations
def setActiveLayerEffect(self, effect):
"""
Changes the effect of the current active :py:class:`Layer`.
:param output: A string with the one of the blend modes listed in :py:meth:`newLayer`.
:rtype: Nothing.
"""
self.image.layers[self.image.activeLayer].effect = effect
#Shortcut for image operations
def duplicateActiveLayer(self):
"""
Duplicates the current active :py:class:`Layer`.
:rtype: Nothing.
"""
self.image.duplicateActiveLayer()
|
elkiwy/paynter | paynter/paynter.py | Paynter.swapColors | python | def swapColors(self):
rgba = self.color.get_0_255()
self.color = self.secondColor
self.secondColor = Color(rgba, '0-255') | Swaps the current :py:class:`Color` with the secondary :py:class:`Color`.
:rtype: Nothing. | train | https://github.com/elkiwy/paynter/blob/f73cb5bb010a6b32ee41640a50396ed0bae8d496/paynter/paynter.py#L303-L311 | null | class Paynter:
"""
This class is the main object of the library and the one that will draw everything you ask.
To create this class you can use the default constructor.
.. code-block:: python
from paynter import *
P = Paynter()
"""
brush = 0
layer = 0
color = Color(0, 0, 0, 1)
secondColor = Color(1,1,1,1)
image = 0
mirrorMode = 0
#Init the paynter
def __init__(self):
#Setup some stuff
config.CANVAS_SIZE = int(config.REAL_CANVAS_SIZE/config.DOWNSAMPLING)
self.image = Image()
######################################################################
# Level 0 Functions, needs downsampling
######################################################################
#Draw a line between two points
def drawLine(self, x1, y1, x2, y2, silent=False):
"""
Draws a line on the current :py:class:`Layer` with the current :py:class:`Brush`.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param x1: Starting X coordinate.
:param y1: Starting Y coordinate.
:param x2: End X coordinate.
:param y2: End Y coordinate.
:rtype: Nothing.
"""
start = time.time()
#Downsample the coordinates
x1 = int(x1/config.DOWNSAMPLING)
x2 = int(x2/config.DOWNSAMPLING)
y1 = int(y1/config.DOWNSAMPLING)
y2 = int(y2/config.DOWNSAMPLING)
if not silent :
print('drawing line from: '+str((x1,y1))+' to: '+str((x2,y2)))
#Calculate the direction and the length of the step
direction = N.arctan2(y2 - y1, x2 - x1)
length = self.brush.spacing
#Prepare the loop
x, y = x1, y1
totalSteps = int(N.sqrt((x2 - x)**2 + (y2 - y)**2)/length)
lay = self.image.getActiveLayer()
col = self.color
secCol = self.secondColor
mirr = self.mirrorMode
#If I use source caching..
if self.brush.usesSourceCaching:
#..than optimize it for faster drawing
laydata = lay.data
x -= self.brush.brushSize*0.5
y -= self.brush.brushSize*0.5
colbrsource = self.brush.coloredBrushSource
canvSize = config.CANVAS_SIZE
brmask = self.brush.brushMask
for _ in range(totalSteps):
#Make the dab on this point
applyMirroredDab_jit(mirr, laydata, int(x), int(y), colbrsource.copy(), canvSize, brmask)
#Mode the point for the next step and update the distances
x += lendir_x(length, direction)
y += lendir_y(length, direction)
#..if I don't use source caching..
else:
#..do the normal drawing
for _ in range(totalSteps):
#Make the dab on this point
self.brush.makeDab(lay, int(x), int(y), col, secCol, mirror=mirr)
#Mode the point for the next step and update the distances
x += lendir_x(length, direction)
y += lendir_y(length, direction)
#Draw a single dab
def drawPoint(self, x, y, silent=True):
"""
Draws a point on the current :py:class:`Layer` with the current :py:class:`Brush`.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param x1: Point X coordinate.
:param y1: Point Y coordinate.
:rtype: Nothing.
"""
start = time.time()
#Downsample the coordinates
x = int(x/config.DOWNSAMPLING)
y = int(y/config.DOWNSAMPLING)
#Apply the dab with or without source caching
if self.brush.usesSourceCaching:
applyMirroredDab_jit(self.mirrorMode, self.image.getActiveLayer().data, int(x-self.brush.brushSize*0.5), int(y-self.brush.brushSize*0.5), self.brush.coloredBrushSource.copy(), config.CANVAS_SIZE, self.brush.brushMask)
else:
self.brush.makeDab(self.image.getActiveLayer(), int(x), int(y), self.color, self.secondColor, mirror=self.mirrorMode)
config.AVGTIME.append(time.time()-start)
######################################################################
# Level 1 Functions, calls Level 0 functions, no downsampling
######################################################################
#Draw a path from a series of points
def drawPath(self, pointList):
"""
Draws a series of lines on the current :py:class:`Layer` with the current :py:class:`Brush`.
No interpolation is applied to these point and :py:meth:`drawLine` will be used to connect all the points lineraly.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param pointList: A list of point like :code:`[(0, 0), (100, 100), (100, 200)]`.
:rtype: Nothing.
"""
self.drawLine(pointList[0][0], pointList[0][1], pointList[1][0], pointList[1][1])
i = 1
while i<len(pointList)-1:
self.drawLine(pointList[i][0], pointList[i][1], pointList[i+1][0], pointList[i+1][1])
i+=1
#Draw a path from a series of points
def drawClosedPath(self, pointList):
"""
Draws a closed series of lines on the current :py:class:`Layer` with the current :py:class:`Brush`.
No interpolation is applied to these point and :py:meth:`drawLine` will be used to connect all the points lineraly.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param pointList: A list of point like :code:`[(0, 0), (100, 100), (100, 200)]`.
:rtype: Nothing.
"""
self.drawLine(pointList[0][0], pointList[0][1], pointList[1][0], pointList[1][1])
i = 1
while i<len(pointList)-1:
self.drawLine(pointList[i][0], pointList[i][1], pointList[i+1][0], pointList[i+1][1])
i+=1
self.drawLine(pointList[-1][0], pointList[-1][1], pointList[0][0], pointList[0][1])
#Draw a rectangle
def drawRect(self, x1, y1, x2, y2, angle=0):
"""
Draws a rectangle on the current :py:class:`Layer` with the current :py:class:`Brush`.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param x1: The X of the top-left corner of the rectangle.
:param y1: The Y of the top-left corner of the rectangle.
:param x2: The X of the bottom-right corner of the rectangle.
:param y2: The Y of the bottom-right corner of the rectangle.
:param angle: An angle (in degrees) of rotation around the center of the rectangle.
:rtype: Nothing.
"""
vertices = [[x1,y1],[x2,y1],[x2,y2],[x1,y2],]
rotatedVertices = rotateMatrix(vertices, (x1+x2)*0.5, (y1+y2)*0.5, angle)
self.drawClosedPath(rotatedVertices)
#Fill the current layer with a color
def fillLayerWithColor(self, color):
"""
Fills the current :py:class:`Layer` with the current :py:class:`Color`.
:param color: The :py:class:`Color` to apply to the layer.
:rtype: Nothing.
"""
layer = self.image.getActiveLayer().data
colorRGBA = color.get_0_255()
layer[:,:,0] = colorRGBA[0]
layer[:,:,1] = colorRGBA[1]
layer[:,:,2] = colorRGBA[2]
layer[:,:,3] = colorRGBA[3]
#Add border to image
def addBorder(self, width, color=None):
"""
Add a border to the current :py:class:`Layer`.
:param width: The width of the border.
:param color: The :py:class:`Color` of the border, current :py:class:`Color` is the default value.
:rtype: Nothing.
"""
width = int(width/config.DOWNSAMPLING)
if color==None:
color = self.color
layer = self.image.getActiveLayer().data
colorRGBA = color.get_0_255()
print('adding border'+str(colorRGBA)+str(width)+str(layer.shape))
layer[0:width,:,0] = colorRGBA[0]
layer[0:width,:,1] = colorRGBA[1]
layer[0:width,:,2] = colorRGBA[2]
layer[0:width,:,3] = colorRGBA[3]
layer[:,0:width,0] = colorRGBA[0]
layer[:,0:width,1] = colorRGBA[1]
layer[:,0:width,2] = colorRGBA[2]
layer[:,0:width,3] = colorRGBA[3]
layer[layer.shape[0]-width:layer.shape[0],:,0] = colorRGBA[0]
layer[layer.shape[0]-width:layer.shape[0],:,1] = colorRGBA[1]
layer[layer.shape[0]-width:layer.shape[0],:,2] = colorRGBA[2]
layer[layer.shape[0]-width:layer.shape[0],:,3] = colorRGBA[3]
layer[:,layer.shape[1]-width:layer.shape[1],0] = colorRGBA[0]
layer[:,layer.shape[1]-width:layer.shape[1],1] = colorRGBA[1]
layer[:,layer.shape[1]-width:layer.shape[1],2] = colorRGBA[2]
layer[:,layer.shape[1]-width:layer.shape[1],3] = colorRGBA[3]
######################################################################
# Setters, getters, and more
######################################################################
#Setter for color, takes 0-255 RGBA
def setColor(self, color):
"""
Sets the current :py:class:`Color` to use.
:param color: The :py:class:`Color` to use.
:rtype: Nothing.
"""
self.color = color
if self.brush and self.brush.doesUseSourceCaching():
self.brush.cacheBrush(color)
#Change only the alpha of the current color
def setColorAlpha(self, fixed=None, proportional=None):
"""
Change the alpha of the current :py:class:`Color`.
:param fixed: Set the absolute 0-1 value of the alpha.
:param proportional: Set the relative value of the alpha (Es: If the current alpha is 0.8, a proportional value of 0.5 will set the final value to 0.4).
:rtype: Nothing.
"""
if fixed!=None:
self.color.set_alpha(fixed)
elif proportional!=None:
self.color.set_alpha(self.color.get_alpha()*proportional)
#Gets the brush alpha
def getColorAlpha(self):
"""
Retrieve the alpha of the current :py:class:`Color`.
:rtype: A float 0-1 value of the current :py:class:`Color` alpha.
"""
return self.color.get_alpha()
#Gets the brush size
def getBrushSize(self):
"""
Retrieve the size of the current :py:class:`Brush`.
:rtype: An integer value of the current :py:class:`Brush` size in pixels.
"""
return self.brush.brushSize
#Swap between first and second color
def swapColors(self):
"""
Swaps the current :py:class:`Color` with the secondary :py:class:`Color`.
:rtype: Nothing.
"""
rgba = self.color.get_0_255()
self.color = self.secondColor
self.secondColor = Color(rgba, '0-255')
#Setter for brush reference
def setBrush(self, b, resize=0, proportional=None):
"""
Sets the size of the current :py:class:`Brush`.
:param brush: The :py:class:`Brush` object to use as a brush.
:param resize: An optional absolute value to resize the brush before using it.
:param proportional: An optional relative float 0-1 value to resize the brush before using it.
:rtype: Nothing.
"""
if proportional!=None:
resize = int(self.brush.brushSize*0.5)
b.resizeBrush(resize) #If resize=0 it reset to its default size
self.brush = b
if self.brush and self.brush.doesUseSourceCaching():
self.brush.cacheBrush(self.color)
#Setter for the mirror mode
def setMirrorMode(self, mirror):
"""
Sets the mirror mode to use in the next operation.
:param mirror: A string object with one of these values : '', 'h', 'v', 'hv'. "h" stands for horizontal mirroring, while "v" stands for vertical mirroring. "hv" sets both at the same time.
:rtype: Nothing.
"""
assert (mirror=='' or mirror=='h' or mirror=='v' or mirror=='hv'or mirror=='vh'), 'setMirrorMode: wrong mirror mode, got '+str(mirror)+' expected one of ["","h","v","hv"]'
#Round up all the coordinates and convert them to int
if mirror=='': mirror = 0
elif mirror=='h': mirror = 1
elif mirror=='v': mirror = 2
elif mirror=='hv': mirror = 3
elif mirror=='vh': mirror = 3
self.mirrorMode = mirror
#Render the final image
def renderImage(self, output='', show=True):
"""
Renders the :py:class:`Image` and outputs the final PNG file.
:param output: A string with the output file path, can be empty if you don't want to save the final image.
:param show: A boolean telling the system to display the final image after the rendering is done.
:rtype: Nothing.
"""
#Merge all the layers to apply blending modes
resultLayer = self.image.mergeAllLayers()
#Show and save the results
img = PIL.Image.fromarray(resultLayer.data, 'RGBA')
if show:
img.show()
if output!='':
img.save(output, 'PNG')
#Shortcut for image operations
def newLayer(self, effect=''):
"""
Creates a new :py:class:`Layer` to the current :py:class:`Image`.
:param effect: A string with the blend mode for that layer that will be used when during the rendering process. The accepted values are: :code:`'soft_light','lighten','screen','dodge','addition','darken','multiply','hard_light','difference','subtract','grain_extract','grain_merge','divide','overlay'`.
:rtype: Nothing.
"""
self.image.newLayer(effect)
#Shortcut for image operations
def setActiveLayerEffect(self, effect):
"""
Changes the effect of the current active :py:class:`Layer`.
:param output: A string with the one of the blend modes listed in :py:meth:`newLayer`.
:rtype: Nothing.
"""
self.image.layers[self.image.activeLayer].effect = effect
#Shortcut for image operations
def duplicateActiveLayer(self):
"""
Duplicates the current active :py:class:`Layer`.
:rtype: Nothing.
"""
self.image.duplicateActiveLayer()
|
elkiwy/paynter | paynter/paynter.py | Paynter.setBrush | python | def setBrush(self, b, resize=0, proportional=None):
if proportional!=None:
resize = int(self.brush.brushSize*0.5)
b.resizeBrush(resize) #If resize=0 it reset to its default size
self.brush = b
if self.brush and self.brush.doesUseSourceCaching():
self.brush.cacheBrush(self.color) | Sets the size of the current :py:class:`Brush`.
:param brush: The :py:class:`Brush` object to use as a brush.
:param resize: An optional absolute value to resize the brush before using it.
:param proportional: An optional relative float 0-1 value to resize the brush before using it.
:rtype: Nothing. | train | https://github.com/elkiwy/paynter/blob/f73cb5bb010a6b32ee41640a50396ed0bae8d496/paynter/paynter.py#L314-L328 | [
"def doesUseSourceCaching(self):\n\treturn self.usesSourceCaching\n",
"def cacheBrush(self, color):\n\tself.coloredBrushSource = self.brushTip[:,:] * color.get_0_1()\n",
"def resizeBrush(self, newSize):\n\t#Check if I want to reset back to original\n\tif newSize==0:\n\t\tnewSize = self.originalSize\n\n\t#Don't do useless calculations\n\tif self.realCurrentSize == newSize:\n\t\treturn\n\n\t#Downsample the size-related parameters\n\tprint('resizing from :'+str(self.realCurrentSize)+' to:'+str(newSize))\n\tself.realCurrentSize = newSize\n\tnewSize = max(int(newSize/config.DOWNSAMPLING), 1)\n\n\t#Set the brushTip\n\tif self.multibrush:\n\t\t#Multibrush\n\t\tfor i in range(0,len(self.brushTip)):\n\t\t\tbtImg = PIL.Image.fromarray((self.brushTip[i]*255).astype(N.uint8), 'RGBA')\n\t\t\tbtImgScaled = btImg.resize((newSize, newSize), resample=resizeResample)\n\t\t\tbtArray = N.divide(N.array(btImgScaled).astype(N.float32), 255)\n\t\t\tself.brushTip[i] = btArray\n\t\tself.brushSize = self.brushTip[0].shape[0]\n\telse:\n\t\t#NormalBrush\n\t\tbtImg = PIL.Image.fromarray((self.brushTip*255).astype(N.uint8), 'RGBA')\n\t\tbtImgScaled = btImg.resize((newSize, newSize), resample=resizeResample)\n\t\tbtArray = N.divide(N.array(btImgScaled).astype(N.float32), 255)\n\t\tself.brushTip = btArray\n\t\tself.brushSize = self.brushTip.shape[0]\n\n\t#Set the perameters\n\tself.spacing = newSize*self.originalSpacing\n"
] | class Paynter:
"""
This class is the main object of the library and the one that will draw everything you ask.
To create this class you can use the default constructor.
.. code-block:: python
from paynter import *
P = Paynter()
"""
brush = 0
layer = 0
color = Color(0, 0, 0, 1)
secondColor = Color(1,1,1,1)
image = 0
mirrorMode = 0
#Init the paynter
def __init__(self):
#Setup some stuff
config.CANVAS_SIZE = int(config.REAL_CANVAS_SIZE/config.DOWNSAMPLING)
self.image = Image()
######################################################################
# Level 0 Functions, needs downsampling
######################################################################
#Draw a line between two points
def drawLine(self, x1, y1, x2, y2, silent=False):
"""
Draws a line on the current :py:class:`Layer` with the current :py:class:`Brush`.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param x1: Starting X coordinate.
:param y1: Starting Y coordinate.
:param x2: End X coordinate.
:param y2: End Y coordinate.
:rtype: Nothing.
"""
start = time.time()
#Downsample the coordinates
x1 = int(x1/config.DOWNSAMPLING)
x2 = int(x2/config.DOWNSAMPLING)
y1 = int(y1/config.DOWNSAMPLING)
y2 = int(y2/config.DOWNSAMPLING)
if not silent :
print('drawing line from: '+str((x1,y1))+' to: '+str((x2,y2)))
#Calculate the direction and the length of the step
direction = N.arctan2(y2 - y1, x2 - x1)
length = self.brush.spacing
#Prepare the loop
x, y = x1, y1
totalSteps = int(N.sqrt((x2 - x)**2 + (y2 - y)**2)/length)
lay = self.image.getActiveLayer()
col = self.color
secCol = self.secondColor
mirr = self.mirrorMode
#If I use source caching..
if self.brush.usesSourceCaching:
#..than optimize it for faster drawing
laydata = lay.data
x -= self.brush.brushSize*0.5
y -= self.brush.brushSize*0.5
colbrsource = self.brush.coloredBrushSource
canvSize = config.CANVAS_SIZE
brmask = self.brush.brushMask
for _ in range(totalSteps):
#Make the dab on this point
applyMirroredDab_jit(mirr, laydata, int(x), int(y), colbrsource.copy(), canvSize, brmask)
#Mode the point for the next step and update the distances
x += lendir_x(length, direction)
y += lendir_y(length, direction)
#..if I don't use source caching..
else:
#..do the normal drawing
for _ in range(totalSteps):
#Make the dab on this point
self.brush.makeDab(lay, int(x), int(y), col, secCol, mirror=mirr)
#Mode the point for the next step and update the distances
x += lendir_x(length, direction)
y += lendir_y(length, direction)
#Draw a single dab
def drawPoint(self, x, y, silent=True):
"""
Draws a point on the current :py:class:`Layer` with the current :py:class:`Brush`.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param x1: Point X coordinate.
:param y1: Point Y coordinate.
:rtype: Nothing.
"""
start = time.time()
#Downsample the coordinates
x = int(x/config.DOWNSAMPLING)
y = int(y/config.DOWNSAMPLING)
#Apply the dab with or without source caching
if self.brush.usesSourceCaching:
applyMirroredDab_jit(self.mirrorMode, self.image.getActiveLayer().data, int(x-self.brush.brushSize*0.5), int(y-self.brush.brushSize*0.5), self.brush.coloredBrushSource.copy(), config.CANVAS_SIZE, self.brush.brushMask)
else:
self.brush.makeDab(self.image.getActiveLayer(), int(x), int(y), self.color, self.secondColor, mirror=self.mirrorMode)
config.AVGTIME.append(time.time()-start)
######################################################################
# Level 1 Functions, calls Level 0 functions, no downsampling
######################################################################
#Draw a path from a series of points
def drawPath(self, pointList):
"""
Draws a series of lines on the current :py:class:`Layer` with the current :py:class:`Brush`.
No interpolation is applied to these point and :py:meth:`drawLine` will be used to connect all the points lineraly.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param pointList: A list of point like :code:`[(0, 0), (100, 100), (100, 200)]`.
:rtype: Nothing.
"""
self.drawLine(pointList[0][0], pointList[0][1], pointList[1][0], pointList[1][1])
i = 1
while i<len(pointList)-1:
self.drawLine(pointList[i][0], pointList[i][1], pointList[i+1][0], pointList[i+1][1])
i+=1
#Draw a path from a series of points
def drawClosedPath(self, pointList):
"""
Draws a closed series of lines on the current :py:class:`Layer` with the current :py:class:`Brush`.
No interpolation is applied to these point and :py:meth:`drawLine` will be used to connect all the points lineraly.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param pointList: A list of point like :code:`[(0, 0), (100, 100), (100, 200)]`.
:rtype: Nothing.
"""
self.drawLine(pointList[0][0], pointList[0][1], pointList[1][0], pointList[1][1])
i = 1
while i<len(pointList)-1:
self.drawLine(pointList[i][0], pointList[i][1], pointList[i+1][0], pointList[i+1][1])
i+=1
self.drawLine(pointList[-1][0], pointList[-1][1], pointList[0][0], pointList[0][1])
#Draw a rectangle
def drawRect(self, x1, y1, x2, y2, angle=0):
"""
Draws a rectangle on the current :py:class:`Layer` with the current :py:class:`Brush`.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param x1: The X of the top-left corner of the rectangle.
:param y1: The Y of the top-left corner of the rectangle.
:param x2: The X of the bottom-right corner of the rectangle.
:param y2: The Y of the bottom-right corner of the rectangle.
:param angle: An angle (in degrees) of rotation around the center of the rectangle.
:rtype: Nothing.
"""
vertices = [[x1,y1],[x2,y1],[x2,y2],[x1,y2],]
rotatedVertices = rotateMatrix(vertices, (x1+x2)*0.5, (y1+y2)*0.5, angle)
self.drawClosedPath(rotatedVertices)
#Fill the current layer with a color
def fillLayerWithColor(self, color):
"""
Fills the current :py:class:`Layer` with the current :py:class:`Color`.
:param color: The :py:class:`Color` to apply to the layer.
:rtype: Nothing.
"""
layer = self.image.getActiveLayer().data
colorRGBA = color.get_0_255()
layer[:,:,0] = colorRGBA[0]
layer[:,:,1] = colorRGBA[1]
layer[:,:,2] = colorRGBA[2]
layer[:,:,3] = colorRGBA[3]
#Add border to image
def addBorder(self, width, color=None):
"""
Add a border to the current :py:class:`Layer`.
:param width: The width of the border.
:param color: The :py:class:`Color` of the border, current :py:class:`Color` is the default value.
:rtype: Nothing.
"""
width = int(width/config.DOWNSAMPLING)
if color==None:
color = self.color
layer = self.image.getActiveLayer().data
colorRGBA = color.get_0_255()
print('adding border'+str(colorRGBA)+str(width)+str(layer.shape))
layer[0:width,:,0] = colorRGBA[0]
layer[0:width,:,1] = colorRGBA[1]
layer[0:width,:,2] = colorRGBA[2]
layer[0:width,:,3] = colorRGBA[3]
layer[:,0:width,0] = colorRGBA[0]
layer[:,0:width,1] = colorRGBA[1]
layer[:,0:width,2] = colorRGBA[2]
layer[:,0:width,3] = colorRGBA[3]
layer[layer.shape[0]-width:layer.shape[0],:,0] = colorRGBA[0]
layer[layer.shape[0]-width:layer.shape[0],:,1] = colorRGBA[1]
layer[layer.shape[0]-width:layer.shape[0],:,2] = colorRGBA[2]
layer[layer.shape[0]-width:layer.shape[0],:,3] = colorRGBA[3]
layer[:,layer.shape[1]-width:layer.shape[1],0] = colorRGBA[0]
layer[:,layer.shape[1]-width:layer.shape[1],1] = colorRGBA[1]
layer[:,layer.shape[1]-width:layer.shape[1],2] = colorRGBA[2]
layer[:,layer.shape[1]-width:layer.shape[1],3] = colorRGBA[3]
######################################################################
# Setters, getters, and more
######################################################################
#Setter for color, takes 0-255 RGBA
def setColor(self, color):
"""
Sets the current :py:class:`Color` to use.
:param color: The :py:class:`Color` to use.
:rtype: Nothing.
"""
self.color = color
if self.brush and self.brush.doesUseSourceCaching():
self.brush.cacheBrush(color)
#Change only the alpha of the current color
def setColorAlpha(self, fixed=None, proportional=None):
"""
Change the alpha of the current :py:class:`Color`.
:param fixed: Set the absolute 0-1 value of the alpha.
:param proportional: Set the relative value of the alpha (Es: If the current alpha is 0.8, a proportional value of 0.5 will set the final value to 0.4).
:rtype: Nothing.
"""
if fixed!=None:
self.color.set_alpha(fixed)
elif proportional!=None:
self.color.set_alpha(self.color.get_alpha()*proportional)
#Gets the brush alpha
def getColorAlpha(self):
"""
Retrieve the alpha of the current :py:class:`Color`.
:rtype: A float 0-1 value of the current :py:class:`Color` alpha.
"""
return self.color.get_alpha()
#Gets the brush size
def getBrushSize(self):
"""
Retrieve the size of the current :py:class:`Brush`.
:rtype: An integer value of the current :py:class:`Brush` size in pixels.
"""
return self.brush.brushSize
#Swap between first and second color
def swapColors(self):
"""
Swaps the current :py:class:`Color` with the secondary :py:class:`Color`.
:rtype: Nothing.
"""
rgba = self.color.get_0_255()
self.color = self.secondColor
self.secondColor = Color(rgba, '0-255')
#Setter for brush reference
def setBrush(self, b, resize=0, proportional=None):
"""
Sets the size of the current :py:class:`Brush`.
:param brush: The :py:class:`Brush` object to use as a brush.
:param resize: An optional absolute value to resize the brush before using it.
:param proportional: An optional relative float 0-1 value to resize the brush before using it.
:rtype: Nothing.
"""
if proportional!=None:
resize = int(self.brush.brushSize*0.5)
b.resizeBrush(resize) #If resize=0 it reset to its default size
self.brush = b
if self.brush and self.brush.doesUseSourceCaching():
self.brush.cacheBrush(self.color)
#Setter for the mirror mode
def setMirrorMode(self, mirror):
"""
Sets the mirror mode to use in the next operation.
:param mirror: A string object with one of these values : '', 'h', 'v', 'hv'. "h" stands for horizontal mirroring, while "v" stands for vertical mirroring. "hv" sets both at the same time.
:rtype: Nothing.
"""
assert (mirror=='' or mirror=='h' or mirror=='v' or mirror=='hv'or mirror=='vh'), 'setMirrorMode: wrong mirror mode, got '+str(mirror)+' expected one of ["","h","v","hv"]'
#Round up all the coordinates and convert them to int
if mirror=='': mirror = 0
elif mirror=='h': mirror = 1
elif mirror=='v': mirror = 2
elif mirror=='hv': mirror = 3
elif mirror=='vh': mirror = 3
self.mirrorMode = mirror
#Render the final image
def renderImage(self, output='', show=True):
"""
Renders the :py:class:`Image` and outputs the final PNG file.
:param output: A string with the output file path, can be empty if you don't want to save the final image.
:param show: A boolean telling the system to display the final image after the rendering is done.
:rtype: Nothing.
"""
#Merge all the layers to apply blending modes
resultLayer = self.image.mergeAllLayers()
#Show and save the results
img = PIL.Image.fromarray(resultLayer.data, 'RGBA')
if show:
img.show()
if output!='':
img.save(output, 'PNG')
#Shortcut for image operations
def newLayer(self, effect=''):
"""
Creates a new :py:class:`Layer` to the current :py:class:`Image`.
:param effect: A string with the blend mode for that layer that will be used when during the rendering process. The accepted values are: :code:`'soft_light','lighten','screen','dodge','addition','darken','multiply','hard_light','difference','subtract','grain_extract','grain_merge','divide','overlay'`.
:rtype: Nothing.
"""
self.image.newLayer(effect)
#Shortcut for image operations
def setActiveLayerEffect(self, effect):
"""
Changes the effect of the current active :py:class:`Layer`.
:param output: A string with the one of the blend modes listed in :py:meth:`newLayer`.
:rtype: Nothing.
"""
self.image.layers[self.image.activeLayer].effect = effect
#Shortcut for image operations
def duplicateActiveLayer(self):
"""
Duplicates the current active :py:class:`Layer`.
:rtype: Nothing.
"""
self.image.duplicateActiveLayer()
|
elkiwy/paynter | paynter/paynter.py | Paynter.setMirrorMode | python | def setMirrorMode(self, mirror):
assert (mirror=='' or mirror=='h' or mirror=='v' or mirror=='hv'or mirror=='vh'), 'setMirrorMode: wrong mirror mode, got '+str(mirror)+' expected one of ["","h","v","hv"]'
#Round up all the coordinates and convert them to int
if mirror=='': mirror = 0
elif mirror=='h': mirror = 1
elif mirror=='v': mirror = 2
elif mirror=='hv': mirror = 3
elif mirror=='vh': mirror = 3
self.mirrorMode = mirror | Sets the mirror mode to use in the next operation.
:param mirror: A string object with one of these values : '', 'h', 'v', 'hv'. "h" stands for horizontal mirroring, while "v" stands for vertical mirroring. "hv" sets both at the same time.
:rtype: Nothing. | train | https://github.com/elkiwy/paynter/blob/f73cb5bb010a6b32ee41640a50396ed0bae8d496/paynter/paynter.py#L331-L346 | null | class Paynter:
"""
This class is the main object of the library and the one that will draw everything you ask.
To create this class you can use the default constructor.
.. code-block:: python
from paynter import *
P = Paynter()
"""
brush = 0
layer = 0
color = Color(0, 0, 0, 1)
secondColor = Color(1,1,1,1)
image = 0
mirrorMode = 0
#Init the paynter
def __init__(self):
#Setup some stuff
config.CANVAS_SIZE = int(config.REAL_CANVAS_SIZE/config.DOWNSAMPLING)
self.image = Image()
######################################################################
# Level 0 Functions, needs downsampling
######################################################################
#Draw a line between two points
def drawLine(self, x1, y1, x2, y2, silent=False):
"""
Draws a line on the current :py:class:`Layer` with the current :py:class:`Brush`.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param x1: Starting X coordinate.
:param y1: Starting Y coordinate.
:param x2: End X coordinate.
:param y2: End Y coordinate.
:rtype: Nothing.
"""
start = time.time()
#Downsample the coordinates
x1 = int(x1/config.DOWNSAMPLING)
x2 = int(x2/config.DOWNSAMPLING)
y1 = int(y1/config.DOWNSAMPLING)
y2 = int(y2/config.DOWNSAMPLING)
if not silent :
print('drawing line from: '+str((x1,y1))+' to: '+str((x2,y2)))
#Calculate the direction and the length of the step
direction = N.arctan2(y2 - y1, x2 - x1)
length = self.brush.spacing
#Prepare the loop
x, y = x1, y1
totalSteps = int(N.sqrt((x2 - x)**2 + (y2 - y)**2)/length)
lay = self.image.getActiveLayer()
col = self.color
secCol = self.secondColor
mirr = self.mirrorMode
#If I use source caching..
if self.brush.usesSourceCaching:
#..than optimize it for faster drawing
laydata = lay.data
x -= self.brush.brushSize*0.5
y -= self.brush.brushSize*0.5
colbrsource = self.brush.coloredBrushSource
canvSize = config.CANVAS_SIZE
brmask = self.brush.brushMask
for _ in range(totalSteps):
#Make the dab on this point
applyMirroredDab_jit(mirr, laydata, int(x), int(y), colbrsource.copy(), canvSize, brmask)
#Mode the point for the next step and update the distances
x += lendir_x(length, direction)
y += lendir_y(length, direction)
#..if I don't use source caching..
else:
#..do the normal drawing
for _ in range(totalSteps):
#Make the dab on this point
self.brush.makeDab(lay, int(x), int(y), col, secCol, mirror=mirr)
#Mode the point for the next step and update the distances
x += lendir_x(length, direction)
y += lendir_y(length, direction)
#Draw a single dab
def drawPoint(self, x, y, silent=True):
"""
Draws a point on the current :py:class:`Layer` with the current :py:class:`Brush`.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param x1: Point X coordinate.
:param y1: Point Y coordinate.
:rtype: Nothing.
"""
start = time.time()
#Downsample the coordinates
x = int(x/config.DOWNSAMPLING)
y = int(y/config.DOWNSAMPLING)
#Apply the dab with or without source caching
if self.brush.usesSourceCaching:
applyMirroredDab_jit(self.mirrorMode, self.image.getActiveLayer().data, int(x-self.brush.brushSize*0.5), int(y-self.brush.brushSize*0.5), self.brush.coloredBrushSource.copy(), config.CANVAS_SIZE, self.brush.brushMask)
else:
self.brush.makeDab(self.image.getActiveLayer(), int(x), int(y), self.color, self.secondColor, mirror=self.mirrorMode)
config.AVGTIME.append(time.time()-start)
######################################################################
# Level 1 Functions, calls Level 0 functions, no downsampling
######################################################################
#Draw a path from a series of points
def drawPath(self, pointList):
"""
Draws a series of lines on the current :py:class:`Layer` with the current :py:class:`Brush`.
No interpolation is applied to these point and :py:meth:`drawLine` will be used to connect all the points lineraly.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param pointList: A list of point like :code:`[(0, 0), (100, 100), (100, 200)]`.
:rtype: Nothing.
"""
self.drawLine(pointList[0][0], pointList[0][1], pointList[1][0], pointList[1][1])
i = 1
while i<len(pointList)-1:
self.drawLine(pointList[i][0], pointList[i][1], pointList[i+1][0], pointList[i+1][1])
i+=1
#Draw a path from a series of points
def drawClosedPath(self, pointList):
"""
Draws a closed series of lines on the current :py:class:`Layer` with the current :py:class:`Brush`.
No interpolation is applied to these point and :py:meth:`drawLine` will be used to connect all the points lineraly.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param pointList: A list of point like :code:`[(0, 0), (100, 100), (100, 200)]`.
:rtype: Nothing.
"""
self.drawLine(pointList[0][0], pointList[0][1], pointList[1][0], pointList[1][1])
i = 1
while i<len(pointList)-1:
self.drawLine(pointList[i][0], pointList[i][1], pointList[i+1][0], pointList[i+1][1])
i+=1
self.drawLine(pointList[-1][0], pointList[-1][1], pointList[0][0], pointList[0][1])
#Draw a rectangle
def drawRect(self, x1, y1, x2, y2, angle=0):
"""
Draws a rectangle on the current :py:class:`Layer` with the current :py:class:`Brush`.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param x1: The X of the top-left corner of the rectangle.
:param y1: The Y of the top-left corner of the rectangle.
:param x2: The X of the bottom-right corner of the rectangle.
:param y2: The Y of the bottom-right corner of the rectangle.
:param angle: An angle (in degrees) of rotation around the center of the rectangle.
:rtype: Nothing.
"""
vertices = [[x1,y1],[x2,y1],[x2,y2],[x1,y2],]
rotatedVertices = rotateMatrix(vertices, (x1+x2)*0.5, (y1+y2)*0.5, angle)
self.drawClosedPath(rotatedVertices)
#Fill the current layer with a color
def fillLayerWithColor(self, color):
"""
Fills the current :py:class:`Layer` with the current :py:class:`Color`.
:param color: The :py:class:`Color` to apply to the layer.
:rtype: Nothing.
"""
layer = self.image.getActiveLayer().data
colorRGBA = color.get_0_255()
layer[:,:,0] = colorRGBA[0]
layer[:,:,1] = colorRGBA[1]
layer[:,:,2] = colorRGBA[2]
layer[:,:,3] = colorRGBA[3]
#Add border to image
def addBorder(self, width, color=None):
"""
Add a border to the current :py:class:`Layer`.
:param width: The width of the border.
:param color: The :py:class:`Color` of the border, current :py:class:`Color` is the default value.
:rtype: Nothing.
"""
width = int(width/config.DOWNSAMPLING)
if color==None:
color = self.color
layer = self.image.getActiveLayer().data
colorRGBA = color.get_0_255()
print('adding border'+str(colorRGBA)+str(width)+str(layer.shape))
layer[0:width,:,0] = colorRGBA[0]
layer[0:width,:,1] = colorRGBA[1]
layer[0:width,:,2] = colorRGBA[2]
layer[0:width,:,3] = colorRGBA[3]
layer[:,0:width,0] = colorRGBA[0]
layer[:,0:width,1] = colorRGBA[1]
layer[:,0:width,2] = colorRGBA[2]
layer[:,0:width,3] = colorRGBA[3]
layer[layer.shape[0]-width:layer.shape[0],:,0] = colorRGBA[0]
layer[layer.shape[0]-width:layer.shape[0],:,1] = colorRGBA[1]
layer[layer.shape[0]-width:layer.shape[0],:,2] = colorRGBA[2]
layer[layer.shape[0]-width:layer.shape[0],:,3] = colorRGBA[3]
layer[:,layer.shape[1]-width:layer.shape[1],0] = colorRGBA[0]
layer[:,layer.shape[1]-width:layer.shape[1],1] = colorRGBA[1]
layer[:,layer.shape[1]-width:layer.shape[1],2] = colorRGBA[2]
layer[:,layer.shape[1]-width:layer.shape[1],3] = colorRGBA[3]
######################################################################
# Setters, getters, and more
######################################################################
#Setter for color, takes 0-255 RGBA
def setColor(self, color):
"""
Sets the current :py:class:`Color` to use.
:param color: The :py:class:`Color` to use.
:rtype: Nothing.
"""
self.color = color
if self.brush and self.brush.doesUseSourceCaching():
self.brush.cacheBrush(color)
#Change only the alpha of the current color
def setColorAlpha(self, fixed=None, proportional=None):
"""
Change the alpha of the current :py:class:`Color`.
:param fixed: Set the absolute 0-1 value of the alpha.
:param proportional: Set the relative value of the alpha (Es: If the current alpha is 0.8, a proportional value of 0.5 will set the final value to 0.4).
:rtype: Nothing.
"""
if fixed!=None:
self.color.set_alpha(fixed)
elif proportional!=None:
self.color.set_alpha(self.color.get_alpha()*proportional)
#Gets the brush alpha
def getColorAlpha(self):
"""
Retrieve the alpha of the current :py:class:`Color`.
:rtype: A float 0-1 value of the current :py:class:`Color` alpha.
"""
return self.color.get_alpha()
#Gets the brush size
def getBrushSize(self):
"""
Retrieve the size of the current :py:class:`Brush`.
:rtype: An integer value of the current :py:class:`Brush` size in pixels.
"""
return self.brush.brushSize
#Swap between first and second color
def swapColors(self):
"""
Swaps the current :py:class:`Color` with the secondary :py:class:`Color`.
:rtype: Nothing.
"""
rgba = self.color.get_0_255()
self.color = self.secondColor
self.secondColor = Color(rgba, '0-255')
#Setter for brush reference
def setBrush(self, b, resize=0, proportional=None):
"""
Sets the size of the current :py:class:`Brush`.
:param brush: The :py:class:`Brush` object to use as a brush.
:param resize: An optional absolute value to resize the brush before using it.
:param proportional: An optional relative float 0-1 value to resize the brush before using it.
:rtype: Nothing.
"""
if proportional!=None:
resize = int(self.brush.brushSize*0.5)
b.resizeBrush(resize) #If resize=0 it reset to its default size
self.brush = b
if self.brush and self.brush.doesUseSourceCaching():
self.brush.cacheBrush(self.color)
#Setter for the mirror mode
def setMirrorMode(self, mirror):
"""
Sets the mirror mode to use in the next operation.
:param mirror: A string object with one of these values : '', 'h', 'v', 'hv'. "h" stands for horizontal mirroring, while "v" stands for vertical mirroring. "hv" sets both at the same time.
:rtype: Nothing.
"""
assert (mirror=='' or mirror=='h' or mirror=='v' or mirror=='hv'or mirror=='vh'), 'setMirrorMode: wrong mirror mode, got '+str(mirror)+' expected one of ["","h","v","hv"]'
#Round up all the coordinates and convert them to int
if mirror=='': mirror = 0
elif mirror=='h': mirror = 1
elif mirror=='v': mirror = 2
elif mirror=='hv': mirror = 3
elif mirror=='vh': mirror = 3
self.mirrorMode = mirror
#Render the final image
def renderImage(self, output='', show=True):
"""
Renders the :py:class:`Image` and outputs the final PNG file.
:param output: A string with the output file path, can be empty if you don't want to save the final image.
:param show: A boolean telling the system to display the final image after the rendering is done.
:rtype: Nothing.
"""
#Merge all the layers to apply blending modes
resultLayer = self.image.mergeAllLayers()
#Show and save the results
img = PIL.Image.fromarray(resultLayer.data, 'RGBA')
if show:
img.show()
if output!='':
img.save(output, 'PNG')
#Shortcut for image operations
def newLayer(self, effect=''):
"""
Creates a new :py:class:`Layer` to the current :py:class:`Image`.
:param effect: A string with the blend mode for that layer that will be used when during the rendering process. The accepted values are: :code:`'soft_light','lighten','screen','dodge','addition','darken','multiply','hard_light','difference','subtract','grain_extract','grain_merge','divide','overlay'`.
:rtype: Nothing.
"""
self.image.newLayer(effect)
#Shortcut for image operations
def setActiveLayerEffect(self, effect):
"""
Changes the effect of the current active :py:class:`Layer`.
:param output: A string with the one of the blend modes listed in :py:meth:`newLayer`.
:rtype: Nothing.
"""
self.image.layers[self.image.activeLayer].effect = effect
#Shortcut for image operations
def duplicateActiveLayer(self):
"""
Duplicates the current active :py:class:`Layer`.
:rtype: Nothing.
"""
self.image.duplicateActiveLayer()
|
elkiwy/paynter | paynter/paynter.py | Paynter.renderImage | python | def renderImage(self, output='', show=True):
#Merge all the layers to apply blending modes
resultLayer = self.image.mergeAllLayers()
#Show and save the results
img = PIL.Image.fromarray(resultLayer.data, 'RGBA')
if show:
img.show()
if output!='':
img.save(output, 'PNG') | Renders the :py:class:`Image` and outputs the final PNG file.
:param output: A string with the output file path, can be empty if you don't want to save the final image.
:param show: A boolean telling the system to display the final image after the rendering is done.
:rtype: Nothing. | train | https://github.com/elkiwy/paynter/blob/f73cb5bb010a6b32ee41640a50396ed0bae8d496/paynter/paynter.py#L349-L367 | [
"def mergeAllLayers(self):\n\t\"\"\"\n\tMerge all the layers together.\n\n\t:rtype: The result :py:class:`Layer` object. \n\t\"\"\"\n\tstart = time.time()\n\twhile(len(self.layers)>1):\n\t\tself.mergeBottomLayers()\n\tprint('merge time:'+str(time.time()-start))\n\treturn self.layers[0]\n"
] | class Paynter:
"""
This class is the main object of the library and the one that will draw everything you ask.
To create this class you can use the default constructor.
.. code-block:: python
from paynter import *
P = Paynter()
"""
brush = 0
layer = 0
color = Color(0, 0, 0, 1)
secondColor = Color(1,1,1,1)
image = 0
mirrorMode = 0
#Init the paynter
def __init__(self):
#Setup some stuff
config.CANVAS_SIZE = int(config.REAL_CANVAS_SIZE/config.DOWNSAMPLING)
self.image = Image()
######################################################################
# Level 0 Functions, needs downsampling
######################################################################
#Draw a line between two points
def drawLine(self, x1, y1, x2, y2, silent=False):
"""
Draws a line on the current :py:class:`Layer` with the current :py:class:`Brush`.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param x1: Starting X coordinate.
:param y1: Starting Y coordinate.
:param x2: End X coordinate.
:param y2: End Y coordinate.
:rtype: Nothing.
"""
start = time.time()
#Downsample the coordinates
x1 = int(x1/config.DOWNSAMPLING)
x2 = int(x2/config.DOWNSAMPLING)
y1 = int(y1/config.DOWNSAMPLING)
y2 = int(y2/config.DOWNSAMPLING)
if not silent :
print('drawing line from: '+str((x1,y1))+' to: '+str((x2,y2)))
#Calculate the direction and the length of the step
direction = N.arctan2(y2 - y1, x2 - x1)
length = self.brush.spacing
#Prepare the loop
x, y = x1, y1
totalSteps = int(N.sqrt((x2 - x)**2 + (y2 - y)**2)/length)
lay = self.image.getActiveLayer()
col = self.color
secCol = self.secondColor
mirr = self.mirrorMode
#If I use source caching..
if self.brush.usesSourceCaching:
#..than optimize it for faster drawing
laydata = lay.data
x -= self.brush.brushSize*0.5
y -= self.brush.brushSize*0.5
colbrsource = self.brush.coloredBrushSource
canvSize = config.CANVAS_SIZE
brmask = self.brush.brushMask
for _ in range(totalSteps):
#Make the dab on this point
applyMirroredDab_jit(mirr, laydata, int(x), int(y), colbrsource.copy(), canvSize, brmask)
#Mode the point for the next step and update the distances
x += lendir_x(length, direction)
y += lendir_y(length, direction)
#..if I don't use source caching..
else:
#..do the normal drawing
for _ in range(totalSteps):
#Make the dab on this point
self.brush.makeDab(lay, int(x), int(y), col, secCol, mirror=mirr)
#Mode the point for the next step and update the distances
x += lendir_x(length, direction)
y += lendir_y(length, direction)
#Draw a single dab
def drawPoint(self, x, y, silent=True):
"""
Draws a point on the current :py:class:`Layer` with the current :py:class:`Brush`.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param x1: Point X coordinate.
:param y1: Point Y coordinate.
:rtype: Nothing.
"""
start = time.time()
#Downsample the coordinates
x = int(x/config.DOWNSAMPLING)
y = int(y/config.DOWNSAMPLING)
#Apply the dab with or without source caching
if self.brush.usesSourceCaching:
applyMirroredDab_jit(self.mirrorMode, self.image.getActiveLayer().data, int(x-self.brush.brushSize*0.5), int(y-self.brush.brushSize*0.5), self.brush.coloredBrushSource.copy(), config.CANVAS_SIZE, self.brush.brushMask)
else:
self.brush.makeDab(self.image.getActiveLayer(), int(x), int(y), self.color, self.secondColor, mirror=self.mirrorMode)
config.AVGTIME.append(time.time()-start)
######################################################################
# Level 1 Functions, calls Level 0 functions, no downsampling
######################################################################
#Draw a path from a series of points
def drawPath(self, pointList):
"""
Draws a series of lines on the current :py:class:`Layer` with the current :py:class:`Brush`.
No interpolation is applied to these point and :py:meth:`drawLine` will be used to connect all the points lineraly.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param pointList: A list of point like :code:`[(0, 0), (100, 100), (100, 200)]`.
:rtype: Nothing.
"""
self.drawLine(pointList[0][0], pointList[0][1], pointList[1][0], pointList[1][1])
i = 1
while i<len(pointList)-1:
self.drawLine(pointList[i][0], pointList[i][1], pointList[i+1][0], pointList[i+1][1])
i+=1
#Draw a path from a series of points
def drawClosedPath(self, pointList):
"""
Draws a closed series of lines on the current :py:class:`Layer` with the current :py:class:`Brush`.
No interpolation is applied to these point and :py:meth:`drawLine` will be used to connect all the points lineraly.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param pointList: A list of point like :code:`[(0, 0), (100, 100), (100, 200)]`.
:rtype: Nothing.
"""
self.drawLine(pointList[0][0], pointList[0][1], pointList[1][0], pointList[1][1])
i = 1
while i<len(pointList)-1:
self.drawLine(pointList[i][0], pointList[i][1], pointList[i+1][0], pointList[i+1][1])
i+=1
self.drawLine(pointList[-1][0], pointList[-1][1], pointList[0][0], pointList[0][1])
#Draw a rectangle
def drawRect(self, x1, y1, x2, y2, angle=0):
"""
Draws a rectangle on the current :py:class:`Layer` with the current :py:class:`Brush`.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param x1: The X of the top-left corner of the rectangle.
:param y1: The Y of the top-left corner of the rectangle.
:param x2: The X of the bottom-right corner of the rectangle.
:param y2: The Y of the bottom-right corner of the rectangle.
:param angle: An angle (in degrees) of rotation around the center of the rectangle.
:rtype: Nothing.
"""
vertices = [[x1,y1],[x2,y1],[x2,y2],[x1,y2],]
rotatedVertices = rotateMatrix(vertices, (x1+x2)*0.5, (y1+y2)*0.5, angle)
self.drawClosedPath(rotatedVertices)
#Fill the current layer with a color
def fillLayerWithColor(self, color):
"""
Fills the current :py:class:`Layer` with the current :py:class:`Color`.
:param color: The :py:class:`Color` to apply to the layer.
:rtype: Nothing.
"""
layer = self.image.getActiveLayer().data
colorRGBA = color.get_0_255()
layer[:,:,0] = colorRGBA[0]
layer[:,:,1] = colorRGBA[1]
layer[:,:,2] = colorRGBA[2]
layer[:,:,3] = colorRGBA[3]
#Add border to image
def addBorder(self, width, color=None):
"""
Add a border to the current :py:class:`Layer`.
:param width: The width of the border.
:param color: The :py:class:`Color` of the border, current :py:class:`Color` is the default value.
:rtype: Nothing.
"""
width = int(width/config.DOWNSAMPLING)
if color==None:
color = self.color
layer = self.image.getActiveLayer().data
colorRGBA = color.get_0_255()
print('adding border'+str(colorRGBA)+str(width)+str(layer.shape))
layer[0:width,:,0] = colorRGBA[0]
layer[0:width,:,1] = colorRGBA[1]
layer[0:width,:,2] = colorRGBA[2]
layer[0:width,:,3] = colorRGBA[3]
layer[:,0:width,0] = colorRGBA[0]
layer[:,0:width,1] = colorRGBA[1]
layer[:,0:width,2] = colorRGBA[2]
layer[:,0:width,3] = colorRGBA[3]
layer[layer.shape[0]-width:layer.shape[0],:,0] = colorRGBA[0]
layer[layer.shape[0]-width:layer.shape[0],:,1] = colorRGBA[1]
layer[layer.shape[0]-width:layer.shape[0],:,2] = colorRGBA[2]
layer[layer.shape[0]-width:layer.shape[0],:,3] = colorRGBA[3]
layer[:,layer.shape[1]-width:layer.shape[1],0] = colorRGBA[0]
layer[:,layer.shape[1]-width:layer.shape[1],1] = colorRGBA[1]
layer[:,layer.shape[1]-width:layer.shape[1],2] = colorRGBA[2]
layer[:,layer.shape[1]-width:layer.shape[1],3] = colorRGBA[3]
######################################################################
# Setters, getters, and more
######################################################################
#Setter for color, takes 0-255 RGBA
def setColor(self, color):
"""
Sets the current :py:class:`Color` to use.
:param color: The :py:class:`Color` to use.
:rtype: Nothing.
"""
self.color = color
if self.brush and self.brush.doesUseSourceCaching():
self.brush.cacheBrush(color)
#Change only the alpha of the current color
def setColorAlpha(self, fixed=None, proportional=None):
"""
Change the alpha of the current :py:class:`Color`.
:param fixed: Set the absolute 0-1 value of the alpha.
:param proportional: Set the relative value of the alpha (Es: If the current alpha is 0.8, a proportional value of 0.5 will set the final value to 0.4).
:rtype: Nothing.
"""
if fixed!=None:
self.color.set_alpha(fixed)
elif proportional!=None:
self.color.set_alpha(self.color.get_alpha()*proportional)
#Gets the brush alpha
def getColorAlpha(self):
"""
Retrieve the alpha of the current :py:class:`Color`.
:rtype: A float 0-1 value of the current :py:class:`Color` alpha.
"""
return self.color.get_alpha()
#Gets the brush size
def getBrushSize(self):
"""
Retrieve the size of the current :py:class:`Brush`.
:rtype: An integer value of the current :py:class:`Brush` size in pixels.
"""
return self.brush.brushSize
#Swap between first and second color
def swapColors(self):
"""
Swaps the current :py:class:`Color` with the secondary :py:class:`Color`.
:rtype: Nothing.
"""
rgba = self.color.get_0_255()
self.color = self.secondColor
self.secondColor = Color(rgba, '0-255')
#Setter for brush reference
def setBrush(self, b, resize=0, proportional=None):
"""
Sets the size of the current :py:class:`Brush`.
:param brush: The :py:class:`Brush` object to use as a brush.
:param resize: An optional absolute value to resize the brush before using it.
:param proportional: An optional relative float 0-1 value to resize the brush before using it.
:rtype: Nothing.
"""
if proportional!=None:
resize = int(self.brush.brushSize*0.5)
b.resizeBrush(resize) #If resize=0 it reset to its default size
self.brush = b
if self.brush and self.brush.doesUseSourceCaching():
self.brush.cacheBrush(self.color)
#Setter for the mirror mode
def setMirrorMode(self, mirror):
"""
Sets the mirror mode to use in the next operation.
:param mirror: A string object with one of these values : '', 'h', 'v', 'hv'. "h" stands for horizontal mirroring, while "v" stands for vertical mirroring. "hv" sets both at the same time.
:rtype: Nothing.
"""
assert (mirror=='' or mirror=='h' or mirror=='v' or mirror=='hv'or mirror=='vh'), 'setMirrorMode: wrong mirror mode, got '+str(mirror)+' expected one of ["","h","v","hv"]'
#Round up all the coordinates and convert them to int
if mirror=='': mirror = 0
elif mirror=='h': mirror = 1
elif mirror=='v': mirror = 2
elif mirror=='hv': mirror = 3
elif mirror=='vh': mirror = 3
self.mirrorMode = mirror
#Render the final image
def renderImage(self, output='', show=True):
"""
Renders the :py:class:`Image` and outputs the final PNG file.
:param output: A string with the output file path, can be empty if you don't want to save the final image.
:param show: A boolean telling the system to display the final image after the rendering is done.
:rtype: Nothing.
"""
#Merge all the layers to apply blending modes
resultLayer = self.image.mergeAllLayers()
#Show and save the results
img = PIL.Image.fromarray(resultLayer.data, 'RGBA')
if show:
img.show()
if output!='':
img.save(output, 'PNG')
#Shortcut for image operations
def newLayer(self, effect=''):
"""
Creates a new :py:class:`Layer` to the current :py:class:`Image`.
:param effect: A string with the blend mode for that layer that will be used when during the rendering process. The accepted values are: :code:`'soft_light','lighten','screen','dodge','addition','darken','multiply','hard_light','difference','subtract','grain_extract','grain_merge','divide','overlay'`.
:rtype: Nothing.
"""
self.image.newLayer(effect)
#Shortcut for image operations
def setActiveLayerEffect(self, effect):
"""
Changes the effect of the current active :py:class:`Layer`.
:param output: A string with the one of the blend modes listed in :py:meth:`newLayer`.
:rtype: Nothing.
"""
self.image.layers[self.image.activeLayer].effect = effect
#Shortcut for image operations
def duplicateActiveLayer(self):
"""
Duplicates the current active :py:class:`Layer`.
:rtype: Nothing.
"""
self.image.duplicateActiveLayer()
|
elkiwy/paynter | paynter/paynter.py | Paynter.setActiveLayerEffect | python | def setActiveLayerEffect(self, effect):
self.image.layers[self.image.activeLayer].effect = effect | Changes the effect of the current active :py:class:`Layer`.
:param output: A string with the one of the blend modes listed in :py:meth:`newLayer`.
:rtype: Nothing. | train | https://github.com/elkiwy/paynter/blob/f73cb5bb010a6b32ee41640a50396ed0bae8d496/paynter/paynter.py#L380-L387 | null | class Paynter:
"""
This class is the main object of the library and the one that will draw everything you ask.
To create this class you can use the default constructor.
.. code-block:: python
from paynter import *
P = Paynter()
"""
brush = 0
layer = 0
color = Color(0, 0, 0, 1)
secondColor = Color(1,1,1,1)
image = 0
mirrorMode = 0
#Init the paynter
def __init__(self):
#Setup some stuff
config.CANVAS_SIZE = int(config.REAL_CANVAS_SIZE/config.DOWNSAMPLING)
self.image = Image()
######################################################################
# Level 0 Functions, needs downsampling
######################################################################
#Draw a line between two points
def drawLine(self, x1, y1, x2, y2, silent=False):
"""
Draws a line on the current :py:class:`Layer` with the current :py:class:`Brush`.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param x1: Starting X coordinate.
:param y1: Starting Y coordinate.
:param x2: End X coordinate.
:param y2: End Y coordinate.
:rtype: Nothing.
"""
start = time.time()
#Downsample the coordinates
x1 = int(x1/config.DOWNSAMPLING)
x2 = int(x2/config.DOWNSAMPLING)
y1 = int(y1/config.DOWNSAMPLING)
y2 = int(y2/config.DOWNSAMPLING)
if not silent :
print('drawing line from: '+str((x1,y1))+' to: '+str((x2,y2)))
#Calculate the direction and the length of the step
direction = N.arctan2(y2 - y1, x2 - x1)
length = self.brush.spacing
#Prepare the loop
x, y = x1, y1
totalSteps = int(N.sqrt((x2 - x)**2 + (y2 - y)**2)/length)
lay = self.image.getActiveLayer()
col = self.color
secCol = self.secondColor
mirr = self.mirrorMode
#If I use source caching..
if self.brush.usesSourceCaching:
#..than optimize it for faster drawing
laydata = lay.data
x -= self.brush.brushSize*0.5
y -= self.brush.brushSize*0.5
colbrsource = self.brush.coloredBrushSource
canvSize = config.CANVAS_SIZE
brmask = self.brush.brushMask
for _ in range(totalSteps):
#Make the dab on this point
applyMirroredDab_jit(mirr, laydata, int(x), int(y), colbrsource.copy(), canvSize, brmask)
#Mode the point for the next step and update the distances
x += lendir_x(length, direction)
y += lendir_y(length, direction)
#..if I don't use source caching..
else:
#..do the normal drawing
for _ in range(totalSteps):
#Make the dab on this point
self.brush.makeDab(lay, int(x), int(y), col, secCol, mirror=mirr)
#Mode the point for the next step and update the distances
x += lendir_x(length, direction)
y += lendir_y(length, direction)
#Draw a single dab
def drawPoint(self, x, y, silent=True):
"""
Draws a point on the current :py:class:`Layer` with the current :py:class:`Brush`.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param x1: Point X coordinate.
:param y1: Point Y coordinate.
:rtype: Nothing.
"""
start = time.time()
#Downsample the coordinates
x = int(x/config.DOWNSAMPLING)
y = int(y/config.DOWNSAMPLING)
#Apply the dab with or without source caching
if self.brush.usesSourceCaching:
applyMirroredDab_jit(self.mirrorMode, self.image.getActiveLayer().data, int(x-self.brush.brushSize*0.5), int(y-self.brush.brushSize*0.5), self.brush.coloredBrushSource.copy(), config.CANVAS_SIZE, self.brush.brushMask)
else:
self.brush.makeDab(self.image.getActiveLayer(), int(x), int(y), self.color, self.secondColor, mirror=self.mirrorMode)
config.AVGTIME.append(time.time()-start)
######################################################################
# Level 1 Functions, calls Level 0 functions, no downsampling
######################################################################
#Draw a path from a series of points
def drawPath(self, pointList):
"""
Draws a series of lines on the current :py:class:`Layer` with the current :py:class:`Brush`.
No interpolation is applied to these point and :py:meth:`drawLine` will be used to connect all the points lineraly.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param pointList: A list of point like :code:`[(0, 0), (100, 100), (100, 200)]`.
:rtype: Nothing.
"""
self.drawLine(pointList[0][0], pointList[0][1], pointList[1][0], pointList[1][1])
i = 1
while i<len(pointList)-1:
self.drawLine(pointList[i][0], pointList[i][1], pointList[i+1][0], pointList[i+1][1])
i+=1
#Draw a path from a series of points
def drawClosedPath(self, pointList):
"""
Draws a closed series of lines on the current :py:class:`Layer` with the current :py:class:`Brush`.
No interpolation is applied to these point and :py:meth:`drawLine` will be used to connect all the points lineraly.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param pointList: A list of point like :code:`[(0, 0), (100, 100), (100, 200)]`.
:rtype: Nothing.
"""
self.drawLine(pointList[0][0], pointList[0][1], pointList[1][0], pointList[1][1])
i = 1
while i<len(pointList)-1:
self.drawLine(pointList[i][0], pointList[i][1], pointList[i+1][0], pointList[i+1][1])
i+=1
self.drawLine(pointList[-1][0], pointList[-1][1], pointList[0][0], pointList[0][1])
#Draw a rectangle
def drawRect(self, x1, y1, x2, y2, angle=0):
"""
Draws a rectangle on the current :py:class:`Layer` with the current :py:class:`Brush`.
Coordinates are relative to the original layer size WITHOUT downsampling applied.
:param x1: The X of the top-left corner of the rectangle.
:param y1: The Y of the top-left corner of the rectangle.
:param x2: The X of the bottom-right corner of the rectangle.
:param y2: The Y of the bottom-right corner of the rectangle.
:param angle: An angle (in degrees) of rotation around the center of the rectangle.
:rtype: Nothing.
"""
vertices = [[x1,y1],[x2,y1],[x2,y2],[x1,y2],]
rotatedVertices = rotateMatrix(vertices, (x1+x2)*0.5, (y1+y2)*0.5, angle)
self.drawClosedPath(rotatedVertices)
#Fill the current layer with a color
def fillLayerWithColor(self, color):
"""
Fills the current :py:class:`Layer` with the current :py:class:`Color`.
:param color: The :py:class:`Color` to apply to the layer.
:rtype: Nothing.
"""
layer = self.image.getActiveLayer().data
colorRGBA = color.get_0_255()
layer[:,:,0] = colorRGBA[0]
layer[:,:,1] = colorRGBA[1]
layer[:,:,2] = colorRGBA[2]
layer[:,:,3] = colorRGBA[3]
#Add border to image
def addBorder(self, width, color=None):
"""
Add a border to the current :py:class:`Layer`.
:param width: The width of the border.
:param color: The :py:class:`Color` of the border, current :py:class:`Color` is the default value.
:rtype: Nothing.
"""
width = int(width/config.DOWNSAMPLING)
if color==None:
color = self.color
layer = self.image.getActiveLayer().data
colorRGBA = color.get_0_255()
print('adding border'+str(colorRGBA)+str(width)+str(layer.shape))
layer[0:width,:,0] = colorRGBA[0]
layer[0:width,:,1] = colorRGBA[1]
layer[0:width,:,2] = colorRGBA[2]
layer[0:width,:,3] = colorRGBA[3]
layer[:,0:width,0] = colorRGBA[0]
layer[:,0:width,1] = colorRGBA[1]
layer[:,0:width,2] = colorRGBA[2]
layer[:,0:width,3] = colorRGBA[3]
layer[layer.shape[0]-width:layer.shape[0],:,0] = colorRGBA[0]
layer[layer.shape[0]-width:layer.shape[0],:,1] = colorRGBA[1]
layer[layer.shape[0]-width:layer.shape[0],:,2] = colorRGBA[2]
layer[layer.shape[0]-width:layer.shape[0],:,3] = colorRGBA[3]
layer[:,layer.shape[1]-width:layer.shape[1],0] = colorRGBA[0]
layer[:,layer.shape[1]-width:layer.shape[1],1] = colorRGBA[1]
layer[:,layer.shape[1]-width:layer.shape[1],2] = colorRGBA[2]
layer[:,layer.shape[1]-width:layer.shape[1],3] = colorRGBA[3]
######################################################################
# Setters, getters, and more
######################################################################
#Setter for color, takes 0-255 RGBA
def setColor(self, color):
"""
Sets the current :py:class:`Color` to use.
:param color: The :py:class:`Color` to use.
:rtype: Nothing.
"""
self.color = color
if self.brush and self.brush.doesUseSourceCaching():
self.brush.cacheBrush(color)
#Change only the alpha of the current color
def setColorAlpha(self, fixed=None, proportional=None):
"""
Change the alpha of the current :py:class:`Color`.
:param fixed: Set the absolute 0-1 value of the alpha.
:param proportional: Set the relative value of the alpha (Es: If the current alpha is 0.8, a proportional value of 0.5 will set the final value to 0.4).
:rtype: Nothing.
"""
if fixed!=None:
self.color.set_alpha(fixed)
elif proportional!=None:
self.color.set_alpha(self.color.get_alpha()*proportional)
#Gets the brush alpha
def getColorAlpha(self):
"""
Retrieve the alpha of the current :py:class:`Color`.
:rtype: A float 0-1 value of the current :py:class:`Color` alpha.
"""
return self.color.get_alpha()
#Gets the brush size
def getBrushSize(self):
"""
Retrieve the size of the current :py:class:`Brush`.
:rtype: An integer value of the current :py:class:`Brush` size in pixels.
"""
return self.brush.brushSize
#Swap between first and second color
def swapColors(self):
"""
Swaps the current :py:class:`Color` with the secondary :py:class:`Color`.
:rtype: Nothing.
"""
rgba = self.color.get_0_255()
self.color = self.secondColor
self.secondColor = Color(rgba, '0-255')
#Setter for brush reference
def setBrush(self, b, resize=0, proportional=None):
"""
Sets the size of the current :py:class:`Brush`.
:param brush: The :py:class:`Brush` object to use as a brush.
:param resize: An optional absolute value to resize the brush before using it.
:param proportional: An optional relative float 0-1 value to resize the brush before using it.
:rtype: Nothing.
"""
if proportional!=None:
resize = int(self.brush.brushSize*0.5)
b.resizeBrush(resize) #If resize=0 it reset to its default size
self.brush = b
if self.brush and self.brush.doesUseSourceCaching():
self.brush.cacheBrush(self.color)
#Setter for the mirror mode
def setMirrorMode(self, mirror):
"""
Sets the mirror mode to use in the next operation.
:param mirror: A string object with one of these values : '', 'h', 'v', 'hv'. "h" stands for horizontal mirroring, while "v" stands for vertical mirroring. "hv" sets both at the same time.
:rtype: Nothing.
"""
assert (mirror=='' or mirror=='h' or mirror=='v' or mirror=='hv'or mirror=='vh'), 'setMirrorMode: wrong mirror mode, got '+str(mirror)+' expected one of ["","h","v","hv"]'
#Round up all the coordinates and convert them to int
if mirror=='': mirror = 0
elif mirror=='h': mirror = 1
elif mirror=='v': mirror = 2
elif mirror=='hv': mirror = 3
elif mirror=='vh': mirror = 3
self.mirrorMode = mirror
#Render the final image
def renderImage(self, output='', show=True):
"""
Renders the :py:class:`Image` and outputs the final PNG file.
:param output: A string with the output file path, can be empty if you don't want to save the final image.
:param show: A boolean telling the system to display the final image after the rendering is done.
:rtype: Nothing.
"""
#Merge all the layers to apply blending modes
resultLayer = self.image.mergeAllLayers()
#Show and save the results
img = PIL.Image.fromarray(resultLayer.data, 'RGBA')
if show:
img.show()
if output!='':
img.save(output, 'PNG')
#Shortcut for image operations
def newLayer(self, effect=''):
"""
Creates a new :py:class:`Layer` to the current :py:class:`Image`.
:param effect: A string with the blend mode for that layer that will be used when during the rendering process. The accepted values are: :code:`'soft_light','lighten','screen','dodge','addition','darken','multiply','hard_light','difference','subtract','grain_extract','grain_merge','divide','overlay'`.
:rtype: Nothing.
"""
self.image.newLayer(effect)
#Shortcut for image operations
def setActiveLayerEffect(self, effect):
"""
Changes the effect of the current active :py:class:`Layer`.
:param output: A string with the one of the blend modes listed in :py:meth:`newLayer`.
:rtype: Nothing.
"""
self.image.layers[self.image.activeLayer].effect = effect
#Shortcut for image operations
def duplicateActiveLayer(self):
"""
Duplicates the current active :py:class:`Layer`.
:rtype: Nothing.
"""
self.image.duplicateActiveLayer()
|
elkiwy/paynter | paynter/layer.py | Layer.showLayer | python | def showLayer(self, title='', debugText=''):
img = PIL.Image.fromarray(self.data, 'RGBA')
if debugText!='':
draw = PIL.ImageDraw.Draw(img)
font = PIL.ImageFont.truetype("DejaVuSansMono.ttf", 24)
draw.text((0, 0),debugText,(255,255,255),font=font)
img.show(title=title) | Shows the single layer.
:param title: A string with the title of the window where to render the image.
:param debugText: A string with some text to render over the image.
:rtype: Nothing. | train | https://github.com/elkiwy/paynter/blob/f73cb5bb010a6b32ee41640a50396ed0bae8d496/paynter/layer.py#L65-L78 | null | class Layer:
"""
The :py:class:`Layer` class contains a 3D array of N.uint8 and a string with the blend mode of the layer.
An Image starts with one layer inside, but you can create more of them as follows:
.. code-block:: python
from paynter import *
#Inside the paynter there is already an Image with a blank Layer.
paynter = Paynter()
#Create a blank new layer
payner.newLayer()
#Create a new layer duplicating the current one
payner.duplicateActiveLayer()
#Gets the current active layer
layer = paynter.image.getActiveLayer()
"""
effect = ''
data = 0
#Layer constructor
# Layers data structure are always 0-255 ints
def __init__(self, data = None, color = None, effect = ''):
if type(data) is not N.ndarray:
self.data = N.zeros((config.CANVAS_SIZE, config.CANVAS_SIZE, 4), dtype=N.uint8)
if color==None:
color = Color(0,0,0,0)
colorRGBA = color.get_0_255()
self.data[:,:,0] = colorRGBA[0]
self.data[:,:,1] = colorRGBA[1]
self.data[:,:,2] = colorRGBA[2]
self.data[:,:,3] = colorRGBA[3]
else:
self.data = data
self.effect = effect
#Show layer as a separate image with the option to write on it
def showLayer(self, title='', debugText=''):
"""
Shows the single layer.
:param title: A string with the title of the window where to render the image.
:param debugText: A string with some text to render over the image.
:rtype: Nothing.
"""
img = PIL.Image.fromarray(self.data, 'RGBA')
if debugText!='':
draw = PIL.ImageDraw.Draw(img)
font = PIL.ImageFont.truetype("DejaVuSansMono.ttf", 24)
draw.text((0, 0),debugText,(255,255,255),font=font)
img.show(title=title)
|
elkiwy/paynter | paynter/color.py | getColors_Triad | python | def getColors_Triad(hue=None, sat = 1, val = 1, spread = 60):
palette = list()
if hue==None:
leadHue = randFloat(0, 1)
else:
leadHue = hue
palette.append(Color(0,0,0,1).set_HSV(leadHue, sat, val))
palette.append(Color(0,0,0,1).set_HSV((leadHue + 0.5 + spread/360) % 1, sat, val))
palette.append(Color(0,0,0,1).set_HSV((leadHue + 0.5 - spread/360) % 1, sat, val))
return palette | Create a palette with one main color and two opposite color evenly spread apart from the main one.
:param hue: A 0-1 float with the starting hue value.
:param sat: A 0-1 float with the palette saturation.
:param val: A 0-1 float with the palette value.
:param val: An int with the spread in degrees from the opposite color.
:rtype: A list of :py:class:`Color` objects. | train | https://github.com/elkiwy/paynter/blob/f73cb5bb010a6b32ee41640a50396ed0bae8d496/paynter/color.py#L71-L89 | [
"def randFloat(a,b):\n\treturn random.uniform(a,b)\n",
"def set_HSV(self, h, s, v):\n\t\"\"\"\n\tOverwrite the current color with this set of HSV values.\n\tThis keeps the current alpha value.\n\n\t:param h: A 0-1 float with the Hue.\n\t:param s: A 0-1 float with the Saturation.\n\t:param v: A 0-1 float with the Value.\n\t:rtype: The new :py:class:`Color` object.\n\t\"\"\"\n\trgb = hsv_to_rgb(h,s,v)\n\tself.r = rgb[0]\n\tself.g = rgb[1]\n\tself.b = rgb[2]\n\treturn self\n"
] | """
The Color class is another fundamental class in the Paynter library.
This module will mange the creation, modification, and storing of colors and palettes.
The color class is mainly used internally by the :py:class:`Paynter` class, but the user will still have to create the palette and sets the active colors manually through Paynter.setColor(color).
"""
#PaYnter Modules
from .utils import *
from numba import jitclass, float32, float64
import sys
@jit([float32[:](float32, float32, float32)], nopython=True)
def rgb_to_hsv(r, g, b):
maxc = max(r, g, b)
minc = min(r, g, b)
v = maxc
if minc == maxc:
return N.array((0.0, 0.0, v), dtype=N.float32)
s = (maxc-minc) / maxc
rc = (maxc-r) / (maxc-minc)
gc = (maxc-g) / (maxc-minc)
bc = (maxc-b) / (maxc-minc)
if r == maxc:
h = bc-gc
elif g == maxc:
h = 2.0+rc-bc
else:
h = 4.0+gc-rc
h = (h/6.0) % 1.0
return N.array((h, s, v), dtype=N.float32)
@jit(float32[:](float32, float32, float32), nopython=True)
def hsv_to_rgb(h, s, v):
if s == 0.0:
return N.array((v, v, v), dtype=N.float32)
i = int(h*6.0) # XXX assume int() truncates!
f = (h*6.0) - i
p = v*(1.0 - s)
q = v*(1.0 - s*f)
t = v*(1.0 - s*(1.0-f))
i = i%6
if i == 0:
return N.array((v, t, p), dtype=N.float32)
if i == 1:
return N.array((q, v, p), dtype=N.float32)
if i == 2:
return N.array((p, v, t), dtype=N.float32)
if i == 3:
return N.array((p, q, v), dtype=N.float32)
if i == 4:
return N.array((t, p, v), dtype=N.float32)
if i == 5:
return N.array((v, p, q), dtype=N.float32)
return N.array((0,0,0), dtype=N.float32)
######################################################################
# Color Management functions
######################################################################
#Get 3 colors with a triad pattern
#Colors are always internally stored as 0-1 floats
spec = [
('r', float32),
('g', float32),
('b', float32),
('a', float32)
]
@jitclass(spec)
class Color:
"""
The :py:class:`Color` class has 4 0-1 floats, one for each RGBA channel.
A Color class is created from palette functions or directly with their constructor.
.. code-block:: python
from paynter import *
#Get a list of colors
palette = getColors_Triad(spread = 20)
#You can create with 0-1 floats..
otherColor = Color(1, 0.5, 0, 1)
#.. or with 0-255 ints
sameColor = Color(255, 128, 0, 255)
"""
def __init__(self, r, g, b, a):
if max((r,g,b,a))>1:
r /= 255
b /= 255
g /= 255
a /= 255
self.r = r
self.g = g
self.b = b
self.a = a
#Return 0-255 RGBA
def get_0_255(self):
"""
Gets the RGBA 0-255 rappresentation of this color.
:rtype: A list of 4 ints.
"""
return [self.r*255, self.g*255, self.b*255, self.a*255]
#Return 0-1 RGBA
def get_0_1(self):
"""
Gets the RGBA 0-1 rappresentation of this color.
:rtype: A list of 4 floats.
"""
return N.array((self.r, self.g, self.b, self.a), dtype=N.float32)
#Returns 0-1 HSV
def get_HSV(self):
"""
Gets the HSV 0-1 rappresentation of this color.
This does ignore the alpha value.
:rtype: A list of 3 floats.
"""
return rgb_to_hsv(self.r, self.g, self.b)
#Overwrite RGB values with this HSV
def set_HSV(self, h, s, v):
"""
Overwrite the current color with this set of HSV values.
This keeps the current alpha value.
:param h: A 0-1 float with the Hue.
:param s: A 0-1 float with the Saturation.
:param v: A 0-1 float with the Value.
:rtype: The new :py:class:`Color` object.
"""
rgb = hsv_to_rgb(h,s,v)
self.r = rgb[0]
self.g = rgb[1]
self.b = rgb[2]
return self
#Sets a new alpha value
def set_alpha(self, newAlpha):
"""
Overwrite the current alpha value.
:param newAlpha: A 0-1 float with the desired alpha.
:rtype: The :py:class:`Color` object.
"""
self.a = newAlpha
return self
#Gets the alpha value
def get_alpha(self):
"""
Gets the current alpha value.
:rtype: A 0-1 float.
"""
return self.a
#Tweak the hue
def tweak_Hue(self, ammount):
"""
Change the current hue value by a certain ammount.
:param ammount: A 0-1 float with the ammount to sum to the current hue.
:rtype: The :py:class:`Color` object.
"""
hsv = self.get_HSV()
hsv[0] = (hsv[0] + ammount) % 1
self.set_HSV(hsv[0], hsv[1], hsv[2])
return self
#Tweak the saturation
def tweak_Sat(self, ammount):
"""
Change the current saturation value by a certain ammount.
:param ammount: A 0-1 float with the ammount to sum to the current saturation.
:rtype: The :py:class:`Color` object.
"""
hsv = self.get_HSV()
hsv[1] = min(max(hsv[1] + ammount, 0), 1)
self.set_HSV(hsv[0], hsv[1], hsv[2])
return self
#Tweak the value
def tweak_Val(self, ammount):
"""
Change the current value value by a certain ammount.
:param ammount: A 0-1 float with the ammount to sum to the current value.
:rtype: The :py:class:`Color` object.
"""
hsv = self.get_HSV()
hsv[2] = min(max(hsv[2] + ammount, 0), 1)
self.set_HSV(hsv[0], hsv[1], hsv[2])
return self
def copy(self):
"""
Creates a copy of this Color.
:rtype: The new :py:class:`Color` object.
"""
return Color(self.r, self.g, self.b, self.a)
|
elkiwy/paynter | paynter/image.py | Image.newLayer | python | def newLayer(self, effect=''):
self.layers.append(Layer(effect = effect))
self.activeLayer = len(self.layers)-1 | Creates a new :py:class:`Layer` and set that as the active.
:param effect: A string with the blend mode for that layer that will be used when during the rendering process. The accepted values are: :code:`'soft_light','lighten','screen','dodge','addition','darken','multiply','hard_light','difference','subtract','grain_extract','grain_merge','divide','overlay'`.
:rtype: Nothing. | train | https://github.com/elkiwy/paynter/blob/f73cb5bb010a6b32ee41640a50396ed0bae8d496/paynter/image.py#L54-L62 | null | class Image:
"""
The :py:class:`Image` class is structured as an array of :py:class:`Layer`.
An Image class is created when you create a :py:class:`Paynter`, so you can access this class as follows:
.. code-block:: python
from paynter import *
paynter = Paynter()
image = paynter.image
"""
layers = []
activeLayer = 0
#Init image
def __init__(self):
#Init by adding a new layer and selecting that as current layer
self.layers.append(Layer())
self.activeLayer = len(self.layers)-1
#Return the current active layer
def getActiveLayer(self):
"""
Returns the currently active :py:class:`Layer`.
:rtype: A :py:class:`Layer` object.
"""
return self.layers[self.activeLayer]
#Create a new layer and select it
def newLayer(self, effect=''):
"""
Creates a new :py:class:`Layer` and set that as the active.
:param effect: A string with the blend mode for that layer that will be used when during the rendering process. The accepted values are: :code:`'soft_light','lighten','screen','dodge','addition','darken','multiply','hard_light','difference','subtract','grain_extract','grain_merge','divide','overlay'`.
:rtype: Nothing.
"""
self.layers.append(Layer(effect = effect))
self.activeLayer = len(self.layers)-1
#Duplicate the current layer
def duplicateActiveLayer(self):
"""
Duplicates the current active :py:class:`Layer`.
:rtype: Nothing.
"""
activeLayer = self.layers[self.activeLayer]
newLayer = Layer(data=activeLayer.data, effect=activeLayer.effect)
self.layers.append(newLayer)
self.activeLayer = len(self.layers)-1
#Merge all the layers together to render the final image
def mergeAllLayers(self):
"""
Merge all the layers together.
:rtype: The result :py:class:`Layer` object.
"""
start = time.time()
while(len(self.layers)>1):
self.mergeBottomLayers()
print('merge time:'+str(time.time()-start))
return self.layers[0]
#Merge the bottom layer with the layer above that
def mergeBottomLayers(self):
#Debug show the two layer being merged
print('merging layers with:'+str(self.layers[1].effect))
#Normal paste on top
if self.layers[1].effect=='':
baseImage = PIL.Image.fromarray(self.layers[0].data, 'RGBA')
overImage = PIL.Image.fromarray(self.layers[1].data, 'RGBA')
baseImage = PIL.Image.alpha_composite(baseImage, overImage)
newImage = N.array(baseImage)
#Apply blend mode
else:
baseImage = self.layers[0].data.astype(N.float32)
overImage = self.layers[1].data.astype(N.float32)
newImage = mergeImagesWithBlendMode(baseImage, overImage, self.layers[1].effect).astype(N.uint8)
#Remove one layer and replace the last one
del self.layers[0]
self.layers[0] = Layer(data = newImage)
|
elkiwy/paynter | paynter/image.py | Image.duplicateActiveLayer | python | def duplicateActiveLayer(self):
activeLayer = self.layers[self.activeLayer]
newLayer = Layer(data=activeLayer.data, effect=activeLayer.effect)
self.layers.append(newLayer)
self.activeLayer = len(self.layers)-1 | Duplicates the current active :py:class:`Layer`.
:rtype: Nothing. | train | https://github.com/elkiwy/paynter/blob/f73cb5bb010a6b32ee41640a50396ed0bae8d496/paynter/image.py#L65-L74 | null | class Image:
"""
The :py:class:`Image` class is structured as an array of :py:class:`Layer`.
An Image class is created when you create a :py:class:`Paynter`, so you can access this class as follows:
.. code-block:: python
from paynter import *
paynter = Paynter()
image = paynter.image
"""
layers = []
activeLayer = 0
#Init image
def __init__(self):
#Init by adding a new layer and selecting that as current layer
self.layers.append(Layer())
self.activeLayer = len(self.layers)-1
#Return the current active layer
def getActiveLayer(self):
"""
Returns the currently active :py:class:`Layer`.
:rtype: A :py:class:`Layer` object.
"""
return self.layers[self.activeLayer]
#Create a new layer and select it
def newLayer(self, effect=''):
"""
Creates a new :py:class:`Layer` and set that as the active.
:param effect: A string with the blend mode for that layer that will be used when during the rendering process. The accepted values are: :code:`'soft_light','lighten','screen','dodge','addition','darken','multiply','hard_light','difference','subtract','grain_extract','grain_merge','divide','overlay'`.
:rtype: Nothing.
"""
self.layers.append(Layer(effect = effect))
self.activeLayer = len(self.layers)-1
#Duplicate the current layer
def duplicateActiveLayer(self):
"""
Duplicates the current active :py:class:`Layer`.
:rtype: Nothing.
"""
activeLayer = self.layers[self.activeLayer]
newLayer = Layer(data=activeLayer.data, effect=activeLayer.effect)
self.layers.append(newLayer)
self.activeLayer = len(self.layers)-1
#Merge all the layers together to render the final image
def mergeAllLayers(self):
"""
Merge all the layers together.
:rtype: The result :py:class:`Layer` object.
"""
start = time.time()
while(len(self.layers)>1):
self.mergeBottomLayers()
print('merge time:'+str(time.time()-start))
return self.layers[0]
#Merge the bottom layer with the layer above that
def mergeBottomLayers(self):
#Debug show the two layer being merged
print('merging layers with:'+str(self.layers[1].effect))
#Normal paste on top
if self.layers[1].effect=='':
baseImage = PIL.Image.fromarray(self.layers[0].data, 'RGBA')
overImage = PIL.Image.fromarray(self.layers[1].data, 'RGBA')
baseImage = PIL.Image.alpha_composite(baseImage, overImage)
newImage = N.array(baseImage)
#Apply blend mode
else:
baseImage = self.layers[0].data.astype(N.float32)
overImage = self.layers[1].data.astype(N.float32)
newImage = mergeImagesWithBlendMode(baseImage, overImage, self.layers[1].effect).astype(N.uint8)
#Remove one layer and replace the last one
del self.layers[0]
self.layers[0] = Layer(data = newImage)
|
elkiwy/paynter | paynter/image.py | Image.mergeAllLayers | python | def mergeAllLayers(self):
start = time.time()
while(len(self.layers)>1):
self.mergeBottomLayers()
print('merge time:'+str(time.time()-start))
return self.layers[0] | Merge all the layers together.
:rtype: The result :py:class:`Layer` object. | train | https://github.com/elkiwy/paynter/blob/f73cb5bb010a6b32ee41640a50396ed0bae8d496/paynter/image.py#L77-L87 | [
"def mergeBottomLayers(self):\n\t#Debug show the two layer being merged\n\tprint('merging layers with:'+str(self.layers[1].effect))\n\t#Normal paste on top\n\tif self.layers[1].effect=='':\n\t\tbaseImage = PIL.Image.fromarray(self.layers[0].data, 'RGBA')\n\t\toverImage = PIL.Image.fromarray(self.layers[1].data, 'RGBA')\n\t\tbaseImage = PIL.Image.alpha_composite(baseImage, overImage)\n\t\tnewImage = N.array(baseImage)\n\t#Apply blend mode\n\telse:\n\t\tbaseImage = self.layers[0].data.astype(N.float32)\n\t\toverImage = self.layers[1].data.astype(N.float32)\n\t\tnewImage = mergeImagesWithBlendMode(baseImage, overImage, self.layers[1].effect).astype(N.uint8)\n\t#Remove one layer and replace the last one\n\tdel self.layers[0]\t\t\t\n\tself.layers[0] = Layer(data = newImage)\n"
] | class Image:
"""
The :py:class:`Image` class is structured as an array of :py:class:`Layer`.
An Image class is created when you create a :py:class:`Paynter`, so you can access this class as follows:
.. code-block:: python
from paynter import *
paynter = Paynter()
image = paynter.image
"""
layers = []
activeLayer = 0
#Init image
def __init__(self):
#Init by adding a new layer and selecting that as current layer
self.layers.append(Layer())
self.activeLayer = len(self.layers)-1
#Return the current active layer
def getActiveLayer(self):
"""
Returns the currently active :py:class:`Layer`.
:rtype: A :py:class:`Layer` object.
"""
return self.layers[self.activeLayer]
#Create a new layer and select it
def newLayer(self, effect=''):
"""
Creates a new :py:class:`Layer` and set that as the active.
:param effect: A string with the blend mode for that layer that will be used when during the rendering process. The accepted values are: :code:`'soft_light','lighten','screen','dodge','addition','darken','multiply','hard_light','difference','subtract','grain_extract','grain_merge','divide','overlay'`.
:rtype: Nothing.
"""
self.layers.append(Layer(effect = effect))
self.activeLayer = len(self.layers)-1
#Duplicate the current layer
def duplicateActiveLayer(self):
"""
Duplicates the current active :py:class:`Layer`.
:rtype: Nothing.
"""
activeLayer = self.layers[self.activeLayer]
newLayer = Layer(data=activeLayer.data, effect=activeLayer.effect)
self.layers.append(newLayer)
self.activeLayer = len(self.layers)-1
#Merge all the layers together to render the final image
#Merge the bottom layer with the layer above that
def mergeBottomLayers(self):
#Debug show the two layer being merged
print('merging layers with:'+str(self.layers[1].effect))
#Normal paste on top
if self.layers[1].effect=='':
baseImage = PIL.Image.fromarray(self.layers[0].data, 'RGBA')
overImage = PIL.Image.fromarray(self.layers[1].data, 'RGBA')
baseImage = PIL.Image.alpha_composite(baseImage, overImage)
newImage = N.array(baseImage)
#Apply blend mode
else:
baseImage = self.layers[0].data.astype(N.float32)
overImage = self.layers[1].data.astype(N.float32)
newImage = mergeImagesWithBlendMode(baseImage, overImage, self.layers[1].effect).astype(N.uint8)
#Remove one layer and replace the last one
del self.layers[0]
self.layers[0] = Layer(data = newImage)
|
thiezn/iperf3-python | iperf3/iperf3.py | more_data | python | def more_data(pipe_out):
r, _, _ = select.select([pipe_out], [], [], 0)
return bool(r) | Check if there is more data left on the pipe
:param pipe_out: The os pipe_out
:rtype: bool | train | https://github.com/thiezn/iperf3-python/blob/094a6e043f44fb154988348603661b1473c23a50/iperf3/iperf3.py#L40-L47 | null | # -*- coding: utf-8 -*-
"""
Python wrapper for the iperf3 libiperf.so.0 library. The module consists of two
classes, :class:`Client` and :class:`Server`, that inherit from the base class
:class:`IPerf3`. They provide a nice (if i say so myself) and pythonic way to
interact with the iperf3 utility.
At the moment the module redirects stdout and stderr to a pipe and returns the
received data back after each ``client.run()`` or ``server.run()`` call. In
later releases there will be an option to toggle this on or off.
A user should never have to utilise the :class:`IPerf3` class directly, this
class provides common settings for the :class:`Client` and :class:`Server`
classes.
To get started quickly see the :ref:`examples` page.
.. moduleauthor:: Mathijs Mortimer <mathijs@mortimer.nl>
"""
from ctypes import util, cdll, c_char_p, c_int, c_char, c_void_p, c_uint64
import os
import select
import json
import threading
from socket import SOCK_DGRAM, SOCK_STREAM
try:
from queue import Queue
except ImportError:
from Queue import Queue # Python2 compatibility
__version__ = '0.1.11'
MAX_UDP_BULKSIZE = (65535 - 8 - 20)
def read_pipe(pipe_out):
"""Read data on a pipe
Used to capture stdout data produced by libiperf
:param pipe_out: The os pipe_out
:rtype: unicode string
"""
out = b''
while more_data(pipe_out):
out += os.read(pipe_out, 1024)
return out.decode('utf-8')
def output_to_pipe(pipe_in):
"""Redirects stdout and stderr to a pipe
:param pipe_out: The pipe to redirect stdout and stderr to
"""
os.dup2(pipe_in, 1) # stdout
# os.dup2(pipe_in, 2) # stderr
def output_to_screen(stdout_fd, stderr_fd):
"""Redirects stdout and stderr to a pipe
:param stdout_fd: The stdout file descriptor
:param stderr_fd: The stderr file descriptor
"""
os.dup2(stdout_fd, 1)
# os.dup2(stderr_fd, 2)
class IPerf3(object):
"""The base class used by both the iperf3 :class:`Server` and :class:`Client`
.. note:: You should not use this class directly
"""
def __init__(self,
role,
verbose=True,
lib_name=None):
"""Initialise the iperf shared library
:param role: 'c' = client; 's' = server
:param verbose: enable verbose output
:param lib_name: optional name and path for libiperf.so.0 library
"""
if lib_name is None:
lib_name = util.find_library('libiperf')
if lib_name is None:
# If we still couldn't find it lets try the manual approach
lib_name = 'libiperf.so.0'
try:
self.lib = cdll.LoadLibrary(lib_name)
except OSError:
raise OSError(
"Couldn't find shared library {}, is iperf3 installed?".format(
lib_name
)
)
# Set the appropriate C types.
self.lib.iperf_client_end.restype = c_int
self.lib.iperf_client_end.argtypes = (c_void_p,)
self.lib.iperf_free_test.restxpe = None
self.lib.iperf_free_test.argtypes = (c_void_p,)
self.lib.iperf_new_test.restype = c_void_p
self.lib.iperf_new_test.argtypes = None
self.lib.iperf_defaults.restype = c_int
self.lib.iperf_defaults.argtypes = (c_void_p,)
self.lib.iperf_get_test_role.restype = c_char
self.lib.iperf_get_test_role.argtypes = (c_void_p,)
self.lib.iperf_set_test_role.restype = None
self.lib.iperf_set_test_role.argtypes = (c_void_p, c_char,)
self.lib.iperf_get_test_bind_address.restype = c_char_p
self.lib.iperf_get_test_bind_address.argtypes = (c_void_p,)
self.lib.iperf_set_test_bind_address.restype = None
self.lib.iperf_set_test_bind_address.argtypes = (c_void_p, c_char_p,)
self.lib.iperf_get_test_server_port.restype = c_int
self.lib.iperf_get_test_server_port.argtypes = (c_void_p,)
self.lib.iperf_set_test_server_port.restype = None
self.lib.iperf_set_test_server_port.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_test_json_output.restype = c_int
self.lib.iperf_get_test_json_output.argtypes = (c_void_p,)
self.lib.iperf_set_test_json_output.restype = None
self.lib.iperf_set_test_json_output.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_verbose.restype = c_int
self.lib.iperf_get_verbose.argtypes = (c_void_p,)
self.lib.iperf_set_verbose.restype = None
self.lib.iperf_set_verbose.argtypes = (c_void_p, c_int)
self.lib.iperf_strerror.restype = c_char_p
self.lib.iperf_strerror.argtypes = (c_int,)
self.lib.iperf_get_test_server_hostname.restype = c_char_p
self.lib.iperf_get_test_server_hostname.argtypes = (c_void_p,)
self.lib.iperf_set_test_server_hostname.restype = None
self.lib.iperf_set_test_server_hostname.argtypes = (
c_void_p, c_char_p,
)
self.lib.iperf_get_test_protocol_id.restype = c_int
self.lib.iperf_get_test_protocol_id.argtypes = (c_void_p,)
self.lib.set_protocol.restype = c_int
self.lib.set_protocol.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_test_omit.restype = c_int
self.lib.iperf_get_test_omit.argtypes = (c_void_p,)
self.lib.iperf_set_test_omit.restype = None
self.lib.iperf_set_test_omit.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_test_duration.restype = c_int
self.lib.iperf_get_test_duration.argtypes = (c_void_p,)
self.lib.iperf_set_test_duration.restype = None
self.lib.iperf_set_test_duration.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_test_rate.restype = c_uint64
self.lib.iperf_get_test_rate.argtypes = (c_void_p,)
self.lib.iperf_set_test_rate.restype = None
self.lib.iperf_set_test_rate.argtypes = (c_void_p, c_uint64,)
self.lib.iperf_get_test_blksize.restype = c_int
self.lib.iperf_get_test_blksize.argtypes = (c_void_p,)
self.lib.iperf_set_test_blksize.restype = None
self.lib.iperf_set_test_blksize.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_test_num_streams.restype = c_int
self.lib.iperf_get_test_num_streams.argtypes = (c_void_p,)
self.lib.iperf_set_test_num_streams.restype = None
self.lib.iperf_set_test_num_streams.argtypes = (c_void_p, c_int,)
self.lib.iperf_has_zerocopy.restype = c_int
self.lib.iperf_has_zerocopy.argtypes = None
self.lib.iperf_set_test_zerocopy.restype = None
self.lib.iperf_set_test_zerocopy.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_test_reverse.restype = c_int
self.lib.iperf_get_test_reverse.argtypes = (c_void_p,)
self.lib.iperf_set_test_reverse.restype = None
self.lib.iperf_set_test_reverse.argtypes = (c_void_p, c_int,)
self.lib.iperf_run_client.restype = c_int
self.lib.iperf_run_client.argtypes = (c_void_p,)
self.lib.iperf_run_server.restype = c_int
self.lib.iperf_run_server.argtypes = (c_void_p,)
self.lib.iperf_reset_test.restype = None
self.lib.iperf_reset_test.argtypes = (c_void_p,)
try:
# Only available from iperf v3.1 and onwards
self.lib.iperf_get_test_json_output_string.restype = c_char_p
self.lib.iperf_get_test_json_output_string.argtypes = (c_void_p,)
except AttributeError:
pass
# The test C struct iperf_test
self._test = self._new()
self.defaults()
# stdout/strerr redirection variables
self._stdout_fd = os.dup(1)
self._stderr_fd = os.dup(2)
self._pipe_out, self._pipe_in = os.pipe() # no need for pipe write
# Generic test settings
self.role = role
self.json_output = True
self.verbose = verbose
def __del__(self):
"""Cleanup the test after the :class:`IPerf3` class is terminated"""
os.close(self._stdout_fd)
os.close(self._stderr_fd)
os.close(self._pipe_out)
os.close(self._pipe_in)
try:
# In the current version of libiperf, the control socket isn't
# closed on iperf_client_end(), see proposed pull request:
# https://github.com/esnet/iperf/pull/597
# Workaround for testing, don't ever do this..:
#
# sck=self.lib.iperf_get_control_socket(self._test)
# os.close(sck)
self.lib.iperf_client_end(self._test)
self.lib.iperf_free_test(self._test)
except AttributeError:
# self.lib doesn't exist, likely because iperf3 wasn't installed or
# the shared library libiperf.so.0 could not be found
pass
def _new(self):
"""Initialise a new iperf test
struct iperf_test *iperf_new_test()
"""
return self.lib.iperf_new_test()
def defaults(self):
"""Set/reset iperf test defaults."""
self.lib.iperf_defaults(self._test)
@property
def role(self):
"""The iperf3 instance role
valid roles are 'c'=client and 's'=server
:rtype: 'c' or 's'
"""
try:
self._role = c_char(
self.lib.iperf_get_test_role(self._test)
).value.decode('utf-8')
except TypeError:
self._role = c_char(
chr(self.lib.iperf_get_test_role(self._test))
).value.decode('utf-8')
return self._role
@role.setter
def role(self, role):
if role.lower() in ['c', 's']:
self.lib.iperf_set_test_role(
self._test,
c_char(role.lower().encode('utf-8'))
)
self._role = role
else:
raise ValueError("Unknown role, accepted values are 'c' and 's'")
@property
def bind_address(self):
"""The bind address the iperf3 instance will listen on
use * to listen on all available IPs
:rtype: string
"""
result = c_char_p(
self.lib.iperf_get_test_bind_address(self._test)
).value
if result:
self._bind_address = result.decode('utf-8')
else:
self._bind_address = '*'
return self._bind_address
@bind_address.setter
def bind_address(self, address):
self.lib.iperf_set_test_bind_address(
self._test,
c_char_p(address.encode('utf-8'))
)
self._bind_address = address
@property
def port(self):
"""The port the iperf3 server is listening on"""
self._port = self.lib.iperf_get_test_server_port(self._test)
return self._port
@port.setter
def port(self, port):
self.lib.iperf_set_test_server_port(self._test, int(port))
self._port = port
@property
def json_output(self):
"""Toggles json output of libiperf
Turning this off will output the iperf3 instance results to
stdout/stderr
:rtype: bool
"""
enabled = self.lib.iperf_get_test_json_output(self._test)
if enabled:
self._json_output = True
else:
self._json_output = False
return self._json_output
@json_output.setter
def json_output(self, enabled):
if enabled:
self.lib.iperf_set_test_json_output(self._test, 1)
else:
self.lib.iperf_set_test_json_output(self._test, 0)
self._json_output = enabled
@property
def verbose(self):
"""Toggles verbose output for the iperf3 instance
:rtype: bool
"""
enabled = self.lib.iperf_get_verbose(self._test)
if enabled:
self._verbose = True
else:
self._verbose = False
return self._verbose
@verbose.setter
def verbose(self, enabled):
if enabled:
self.lib.iperf_set_verbose(self._test, 1)
else:
self.lib.iperf_set_verbose(self._test, 0)
self._verbose = enabled
@property
def _errno(self):
"""Returns the last error ID
:rtype: int
"""
return c_int.in_dll(self.lib, "i_errno").value
@property
def iperf_version(self):
"""Returns the version of the libiperf library
:rtype: string
"""
# TODO: Is there a better way to get the const char than allocating 30?
VersionType = c_char * 30
return VersionType.in_dll(self.lib, "version").value.decode('utf-8')
def _error_to_string(self, error_id):
"""Returns an error string from libiperf
:param error_id: The error_id produced by libiperf
:rtype: string
"""
strerror = self.lib.iperf_strerror
strerror.restype = c_char_p
return strerror(error_id).decode('utf-8')
def run(self):
"""Runs the iperf3 instance.
This function has to be instantiated by the Client and Server
instances
:rtype: NotImplementedError
"""
raise NotImplementedError
class Client(IPerf3):
"""An iperf3 client connection.
This opens up a connection to a running iperf3 server
Basic Usage::
>>> import iperf3
>>> client = iperf3.Client()
>>> client.duration = 1
>>> client.server_hostname = '127.0.0.1'
>>> client.port = 5201
>>> client.run()
{'intervals': [{'sum': {...
"""
def __init__(self, *args, **kwargs):
"""Initialise the iperf shared library"""
super(Client, self).__init__(role='c', *args, **kwargs)
# Internal variables
self._blksize = None
self._server_hostname = None
self._port = None
self._num_streams = None
self._zerocopy = False
self._omit = None
self._duration = None
self._bandwidth = None
self._protocol = None
@property
def server_hostname(self):
"""The server hostname to connect to.
Accepts DNS entries or IP addresses.
:rtype: string
"""
result = c_char_p(
self.lib.iperf_get_test_server_hostname(self._test)
).value
if result:
self._server_hostname = result.decode('utf-8')
else:
self._server_hostname = None
return self._server_hostname
@server_hostname.setter
def server_hostname(self, hostname):
self.lib.iperf_set_test_server_hostname(
self._test,
c_char_p(hostname.encode('utf-8'))
)
self._server_hostname = hostname
@property
def protocol(self):
"""The iperf3 instance protocol
valid protocols are 'tcp' and 'udp'
:rtype: str
"""
proto_id = self.lib.iperf_get_test_protocol_id(self._test)
if proto_id == SOCK_STREAM:
self._protocol = 'tcp'
elif proto_id == SOCK_DGRAM:
self._protocol = 'udp'
return self._protocol
@protocol.setter
def protocol(self, protocol):
if protocol == 'tcp':
self.lib.set_protocol(self._test, int(SOCK_STREAM))
elif protocol == 'udp':
self.lib.set_protocol(self._test, int(SOCK_DGRAM))
if self.blksize > MAX_UDP_BULKSIZE:
self.blksize = MAX_UDP_BULKSIZE
self._protocol = protocol
@property
def omit(self):
"""The test startup duration to omit in seconds."""
self._omit = self.lib.iperf_get_test_omit(self._test)
return self._omit
@omit.setter
def omit(self, omit):
self.lib.iperf_set_test_omit(self._test, omit)
self._omit = omit
@property
def duration(self):
"""The test duration in seconds."""
self._duration = self.lib.iperf_get_test_duration(self._test)
return self._duration
@duration.setter
def duration(self, duration):
self.lib.iperf_set_test_duration(self._test, duration)
self._duration = duration
@property
def bandwidth(self):
"""Target bandwidth in bits/sec"""
self._bandwidth = self.lib.iperf_get_test_rate(self._test)
return self._bandwidth
@bandwidth.setter
def bandwidth(self, bandwidth):
self.lib.iperf_set_test_rate(self._test, bandwidth)
self._bandwidth = bandwidth
@property
def blksize(self):
"""The test blksize."""
self._blksize = self.lib.iperf_get_test_blksize(self._test)
return self._blksize
@blksize.setter
def blksize(self, bulksize):
# iperf version < 3.1.3 has some weird bugs when bulksize is
# larger than MAX_UDP_BULKSIZE
if self.protocol == 'udp' and bulksize > MAX_UDP_BULKSIZE:
bulksize = MAX_UDP_BULKSIZE
self.lib.iperf_set_test_blksize(self._test, bulksize)
self._blksize = bulksize
@property
def bulksize(self):
"""The test bulksize.
Deprecated argument, use blksize instead to ensure consistency
with iperf3 C libary
"""
# Keeping bulksize argument for backwards compatibility with
# iperf3-python < 0.1.7
return self.blksize
@bulksize.setter
def bulksize(self, bulksize):
# Keeping bulksize argument for backwards compatibility with
# iperf3-python < 0.1.7
self.blksize = bulksize
@property
def num_streams(self):
"""The number of streams to use."""
self._num_streams = self.lib.iperf_get_test_num_streams(self._test)
return self._num_streams
@num_streams.setter
def num_streams(self, number):
self.lib.iperf_set_test_num_streams(self._test, number)
self._num_streams = number
@property
def zerocopy(self):
"""Toggle zerocopy.
Use the sendfile() system call for "Zero Copy" mode. This uses much
less CPU. This is not supported on all systems.
**Note** there isn't a hook in the libiperf library for getting the
current configured value. Relying on zerocopy.setter function
:rtype: bool
"""
return self._zerocopy
@zerocopy.setter
def zerocopy(self, enabled):
if enabled and self.lib.iperf_has_zerocopy():
self.lib.iperf_set_test_zerocopy(self._test, 1)
self._zerocopy = True
else:
self.lib.iperf_set_test_zerocopy(self._test, 0)
self._zerocopy = False
@property
def reverse(self):
"""Toggles direction of test
:rtype: bool
"""
enabled = self.lib.iperf_get_test_reverse(self._test)
if enabled:
self._reverse = True
else:
self._reverse = False
return self._reverse
@reverse.setter
def reverse(self, enabled):
if enabled:
self.lib.iperf_set_test_reverse(self._test, 1)
else:
self.lib.iperf_set_test_reverse(self._test, 0)
self._reverse = enabled
def run(self):
"""Run the current test client.
:rtype: instance of :class:`TestResult`
"""
if self.json_output:
output_to_pipe(self._pipe_in) # Disable stdout
error = self.lib.iperf_run_client(self._test)
if not self.iperf_version.startswith('iperf 3.1'):
data = read_pipe(self._pipe_out)
if data.startswith('Control connection'):
data = '{' + data.split('{', 1)[1]
else:
data = c_char_p(
self.lib.iperf_get_test_json_output_string(self._test)
).value
if data:
data = data.decode('utf-8')
output_to_screen(self._stdout_fd, self._stderr_fd) # enable stdout
if not data or error:
data = '{"error": "%s"}' % self._error_to_string(self._errno)
return TestResult(data)
class Server(IPerf3):
"""An iperf3 server connection.
This starts an iperf3 server session. The server terminates after each
succesful client connection so it might be useful to run Server.run()
in a loop.
The C function iperf_run_server is called in a seperate thread to make
sure KeyboardInterrupt(aka ctrl+c) can still be captured
Basic Usage::
>>> import iperf3
>>> server = iperf3.Server()
>>> server.run()
{'start': {...
"""
def __init__(self, *args, **kwargs):
"""Initialise the iperf3 server instance"""
super(Server, self).__init__(role='s', *args, **kwargs)
def run(self):
"""Run the iperf3 server instance.
:rtype: instance of :class:`TestResult`
"""
def _run_in_thread(self, data_queue):
"""Runs the iperf_run_server
:param data_queue: thread-safe queue
"""
output_to_pipe(self._pipe_in) # disable stdout
error = self.lib.iperf_run_server(self._test)
output_to_screen(self._stdout_fd, self._stderr_fd) # enable stdout
# TODO json_output_string not available on earlier iperf3 builds
# have to build in a version check using self.iperf_version
# The following line should work on later versions:
# data = c_char_p(
# self.lib.iperf_get_test_json_output_string(self._test)
# ).value
data = read_pipe(self._pipe_out)
if not data or error:
data = '{"error": "%s"}' % self._error_to_string(self._errno)
self.lib.iperf_reset_test(self._test)
data_queue.put(data)
if self.json_output:
data_queue = Queue()
t = threading.Thread(
target=_run_in_thread, args=[self, data_queue]
)
t.daemon = True
t.start()
while t.is_alive():
t.join(.1)
return TestResult(data_queue.get())
else:
# setting json_output to False will output test to screen only
self.lib.iperf_run_server(self._test)
self.lib.iperf_reset_test(self._test)
return None
class TestResult(object):
"""Class containing iperf3 test results.
:param text: The raw result from libiperf as text
:param json: The raw result from libiperf asjson/dict
:param error: Error captured during test, None if all ok
:param time: Start time
:param timesecs: Start time in seconds
:param system_info: System info
:param version: Iperf Version
:param local_host: Local host ip
:param local_port: Local port number
:param remote_host: Remote host ip
:param remote_port: Remote port number
:param reverse: Test ran in reverse direction
:param protocol: 'TCP' or 'UDP'
:param num_streams: Number of test streams
:param blksize:
:param omit: Test duration to omit in the beginning in seconds
:param duration: Test duration (following omit duration) in seconds
:param local_cpu_total: The local total CPU load
:param local_cpu_user: The local user CPU load
:param local_cpu_system: The local system CPU load
:param remote_cpu_total: The remote total CPU load
:param remote_cpu_user: The remote user CPU load
:param remote_cpu_system: The remote system CPU load
TCP test specific
:param tcp_mss_default:
:param retransmits: amount of retransmits (Only returned from client)
:param sent_bytes: Sent bytes
:param sent_bps: Sent bits per second
:param sent_kbps: sent kilobits per second
:param sent_Mbps: Sent Megabits per second
:param sent_kB_s: Sent kiloBytes per second
:param sent_MB_s: Sent MegaBytes per second
:param received_bytes: Received bytes
:param received_bps: Received bits per second
:param received_kbps: Received kilobits per second
:param received_Mbps: Received Megabits per second
:param received_kB_s: Received kiloBytes per second
:param received_MB_s: Received MegaBytes per second
UDP test specific
:param bytes:
:param bps:
:param jitter_ms:
:param kbps:
:param Mbps:
:param kB_s:
:param MB_s:
:param packets:
:param lost_packets:
:param lost_percent:
:param seconds:
"""
def __init__(self, result):
"""Initialise TestResult
:param result: raw json output from :class:`Client` and :class:`Server`
"""
# The full result data
self.text = result
self.json = json.loads(result)
if 'error' in self.json:
self.error = self.json['error']
else:
self.error = None
# start time
self.time = self.json['start']['timestamp']['time']
self.timesecs = self.json['start']['timestamp']['timesecs']
# generic info
self.system_info = self.json['start']['system_info']
self.version = self.json['start']['version']
# connection details
connection_details = self.json['start']['connected'][0]
self.local_host = connection_details['local_host']
self.local_port = connection_details['local_port']
self.remote_host = connection_details['remote_host']
self.remote_port = connection_details['remote_port']
# test setup
self.tcp_mss_default = self.json['start'].get('tcp_mss_default')
self.protocol = self.json['start']['test_start']['protocol']
self.num_streams = self.json['start']['test_start']['num_streams']
self.blksize = self.json['start']['test_start']['blksize']
self.omit = self.json['start']['test_start']['omit']
self.duration = self.json['start']['test_start']['duration']
# system performance
cpu_utilization_perc = self.json['end']['cpu_utilization_percent']
self.local_cpu_total = cpu_utilization_perc['host_total']
self.local_cpu_user = cpu_utilization_perc['host_user']
self.local_cpu_system = cpu_utilization_perc['host_system']
self.remote_cpu_total = cpu_utilization_perc['remote_total']
self.remote_cpu_user = cpu_utilization_perc['remote_user']
self.remote_cpu_system = cpu_utilization_perc['remote_system']
# TCP specific test results
if self.protocol == 'TCP':
sent_json = self.json['end']['sum_sent']
self.sent_bytes = sent_json['bytes']
self.sent_bps = sent_json['bits_per_second']
recv_json = self.json['end']['sum_received']
self.received_bytes = recv_json['bytes']
self.received_bps = recv_json['bits_per_second']
# Bits are measured in 10**3 terms
# Bytes are measured in 2**10 terms
# kbps = Kilobits per second
# Mbps = Megabits per second
# kB_s = kiloBytes per second
# MB_s = MegaBytes per second
self.sent_kbps = self.sent_bps / 1000
self.sent_Mbps = self.sent_kbps / 1000
self.sent_kB_s = self.sent_bps / (8 * 1024)
self.sent_MB_s = self.sent_kB_s / 1024
self.received_kbps = self.received_bps / 1000
self.received_Mbps = self.received_kbps / 1000
self.received_kB_s = self.received_bps / (8 * 1024)
self.received_MB_s = self.received_kB_s / 1024
# retransmits only returned from client
self.retransmits = sent_json.get('retransmits')
# UDP specific test results
elif self.protocol == 'UDP':
self.bytes = self.json['end']['sum']['bytes']
self.bps = self.json['end']['sum']['bits_per_second']
self.jitter_ms = self.json['end']['sum']['jitter_ms']
self.kbps = self.bps / 1000
self.Mbps = self.kbps / 1000
self.kB_s = self.bps / (8 * 1024)
self.MB_s = self.kB_s / 1024
self.packets = self.json['end']['sum']['packets']
self.lost_packets = self.json['end']['sum']['lost_packets']
self.lost_percent = self.json['end']['sum']['lost_percent']
self.seconds = self.json['end']['sum']['seconds']
@property
def reverse(self):
if self.json['start']['test_start']['reverse']:
return True
else:
return False
@property
def type(self):
if 'connecting_to' in self.json['start']:
return 'client'
else:
return 'server'
def __repr__(self):
"""Print the result as received from iperf3"""
return self.text
|
thiezn/iperf3-python | iperf3/iperf3.py | read_pipe | python | def read_pipe(pipe_out):
out = b''
while more_data(pipe_out):
out += os.read(pipe_out, 1024)
return out.decode('utf-8') | Read data on a pipe
Used to capture stdout data produced by libiperf
:param pipe_out: The os pipe_out
:rtype: unicode string | train | https://github.com/thiezn/iperf3-python/blob/094a6e043f44fb154988348603661b1473c23a50/iperf3/iperf3.py#L50-L62 | [
"def more_data(pipe_out):\n \"\"\"Check if there is more data left on the pipe\n\n :param pipe_out: The os pipe_out\n :rtype: bool\n \"\"\"\n r, _, _ = select.select([pipe_out], [], [], 0)\n return bool(r)\n"
] | # -*- coding: utf-8 -*-
"""
Python wrapper for the iperf3 libiperf.so.0 library. The module consists of two
classes, :class:`Client` and :class:`Server`, that inherit from the base class
:class:`IPerf3`. They provide a nice (if i say so myself) and pythonic way to
interact with the iperf3 utility.
At the moment the module redirects stdout and stderr to a pipe and returns the
received data back after each ``client.run()`` or ``server.run()`` call. In
later releases there will be an option to toggle this on or off.
A user should never have to utilise the :class:`IPerf3` class directly, this
class provides common settings for the :class:`Client` and :class:`Server`
classes.
To get started quickly see the :ref:`examples` page.
.. moduleauthor:: Mathijs Mortimer <mathijs@mortimer.nl>
"""
from ctypes import util, cdll, c_char_p, c_int, c_char, c_void_p, c_uint64
import os
import select
import json
import threading
from socket import SOCK_DGRAM, SOCK_STREAM
try:
from queue import Queue
except ImportError:
from Queue import Queue # Python2 compatibility
__version__ = '0.1.11'
MAX_UDP_BULKSIZE = (65535 - 8 - 20)
def more_data(pipe_out):
"""Check if there is more data left on the pipe
:param pipe_out: The os pipe_out
:rtype: bool
"""
r, _, _ = select.select([pipe_out], [], [], 0)
return bool(r)
def output_to_pipe(pipe_in):
"""Redirects stdout and stderr to a pipe
:param pipe_out: The pipe to redirect stdout and stderr to
"""
os.dup2(pipe_in, 1) # stdout
# os.dup2(pipe_in, 2) # stderr
def output_to_screen(stdout_fd, stderr_fd):
"""Redirects stdout and stderr to a pipe
:param stdout_fd: The stdout file descriptor
:param stderr_fd: The stderr file descriptor
"""
os.dup2(stdout_fd, 1)
# os.dup2(stderr_fd, 2)
class IPerf3(object):
"""The base class used by both the iperf3 :class:`Server` and :class:`Client`
.. note:: You should not use this class directly
"""
def __init__(self,
role,
verbose=True,
lib_name=None):
"""Initialise the iperf shared library
:param role: 'c' = client; 's' = server
:param verbose: enable verbose output
:param lib_name: optional name and path for libiperf.so.0 library
"""
if lib_name is None:
lib_name = util.find_library('libiperf')
if lib_name is None:
# If we still couldn't find it lets try the manual approach
lib_name = 'libiperf.so.0'
try:
self.lib = cdll.LoadLibrary(lib_name)
except OSError:
raise OSError(
"Couldn't find shared library {}, is iperf3 installed?".format(
lib_name
)
)
# Set the appropriate C types.
self.lib.iperf_client_end.restype = c_int
self.lib.iperf_client_end.argtypes = (c_void_p,)
self.lib.iperf_free_test.restxpe = None
self.lib.iperf_free_test.argtypes = (c_void_p,)
self.lib.iperf_new_test.restype = c_void_p
self.lib.iperf_new_test.argtypes = None
self.lib.iperf_defaults.restype = c_int
self.lib.iperf_defaults.argtypes = (c_void_p,)
self.lib.iperf_get_test_role.restype = c_char
self.lib.iperf_get_test_role.argtypes = (c_void_p,)
self.lib.iperf_set_test_role.restype = None
self.lib.iperf_set_test_role.argtypes = (c_void_p, c_char,)
self.lib.iperf_get_test_bind_address.restype = c_char_p
self.lib.iperf_get_test_bind_address.argtypes = (c_void_p,)
self.lib.iperf_set_test_bind_address.restype = None
self.lib.iperf_set_test_bind_address.argtypes = (c_void_p, c_char_p,)
self.lib.iperf_get_test_server_port.restype = c_int
self.lib.iperf_get_test_server_port.argtypes = (c_void_p,)
self.lib.iperf_set_test_server_port.restype = None
self.lib.iperf_set_test_server_port.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_test_json_output.restype = c_int
self.lib.iperf_get_test_json_output.argtypes = (c_void_p,)
self.lib.iperf_set_test_json_output.restype = None
self.lib.iperf_set_test_json_output.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_verbose.restype = c_int
self.lib.iperf_get_verbose.argtypes = (c_void_p,)
self.lib.iperf_set_verbose.restype = None
self.lib.iperf_set_verbose.argtypes = (c_void_p, c_int)
self.lib.iperf_strerror.restype = c_char_p
self.lib.iperf_strerror.argtypes = (c_int,)
self.lib.iperf_get_test_server_hostname.restype = c_char_p
self.lib.iperf_get_test_server_hostname.argtypes = (c_void_p,)
self.lib.iperf_set_test_server_hostname.restype = None
self.lib.iperf_set_test_server_hostname.argtypes = (
c_void_p, c_char_p,
)
self.lib.iperf_get_test_protocol_id.restype = c_int
self.lib.iperf_get_test_protocol_id.argtypes = (c_void_p,)
self.lib.set_protocol.restype = c_int
self.lib.set_protocol.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_test_omit.restype = c_int
self.lib.iperf_get_test_omit.argtypes = (c_void_p,)
self.lib.iperf_set_test_omit.restype = None
self.lib.iperf_set_test_omit.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_test_duration.restype = c_int
self.lib.iperf_get_test_duration.argtypes = (c_void_p,)
self.lib.iperf_set_test_duration.restype = None
self.lib.iperf_set_test_duration.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_test_rate.restype = c_uint64
self.lib.iperf_get_test_rate.argtypes = (c_void_p,)
self.lib.iperf_set_test_rate.restype = None
self.lib.iperf_set_test_rate.argtypes = (c_void_p, c_uint64,)
self.lib.iperf_get_test_blksize.restype = c_int
self.lib.iperf_get_test_blksize.argtypes = (c_void_p,)
self.lib.iperf_set_test_blksize.restype = None
self.lib.iperf_set_test_blksize.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_test_num_streams.restype = c_int
self.lib.iperf_get_test_num_streams.argtypes = (c_void_p,)
self.lib.iperf_set_test_num_streams.restype = None
self.lib.iperf_set_test_num_streams.argtypes = (c_void_p, c_int,)
self.lib.iperf_has_zerocopy.restype = c_int
self.lib.iperf_has_zerocopy.argtypes = None
self.lib.iperf_set_test_zerocopy.restype = None
self.lib.iperf_set_test_zerocopy.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_test_reverse.restype = c_int
self.lib.iperf_get_test_reverse.argtypes = (c_void_p,)
self.lib.iperf_set_test_reverse.restype = None
self.lib.iperf_set_test_reverse.argtypes = (c_void_p, c_int,)
self.lib.iperf_run_client.restype = c_int
self.lib.iperf_run_client.argtypes = (c_void_p,)
self.lib.iperf_run_server.restype = c_int
self.lib.iperf_run_server.argtypes = (c_void_p,)
self.lib.iperf_reset_test.restype = None
self.lib.iperf_reset_test.argtypes = (c_void_p,)
try:
# Only available from iperf v3.1 and onwards
self.lib.iperf_get_test_json_output_string.restype = c_char_p
self.lib.iperf_get_test_json_output_string.argtypes = (c_void_p,)
except AttributeError:
pass
# The test C struct iperf_test
self._test = self._new()
self.defaults()
# stdout/strerr redirection variables
self._stdout_fd = os.dup(1)
self._stderr_fd = os.dup(2)
self._pipe_out, self._pipe_in = os.pipe() # no need for pipe write
# Generic test settings
self.role = role
self.json_output = True
self.verbose = verbose
def __del__(self):
"""Cleanup the test after the :class:`IPerf3` class is terminated"""
os.close(self._stdout_fd)
os.close(self._stderr_fd)
os.close(self._pipe_out)
os.close(self._pipe_in)
try:
# In the current version of libiperf, the control socket isn't
# closed on iperf_client_end(), see proposed pull request:
# https://github.com/esnet/iperf/pull/597
# Workaround for testing, don't ever do this..:
#
# sck=self.lib.iperf_get_control_socket(self._test)
# os.close(sck)
self.lib.iperf_client_end(self._test)
self.lib.iperf_free_test(self._test)
except AttributeError:
# self.lib doesn't exist, likely because iperf3 wasn't installed or
# the shared library libiperf.so.0 could not be found
pass
def _new(self):
"""Initialise a new iperf test
struct iperf_test *iperf_new_test()
"""
return self.lib.iperf_new_test()
def defaults(self):
"""Set/reset iperf test defaults."""
self.lib.iperf_defaults(self._test)
@property
def role(self):
"""The iperf3 instance role
valid roles are 'c'=client and 's'=server
:rtype: 'c' or 's'
"""
try:
self._role = c_char(
self.lib.iperf_get_test_role(self._test)
).value.decode('utf-8')
except TypeError:
self._role = c_char(
chr(self.lib.iperf_get_test_role(self._test))
).value.decode('utf-8')
return self._role
@role.setter
def role(self, role):
if role.lower() in ['c', 's']:
self.lib.iperf_set_test_role(
self._test,
c_char(role.lower().encode('utf-8'))
)
self._role = role
else:
raise ValueError("Unknown role, accepted values are 'c' and 's'")
@property
def bind_address(self):
"""The bind address the iperf3 instance will listen on
use * to listen on all available IPs
:rtype: string
"""
result = c_char_p(
self.lib.iperf_get_test_bind_address(self._test)
).value
if result:
self._bind_address = result.decode('utf-8')
else:
self._bind_address = '*'
return self._bind_address
@bind_address.setter
def bind_address(self, address):
self.lib.iperf_set_test_bind_address(
self._test,
c_char_p(address.encode('utf-8'))
)
self._bind_address = address
@property
def port(self):
"""The port the iperf3 server is listening on"""
self._port = self.lib.iperf_get_test_server_port(self._test)
return self._port
@port.setter
def port(self, port):
self.lib.iperf_set_test_server_port(self._test, int(port))
self._port = port
@property
def json_output(self):
"""Toggles json output of libiperf
Turning this off will output the iperf3 instance results to
stdout/stderr
:rtype: bool
"""
enabled = self.lib.iperf_get_test_json_output(self._test)
if enabled:
self._json_output = True
else:
self._json_output = False
return self._json_output
@json_output.setter
def json_output(self, enabled):
if enabled:
self.lib.iperf_set_test_json_output(self._test, 1)
else:
self.lib.iperf_set_test_json_output(self._test, 0)
self._json_output = enabled
@property
def verbose(self):
"""Toggles verbose output for the iperf3 instance
:rtype: bool
"""
enabled = self.lib.iperf_get_verbose(self._test)
if enabled:
self._verbose = True
else:
self._verbose = False
return self._verbose
@verbose.setter
def verbose(self, enabled):
if enabled:
self.lib.iperf_set_verbose(self._test, 1)
else:
self.lib.iperf_set_verbose(self._test, 0)
self._verbose = enabled
@property
def _errno(self):
"""Returns the last error ID
:rtype: int
"""
return c_int.in_dll(self.lib, "i_errno").value
@property
def iperf_version(self):
"""Returns the version of the libiperf library
:rtype: string
"""
# TODO: Is there a better way to get the const char than allocating 30?
VersionType = c_char * 30
return VersionType.in_dll(self.lib, "version").value.decode('utf-8')
def _error_to_string(self, error_id):
"""Returns an error string from libiperf
:param error_id: The error_id produced by libiperf
:rtype: string
"""
strerror = self.lib.iperf_strerror
strerror.restype = c_char_p
return strerror(error_id).decode('utf-8')
def run(self):
"""Runs the iperf3 instance.
This function has to be instantiated by the Client and Server
instances
:rtype: NotImplementedError
"""
raise NotImplementedError
class Client(IPerf3):
"""An iperf3 client connection.
This opens up a connection to a running iperf3 server
Basic Usage::
>>> import iperf3
>>> client = iperf3.Client()
>>> client.duration = 1
>>> client.server_hostname = '127.0.0.1'
>>> client.port = 5201
>>> client.run()
{'intervals': [{'sum': {...
"""
def __init__(self, *args, **kwargs):
"""Initialise the iperf shared library"""
super(Client, self).__init__(role='c', *args, **kwargs)
# Internal variables
self._blksize = None
self._server_hostname = None
self._port = None
self._num_streams = None
self._zerocopy = False
self._omit = None
self._duration = None
self._bandwidth = None
self._protocol = None
@property
def server_hostname(self):
"""The server hostname to connect to.
Accepts DNS entries or IP addresses.
:rtype: string
"""
result = c_char_p(
self.lib.iperf_get_test_server_hostname(self._test)
).value
if result:
self._server_hostname = result.decode('utf-8')
else:
self._server_hostname = None
return self._server_hostname
@server_hostname.setter
def server_hostname(self, hostname):
self.lib.iperf_set_test_server_hostname(
self._test,
c_char_p(hostname.encode('utf-8'))
)
self._server_hostname = hostname
@property
def protocol(self):
"""The iperf3 instance protocol
valid protocols are 'tcp' and 'udp'
:rtype: str
"""
proto_id = self.lib.iperf_get_test_protocol_id(self._test)
if proto_id == SOCK_STREAM:
self._protocol = 'tcp'
elif proto_id == SOCK_DGRAM:
self._protocol = 'udp'
return self._protocol
@protocol.setter
def protocol(self, protocol):
if protocol == 'tcp':
self.lib.set_protocol(self._test, int(SOCK_STREAM))
elif protocol == 'udp':
self.lib.set_protocol(self._test, int(SOCK_DGRAM))
if self.blksize > MAX_UDP_BULKSIZE:
self.blksize = MAX_UDP_BULKSIZE
self._protocol = protocol
@property
def omit(self):
"""The test startup duration to omit in seconds."""
self._omit = self.lib.iperf_get_test_omit(self._test)
return self._omit
@omit.setter
def omit(self, omit):
self.lib.iperf_set_test_omit(self._test, omit)
self._omit = omit
@property
def duration(self):
"""The test duration in seconds."""
self._duration = self.lib.iperf_get_test_duration(self._test)
return self._duration
@duration.setter
def duration(self, duration):
self.lib.iperf_set_test_duration(self._test, duration)
self._duration = duration
@property
def bandwidth(self):
"""Target bandwidth in bits/sec"""
self._bandwidth = self.lib.iperf_get_test_rate(self._test)
return self._bandwidth
@bandwidth.setter
def bandwidth(self, bandwidth):
self.lib.iperf_set_test_rate(self._test, bandwidth)
self._bandwidth = bandwidth
@property
def blksize(self):
"""The test blksize."""
self._blksize = self.lib.iperf_get_test_blksize(self._test)
return self._blksize
@blksize.setter
def blksize(self, bulksize):
# iperf version < 3.1.3 has some weird bugs when bulksize is
# larger than MAX_UDP_BULKSIZE
if self.protocol == 'udp' and bulksize > MAX_UDP_BULKSIZE:
bulksize = MAX_UDP_BULKSIZE
self.lib.iperf_set_test_blksize(self._test, bulksize)
self._blksize = bulksize
@property
def bulksize(self):
"""The test bulksize.
Deprecated argument, use blksize instead to ensure consistency
with iperf3 C libary
"""
# Keeping bulksize argument for backwards compatibility with
# iperf3-python < 0.1.7
return self.blksize
@bulksize.setter
def bulksize(self, bulksize):
# Keeping bulksize argument for backwards compatibility with
# iperf3-python < 0.1.7
self.blksize = bulksize
@property
def num_streams(self):
"""The number of streams to use."""
self._num_streams = self.lib.iperf_get_test_num_streams(self._test)
return self._num_streams
@num_streams.setter
def num_streams(self, number):
self.lib.iperf_set_test_num_streams(self._test, number)
self._num_streams = number
@property
def zerocopy(self):
"""Toggle zerocopy.
Use the sendfile() system call for "Zero Copy" mode. This uses much
less CPU. This is not supported on all systems.
**Note** there isn't a hook in the libiperf library for getting the
current configured value. Relying on zerocopy.setter function
:rtype: bool
"""
return self._zerocopy
@zerocopy.setter
def zerocopy(self, enabled):
if enabled and self.lib.iperf_has_zerocopy():
self.lib.iperf_set_test_zerocopy(self._test, 1)
self._zerocopy = True
else:
self.lib.iperf_set_test_zerocopy(self._test, 0)
self._zerocopy = False
@property
def reverse(self):
"""Toggles direction of test
:rtype: bool
"""
enabled = self.lib.iperf_get_test_reverse(self._test)
if enabled:
self._reverse = True
else:
self._reverse = False
return self._reverse
@reverse.setter
def reverse(self, enabled):
if enabled:
self.lib.iperf_set_test_reverse(self._test, 1)
else:
self.lib.iperf_set_test_reverse(self._test, 0)
self._reverse = enabled
def run(self):
"""Run the current test client.
:rtype: instance of :class:`TestResult`
"""
if self.json_output:
output_to_pipe(self._pipe_in) # Disable stdout
error = self.lib.iperf_run_client(self._test)
if not self.iperf_version.startswith('iperf 3.1'):
data = read_pipe(self._pipe_out)
if data.startswith('Control connection'):
data = '{' + data.split('{', 1)[1]
else:
data = c_char_p(
self.lib.iperf_get_test_json_output_string(self._test)
).value
if data:
data = data.decode('utf-8')
output_to_screen(self._stdout_fd, self._stderr_fd) # enable stdout
if not data or error:
data = '{"error": "%s"}' % self._error_to_string(self._errno)
return TestResult(data)
class Server(IPerf3):
"""An iperf3 server connection.
This starts an iperf3 server session. The server terminates after each
succesful client connection so it might be useful to run Server.run()
in a loop.
The C function iperf_run_server is called in a seperate thread to make
sure KeyboardInterrupt(aka ctrl+c) can still be captured
Basic Usage::
>>> import iperf3
>>> server = iperf3.Server()
>>> server.run()
{'start': {...
"""
def __init__(self, *args, **kwargs):
"""Initialise the iperf3 server instance"""
super(Server, self).__init__(role='s', *args, **kwargs)
def run(self):
"""Run the iperf3 server instance.
:rtype: instance of :class:`TestResult`
"""
def _run_in_thread(self, data_queue):
"""Runs the iperf_run_server
:param data_queue: thread-safe queue
"""
output_to_pipe(self._pipe_in) # disable stdout
error = self.lib.iperf_run_server(self._test)
output_to_screen(self._stdout_fd, self._stderr_fd) # enable stdout
# TODO json_output_string not available on earlier iperf3 builds
# have to build in a version check using self.iperf_version
# The following line should work on later versions:
# data = c_char_p(
# self.lib.iperf_get_test_json_output_string(self._test)
# ).value
data = read_pipe(self._pipe_out)
if not data or error:
data = '{"error": "%s"}' % self._error_to_string(self._errno)
self.lib.iperf_reset_test(self._test)
data_queue.put(data)
if self.json_output:
data_queue = Queue()
t = threading.Thread(
target=_run_in_thread, args=[self, data_queue]
)
t.daemon = True
t.start()
while t.is_alive():
t.join(.1)
return TestResult(data_queue.get())
else:
# setting json_output to False will output test to screen only
self.lib.iperf_run_server(self._test)
self.lib.iperf_reset_test(self._test)
return None
class TestResult(object):
"""Class containing iperf3 test results.
:param text: The raw result from libiperf as text
:param json: The raw result from libiperf asjson/dict
:param error: Error captured during test, None if all ok
:param time: Start time
:param timesecs: Start time in seconds
:param system_info: System info
:param version: Iperf Version
:param local_host: Local host ip
:param local_port: Local port number
:param remote_host: Remote host ip
:param remote_port: Remote port number
:param reverse: Test ran in reverse direction
:param protocol: 'TCP' or 'UDP'
:param num_streams: Number of test streams
:param blksize:
:param omit: Test duration to omit in the beginning in seconds
:param duration: Test duration (following omit duration) in seconds
:param local_cpu_total: The local total CPU load
:param local_cpu_user: The local user CPU load
:param local_cpu_system: The local system CPU load
:param remote_cpu_total: The remote total CPU load
:param remote_cpu_user: The remote user CPU load
:param remote_cpu_system: The remote system CPU load
TCP test specific
:param tcp_mss_default:
:param retransmits: amount of retransmits (Only returned from client)
:param sent_bytes: Sent bytes
:param sent_bps: Sent bits per second
:param sent_kbps: sent kilobits per second
:param sent_Mbps: Sent Megabits per second
:param sent_kB_s: Sent kiloBytes per second
:param sent_MB_s: Sent MegaBytes per second
:param received_bytes: Received bytes
:param received_bps: Received bits per second
:param received_kbps: Received kilobits per second
:param received_Mbps: Received Megabits per second
:param received_kB_s: Received kiloBytes per second
:param received_MB_s: Received MegaBytes per second
UDP test specific
:param bytes:
:param bps:
:param jitter_ms:
:param kbps:
:param Mbps:
:param kB_s:
:param MB_s:
:param packets:
:param lost_packets:
:param lost_percent:
:param seconds:
"""
def __init__(self, result):
"""Initialise TestResult
:param result: raw json output from :class:`Client` and :class:`Server`
"""
# The full result data
self.text = result
self.json = json.loads(result)
if 'error' in self.json:
self.error = self.json['error']
else:
self.error = None
# start time
self.time = self.json['start']['timestamp']['time']
self.timesecs = self.json['start']['timestamp']['timesecs']
# generic info
self.system_info = self.json['start']['system_info']
self.version = self.json['start']['version']
# connection details
connection_details = self.json['start']['connected'][0]
self.local_host = connection_details['local_host']
self.local_port = connection_details['local_port']
self.remote_host = connection_details['remote_host']
self.remote_port = connection_details['remote_port']
# test setup
self.tcp_mss_default = self.json['start'].get('tcp_mss_default')
self.protocol = self.json['start']['test_start']['protocol']
self.num_streams = self.json['start']['test_start']['num_streams']
self.blksize = self.json['start']['test_start']['blksize']
self.omit = self.json['start']['test_start']['omit']
self.duration = self.json['start']['test_start']['duration']
# system performance
cpu_utilization_perc = self.json['end']['cpu_utilization_percent']
self.local_cpu_total = cpu_utilization_perc['host_total']
self.local_cpu_user = cpu_utilization_perc['host_user']
self.local_cpu_system = cpu_utilization_perc['host_system']
self.remote_cpu_total = cpu_utilization_perc['remote_total']
self.remote_cpu_user = cpu_utilization_perc['remote_user']
self.remote_cpu_system = cpu_utilization_perc['remote_system']
# TCP specific test results
if self.protocol == 'TCP':
sent_json = self.json['end']['sum_sent']
self.sent_bytes = sent_json['bytes']
self.sent_bps = sent_json['bits_per_second']
recv_json = self.json['end']['sum_received']
self.received_bytes = recv_json['bytes']
self.received_bps = recv_json['bits_per_second']
# Bits are measured in 10**3 terms
# Bytes are measured in 2**10 terms
# kbps = Kilobits per second
# Mbps = Megabits per second
# kB_s = kiloBytes per second
# MB_s = MegaBytes per second
self.sent_kbps = self.sent_bps / 1000
self.sent_Mbps = self.sent_kbps / 1000
self.sent_kB_s = self.sent_bps / (8 * 1024)
self.sent_MB_s = self.sent_kB_s / 1024
self.received_kbps = self.received_bps / 1000
self.received_Mbps = self.received_kbps / 1000
self.received_kB_s = self.received_bps / (8 * 1024)
self.received_MB_s = self.received_kB_s / 1024
# retransmits only returned from client
self.retransmits = sent_json.get('retransmits')
# UDP specific test results
elif self.protocol == 'UDP':
self.bytes = self.json['end']['sum']['bytes']
self.bps = self.json['end']['sum']['bits_per_second']
self.jitter_ms = self.json['end']['sum']['jitter_ms']
self.kbps = self.bps / 1000
self.Mbps = self.kbps / 1000
self.kB_s = self.bps / (8 * 1024)
self.MB_s = self.kB_s / 1024
self.packets = self.json['end']['sum']['packets']
self.lost_packets = self.json['end']['sum']['lost_packets']
self.lost_percent = self.json['end']['sum']['lost_percent']
self.seconds = self.json['end']['sum']['seconds']
@property
def reverse(self):
if self.json['start']['test_start']['reverse']:
return True
else:
return False
@property
def type(self):
if 'connecting_to' in self.json['start']:
return 'client'
else:
return 'server'
def __repr__(self):
"""Print the result as received from iperf3"""
return self.text
|
thiezn/iperf3-python | iperf3/iperf3.py | IPerf3.role | python | def role(self):
try:
self._role = c_char(
self.lib.iperf_get_test_role(self._test)
).value.decode('utf-8')
except TypeError:
self._role = c_char(
chr(self.lib.iperf_get_test_role(self._test))
).value.decode('utf-8')
return self._role | The iperf3 instance role
valid roles are 'c'=client and 's'=server
:rtype: 'c' or 's' | train | https://github.com/thiezn/iperf3-python/blob/094a6e043f44fb154988348603661b1473c23a50/iperf3/iperf3.py#L246-L261 | null | class IPerf3(object):
"""The base class used by both the iperf3 :class:`Server` and :class:`Client`
.. note:: You should not use this class directly
"""
def __init__(self,
role,
verbose=True,
lib_name=None):
"""Initialise the iperf shared library
:param role: 'c' = client; 's' = server
:param verbose: enable verbose output
:param lib_name: optional name and path for libiperf.so.0 library
"""
if lib_name is None:
lib_name = util.find_library('libiperf')
if lib_name is None:
# If we still couldn't find it lets try the manual approach
lib_name = 'libiperf.so.0'
try:
self.lib = cdll.LoadLibrary(lib_name)
except OSError:
raise OSError(
"Couldn't find shared library {}, is iperf3 installed?".format(
lib_name
)
)
# Set the appropriate C types.
self.lib.iperf_client_end.restype = c_int
self.lib.iperf_client_end.argtypes = (c_void_p,)
self.lib.iperf_free_test.restxpe = None
self.lib.iperf_free_test.argtypes = (c_void_p,)
self.lib.iperf_new_test.restype = c_void_p
self.lib.iperf_new_test.argtypes = None
self.lib.iperf_defaults.restype = c_int
self.lib.iperf_defaults.argtypes = (c_void_p,)
self.lib.iperf_get_test_role.restype = c_char
self.lib.iperf_get_test_role.argtypes = (c_void_p,)
self.lib.iperf_set_test_role.restype = None
self.lib.iperf_set_test_role.argtypes = (c_void_p, c_char,)
self.lib.iperf_get_test_bind_address.restype = c_char_p
self.lib.iperf_get_test_bind_address.argtypes = (c_void_p,)
self.lib.iperf_set_test_bind_address.restype = None
self.lib.iperf_set_test_bind_address.argtypes = (c_void_p, c_char_p,)
self.lib.iperf_get_test_server_port.restype = c_int
self.lib.iperf_get_test_server_port.argtypes = (c_void_p,)
self.lib.iperf_set_test_server_port.restype = None
self.lib.iperf_set_test_server_port.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_test_json_output.restype = c_int
self.lib.iperf_get_test_json_output.argtypes = (c_void_p,)
self.lib.iperf_set_test_json_output.restype = None
self.lib.iperf_set_test_json_output.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_verbose.restype = c_int
self.lib.iperf_get_verbose.argtypes = (c_void_p,)
self.lib.iperf_set_verbose.restype = None
self.lib.iperf_set_verbose.argtypes = (c_void_p, c_int)
self.lib.iperf_strerror.restype = c_char_p
self.lib.iperf_strerror.argtypes = (c_int,)
self.lib.iperf_get_test_server_hostname.restype = c_char_p
self.lib.iperf_get_test_server_hostname.argtypes = (c_void_p,)
self.lib.iperf_set_test_server_hostname.restype = None
self.lib.iperf_set_test_server_hostname.argtypes = (
c_void_p, c_char_p,
)
self.lib.iperf_get_test_protocol_id.restype = c_int
self.lib.iperf_get_test_protocol_id.argtypes = (c_void_p,)
self.lib.set_protocol.restype = c_int
self.lib.set_protocol.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_test_omit.restype = c_int
self.lib.iperf_get_test_omit.argtypes = (c_void_p,)
self.lib.iperf_set_test_omit.restype = None
self.lib.iperf_set_test_omit.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_test_duration.restype = c_int
self.lib.iperf_get_test_duration.argtypes = (c_void_p,)
self.lib.iperf_set_test_duration.restype = None
self.lib.iperf_set_test_duration.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_test_rate.restype = c_uint64
self.lib.iperf_get_test_rate.argtypes = (c_void_p,)
self.lib.iperf_set_test_rate.restype = None
self.lib.iperf_set_test_rate.argtypes = (c_void_p, c_uint64,)
self.lib.iperf_get_test_blksize.restype = c_int
self.lib.iperf_get_test_blksize.argtypes = (c_void_p,)
self.lib.iperf_set_test_blksize.restype = None
self.lib.iperf_set_test_blksize.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_test_num_streams.restype = c_int
self.lib.iperf_get_test_num_streams.argtypes = (c_void_p,)
self.lib.iperf_set_test_num_streams.restype = None
self.lib.iperf_set_test_num_streams.argtypes = (c_void_p, c_int,)
self.lib.iperf_has_zerocopy.restype = c_int
self.lib.iperf_has_zerocopy.argtypes = None
self.lib.iperf_set_test_zerocopy.restype = None
self.lib.iperf_set_test_zerocopy.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_test_reverse.restype = c_int
self.lib.iperf_get_test_reverse.argtypes = (c_void_p,)
self.lib.iperf_set_test_reverse.restype = None
self.lib.iperf_set_test_reverse.argtypes = (c_void_p, c_int,)
self.lib.iperf_run_client.restype = c_int
self.lib.iperf_run_client.argtypes = (c_void_p,)
self.lib.iperf_run_server.restype = c_int
self.lib.iperf_run_server.argtypes = (c_void_p,)
self.lib.iperf_reset_test.restype = None
self.lib.iperf_reset_test.argtypes = (c_void_p,)
try:
# Only available from iperf v3.1 and onwards
self.lib.iperf_get_test_json_output_string.restype = c_char_p
self.lib.iperf_get_test_json_output_string.argtypes = (c_void_p,)
except AttributeError:
pass
# The test C struct iperf_test
self._test = self._new()
self.defaults()
# stdout/strerr redirection variables
self._stdout_fd = os.dup(1)
self._stderr_fd = os.dup(2)
self._pipe_out, self._pipe_in = os.pipe() # no need for pipe write
# Generic test settings
self.role = role
self.json_output = True
self.verbose = verbose
def __del__(self):
"""Cleanup the test after the :class:`IPerf3` class is terminated"""
os.close(self._stdout_fd)
os.close(self._stderr_fd)
os.close(self._pipe_out)
os.close(self._pipe_in)
try:
# In the current version of libiperf, the control socket isn't
# closed on iperf_client_end(), see proposed pull request:
# https://github.com/esnet/iperf/pull/597
# Workaround for testing, don't ever do this..:
#
# sck=self.lib.iperf_get_control_socket(self._test)
# os.close(sck)
self.lib.iperf_client_end(self._test)
self.lib.iperf_free_test(self._test)
except AttributeError:
# self.lib doesn't exist, likely because iperf3 wasn't installed or
# the shared library libiperf.so.0 could not be found
pass
def _new(self):
"""Initialise a new iperf test
struct iperf_test *iperf_new_test()
"""
return self.lib.iperf_new_test()
def defaults(self):
"""Set/reset iperf test defaults."""
self.lib.iperf_defaults(self._test)
@property
@role.setter
def role(self, role):
if role.lower() in ['c', 's']:
self.lib.iperf_set_test_role(
self._test,
c_char(role.lower().encode('utf-8'))
)
self._role = role
else:
raise ValueError("Unknown role, accepted values are 'c' and 's'")
@property
def bind_address(self):
"""The bind address the iperf3 instance will listen on
use * to listen on all available IPs
:rtype: string
"""
result = c_char_p(
self.lib.iperf_get_test_bind_address(self._test)
).value
if result:
self._bind_address = result.decode('utf-8')
else:
self._bind_address = '*'
return self._bind_address
@bind_address.setter
def bind_address(self, address):
self.lib.iperf_set_test_bind_address(
self._test,
c_char_p(address.encode('utf-8'))
)
self._bind_address = address
@property
def port(self):
"""The port the iperf3 server is listening on"""
self._port = self.lib.iperf_get_test_server_port(self._test)
return self._port
@port.setter
def port(self, port):
self.lib.iperf_set_test_server_port(self._test, int(port))
self._port = port
@property
def json_output(self):
"""Toggles json output of libiperf
Turning this off will output the iperf3 instance results to
stdout/stderr
:rtype: bool
"""
enabled = self.lib.iperf_get_test_json_output(self._test)
if enabled:
self._json_output = True
else:
self._json_output = False
return self._json_output
@json_output.setter
def json_output(self, enabled):
if enabled:
self.lib.iperf_set_test_json_output(self._test, 1)
else:
self.lib.iperf_set_test_json_output(self._test, 0)
self._json_output = enabled
@property
def verbose(self):
"""Toggles verbose output for the iperf3 instance
:rtype: bool
"""
enabled = self.lib.iperf_get_verbose(self._test)
if enabled:
self._verbose = True
else:
self._verbose = False
return self._verbose
@verbose.setter
def verbose(self, enabled):
if enabled:
self.lib.iperf_set_verbose(self._test, 1)
else:
self.lib.iperf_set_verbose(self._test, 0)
self._verbose = enabled
@property
def _errno(self):
"""Returns the last error ID
:rtype: int
"""
return c_int.in_dll(self.lib, "i_errno").value
@property
def iperf_version(self):
"""Returns the version of the libiperf library
:rtype: string
"""
# TODO: Is there a better way to get the const char than allocating 30?
VersionType = c_char * 30
return VersionType.in_dll(self.lib, "version").value.decode('utf-8')
def _error_to_string(self, error_id):
"""Returns an error string from libiperf
:param error_id: The error_id produced by libiperf
:rtype: string
"""
strerror = self.lib.iperf_strerror
strerror.restype = c_char_p
return strerror(error_id).decode('utf-8')
def run(self):
"""Runs the iperf3 instance.
This function has to be instantiated by the Client and Server
instances
:rtype: NotImplementedError
"""
raise NotImplementedError
|
thiezn/iperf3-python | iperf3/iperf3.py | IPerf3.bind_address | python | def bind_address(self):
result = c_char_p(
self.lib.iperf_get_test_bind_address(self._test)
).value
if result:
self._bind_address = result.decode('utf-8')
else:
self._bind_address = '*'
return self._bind_address | The bind address the iperf3 instance will listen on
use * to listen on all available IPs
:rtype: string | train | https://github.com/thiezn/iperf3-python/blob/094a6e043f44fb154988348603661b1473c23a50/iperf3/iperf3.py#L275-L289 | null | class IPerf3(object):
"""The base class used by both the iperf3 :class:`Server` and :class:`Client`
.. note:: You should not use this class directly
"""
def __init__(self,
role,
verbose=True,
lib_name=None):
"""Initialise the iperf shared library
:param role: 'c' = client; 's' = server
:param verbose: enable verbose output
:param lib_name: optional name and path for libiperf.so.0 library
"""
if lib_name is None:
lib_name = util.find_library('libiperf')
if lib_name is None:
# If we still couldn't find it lets try the manual approach
lib_name = 'libiperf.so.0'
try:
self.lib = cdll.LoadLibrary(lib_name)
except OSError:
raise OSError(
"Couldn't find shared library {}, is iperf3 installed?".format(
lib_name
)
)
# Set the appropriate C types.
self.lib.iperf_client_end.restype = c_int
self.lib.iperf_client_end.argtypes = (c_void_p,)
self.lib.iperf_free_test.restxpe = None
self.lib.iperf_free_test.argtypes = (c_void_p,)
self.lib.iperf_new_test.restype = c_void_p
self.lib.iperf_new_test.argtypes = None
self.lib.iperf_defaults.restype = c_int
self.lib.iperf_defaults.argtypes = (c_void_p,)
self.lib.iperf_get_test_role.restype = c_char
self.lib.iperf_get_test_role.argtypes = (c_void_p,)
self.lib.iperf_set_test_role.restype = None
self.lib.iperf_set_test_role.argtypes = (c_void_p, c_char,)
self.lib.iperf_get_test_bind_address.restype = c_char_p
self.lib.iperf_get_test_bind_address.argtypes = (c_void_p,)
self.lib.iperf_set_test_bind_address.restype = None
self.lib.iperf_set_test_bind_address.argtypes = (c_void_p, c_char_p,)
self.lib.iperf_get_test_server_port.restype = c_int
self.lib.iperf_get_test_server_port.argtypes = (c_void_p,)
self.lib.iperf_set_test_server_port.restype = None
self.lib.iperf_set_test_server_port.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_test_json_output.restype = c_int
self.lib.iperf_get_test_json_output.argtypes = (c_void_p,)
self.lib.iperf_set_test_json_output.restype = None
self.lib.iperf_set_test_json_output.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_verbose.restype = c_int
self.lib.iperf_get_verbose.argtypes = (c_void_p,)
self.lib.iperf_set_verbose.restype = None
self.lib.iperf_set_verbose.argtypes = (c_void_p, c_int)
self.lib.iperf_strerror.restype = c_char_p
self.lib.iperf_strerror.argtypes = (c_int,)
self.lib.iperf_get_test_server_hostname.restype = c_char_p
self.lib.iperf_get_test_server_hostname.argtypes = (c_void_p,)
self.lib.iperf_set_test_server_hostname.restype = None
self.lib.iperf_set_test_server_hostname.argtypes = (
c_void_p, c_char_p,
)
self.lib.iperf_get_test_protocol_id.restype = c_int
self.lib.iperf_get_test_protocol_id.argtypes = (c_void_p,)
self.lib.set_protocol.restype = c_int
self.lib.set_protocol.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_test_omit.restype = c_int
self.lib.iperf_get_test_omit.argtypes = (c_void_p,)
self.lib.iperf_set_test_omit.restype = None
self.lib.iperf_set_test_omit.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_test_duration.restype = c_int
self.lib.iperf_get_test_duration.argtypes = (c_void_p,)
self.lib.iperf_set_test_duration.restype = None
self.lib.iperf_set_test_duration.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_test_rate.restype = c_uint64
self.lib.iperf_get_test_rate.argtypes = (c_void_p,)
self.lib.iperf_set_test_rate.restype = None
self.lib.iperf_set_test_rate.argtypes = (c_void_p, c_uint64,)
self.lib.iperf_get_test_blksize.restype = c_int
self.lib.iperf_get_test_blksize.argtypes = (c_void_p,)
self.lib.iperf_set_test_blksize.restype = None
self.lib.iperf_set_test_blksize.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_test_num_streams.restype = c_int
self.lib.iperf_get_test_num_streams.argtypes = (c_void_p,)
self.lib.iperf_set_test_num_streams.restype = None
self.lib.iperf_set_test_num_streams.argtypes = (c_void_p, c_int,)
self.lib.iperf_has_zerocopy.restype = c_int
self.lib.iperf_has_zerocopy.argtypes = None
self.lib.iperf_set_test_zerocopy.restype = None
self.lib.iperf_set_test_zerocopy.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_test_reverse.restype = c_int
self.lib.iperf_get_test_reverse.argtypes = (c_void_p,)
self.lib.iperf_set_test_reverse.restype = None
self.lib.iperf_set_test_reverse.argtypes = (c_void_p, c_int,)
self.lib.iperf_run_client.restype = c_int
self.lib.iperf_run_client.argtypes = (c_void_p,)
self.lib.iperf_run_server.restype = c_int
self.lib.iperf_run_server.argtypes = (c_void_p,)
self.lib.iperf_reset_test.restype = None
self.lib.iperf_reset_test.argtypes = (c_void_p,)
try:
# Only available from iperf v3.1 and onwards
self.lib.iperf_get_test_json_output_string.restype = c_char_p
self.lib.iperf_get_test_json_output_string.argtypes = (c_void_p,)
except AttributeError:
pass
# The test C struct iperf_test
self._test = self._new()
self.defaults()
# stdout/strerr redirection variables
self._stdout_fd = os.dup(1)
self._stderr_fd = os.dup(2)
self._pipe_out, self._pipe_in = os.pipe() # no need for pipe write
# Generic test settings
self.role = role
self.json_output = True
self.verbose = verbose
def __del__(self):
"""Cleanup the test after the :class:`IPerf3` class is terminated"""
os.close(self._stdout_fd)
os.close(self._stderr_fd)
os.close(self._pipe_out)
os.close(self._pipe_in)
try:
# In the current version of libiperf, the control socket isn't
# closed on iperf_client_end(), see proposed pull request:
# https://github.com/esnet/iperf/pull/597
# Workaround for testing, don't ever do this..:
#
# sck=self.lib.iperf_get_control_socket(self._test)
# os.close(sck)
self.lib.iperf_client_end(self._test)
self.lib.iperf_free_test(self._test)
except AttributeError:
# self.lib doesn't exist, likely because iperf3 wasn't installed or
# the shared library libiperf.so.0 could not be found
pass
def _new(self):
"""Initialise a new iperf test
struct iperf_test *iperf_new_test()
"""
return self.lib.iperf_new_test()
def defaults(self):
"""Set/reset iperf test defaults."""
self.lib.iperf_defaults(self._test)
@property
def role(self):
"""The iperf3 instance role
valid roles are 'c'=client and 's'=server
:rtype: 'c' or 's'
"""
try:
self._role = c_char(
self.lib.iperf_get_test_role(self._test)
).value.decode('utf-8')
except TypeError:
self._role = c_char(
chr(self.lib.iperf_get_test_role(self._test))
).value.decode('utf-8')
return self._role
@role.setter
def role(self, role):
if role.lower() in ['c', 's']:
self.lib.iperf_set_test_role(
self._test,
c_char(role.lower().encode('utf-8'))
)
self._role = role
else:
raise ValueError("Unknown role, accepted values are 'c' and 's'")
@property
@bind_address.setter
def bind_address(self, address):
self.lib.iperf_set_test_bind_address(
self._test,
c_char_p(address.encode('utf-8'))
)
self._bind_address = address
@property
def port(self):
"""The port the iperf3 server is listening on"""
self._port = self.lib.iperf_get_test_server_port(self._test)
return self._port
@port.setter
def port(self, port):
self.lib.iperf_set_test_server_port(self._test, int(port))
self._port = port
@property
def json_output(self):
"""Toggles json output of libiperf
Turning this off will output the iperf3 instance results to
stdout/stderr
:rtype: bool
"""
enabled = self.lib.iperf_get_test_json_output(self._test)
if enabled:
self._json_output = True
else:
self._json_output = False
return self._json_output
@json_output.setter
def json_output(self, enabled):
if enabled:
self.lib.iperf_set_test_json_output(self._test, 1)
else:
self.lib.iperf_set_test_json_output(self._test, 0)
self._json_output = enabled
@property
def verbose(self):
"""Toggles verbose output for the iperf3 instance
:rtype: bool
"""
enabled = self.lib.iperf_get_verbose(self._test)
if enabled:
self._verbose = True
else:
self._verbose = False
return self._verbose
@verbose.setter
def verbose(self, enabled):
if enabled:
self.lib.iperf_set_verbose(self._test, 1)
else:
self.lib.iperf_set_verbose(self._test, 0)
self._verbose = enabled
@property
def _errno(self):
"""Returns the last error ID
:rtype: int
"""
return c_int.in_dll(self.lib, "i_errno").value
@property
def iperf_version(self):
"""Returns the version of the libiperf library
:rtype: string
"""
# TODO: Is there a better way to get the const char than allocating 30?
VersionType = c_char * 30
return VersionType.in_dll(self.lib, "version").value.decode('utf-8')
def _error_to_string(self, error_id):
"""Returns an error string from libiperf
:param error_id: The error_id produced by libiperf
:rtype: string
"""
strerror = self.lib.iperf_strerror
strerror.restype = c_char_p
return strerror(error_id).decode('utf-8')
def run(self):
"""Runs the iperf3 instance.
This function has to be instantiated by the Client and Server
instances
:rtype: NotImplementedError
"""
raise NotImplementedError
|
thiezn/iperf3-python | iperf3/iperf3.py | IPerf3.port | python | def port(self):
self._port = self.lib.iperf_get_test_server_port(self._test)
return self._port | The port the iperf3 server is listening on | train | https://github.com/thiezn/iperf3-python/blob/094a6e043f44fb154988348603661b1473c23a50/iperf3/iperf3.py#L300-L303 | null | class IPerf3(object):
"""The base class used by both the iperf3 :class:`Server` and :class:`Client`
.. note:: You should not use this class directly
"""
def __init__(self,
role,
verbose=True,
lib_name=None):
"""Initialise the iperf shared library
:param role: 'c' = client; 's' = server
:param verbose: enable verbose output
:param lib_name: optional name and path for libiperf.so.0 library
"""
if lib_name is None:
lib_name = util.find_library('libiperf')
if lib_name is None:
# If we still couldn't find it lets try the manual approach
lib_name = 'libiperf.so.0'
try:
self.lib = cdll.LoadLibrary(lib_name)
except OSError:
raise OSError(
"Couldn't find shared library {}, is iperf3 installed?".format(
lib_name
)
)
# Set the appropriate C types.
self.lib.iperf_client_end.restype = c_int
self.lib.iperf_client_end.argtypes = (c_void_p,)
self.lib.iperf_free_test.restxpe = None
self.lib.iperf_free_test.argtypes = (c_void_p,)
self.lib.iperf_new_test.restype = c_void_p
self.lib.iperf_new_test.argtypes = None
self.lib.iperf_defaults.restype = c_int
self.lib.iperf_defaults.argtypes = (c_void_p,)
self.lib.iperf_get_test_role.restype = c_char
self.lib.iperf_get_test_role.argtypes = (c_void_p,)
self.lib.iperf_set_test_role.restype = None
self.lib.iperf_set_test_role.argtypes = (c_void_p, c_char,)
self.lib.iperf_get_test_bind_address.restype = c_char_p
self.lib.iperf_get_test_bind_address.argtypes = (c_void_p,)
self.lib.iperf_set_test_bind_address.restype = None
self.lib.iperf_set_test_bind_address.argtypes = (c_void_p, c_char_p,)
self.lib.iperf_get_test_server_port.restype = c_int
self.lib.iperf_get_test_server_port.argtypes = (c_void_p,)
self.lib.iperf_set_test_server_port.restype = None
self.lib.iperf_set_test_server_port.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_test_json_output.restype = c_int
self.lib.iperf_get_test_json_output.argtypes = (c_void_p,)
self.lib.iperf_set_test_json_output.restype = None
self.lib.iperf_set_test_json_output.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_verbose.restype = c_int
self.lib.iperf_get_verbose.argtypes = (c_void_p,)
self.lib.iperf_set_verbose.restype = None
self.lib.iperf_set_verbose.argtypes = (c_void_p, c_int)
self.lib.iperf_strerror.restype = c_char_p
self.lib.iperf_strerror.argtypes = (c_int,)
self.lib.iperf_get_test_server_hostname.restype = c_char_p
self.lib.iperf_get_test_server_hostname.argtypes = (c_void_p,)
self.lib.iperf_set_test_server_hostname.restype = None
self.lib.iperf_set_test_server_hostname.argtypes = (
c_void_p, c_char_p,
)
self.lib.iperf_get_test_protocol_id.restype = c_int
self.lib.iperf_get_test_protocol_id.argtypes = (c_void_p,)
self.lib.set_protocol.restype = c_int
self.lib.set_protocol.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_test_omit.restype = c_int
self.lib.iperf_get_test_omit.argtypes = (c_void_p,)
self.lib.iperf_set_test_omit.restype = None
self.lib.iperf_set_test_omit.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_test_duration.restype = c_int
self.lib.iperf_get_test_duration.argtypes = (c_void_p,)
self.lib.iperf_set_test_duration.restype = None
self.lib.iperf_set_test_duration.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_test_rate.restype = c_uint64
self.lib.iperf_get_test_rate.argtypes = (c_void_p,)
self.lib.iperf_set_test_rate.restype = None
self.lib.iperf_set_test_rate.argtypes = (c_void_p, c_uint64,)
self.lib.iperf_get_test_blksize.restype = c_int
self.lib.iperf_get_test_blksize.argtypes = (c_void_p,)
self.lib.iperf_set_test_blksize.restype = None
self.lib.iperf_set_test_blksize.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_test_num_streams.restype = c_int
self.lib.iperf_get_test_num_streams.argtypes = (c_void_p,)
self.lib.iperf_set_test_num_streams.restype = None
self.lib.iperf_set_test_num_streams.argtypes = (c_void_p, c_int,)
self.lib.iperf_has_zerocopy.restype = c_int
self.lib.iperf_has_zerocopy.argtypes = None
self.lib.iperf_set_test_zerocopy.restype = None
self.lib.iperf_set_test_zerocopy.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_test_reverse.restype = c_int
self.lib.iperf_get_test_reverse.argtypes = (c_void_p,)
self.lib.iperf_set_test_reverse.restype = None
self.lib.iperf_set_test_reverse.argtypes = (c_void_p, c_int,)
self.lib.iperf_run_client.restype = c_int
self.lib.iperf_run_client.argtypes = (c_void_p,)
self.lib.iperf_run_server.restype = c_int
self.lib.iperf_run_server.argtypes = (c_void_p,)
self.lib.iperf_reset_test.restype = None
self.lib.iperf_reset_test.argtypes = (c_void_p,)
try:
# Only available from iperf v3.1 and onwards
self.lib.iperf_get_test_json_output_string.restype = c_char_p
self.lib.iperf_get_test_json_output_string.argtypes = (c_void_p,)
except AttributeError:
pass
# The test C struct iperf_test
self._test = self._new()
self.defaults()
# stdout/strerr redirection variables
self._stdout_fd = os.dup(1)
self._stderr_fd = os.dup(2)
self._pipe_out, self._pipe_in = os.pipe() # no need for pipe write
# Generic test settings
self.role = role
self.json_output = True
self.verbose = verbose
def __del__(self):
"""Cleanup the test after the :class:`IPerf3` class is terminated"""
os.close(self._stdout_fd)
os.close(self._stderr_fd)
os.close(self._pipe_out)
os.close(self._pipe_in)
try:
# In the current version of libiperf, the control socket isn't
# closed on iperf_client_end(), see proposed pull request:
# https://github.com/esnet/iperf/pull/597
# Workaround for testing, don't ever do this..:
#
# sck=self.lib.iperf_get_control_socket(self._test)
# os.close(sck)
self.lib.iperf_client_end(self._test)
self.lib.iperf_free_test(self._test)
except AttributeError:
# self.lib doesn't exist, likely because iperf3 wasn't installed or
# the shared library libiperf.so.0 could not be found
pass
def _new(self):
"""Initialise a new iperf test
struct iperf_test *iperf_new_test()
"""
return self.lib.iperf_new_test()
def defaults(self):
"""Set/reset iperf test defaults."""
self.lib.iperf_defaults(self._test)
@property
def role(self):
"""The iperf3 instance role
valid roles are 'c'=client and 's'=server
:rtype: 'c' or 's'
"""
try:
self._role = c_char(
self.lib.iperf_get_test_role(self._test)
).value.decode('utf-8')
except TypeError:
self._role = c_char(
chr(self.lib.iperf_get_test_role(self._test))
).value.decode('utf-8')
return self._role
@role.setter
def role(self, role):
if role.lower() in ['c', 's']:
self.lib.iperf_set_test_role(
self._test,
c_char(role.lower().encode('utf-8'))
)
self._role = role
else:
raise ValueError("Unknown role, accepted values are 'c' and 's'")
@property
def bind_address(self):
"""The bind address the iperf3 instance will listen on
use * to listen on all available IPs
:rtype: string
"""
result = c_char_p(
self.lib.iperf_get_test_bind_address(self._test)
).value
if result:
self._bind_address = result.decode('utf-8')
else:
self._bind_address = '*'
return self._bind_address
@bind_address.setter
def bind_address(self, address):
self.lib.iperf_set_test_bind_address(
self._test,
c_char_p(address.encode('utf-8'))
)
self._bind_address = address
@property
@port.setter
def port(self, port):
self.lib.iperf_set_test_server_port(self._test, int(port))
self._port = port
@property
def json_output(self):
"""Toggles json output of libiperf
Turning this off will output the iperf3 instance results to
stdout/stderr
:rtype: bool
"""
enabled = self.lib.iperf_get_test_json_output(self._test)
if enabled:
self._json_output = True
else:
self._json_output = False
return self._json_output
@json_output.setter
def json_output(self, enabled):
if enabled:
self.lib.iperf_set_test_json_output(self._test, 1)
else:
self.lib.iperf_set_test_json_output(self._test, 0)
self._json_output = enabled
@property
def verbose(self):
"""Toggles verbose output for the iperf3 instance
:rtype: bool
"""
enabled = self.lib.iperf_get_verbose(self._test)
if enabled:
self._verbose = True
else:
self._verbose = False
return self._verbose
@verbose.setter
def verbose(self, enabled):
if enabled:
self.lib.iperf_set_verbose(self._test, 1)
else:
self.lib.iperf_set_verbose(self._test, 0)
self._verbose = enabled
@property
def _errno(self):
"""Returns the last error ID
:rtype: int
"""
return c_int.in_dll(self.lib, "i_errno").value
@property
def iperf_version(self):
"""Returns the version of the libiperf library
:rtype: string
"""
# TODO: Is there a better way to get the const char than allocating 30?
VersionType = c_char * 30
return VersionType.in_dll(self.lib, "version").value.decode('utf-8')
def _error_to_string(self, error_id):
"""Returns an error string from libiperf
:param error_id: The error_id produced by libiperf
:rtype: string
"""
strerror = self.lib.iperf_strerror
strerror.restype = c_char_p
return strerror(error_id).decode('utf-8')
def run(self):
"""Runs the iperf3 instance.
This function has to be instantiated by the Client and Server
instances
:rtype: NotImplementedError
"""
raise NotImplementedError
|
thiezn/iperf3-python | iperf3/iperf3.py | IPerf3.json_output | python | def json_output(self):
enabled = self.lib.iperf_get_test_json_output(self._test)
if enabled:
self._json_output = True
else:
self._json_output = False
return self._json_output | Toggles json output of libiperf
Turning this off will output the iperf3 instance results to
stdout/stderr
:rtype: bool | train | https://github.com/thiezn/iperf3-python/blob/094a6e043f44fb154988348603661b1473c23a50/iperf3/iperf3.py#L311-L326 | null | class IPerf3(object):
"""The base class used by both the iperf3 :class:`Server` and :class:`Client`
.. note:: You should not use this class directly
"""
def __init__(self,
role,
verbose=True,
lib_name=None):
"""Initialise the iperf shared library
:param role: 'c' = client; 's' = server
:param verbose: enable verbose output
:param lib_name: optional name and path for libiperf.so.0 library
"""
if lib_name is None:
lib_name = util.find_library('libiperf')
if lib_name is None:
# If we still couldn't find it lets try the manual approach
lib_name = 'libiperf.so.0'
try:
self.lib = cdll.LoadLibrary(lib_name)
except OSError:
raise OSError(
"Couldn't find shared library {}, is iperf3 installed?".format(
lib_name
)
)
# Set the appropriate C types.
self.lib.iperf_client_end.restype = c_int
self.lib.iperf_client_end.argtypes = (c_void_p,)
self.lib.iperf_free_test.restxpe = None
self.lib.iperf_free_test.argtypes = (c_void_p,)
self.lib.iperf_new_test.restype = c_void_p
self.lib.iperf_new_test.argtypes = None
self.lib.iperf_defaults.restype = c_int
self.lib.iperf_defaults.argtypes = (c_void_p,)
self.lib.iperf_get_test_role.restype = c_char
self.lib.iperf_get_test_role.argtypes = (c_void_p,)
self.lib.iperf_set_test_role.restype = None
self.lib.iperf_set_test_role.argtypes = (c_void_p, c_char,)
self.lib.iperf_get_test_bind_address.restype = c_char_p
self.lib.iperf_get_test_bind_address.argtypes = (c_void_p,)
self.lib.iperf_set_test_bind_address.restype = None
self.lib.iperf_set_test_bind_address.argtypes = (c_void_p, c_char_p,)
self.lib.iperf_get_test_server_port.restype = c_int
self.lib.iperf_get_test_server_port.argtypes = (c_void_p,)
self.lib.iperf_set_test_server_port.restype = None
self.lib.iperf_set_test_server_port.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_test_json_output.restype = c_int
self.lib.iperf_get_test_json_output.argtypes = (c_void_p,)
self.lib.iperf_set_test_json_output.restype = None
self.lib.iperf_set_test_json_output.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_verbose.restype = c_int
self.lib.iperf_get_verbose.argtypes = (c_void_p,)
self.lib.iperf_set_verbose.restype = None
self.lib.iperf_set_verbose.argtypes = (c_void_p, c_int)
self.lib.iperf_strerror.restype = c_char_p
self.lib.iperf_strerror.argtypes = (c_int,)
self.lib.iperf_get_test_server_hostname.restype = c_char_p
self.lib.iperf_get_test_server_hostname.argtypes = (c_void_p,)
self.lib.iperf_set_test_server_hostname.restype = None
self.lib.iperf_set_test_server_hostname.argtypes = (
c_void_p, c_char_p,
)
self.lib.iperf_get_test_protocol_id.restype = c_int
self.lib.iperf_get_test_protocol_id.argtypes = (c_void_p,)
self.lib.set_protocol.restype = c_int
self.lib.set_protocol.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_test_omit.restype = c_int
self.lib.iperf_get_test_omit.argtypes = (c_void_p,)
self.lib.iperf_set_test_omit.restype = None
self.lib.iperf_set_test_omit.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_test_duration.restype = c_int
self.lib.iperf_get_test_duration.argtypes = (c_void_p,)
self.lib.iperf_set_test_duration.restype = None
self.lib.iperf_set_test_duration.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_test_rate.restype = c_uint64
self.lib.iperf_get_test_rate.argtypes = (c_void_p,)
self.lib.iperf_set_test_rate.restype = None
self.lib.iperf_set_test_rate.argtypes = (c_void_p, c_uint64,)
self.lib.iperf_get_test_blksize.restype = c_int
self.lib.iperf_get_test_blksize.argtypes = (c_void_p,)
self.lib.iperf_set_test_blksize.restype = None
self.lib.iperf_set_test_blksize.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_test_num_streams.restype = c_int
self.lib.iperf_get_test_num_streams.argtypes = (c_void_p,)
self.lib.iperf_set_test_num_streams.restype = None
self.lib.iperf_set_test_num_streams.argtypes = (c_void_p, c_int,)
self.lib.iperf_has_zerocopy.restype = c_int
self.lib.iperf_has_zerocopy.argtypes = None
self.lib.iperf_set_test_zerocopy.restype = None
self.lib.iperf_set_test_zerocopy.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_test_reverse.restype = c_int
self.lib.iperf_get_test_reverse.argtypes = (c_void_p,)
self.lib.iperf_set_test_reverse.restype = None
self.lib.iperf_set_test_reverse.argtypes = (c_void_p, c_int,)
self.lib.iperf_run_client.restype = c_int
self.lib.iperf_run_client.argtypes = (c_void_p,)
self.lib.iperf_run_server.restype = c_int
self.lib.iperf_run_server.argtypes = (c_void_p,)
self.lib.iperf_reset_test.restype = None
self.lib.iperf_reset_test.argtypes = (c_void_p,)
try:
# Only available from iperf v3.1 and onwards
self.lib.iperf_get_test_json_output_string.restype = c_char_p
self.lib.iperf_get_test_json_output_string.argtypes = (c_void_p,)
except AttributeError:
pass
# The test C struct iperf_test
self._test = self._new()
self.defaults()
# stdout/strerr redirection variables
self._stdout_fd = os.dup(1)
self._stderr_fd = os.dup(2)
self._pipe_out, self._pipe_in = os.pipe() # no need for pipe write
# Generic test settings
self.role = role
self.json_output = True
self.verbose = verbose
def __del__(self):
"""Cleanup the test after the :class:`IPerf3` class is terminated"""
os.close(self._stdout_fd)
os.close(self._stderr_fd)
os.close(self._pipe_out)
os.close(self._pipe_in)
try:
# In the current version of libiperf, the control socket isn't
# closed on iperf_client_end(), see proposed pull request:
# https://github.com/esnet/iperf/pull/597
# Workaround for testing, don't ever do this..:
#
# sck=self.lib.iperf_get_control_socket(self._test)
# os.close(sck)
self.lib.iperf_client_end(self._test)
self.lib.iperf_free_test(self._test)
except AttributeError:
# self.lib doesn't exist, likely because iperf3 wasn't installed or
# the shared library libiperf.so.0 could not be found
pass
def _new(self):
"""Initialise a new iperf test
struct iperf_test *iperf_new_test()
"""
return self.lib.iperf_new_test()
def defaults(self):
"""Set/reset iperf test defaults."""
self.lib.iperf_defaults(self._test)
@property
def role(self):
"""The iperf3 instance role
valid roles are 'c'=client and 's'=server
:rtype: 'c' or 's'
"""
try:
self._role = c_char(
self.lib.iperf_get_test_role(self._test)
).value.decode('utf-8')
except TypeError:
self._role = c_char(
chr(self.lib.iperf_get_test_role(self._test))
).value.decode('utf-8')
return self._role
@role.setter
def role(self, role):
if role.lower() in ['c', 's']:
self.lib.iperf_set_test_role(
self._test,
c_char(role.lower().encode('utf-8'))
)
self._role = role
else:
raise ValueError("Unknown role, accepted values are 'c' and 's'")
@property
def bind_address(self):
"""The bind address the iperf3 instance will listen on
use * to listen on all available IPs
:rtype: string
"""
result = c_char_p(
self.lib.iperf_get_test_bind_address(self._test)
).value
if result:
self._bind_address = result.decode('utf-8')
else:
self._bind_address = '*'
return self._bind_address
@bind_address.setter
def bind_address(self, address):
self.lib.iperf_set_test_bind_address(
self._test,
c_char_p(address.encode('utf-8'))
)
self._bind_address = address
@property
def port(self):
"""The port the iperf3 server is listening on"""
self._port = self.lib.iperf_get_test_server_port(self._test)
return self._port
@port.setter
def port(self, port):
self.lib.iperf_set_test_server_port(self._test, int(port))
self._port = port
@property
@json_output.setter
def json_output(self, enabled):
if enabled:
self.lib.iperf_set_test_json_output(self._test, 1)
else:
self.lib.iperf_set_test_json_output(self._test, 0)
self._json_output = enabled
@property
def verbose(self):
"""Toggles verbose output for the iperf3 instance
:rtype: bool
"""
enabled = self.lib.iperf_get_verbose(self._test)
if enabled:
self._verbose = True
else:
self._verbose = False
return self._verbose
@verbose.setter
def verbose(self, enabled):
if enabled:
self.lib.iperf_set_verbose(self._test, 1)
else:
self.lib.iperf_set_verbose(self._test, 0)
self._verbose = enabled
@property
def _errno(self):
"""Returns the last error ID
:rtype: int
"""
return c_int.in_dll(self.lib, "i_errno").value
@property
def iperf_version(self):
"""Returns the version of the libiperf library
:rtype: string
"""
# TODO: Is there a better way to get the const char than allocating 30?
VersionType = c_char * 30
return VersionType.in_dll(self.lib, "version").value.decode('utf-8')
def _error_to_string(self, error_id):
"""Returns an error string from libiperf
:param error_id: The error_id produced by libiperf
:rtype: string
"""
strerror = self.lib.iperf_strerror
strerror.restype = c_char_p
return strerror(error_id).decode('utf-8')
def run(self):
"""Runs the iperf3 instance.
This function has to be instantiated by the Client and Server
instances
:rtype: NotImplementedError
"""
raise NotImplementedError
|
thiezn/iperf3-python | iperf3/iperf3.py | IPerf3.verbose | python | def verbose(self):
enabled = self.lib.iperf_get_verbose(self._test)
if enabled:
self._verbose = True
else:
self._verbose = False
return self._verbose | Toggles verbose output for the iperf3 instance
:rtype: bool | train | https://github.com/thiezn/iperf3-python/blob/094a6e043f44fb154988348603661b1473c23a50/iperf3/iperf3.py#L338-L350 | null | class IPerf3(object):
"""The base class used by both the iperf3 :class:`Server` and :class:`Client`
.. note:: You should not use this class directly
"""
def __init__(self,
role,
verbose=True,
lib_name=None):
"""Initialise the iperf shared library
:param role: 'c' = client; 's' = server
:param verbose: enable verbose output
:param lib_name: optional name and path for libiperf.so.0 library
"""
if lib_name is None:
lib_name = util.find_library('libiperf')
if lib_name is None:
# If we still couldn't find it lets try the manual approach
lib_name = 'libiperf.so.0'
try:
self.lib = cdll.LoadLibrary(lib_name)
except OSError:
raise OSError(
"Couldn't find shared library {}, is iperf3 installed?".format(
lib_name
)
)
# Set the appropriate C types.
self.lib.iperf_client_end.restype = c_int
self.lib.iperf_client_end.argtypes = (c_void_p,)
self.lib.iperf_free_test.restxpe = None
self.lib.iperf_free_test.argtypes = (c_void_p,)
self.lib.iperf_new_test.restype = c_void_p
self.lib.iperf_new_test.argtypes = None
self.lib.iperf_defaults.restype = c_int
self.lib.iperf_defaults.argtypes = (c_void_p,)
self.lib.iperf_get_test_role.restype = c_char
self.lib.iperf_get_test_role.argtypes = (c_void_p,)
self.lib.iperf_set_test_role.restype = None
self.lib.iperf_set_test_role.argtypes = (c_void_p, c_char,)
self.lib.iperf_get_test_bind_address.restype = c_char_p
self.lib.iperf_get_test_bind_address.argtypes = (c_void_p,)
self.lib.iperf_set_test_bind_address.restype = None
self.lib.iperf_set_test_bind_address.argtypes = (c_void_p, c_char_p,)
self.lib.iperf_get_test_server_port.restype = c_int
self.lib.iperf_get_test_server_port.argtypes = (c_void_p,)
self.lib.iperf_set_test_server_port.restype = None
self.lib.iperf_set_test_server_port.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_test_json_output.restype = c_int
self.lib.iperf_get_test_json_output.argtypes = (c_void_p,)
self.lib.iperf_set_test_json_output.restype = None
self.lib.iperf_set_test_json_output.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_verbose.restype = c_int
self.lib.iperf_get_verbose.argtypes = (c_void_p,)
self.lib.iperf_set_verbose.restype = None
self.lib.iperf_set_verbose.argtypes = (c_void_p, c_int)
self.lib.iperf_strerror.restype = c_char_p
self.lib.iperf_strerror.argtypes = (c_int,)
self.lib.iperf_get_test_server_hostname.restype = c_char_p
self.lib.iperf_get_test_server_hostname.argtypes = (c_void_p,)
self.lib.iperf_set_test_server_hostname.restype = None
self.lib.iperf_set_test_server_hostname.argtypes = (
c_void_p, c_char_p,
)
self.lib.iperf_get_test_protocol_id.restype = c_int
self.lib.iperf_get_test_protocol_id.argtypes = (c_void_p,)
self.lib.set_protocol.restype = c_int
self.lib.set_protocol.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_test_omit.restype = c_int
self.lib.iperf_get_test_omit.argtypes = (c_void_p,)
self.lib.iperf_set_test_omit.restype = None
self.lib.iperf_set_test_omit.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_test_duration.restype = c_int
self.lib.iperf_get_test_duration.argtypes = (c_void_p,)
self.lib.iperf_set_test_duration.restype = None
self.lib.iperf_set_test_duration.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_test_rate.restype = c_uint64
self.lib.iperf_get_test_rate.argtypes = (c_void_p,)
self.lib.iperf_set_test_rate.restype = None
self.lib.iperf_set_test_rate.argtypes = (c_void_p, c_uint64,)
self.lib.iperf_get_test_blksize.restype = c_int
self.lib.iperf_get_test_blksize.argtypes = (c_void_p,)
self.lib.iperf_set_test_blksize.restype = None
self.lib.iperf_set_test_blksize.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_test_num_streams.restype = c_int
self.lib.iperf_get_test_num_streams.argtypes = (c_void_p,)
self.lib.iperf_set_test_num_streams.restype = None
self.lib.iperf_set_test_num_streams.argtypes = (c_void_p, c_int,)
self.lib.iperf_has_zerocopy.restype = c_int
self.lib.iperf_has_zerocopy.argtypes = None
self.lib.iperf_set_test_zerocopy.restype = None
self.lib.iperf_set_test_zerocopy.argtypes = (c_void_p, c_int,)
self.lib.iperf_get_test_reverse.restype = c_int
self.lib.iperf_get_test_reverse.argtypes = (c_void_p,)
self.lib.iperf_set_test_reverse.restype = None
self.lib.iperf_set_test_reverse.argtypes = (c_void_p, c_int,)
self.lib.iperf_run_client.restype = c_int
self.lib.iperf_run_client.argtypes = (c_void_p,)
self.lib.iperf_run_server.restype = c_int
self.lib.iperf_run_server.argtypes = (c_void_p,)
self.lib.iperf_reset_test.restype = None
self.lib.iperf_reset_test.argtypes = (c_void_p,)
try:
# Only available from iperf v3.1 and onwards
self.lib.iperf_get_test_json_output_string.restype = c_char_p
self.lib.iperf_get_test_json_output_string.argtypes = (c_void_p,)
except AttributeError:
pass
# The test C struct iperf_test
self._test = self._new()
self.defaults()
# stdout/strerr redirection variables
self._stdout_fd = os.dup(1)
self._stderr_fd = os.dup(2)
self._pipe_out, self._pipe_in = os.pipe() # no need for pipe write
# Generic test settings
self.role = role
self.json_output = True
self.verbose = verbose
def __del__(self):
"""Cleanup the test after the :class:`IPerf3` class is terminated"""
os.close(self._stdout_fd)
os.close(self._stderr_fd)
os.close(self._pipe_out)
os.close(self._pipe_in)
try:
# In the current version of libiperf, the control socket isn't
# closed on iperf_client_end(), see proposed pull request:
# https://github.com/esnet/iperf/pull/597
# Workaround for testing, don't ever do this..:
#
# sck=self.lib.iperf_get_control_socket(self._test)
# os.close(sck)
self.lib.iperf_client_end(self._test)
self.lib.iperf_free_test(self._test)
except AttributeError:
# self.lib doesn't exist, likely because iperf3 wasn't installed or
# the shared library libiperf.so.0 could not be found
pass
def _new(self):
"""Initialise a new iperf test
struct iperf_test *iperf_new_test()
"""
return self.lib.iperf_new_test()
def defaults(self):
"""Set/reset iperf test defaults."""
self.lib.iperf_defaults(self._test)
@property
def role(self):
"""The iperf3 instance role
valid roles are 'c'=client and 's'=server
:rtype: 'c' or 's'
"""
try:
self._role = c_char(
self.lib.iperf_get_test_role(self._test)
).value.decode('utf-8')
except TypeError:
self._role = c_char(
chr(self.lib.iperf_get_test_role(self._test))
).value.decode('utf-8')
return self._role
@role.setter
def role(self, role):
if role.lower() in ['c', 's']:
self.lib.iperf_set_test_role(
self._test,
c_char(role.lower().encode('utf-8'))
)
self._role = role
else:
raise ValueError("Unknown role, accepted values are 'c' and 's'")
@property
def bind_address(self):
"""The bind address the iperf3 instance will listen on
use * to listen on all available IPs
:rtype: string
"""
result = c_char_p(
self.lib.iperf_get_test_bind_address(self._test)
).value
if result:
self._bind_address = result.decode('utf-8')
else:
self._bind_address = '*'
return self._bind_address
@bind_address.setter
def bind_address(self, address):
self.lib.iperf_set_test_bind_address(
self._test,
c_char_p(address.encode('utf-8'))
)
self._bind_address = address
@property
def port(self):
"""The port the iperf3 server is listening on"""
self._port = self.lib.iperf_get_test_server_port(self._test)
return self._port
@port.setter
def port(self, port):
self.lib.iperf_set_test_server_port(self._test, int(port))
self._port = port
@property
def json_output(self):
"""Toggles json output of libiperf
Turning this off will output the iperf3 instance results to
stdout/stderr
:rtype: bool
"""
enabled = self.lib.iperf_get_test_json_output(self._test)
if enabled:
self._json_output = True
else:
self._json_output = False
return self._json_output
@json_output.setter
def json_output(self, enabled):
if enabled:
self.lib.iperf_set_test_json_output(self._test, 1)
else:
self.lib.iperf_set_test_json_output(self._test, 0)
self._json_output = enabled
@property
@verbose.setter
def verbose(self, enabled):
if enabled:
self.lib.iperf_set_verbose(self._test, 1)
else:
self.lib.iperf_set_verbose(self._test, 0)
self._verbose = enabled
@property
def _errno(self):
"""Returns the last error ID
:rtype: int
"""
return c_int.in_dll(self.lib, "i_errno").value
@property
def iperf_version(self):
"""Returns the version of the libiperf library
:rtype: string
"""
# TODO: Is there a better way to get the const char than allocating 30?
VersionType = c_char * 30
return VersionType.in_dll(self.lib, "version").value.decode('utf-8')
def _error_to_string(self, error_id):
"""Returns an error string from libiperf
:param error_id: The error_id produced by libiperf
:rtype: string
"""
strerror = self.lib.iperf_strerror
strerror.restype = c_char_p
return strerror(error_id).decode('utf-8')
def run(self):
"""Runs the iperf3 instance.
This function has to be instantiated by the Client and Server
instances
:rtype: NotImplementedError
"""
raise NotImplementedError
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.