repository_name stringclasses 316 values | func_path_in_repository stringlengths 6 223 | func_name stringlengths 1 134 | language stringclasses 1 value | func_code_string stringlengths 57 65.5k | func_documentation_string stringlengths 1 46.3k | split_name stringclasses 1 value | func_code_url stringlengths 91 315 | called_functions listlengths 1 156 ⌀ | enclosing_scope stringlengths 2 1.48M |
|---|---|---|---|---|---|---|---|---|---|
wishtack/pysynthetic | synthetic/synthetic_class_controller.py | SyntheticClassController._updateConstructorAndMembers | python | def _updateConstructorAndMembers(self):
syntheticMetaData = self._syntheticMetaData()
constructor = self._constructorFactory.makeConstructor(syntheticMetaData.originalConstructor(),
syntheticMetaData.syntheticMemberList(),
syntheticMetaData.doesConsumeArguments())
self._class.__init__ = constructor
for syntheticMember in syntheticMetaData.syntheticMemberList():
syntheticMember.apply(self._class,
syntheticMetaData.originalMemberNameList(),
syntheticMetaData.namingConvention())
if syntheticMetaData.hasEqualityGeneration():
eq = self._comparisonFactory.makeEqualFunction(syntheticMetaData.originalEqualFunction(),
syntheticMetaData.syntheticMemberList())
ne = self._comparisonFactory.makeNotEqualFunction(syntheticMetaData.originalNotEqualFunction(),
syntheticMetaData.syntheticMemberList())
hashFunc = self._comparisonFactory.makeHashFunction(syntheticMetaData.originalHashFunction(),
syntheticMetaData.syntheticMemberList())
self._class.__eq__ = eq
self._class.__ne__ = ne
self._class.__hash__ = hashFunc | We overwrite constructor and accessors every time because the constructor might have to consume all
members even if their decorator is below the "synthesizeConstructor" decorator and it also might need to update
the getters and setters because the naming convention has changed. | train | https://github.com/wishtack/pysynthetic/blob/f37a4a2f1e0313b8c544f60d37c93726bc806ec6/synthetic/synthetic_class_controller.py#L90-L115 | [
"def _syntheticMetaData(self):\n # SyntheticMetaData does not exist...\n syntheticMetaDataName = '__syntheticMetaData__{className}'.format(className=self._class.__name__)\n if not hasattr(self._class, syntheticMetaDataName):\n # ...we create it.\n originalConstructor = getattr(self._class, '_... | class SyntheticClassController:
def __init__(self, cls):
self._constructorFactory = SyntheticConstructorFactory()
self._comparisonFactory = SyntheticComparisonFactory()
self._class = cls
@contract
def addSyntheticMember(self, syntheticMember):
"""
:type syntheticMember: SyntheticMember
"""
# Inserting this member at the beginning of the member list of synthesization data attribute
# because decorators are called in reversed order.
self._syntheticMetaData().insertSyntheticMemberAtBegin(syntheticMember)
# Update constructor and recreate accessors.
self._updateConstructorAndMembers()
def synthesizeConstructor(self):
self._syntheticMetaData().setConsumeArguments(True)
# Update constructor and recreate accessors.
self._updateConstructorAndMembers()
def synthesizeEquality(self):
self._syntheticMetaData().setEqualityGeneration(True)
# Update constructor and recreate accessors.
self._updateConstructorAndMembers()
@contract
def setNamingConvention(self, namingConvention):
"""
:type namingConvention: INamingConvention
"""
# Remove getters and setters with old naming convention.
self._removeSyntheticMembers()
# Set new naming convention.
self._syntheticMetaData().setNamingConvention(namingConvention)
# Update constructor and recreate accessors.
self._updateConstructorAndMembers()
def _syntheticMetaData(self):
# SyntheticMetaData does not exist...
syntheticMetaDataName = '__syntheticMetaData__{className}'.format(className=self._class.__name__)
if not hasattr(self._class, syntheticMetaDataName):
# ...we create it.
originalConstructor = getattr(self._class, '__init__', None)
originalEqualFunction = getattr(self._class, '__eq__', None)
originalNotEqualFunction = getattr(self._class, '__ne__', None)
originalHashFunction = getattr(self._class, '__hash__', None)
# List of existing methods (Python2: ismethod, Python3: isfunction).
originalMemberList = inspect.getmembers(self._class)
originalMemberNameList = [method[0] for method in originalMemberList]
# Making the synthetic meta data.
syntheticMetaData = SyntheticMetaData(cls = self._class,
originalConstructor = originalConstructor,
originalEqualFunction= originalEqualFunction,
originalNotEqualFunction= originalNotEqualFunction,
originalHashFuction= originalHashFunction,
originalMemberNameList = originalMemberNameList)
setattr(self._class, syntheticMetaDataName, syntheticMetaData)
return getattr(self._class, syntheticMetaDataName)
def _removeSyntheticMembers(self):
syntheticMetaData = self._syntheticMetaData()
for syntheticMember in syntheticMetaData.syntheticMemberList():
syntheticMember.remove(self._class,
syntheticMetaData.originalMemberNameList(),
syntheticMetaData.namingConvention())
|
wishtack/pysynthetic | synthetic/synthetic_comparison_factory.py | SyntheticComparisonFactory.makeEqualFunction | python | def makeEqualFunction(self, originalEqualFunction, syntheticMemberList):
def equal(instance, other):
if instance.__class__ is not other.__class__:
return NotImplemented
for m in syntheticMemberList:
if getattr(instance, m.privateMemberName()) != getattr(other, m.privateMemberName()):
return False
if inspect.isfunction(originalEqualFunction) or inspect.ismethod(originalEqualFunction):
return originalEqualFunction(instance, other)
return True
return equal | :param list(SyntheticMember) syntheticMemberList: a list of the class' synthetic members. | train | https://github.com/wishtack/pysynthetic/blob/f37a4a2f1e0313b8c544f60d37c93726bc806ec6/synthetic/synthetic_comparison_factory.py#L16-L33 | null | class SyntheticComparisonFactory(object):
@contract
@contract
def makeNotEqualFunction(self, originalNotEqualFunction, syntheticMemberList):
"""
:param list(SyntheticMember) syntheticMemberList: a list of the class' synthetic members.
"""
def not_equal(instance, other):
if instance.__class__ is not other.__class__:
return NotImplemented
for m in syntheticMemberList:
if getattr(instance, m.privateMemberName()) != getattr(other, m.privateMemberName()):
return True
if inspect.isfunction(originalNotEqualFunction) or inspect.ismethod(originalNotEqualFunction):
return originalNotEqualFunction(instance, other)
return False
return not_equal
@contract
def makeHashFunction(self, originalHashFunction, syntheticMemberList):
"""
:param list(SyntheticMember) syntheticMemberList: a list of the class' synthetic members.
"""
if originalHashFunction is None:
return None
for member in syntheticMemberList:
if not member.readOnly():
return None
# All synthetic members are read-only: generate a hash function.
def hash_function(instance):
values = [getattr(instance, m.privateMemberName()) for m in syntheticMemberList]
if inspect.isfunction(originalHashFunction) or inspect.ismethod(originalHashFunction):
values.append(originalHashFunction(instance))
return hash(tuple(values))
return hash_function
|
wishtack/pysynthetic | synthetic/synthetic_comparison_factory.py | SyntheticComparisonFactory.makeNotEqualFunction | python | def makeNotEqualFunction(self, originalNotEqualFunction, syntheticMemberList):
def not_equal(instance, other):
if instance.__class__ is not other.__class__:
return NotImplemented
for m in syntheticMemberList:
if getattr(instance, m.privateMemberName()) != getattr(other, m.privateMemberName()):
return True
if inspect.isfunction(originalNotEqualFunction) or inspect.ismethod(originalNotEqualFunction):
return originalNotEqualFunction(instance, other)
return False
return not_equal | :param list(SyntheticMember) syntheticMemberList: a list of the class' synthetic members. | train | https://github.com/wishtack/pysynthetic/blob/f37a4a2f1e0313b8c544f60d37c93726bc806ec6/synthetic/synthetic_comparison_factory.py#L36-L54 | null | class SyntheticComparisonFactory(object):
@contract
def makeEqualFunction(self, originalEqualFunction, syntheticMemberList):
"""
:param list(SyntheticMember) syntheticMemberList: a list of the class' synthetic members.
"""
def equal(instance, other):
if instance.__class__ is not other.__class__:
return NotImplemented
for m in syntheticMemberList:
if getattr(instance, m.privateMemberName()) != getattr(other, m.privateMemberName()):
return False
if inspect.isfunction(originalEqualFunction) or inspect.ismethod(originalEqualFunction):
return originalEqualFunction(instance, other)
return True
return equal
@contract
@contract
def makeHashFunction(self, originalHashFunction, syntheticMemberList):
"""
:param list(SyntheticMember) syntheticMemberList: a list of the class' synthetic members.
"""
if originalHashFunction is None:
return None
for member in syntheticMemberList:
if not member.readOnly():
return None
# All synthetic members are read-only: generate a hash function.
def hash_function(instance):
values = [getattr(instance, m.privateMemberName()) for m in syntheticMemberList]
if inspect.isfunction(originalHashFunction) or inspect.ismethod(originalHashFunction):
values.append(originalHashFunction(instance))
return hash(tuple(values))
return hash_function
|
wishtack/pysynthetic | synthetic/synthetic_comparison_factory.py | SyntheticComparisonFactory.makeHashFunction | python | def makeHashFunction(self, originalHashFunction, syntheticMemberList):
if originalHashFunction is None:
return None
for member in syntheticMemberList:
if not member.readOnly():
return None
# All synthetic members are read-only: generate a hash function.
def hash_function(instance):
values = [getattr(instance, m.privateMemberName()) for m in syntheticMemberList]
if inspect.isfunction(originalHashFunction) or inspect.ismethod(originalHashFunction):
values.append(originalHashFunction(instance))
return hash(tuple(values))
return hash_function | :param list(SyntheticMember) syntheticMemberList: a list of the class' synthetic members. | train | https://github.com/wishtack/pysynthetic/blob/f37a4a2f1e0313b8c544f60d37c93726bc806ec6/synthetic/synthetic_comparison_factory.py#L57-L75 | null | class SyntheticComparisonFactory(object):
@contract
def makeEqualFunction(self, originalEqualFunction, syntheticMemberList):
"""
:param list(SyntheticMember) syntheticMemberList: a list of the class' synthetic members.
"""
def equal(instance, other):
if instance.__class__ is not other.__class__:
return NotImplemented
for m in syntheticMemberList:
if getattr(instance, m.privateMemberName()) != getattr(other, m.privateMemberName()):
return False
if inspect.isfunction(originalEqualFunction) or inspect.ismethod(originalEqualFunction):
return originalEqualFunction(instance, other)
return True
return equal
@contract
def makeNotEqualFunction(self, originalNotEqualFunction, syntheticMemberList):
"""
:param list(SyntheticMember) syntheticMemberList: a list of the class' synthetic members.
"""
def not_equal(instance, other):
if instance.__class__ is not other.__class__:
return NotImplemented
for m in syntheticMemberList:
if getattr(instance, m.privateMemberName()) != getattr(other, m.privateMemberName()):
return True
if inspect.isfunction(originalNotEqualFunction) or inspect.ismethod(originalNotEqualFunction):
return originalNotEqualFunction(instance, other)
return False
return not_equal
@contract
|
wishtack/pysynthetic | synthetic/synthetic_meta_data.py | SyntheticMetaData.insertSyntheticMemberAtBegin | python | def insertSyntheticMemberAtBegin(self, synthesizedMember):
memberName = synthesizedMember.memberName()
if memberName in [m.memberName() for m in self._syntheticMemberList]:
raise DuplicateMemberNameError(memberName, self._class.__name__)
self._syntheticMemberList.insert(0, synthesizedMember) | :type synthesizedMember: SyntheticMember
:raises DuplicateMemberNameError | train | https://github.com/wishtack/pysynthetic/blob/f37a4a2f1e0313b8c544f60d37c93726bc806ec6/synthetic/synthetic_meta_data.py#L64-L73 | null | class SyntheticMetaData:
def __init__(self, cls, originalConstructor, originalEqualFunction, originalNotEqualFunction, originalHashFuction,
originalMemberNameList):
"""
:type originalMemberNameList: list(str)
:type namingConvention: INamingConvention|None
"""
self._class = cls
self._originalConstructor = originalConstructor
self._originalEqualFunction = originalEqualFunction
self._originalNotEqualFunction = originalNotEqualFunction
self._originalHashFunction = originalHashFuction
self._originalMemberNameList = originalMemberNameList
self._syntheticMemberList = []
self._doesConsumeArguments = False
self._hasEqualityGeneration = False
self._namingConvention = None
def originalConstructor(self):
return self._originalConstructor
def originalEqualFunction(self):
return self._originalEqualFunction
def originalNotEqualFunction(self):
return self._originalNotEqualFunction
def originalHashFunction(self):
return self._originalHashFunction
def originalMemberNameList(self):
return self._originalMemberNameList
@contract
def insertSyntheticMemberAtBegin(self, synthesizedMember):
"""
:type synthesizedMember: SyntheticMember
:raises DuplicateMemberNameError
"""
memberName = synthesizedMember.memberName()
if memberName in [m.memberName() for m in self._syntheticMemberList]:
raise DuplicateMemberNameError(memberName, self._class.__name__)
self._syntheticMemberList.insert(0, synthesizedMember)
def syntheticMemberList(self):
return self._syntheticMemberList
def doesConsumeArguments(self):
"""Tells if the generated constructor must consume parameters or just use the default values."""
return self._doesConsumeArguments
def setConsumeArguments(self, _consumeArguments):
self._doesConsumeArguments = _consumeArguments
def hasEqualityGeneration(self):
"""Tells if __eq__ and __neq__ functions should be generated"""
return self._hasEqualityGeneration
def setEqualityGeneration(self, equalityGeneration):
self._hasEqualityGeneration = equalityGeneration
def namingConvention(self):
return self._namingConvention
def setNamingConvention(self, namingConvention):
"""
:type namingConvention: INamingConvention
"""
self._namingConvention = namingConvention
|
wishtack/pysynthetic | synthetic/property_delegate.py | PropertyDelegate.apply | python | def apply(self, cls, originalMemberNameList, memberName, classNamingConvention, getter, setter):
# The new property.
originalProperty = None
if memberName in originalMemberNameList:
member = getattr(cls, memberName)
# There's already a member with that name and it's not a property
if not isinstance(member, property):
raise InvalidPropertyOverrideError(memberName, cls.__name__)
# If property already exists, we will just modify it.
originalProperty = member
kwargs = {self._KEY_PROPERTY_GET: getattr(originalProperty, self._KEY_PROPERTY_GET, None) or getter,
self._KEY_PROPERTY_SET: getattr(originalProperty, self._KEY_PROPERTY_SET, None) or setter,
self._KEY_PROPERTY_DEL: getattr(originalProperty, self._KEY_PROPERTY_DEL, None) or None,
self._KEY_PROPERTY_DOC: getattr(originalProperty, self._KEY_PROPERTY_DOC, None) or None}
setattr(cls, memberName, property(**kwargs)) | :type cls: type
:type originalMemberNameList: list(str)
:type memberName: str
:type classNamingConvention: INamingConvention|None | train | https://github.com/wishtack/pysynthetic/blob/f37a4a2f1e0313b8c544f60d37c93726bc806ec6/synthetic/property_delegate.py#L34-L57 | null | class PropertyDelegate(IMemberDelegate):
_KEY_PROPERTY_GET = 'fget'
_KEY_PROPERTY_SET = 'fset'
_KEY_PROPERTY_DEL = 'fdel'
_KEY_PROPERTY_DOC = 'doc'
def apply(self, cls, originalMemberNameList, memberName, classNamingConvention, getter, setter):
"""
:type cls: type
:type originalMemberNameList: list(str)
:type memberName: str
:type classNamingConvention: INamingConvention|None
"""
# The new property.
originalProperty = None
if memberName in originalMemberNameList:
member = getattr(cls, memberName)
# There's already a member with that name and it's not a property
if not isinstance(member, property):
raise InvalidPropertyOverrideError(memberName, cls.__name__)
# If property already exists, we will just modify it.
originalProperty = member
kwargs = {self._KEY_PROPERTY_GET: getattr(originalProperty, self._KEY_PROPERTY_GET, None) or getter,
self._KEY_PROPERTY_SET: getattr(originalProperty, self._KEY_PROPERTY_SET, None) or setter,
self._KEY_PROPERTY_DEL: getattr(originalProperty, self._KEY_PROPERTY_DEL, None) or None,
self._KEY_PROPERTY_DOC: getattr(originalProperty, self._KEY_PROPERTY_DOC, None) or None}
setattr(cls, memberName, property(**kwargs))
def remove(self, cls, originalMemberNameList, memberName, classNamingConvention):
"""
:type cls: type
:type originalMemberNameList: list(str)
:type classNamingConvention: INamingConvention|None
"""
if memberName not in originalMemberNameList:
delattr(cls, memberName)
|
wishtack/pysynthetic | synthetic/property_delegate.py | PropertyDelegate.remove | python | def remove(self, cls, originalMemberNameList, memberName, classNamingConvention):
if memberName not in originalMemberNameList:
delattr(cls, memberName) | :type cls: type
:type originalMemberNameList: list(str)
:type classNamingConvention: INamingConvention|None | train | https://github.com/wishtack/pysynthetic/blob/f37a4a2f1e0313b8c544f60d37c93726bc806ec6/synthetic/property_delegate.py#L59-L66 | null | class PropertyDelegate(IMemberDelegate):
_KEY_PROPERTY_GET = 'fget'
_KEY_PROPERTY_SET = 'fset'
_KEY_PROPERTY_DEL = 'fdel'
_KEY_PROPERTY_DOC = 'doc'
def apply(self, cls, originalMemberNameList, memberName, classNamingConvention, getter, setter):
"""
:type cls: type
:type originalMemberNameList: list(str)
:type memberName: str
:type classNamingConvention: INamingConvention|None
"""
# The new property.
originalProperty = None
if memberName in originalMemberNameList:
member = getattr(cls, memberName)
# There's already a member with that name and it's not a property
if not isinstance(member, property):
raise InvalidPropertyOverrideError(memberName, cls.__name__)
# If property already exists, we will just modify it.
originalProperty = member
kwargs = {self._KEY_PROPERTY_GET: getattr(originalProperty, self._KEY_PROPERTY_GET, None) or getter,
self._KEY_PROPERTY_SET: getattr(originalProperty, self._KEY_PROPERTY_SET, None) or setter,
self._KEY_PROPERTY_DEL: getattr(originalProperty, self._KEY_PROPERTY_DEL, None) or None,
self._KEY_PROPERTY_DOC: getattr(originalProperty, self._KEY_PROPERTY_DOC, None) or None}
setattr(cls, memberName, property(**kwargs))
|
wishtack/pysynthetic | synthetic/decorators.py | synthesizeMember | python | def synthesizeMember(memberName,
default = None,
contract = None,
readOnly = False,
getterName = None,
setterName = None,
privateMemberName = None):
accessorDelegate = AccessorDelegate(namingConvention = NamingConventionCamelCase(),
getterName = getterName,
setterName = setterName)
return SyntheticDecoratorFactory().syntheticMemberDecorator(memberName = memberName,
defaultValue = default,
contract = contract,
readOnly = readOnly,
privateMemberName = privateMemberName,
memberDelegate = accessorDelegate) | When applied to a class, this decorator adds getter/setter methods to it and overrides the constructor in order to set\
the default value of the member.
By default, the getter will be named ``memberName``. (Ex.: ``memberName = 'member' => instance.member()``)
By default, the setter will be named ``memberName`` with the first letter capitalized and 'set' prepended it to it.
(Ex.: ``memberName = "member" => instance.setMember(...)``)
By default, the private attribute containing the member's value will be named ``memberName`` with '_' prepended to it.
Naming convention can be overridden with a custom one using :meth:`namingConvention <namingConvention>` decorator.
:param memberName: Name of the member to synthesize.
:type memberName: str
:param default: Member's default value.
:type default: *
:param contract: Type constraint. See `PyContracts <http://andreacensi.github.com/contracts/>`_
:type contract: *
:param readOnly: If set to ``True``, the setter will not be added to the class.
:type readOnly: bool
:param getterName: Custom getter name. This can be useful when the member is a boolean. (Ex.: ``isAlive``)
:type getterName: str|None
:param setterName: Custom setter name.
:type setterName: str|None
:param privateMemberName: Custom name for the private attribute that contains the member's value.
:type privateMemberName: str|None
:raises: :class:`DuplicateMemberNameError` when two synthetic members have the same name. | train | https://github.com/wishtack/pysynthetic/blob/f37a4a2f1e0313b8c544f60d37c93726bc806ec6/synthetic/decorators.py#L21-L65 | null | #-*- coding: utf-8 -*-
#
# Created on Dec 17, 2012
#
# @author: Younes JAAIDI
#
# $Id: 76fcd589c5fc821e2552c3ff75b23833f8a18cb6 $
#
from .accessor_delegate import AccessorDelegate
from .i_naming_convention import INamingConvention
from .property_delegate import PropertyDelegate
from .naming_convention_camel_case import NamingConventionCamelCase
from .naming_convention_underscore import NamingConventionUnderscore
from .synthetic_decorator_factory import SyntheticDecoratorFactory
from contracts import contract, new_contract
new_contract('INamingConvention', INamingConvention)
@contract
def synthesizeMember(memberName,
default = None,
contract = None,
readOnly = False,
getterName = None,
setterName = None,
privateMemberName = None):
"""
When applied to a class, this decorator adds getter/setter methods to it and overrides the constructor in order to set\
the default value of the member.
By default, the getter will be named ``memberName``. (Ex.: ``memberName = 'member' => instance.member()``)
By default, the setter will be named ``memberName`` with the first letter capitalized and 'set' prepended it to it.
(Ex.: ``memberName = "member" => instance.setMember(...)``)
By default, the private attribute containing the member's value will be named ``memberName`` with '_' prepended to it.
Naming convention can be overridden with a custom one using :meth:`namingConvention <namingConvention>` decorator.
:param memberName: Name of the member to synthesize.
:type memberName: str
:param default: Member's default value.
:type default: *
:param contract: Type constraint. See `PyContracts <http://andreacensi.github.com/contracts/>`_
:type contract: *
:param readOnly: If set to ``True``, the setter will not be added to the class.
:type readOnly: bool
:param getterName: Custom getter name. This can be useful when the member is a boolean. (Ex.: ``isAlive``)
:type getterName: str|None
:param setterName: Custom setter name.
:type setterName: str|None
:param privateMemberName: Custom name for the private attribute that contains the member's value.
:type privateMemberName: str|None
:raises: :class:`DuplicateMemberNameError` when two synthetic members have the same name.
"""
accessorDelegate = AccessorDelegate(namingConvention = NamingConventionCamelCase(),
getterName = getterName,
setterName = setterName)
return SyntheticDecoratorFactory().syntheticMemberDecorator(memberName = memberName,
defaultValue = default,
contract = contract,
readOnly = readOnly,
privateMemberName = privateMemberName,
memberDelegate = accessorDelegate)
@contract
def synthesize_member(member_name,
default = None,
contract = None,
read_only = False,
getter_name = None,
setter_name = None,
private_member_name = None):
"""
When applied to a class, this decorator adds getter/setter methods to it and overrides the constructor in order to set\
the default value of the member.
By default, the getter will be named ``member_name``. (Ex.: ``member_name = 'member' => instance.member()``)
By default, the setter will be named ``member_name`` with 'set\_' prepended it to it.
(Ex.: ``member_name = 'member' => instance.set_member(...)``)
By default, the private attribute containing the member's value will be named ``member_name`` with '_' prepended to it.
Naming convention can be overridden with a custom one using :meth:`naming_convention <naming_convention>` decorator.
:param member_name: Name of the member to synthesize.
:type member_name: str
:param default: Member's default value.
:type default: *
:param contract: Type constraint. See `PyContracts <http://andreacensi.github.com/contracts/>`_
:type contract: *
:param read_only: If set to ``True``, the setter will not be added to the class.
:type read_only: bool
:param getter_name: Custom getter name. This can be useful when the member is a boolean. (Ex.: ``is_alive``)
:type getter_name: str|None
:param setter_name: Custom setter name.
:type setter_name: str|None
:param private_member_name: Custom name for the private attribute that contains the member's value.
:type private_member_name: str|None
:raises: :class:`DuplicateMemberNameError` when two synthetic members have the same name.
"""
accessorDelegate = AccessorDelegate(namingConvention = NamingConventionUnderscore(),
getterName = getter_name,
setterName = setter_name)
return SyntheticDecoratorFactory().syntheticMemberDecorator(memberName = member_name,
defaultValue = default,
contract = contract,
readOnly = read_only,
privateMemberName = private_member_name,
memberDelegate = accessorDelegate)
@contract
def synthesizeProperty(propertyName,
default = None,
contract = None,
readOnly = False,
privateMemberName = None):
"""
When applied to a class, this decorator adds a property to it and overrides the constructor in order to set\
the default value of the property.
:IMPORTANT: In order for this to work on python 2, you must use new objects that is to say that the class must inherit from object.
By default, the private attribute containing the property's value will be named ``propertyName`` with '_' prepended to it.
Naming convention can be overridden with a custom one using :meth:`namingConvention <namingConvention>` decorator.
:param propertyName: Name of the property to synthesize.
:type propertyName: str
:param default: Property's default value.
:type default: *
:param contract: Type constraint. See `PyContracts <http://andreacensi.github.com/contracts/>`_
:type contract: *
:param readOnly: If set to ``True``, the property will not a have a setter.
:type readOnly: bool
:param privateMemberName: Custom name for the private attribute that contains the property's value.
:type privateMemberName: str|None
:raises: :class:`DuplicateMemberNameError` when two synthetic members have the same name.
:raises: :class:`InvalidPropertyOverrideError` when there's already a member with that name and which is not a property.
"""
return SyntheticDecoratorFactory().syntheticMemberDecorator(memberName = propertyName,
defaultValue = default,
contract = contract,
readOnly = readOnly,
privateMemberName = privateMemberName,
memberDelegate = PropertyDelegate())
@contract
def synthesize_property(property_name,
default = None,
contract = None,
read_only = False,
private_member_name = None):
"""
When applied to a class, this decorator adds a property to it and overrides the constructor in order to set\
the default value of the property.
:IMPORTANT: In order for this to work on python 2, you must use new objects that is to say that the class must inherit from object.
By default, the private attribute containing the property's value will be named ``property_name`` with '_' prepended to it.
Naming convention can be overridden with a custom one using :meth:`naming_convention <naming_convention>` decorator.
:param property_name: Name of the property to synthesize.
:type property_name: str
:param default: Property's default value.
:type default: *
:param contract: Type constraint. See `PyContracts <http://andreacensi.github.com/contracts/>`_
:type contract: *
:param read_only: If set to ``True``, the property will not a have a setter.
:type read_only: bool
:param private_member_name: Custom name for the private attribute that contains the property's value.
:type private_member_name: str|None
:raises: :class:`DuplicateMemberNameError` when two synthetic members have the same name.
:raises: :class:`InvalidPropertyOverrideError` when there's already a member with that name and which is not a property.
"""
return SyntheticDecoratorFactory().syntheticMemberDecorator(memberName = property_name,
defaultValue = default,
contract = contract,
readOnly = read_only,
privateMemberName = private_member_name,
memberDelegate = PropertyDelegate())
def synthesizeConstructor():
"""
This class decorator will override the class's constructor by making it\
implicitly consume values for synthesized members and properties.
"""
return SyntheticDecoratorFactory().syntheticConstructorDecorator()
def synthesizeEquality():
"""
This class decorator will override the class's __eq__ and __neq__ operations
to be based on comparing the values of the synthetic members.
"""
return SyntheticDecoratorFactory().syntheticEqualityDecorator()
def namingConvention(namingConvention):
"""
When applied to a class, this decorator will override the CamelCase naming convention of all (previous and following)
:meth:`synthesizeMember` calls on the class to ``namingConvention``.
:param namingConvention: The new naming convention.
:type namingConvention: INamingConvention
"""
return SyntheticDecoratorFactory().namingConventionDecorator(namingConvention)
def naming_convention(naming_convention):
"""
When applied to a class, this decorator will override the underscore naming convention of all (previous and following)
:meth:`synthesizeMember` calls on the class to ``naming_convention``.
:param naming_convention: The new naming convention.
:type naming_convention: INamingConvention
"""
return SyntheticDecoratorFactory().namingConventionDecorator(naming_convention)
synthesize_constructor = synthesizeConstructor
synthesize_equality = synthesizeEquality
|
wishtack/pysynthetic | synthetic/decorators.py | synthesize_member | python | def synthesize_member(member_name,
default = None,
contract = None,
read_only = False,
getter_name = None,
setter_name = None,
private_member_name = None):
accessorDelegate = AccessorDelegate(namingConvention = NamingConventionUnderscore(),
getterName = getter_name,
setterName = setter_name)
return SyntheticDecoratorFactory().syntheticMemberDecorator(memberName = member_name,
defaultValue = default,
contract = contract,
readOnly = read_only,
privateMemberName = private_member_name,
memberDelegate = accessorDelegate) | When applied to a class, this decorator adds getter/setter methods to it and overrides the constructor in order to set\
the default value of the member.
By default, the getter will be named ``member_name``. (Ex.: ``member_name = 'member' => instance.member()``)
By default, the setter will be named ``member_name`` with 'set\_' prepended it to it.
(Ex.: ``member_name = 'member' => instance.set_member(...)``)
By default, the private attribute containing the member's value will be named ``member_name`` with '_' prepended to it.
Naming convention can be overridden with a custom one using :meth:`naming_convention <naming_convention>` decorator.
:param member_name: Name of the member to synthesize.
:type member_name: str
:param default: Member's default value.
:type default: *
:param contract: Type constraint. See `PyContracts <http://andreacensi.github.com/contracts/>`_
:type contract: *
:param read_only: If set to ``True``, the setter will not be added to the class.
:type read_only: bool
:param getter_name: Custom getter name. This can be useful when the member is a boolean. (Ex.: ``is_alive``)
:type getter_name: str|None
:param setter_name: Custom setter name.
:type setter_name: str|None
:param private_member_name: Custom name for the private attribute that contains the member's value.
:type private_member_name: str|None
:raises: :class:`DuplicateMemberNameError` when two synthetic members have the same name. | train | https://github.com/wishtack/pysynthetic/blob/f37a4a2f1e0313b8c544f60d37c93726bc806ec6/synthetic/decorators.py#L68-L112 | null | #-*- coding: utf-8 -*-
#
# Created on Dec 17, 2012
#
# @author: Younes JAAIDI
#
# $Id: 76fcd589c5fc821e2552c3ff75b23833f8a18cb6 $
#
from .accessor_delegate import AccessorDelegate
from .i_naming_convention import INamingConvention
from .property_delegate import PropertyDelegate
from .naming_convention_camel_case import NamingConventionCamelCase
from .naming_convention_underscore import NamingConventionUnderscore
from .synthetic_decorator_factory import SyntheticDecoratorFactory
from contracts import contract, new_contract
new_contract('INamingConvention', INamingConvention)
@contract
def synthesizeMember(memberName,
default = None,
contract = None,
readOnly = False,
getterName = None,
setterName = None,
privateMemberName = None):
"""
When applied to a class, this decorator adds getter/setter methods to it and overrides the constructor in order to set\
the default value of the member.
By default, the getter will be named ``memberName``. (Ex.: ``memberName = 'member' => instance.member()``)
By default, the setter will be named ``memberName`` with the first letter capitalized and 'set' prepended it to it.
(Ex.: ``memberName = "member" => instance.setMember(...)``)
By default, the private attribute containing the member's value will be named ``memberName`` with '_' prepended to it.
Naming convention can be overridden with a custom one using :meth:`namingConvention <namingConvention>` decorator.
:param memberName: Name of the member to synthesize.
:type memberName: str
:param default: Member's default value.
:type default: *
:param contract: Type constraint. See `PyContracts <http://andreacensi.github.com/contracts/>`_
:type contract: *
:param readOnly: If set to ``True``, the setter will not be added to the class.
:type readOnly: bool
:param getterName: Custom getter name. This can be useful when the member is a boolean. (Ex.: ``isAlive``)
:type getterName: str|None
:param setterName: Custom setter name.
:type setterName: str|None
:param privateMemberName: Custom name for the private attribute that contains the member's value.
:type privateMemberName: str|None
:raises: :class:`DuplicateMemberNameError` when two synthetic members have the same name.
"""
accessorDelegate = AccessorDelegate(namingConvention = NamingConventionCamelCase(),
getterName = getterName,
setterName = setterName)
return SyntheticDecoratorFactory().syntheticMemberDecorator(memberName = memberName,
defaultValue = default,
contract = contract,
readOnly = readOnly,
privateMemberName = privateMemberName,
memberDelegate = accessorDelegate)
@contract
def synthesize_member(member_name,
default = None,
contract = None,
read_only = False,
getter_name = None,
setter_name = None,
private_member_name = None):
"""
When applied to a class, this decorator adds getter/setter methods to it and overrides the constructor in order to set\
the default value of the member.
By default, the getter will be named ``member_name``. (Ex.: ``member_name = 'member' => instance.member()``)
By default, the setter will be named ``member_name`` with 'set\_' prepended it to it.
(Ex.: ``member_name = 'member' => instance.set_member(...)``)
By default, the private attribute containing the member's value will be named ``member_name`` with '_' prepended to it.
Naming convention can be overridden with a custom one using :meth:`naming_convention <naming_convention>` decorator.
:param member_name: Name of the member to synthesize.
:type member_name: str
:param default: Member's default value.
:type default: *
:param contract: Type constraint. See `PyContracts <http://andreacensi.github.com/contracts/>`_
:type contract: *
:param read_only: If set to ``True``, the setter will not be added to the class.
:type read_only: bool
:param getter_name: Custom getter name. This can be useful when the member is a boolean. (Ex.: ``is_alive``)
:type getter_name: str|None
:param setter_name: Custom setter name.
:type setter_name: str|None
:param private_member_name: Custom name for the private attribute that contains the member's value.
:type private_member_name: str|None
:raises: :class:`DuplicateMemberNameError` when two synthetic members have the same name.
"""
accessorDelegate = AccessorDelegate(namingConvention = NamingConventionUnderscore(),
getterName = getter_name,
setterName = setter_name)
return SyntheticDecoratorFactory().syntheticMemberDecorator(memberName = member_name,
defaultValue = default,
contract = contract,
readOnly = read_only,
privateMemberName = private_member_name,
memberDelegate = accessorDelegate)
@contract
def synthesizeProperty(propertyName,
default = None,
contract = None,
readOnly = False,
privateMemberName = None):
"""
When applied to a class, this decorator adds a property to it and overrides the constructor in order to set\
the default value of the property.
:IMPORTANT: In order for this to work on python 2, you must use new objects that is to say that the class must inherit from object.
By default, the private attribute containing the property's value will be named ``propertyName`` with '_' prepended to it.
Naming convention can be overridden with a custom one using :meth:`namingConvention <namingConvention>` decorator.
:param propertyName: Name of the property to synthesize.
:type propertyName: str
:param default: Property's default value.
:type default: *
:param contract: Type constraint. See `PyContracts <http://andreacensi.github.com/contracts/>`_
:type contract: *
:param readOnly: If set to ``True``, the property will not a have a setter.
:type readOnly: bool
:param privateMemberName: Custom name for the private attribute that contains the property's value.
:type privateMemberName: str|None
:raises: :class:`DuplicateMemberNameError` when two synthetic members have the same name.
:raises: :class:`InvalidPropertyOverrideError` when there's already a member with that name and which is not a property.
"""
return SyntheticDecoratorFactory().syntheticMemberDecorator(memberName = propertyName,
defaultValue = default,
contract = contract,
readOnly = readOnly,
privateMemberName = privateMemberName,
memberDelegate = PropertyDelegate())
@contract
def synthesize_property(property_name,
default = None,
contract = None,
read_only = False,
private_member_name = None):
"""
When applied to a class, this decorator adds a property to it and overrides the constructor in order to set\
the default value of the property.
:IMPORTANT: In order for this to work on python 2, you must use new objects that is to say that the class must inherit from object.
By default, the private attribute containing the property's value will be named ``property_name`` with '_' prepended to it.
Naming convention can be overridden with a custom one using :meth:`naming_convention <naming_convention>` decorator.
:param property_name: Name of the property to synthesize.
:type property_name: str
:param default: Property's default value.
:type default: *
:param contract: Type constraint. See `PyContracts <http://andreacensi.github.com/contracts/>`_
:type contract: *
:param read_only: If set to ``True``, the property will not a have a setter.
:type read_only: bool
:param private_member_name: Custom name for the private attribute that contains the property's value.
:type private_member_name: str|None
:raises: :class:`DuplicateMemberNameError` when two synthetic members have the same name.
:raises: :class:`InvalidPropertyOverrideError` when there's already a member with that name and which is not a property.
"""
return SyntheticDecoratorFactory().syntheticMemberDecorator(memberName = property_name,
defaultValue = default,
contract = contract,
readOnly = read_only,
privateMemberName = private_member_name,
memberDelegate = PropertyDelegate())
def synthesizeConstructor():
"""
This class decorator will override the class's constructor by making it\
implicitly consume values for synthesized members and properties.
"""
return SyntheticDecoratorFactory().syntheticConstructorDecorator()
def synthesizeEquality():
"""
This class decorator will override the class's __eq__ and __neq__ operations
to be based on comparing the values of the synthetic members.
"""
return SyntheticDecoratorFactory().syntheticEqualityDecorator()
def namingConvention(namingConvention):
"""
When applied to a class, this decorator will override the CamelCase naming convention of all (previous and following)
:meth:`synthesizeMember` calls on the class to ``namingConvention``.
:param namingConvention: The new naming convention.
:type namingConvention: INamingConvention
"""
return SyntheticDecoratorFactory().namingConventionDecorator(namingConvention)
def naming_convention(naming_convention):
"""
When applied to a class, this decorator will override the underscore naming convention of all (previous and following)
:meth:`synthesizeMember` calls on the class to ``naming_convention``.
:param naming_convention: The new naming convention.
:type naming_convention: INamingConvention
"""
return SyntheticDecoratorFactory().namingConventionDecorator(naming_convention)
synthesize_constructor = synthesizeConstructor
synthesize_equality = synthesizeEquality
|
wishtack/pysynthetic | synthetic/decorators.py | synthesizeProperty | python | def synthesizeProperty(propertyName,
default = None,
contract = None,
readOnly = False,
privateMemberName = None):
return SyntheticDecoratorFactory().syntheticMemberDecorator(memberName = propertyName,
defaultValue = default,
contract = contract,
readOnly = readOnly,
privateMemberName = privateMemberName,
memberDelegate = PropertyDelegate()) | When applied to a class, this decorator adds a property to it and overrides the constructor in order to set\
the default value of the property.
:IMPORTANT: In order for this to work on python 2, you must use new objects that is to say that the class must inherit from object.
By default, the private attribute containing the property's value will be named ``propertyName`` with '_' prepended to it.
Naming convention can be overridden with a custom one using :meth:`namingConvention <namingConvention>` decorator.
:param propertyName: Name of the property to synthesize.
:type propertyName: str
:param default: Property's default value.
:type default: *
:param contract: Type constraint. See `PyContracts <http://andreacensi.github.com/contracts/>`_
:type contract: *
:param readOnly: If set to ``True``, the property will not a have a setter.
:type readOnly: bool
:param privateMemberName: Custom name for the private attribute that contains the property's value.
:type privateMemberName: str|None
:raises: :class:`DuplicateMemberNameError` when two synthetic members have the same name.
:raises: :class:`InvalidPropertyOverrideError` when there's already a member with that name and which is not a property. | train | https://github.com/wishtack/pysynthetic/blob/f37a4a2f1e0313b8c544f60d37c93726bc806ec6/synthetic/decorators.py#L115-L149 | null | #-*- coding: utf-8 -*-
#
# Created on Dec 17, 2012
#
# @author: Younes JAAIDI
#
# $Id: 76fcd589c5fc821e2552c3ff75b23833f8a18cb6 $
#
from .accessor_delegate import AccessorDelegate
from .i_naming_convention import INamingConvention
from .property_delegate import PropertyDelegate
from .naming_convention_camel_case import NamingConventionCamelCase
from .naming_convention_underscore import NamingConventionUnderscore
from .synthetic_decorator_factory import SyntheticDecoratorFactory
from contracts import contract, new_contract
new_contract('INamingConvention', INamingConvention)
@contract
def synthesizeMember(memberName,
default = None,
contract = None,
readOnly = False,
getterName = None,
setterName = None,
privateMemberName = None):
"""
When applied to a class, this decorator adds getter/setter methods to it and overrides the constructor in order to set\
the default value of the member.
By default, the getter will be named ``memberName``. (Ex.: ``memberName = 'member' => instance.member()``)
By default, the setter will be named ``memberName`` with the first letter capitalized and 'set' prepended it to it.
(Ex.: ``memberName = "member" => instance.setMember(...)``)
By default, the private attribute containing the member's value will be named ``memberName`` with '_' prepended to it.
Naming convention can be overridden with a custom one using :meth:`namingConvention <namingConvention>` decorator.
:param memberName: Name of the member to synthesize.
:type memberName: str
:param default: Member's default value.
:type default: *
:param contract: Type constraint. See `PyContracts <http://andreacensi.github.com/contracts/>`_
:type contract: *
:param readOnly: If set to ``True``, the setter will not be added to the class.
:type readOnly: bool
:param getterName: Custom getter name. This can be useful when the member is a boolean. (Ex.: ``isAlive``)
:type getterName: str|None
:param setterName: Custom setter name.
:type setterName: str|None
:param privateMemberName: Custom name for the private attribute that contains the member's value.
:type privateMemberName: str|None
:raises: :class:`DuplicateMemberNameError` when two synthetic members have the same name.
"""
accessorDelegate = AccessorDelegate(namingConvention = NamingConventionCamelCase(),
getterName = getterName,
setterName = setterName)
return SyntheticDecoratorFactory().syntheticMemberDecorator(memberName = memberName,
defaultValue = default,
contract = contract,
readOnly = readOnly,
privateMemberName = privateMemberName,
memberDelegate = accessorDelegate)
@contract
def synthesize_member(member_name,
default = None,
contract = None,
read_only = False,
getter_name = None,
setter_name = None,
private_member_name = None):
"""
When applied to a class, this decorator adds getter/setter methods to it and overrides the constructor in order to set\
the default value of the member.
By default, the getter will be named ``member_name``. (Ex.: ``member_name = 'member' => instance.member()``)
By default, the setter will be named ``member_name`` with 'set\_' prepended it to it.
(Ex.: ``member_name = 'member' => instance.set_member(...)``)
By default, the private attribute containing the member's value will be named ``member_name`` with '_' prepended to it.
Naming convention can be overridden with a custom one using :meth:`naming_convention <naming_convention>` decorator.
:param member_name: Name of the member to synthesize.
:type member_name: str
:param default: Member's default value.
:type default: *
:param contract: Type constraint. See `PyContracts <http://andreacensi.github.com/contracts/>`_
:type contract: *
:param read_only: If set to ``True``, the setter will not be added to the class.
:type read_only: bool
:param getter_name: Custom getter name. This can be useful when the member is a boolean. (Ex.: ``is_alive``)
:type getter_name: str|None
:param setter_name: Custom setter name.
:type setter_name: str|None
:param private_member_name: Custom name for the private attribute that contains the member's value.
:type private_member_name: str|None
:raises: :class:`DuplicateMemberNameError` when two synthetic members have the same name.
"""
accessorDelegate = AccessorDelegate(namingConvention = NamingConventionUnderscore(),
getterName = getter_name,
setterName = setter_name)
return SyntheticDecoratorFactory().syntheticMemberDecorator(memberName = member_name,
defaultValue = default,
contract = contract,
readOnly = read_only,
privateMemberName = private_member_name,
memberDelegate = accessorDelegate)
@contract
@contract
def synthesize_property(property_name,
default = None,
contract = None,
read_only = False,
private_member_name = None):
"""
When applied to a class, this decorator adds a property to it and overrides the constructor in order to set\
the default value of the property.
:IMPORTANT: In order for this to work on python 2, you must use new objects that is to say that the class must inherit from object.
By default, the private attribute containing the property's value will be named ``property_name`` with '_' prepended to it.
Naming convention can be overridden with a custom one using :meth:`naming_convention <naming_convention>` decorator.
:param property_name: Name of the property to synthesize.
:type property_name: str
:param default: Property's default value.
:type default: *
:param contract: Type constraint. See `PyContracts <http://andreacensi.github.com/contracts/>`_
:type contract: *
:param read_only: If set to ``True``, the property will not a have a setter.
:type read_only: bool
:param private_member_name: Custom name for the private attribute that contains the property's value.
:type private_member_name: str|None
:raises: :class:`DuplicateMemberNameError` when two synthetic members have the same name.
:raises: :class:`InvalidPropertyOverrideError` when there's already a member with that name and which is not a property.
"""
return SyntheticDecoratorFactory().syntheticMemberDecorator(memberName = property_name,
defaultValue = default,
contract = contract,
readOnly = read_only,
privateMemberName = private_member_name,
memberDelegate = PropertyDelegate())
def synthesizeConstructor():
"""
This class decorator will override the class's constructor by making it\
implicitly consume values for synthesized members and properties.
"""
return SyntheticDecoratorFactory().syntheticConstructorDecorator()
def synthesizeEquality():
"""
This class decorator will override the class's __eq__ and __neq__ operations
to be based on comparing the values of the synthetic members.
"""
return SyntheticDecoratorFactory().syntheticEqualityDecorator()
def namingConvention(namingConvention):
"""
When applied to a class, this decorator will override the CamelCase naming convention of all (previous and following)
:meth:`synthesizeMember` calls on the class to ``namingConvention``.
:param namingConvention: The new naming convention.
:type namingConvention: INamingConvention
"""
return SyntheticDecoratorFactory().namingConventionDecorator(namingConvention)
def naming_convention(naming_convention):
"""
When applied to a class, this decorator will override the underscore naming convention of all (previous and following)
:meth:`synthesizeMember` calls on the class to ``naming_convention``.
:param naming_convention: The new naming convention.
:type naming_convention: INamingConvention
"""
return SyntheticDecoratorFactory().namingConventionDecorator(naming_convention)
synthesize_constructor = synthesizeConstructor
synthesize_equality = synthesizeEquality
|
wishtack/pysynthetic | synthetic/decorators.py | synthesize_property | python | def synthesize_property(property_name,
default = None,
contract = None,
read_only = False,
private_member_name = None):
return SyntheticDecoratorFactory().syntheticMemberDecorator(memberName = property_name,
defaultValue = default,
contract = contract,
readOnly = read_only,
privateMemberName = private_member_name,
memberDelegate = PropertyDelegate()) | When applied to a class, this decorator adds a property to it and overrides the constructor in order to set\
the default value of the property.
:IMPORTANT: In order for this to work on python 2, you must use new objects that is to say that the class must inherit from object.
By default, the private attribute containing the property's value will be named ``property_name`` with '_' prepended to it.
Naming convention can be overridden with a custom one using :meth:`naming_convention <naming_convention>` decorator.
:param property_name: Name of the property to synthesize.
:type property_name: str
:param default: Property's default value.
:type default: *
:param contract: Type constraint. See `PyContracts <http://andreacensi.github.com/contracts/>`_
:type contract: *
:param read_only: If set to ``True``, the property will not a have a setter.
:type read_only: bool
:param private_member_name: Custom name for the private attribute that contains the property's value.
:type private_member_name: str|None
:raises: :class:`DuplicateMemberNameError` when two synthetic members have the same name.
:raises: :class:`InvalidPropertyOverrideError` when there's already a member with that name and which is not a property. | train | https://github.com/wishtack/pysynthetic/blob/f37a4a2f1e0313b8c544f60d37c93726bc806ec6/synthetic/decorators.py#L151-L185 | null | #-*- coding: utf-8 -*-
#
# Created on Dec 17, 2012
#
# @author: Younes JAAIDI
#
# $Id: 76fcd589c5fc821e2552c3ff75b23833f8a18cb6 $
#
from .accessor_delegate import AccessorDelegate
from .i_naming_convention import INamingConvention
from .property_delegate import PropertyDelegate
from .naming_convention_camel_case import NamingConventionCamelCase
from .naming_convention_underscore import NamingConventionUnderscore
from .synthetic_decorator_factory import SyntheticDecoratorFactory
from contracts import contract, new_contract
new_contract('INamingConvention', INamingConvention)
@contract
def synthesizeMember(memberName,
default = None,
contract = None,
readOnly = False,
getterName = None,
setterName = None,
privateMemberName = None):
"""
When applied to a class, this decorator adds getter/setter methods to it and overrides the constructor in order to set\
the default value of the member.
By default, the getter will be named ``memberName``. (Ex.: ``memberName = 'member' => instance.member()``)
By default, the setter will be named ``memberName`` with the first letter capitalized and 'set' prepended it to it.
(Ex.: ``memberName = "member" => instance.setMember(...)``)
By default, the private attribute containing the member's value will be named ``memberName`` with '_' prepended to it.
Naming convention can be overridden with a custom one using :meth:`namingConvention <namingConvention>` decorator.
:param memberName: Name of the member to synthesize.
:type memberName: str
:param default: Member's default value.
:type default: *
:param contract: Type constraint. See `PyContracts <http://andreacensi.github.com/contracts/>`_
:type contract: *
:param readOnly: If set to ``True``, the setter will not be added to the class.
:type readOnly: bool
:param getterName: Custom getter name. This can be useful when the member is a boolean. (Ex.: ``isAlive``)
:type getterName: str|None
:param setterName: Custom setter name.
:type setterName: str|None
:param privateMemberName: Custom name for the private attribute that contains the member's value.
:type privateMemberName: str|None
:raises: :class:`DuplicateMemberNameError` when two synthetic members have the same name.
"""
accessorDelegate = AccessorDelegate(namingConvention = NamingConventionCamelCase(),
getterName = getterName,
setterName = setterName)
return SyntheticDecoratorFactory().syntheticMemberDecorator(memberName = memberName,
defaultValue = default,
contract = contract,
readOnly = readOnly,
privateMemberName = privateMemberName,
memberDelegate = accessorDelegate)
@contract
def synthesize_member(member_name,
default = None,
contract = None,
read_only = False,
getter_name = None,
setter_name = None,
private_member_name = None):
"""
When applied to a class, this decorator adds getter/setter methods to it and overrides the constructor in order to set\
the default value of the member.
By default, the getter will be named ``member_name``. (Ex.: ``member_name = 'member' => instance.member()``)
By default, the setter will be named ``member_name`` with 'set\_' prepended it to it.
(Ex.: ``member_name = 'member' => instance.set_member(...)``)
By default, the private attribute containing the member's value will be named ``member_name`` with '_' prepended to it.
Naming convention can be overridden with a custom one using :meth:`naming_convention <naming_convention>` decorator.
:param member_name: Name of the member to synthesize.
:type member_name: str
:param default: Member's default value.
:type default: *
:param contract: Type constraint. See `PyContracts <http://andreacensi.github.com/contracts/>`_
:type contract: *
:param read_only: If set to ``True``, the setter will not be added to the class.
:type read_only: bool
:param getter_name: Custom getter name. This can be useful when the member is a boolean. (Ex.: ``is_alive``)
:type getter_name: str|None
:param setter_name: Custom setter name.
:type setter_name: str|None
:param private_member_name: Custom name for the private attribute that contains the member's value.
:type private_member_name: str|None
:raises: :class:`DuplicateMemberNameError` when two synthetic members have the same name.
"""
accessorDelegate = AccessorDelegate(namingConvention = NamingConventionUnderscore(),
getterName = getter_name,
setterName = setter_name)
return SyntheticDecoratorFactory().syntheticMemberDecorator(memberName = member_name,
defaultValue = default,
contract = contract,
readOnly = read_only,
privateMemberName = private_member_name,
memberDelegate = accessorDelegate)
@contract
def synthesizeProperty(propertyName,
default = None,
contract = None,
readOnly = False,
privateMemberName = None):
"""
When applied to a class, this decorator adds a property to it and overrides the constructor in order to set\
the default value of the property.
:IMPORTANT: In order for this to work on python 2, you must use new objects that is to say that the class must inherit from object.
By default, the private attribute containing the property's value will be named ``propertyName`` with '_' prepended to it.
Naming convention can be overridden with a custom one using :meth:`namingConvention <namingConvention>` decorator.
:param propertyName: Name of the property to synthesize.
:type propertyName: str
:param default: Property's default value.
:type default: *
:param contract: Type constraint. See `PyContracts <http://andreacensi.github.com/contracts/>`_
:type contract: *
:param readOnly: If set to ``True``, the property will not a have a setter.
:type readOnly: bool
:param privateMemberName: Custom name for the private attribute that contains the property's value.
:type privateMemberName: str|None
:raises: :class:`DuplicateMemberNameError` when two synthetic members have the same name.
:raises: :class:`InvalidPropertyOverrideError` when there's already a member with that name and which is not a property.
"""
return SyntheticDecoratorFactory().syntheticMemberDecorator(memberName = propertyName,
defaultValue = default,
contract = contract,
readOnly = readOnly,
privateMemberName = privateMemberName,
memberDelegate = PropertyDelegate())
@contract
def synthesize_property(property_name,
default = None,
contract = None,
read_only = False,
private_member_name = None):
"""
When applied to a class, this decorator adds a property to it and overrides the constructor in order to set\
the default value of the property.
:IMPORTANT: In order for this to work on python 2, you must use new objects that is to say that the class must inherit from object.
By default, the private attribute containing the property's value will be named ``property_name`` with '_' prepended to it.
Naming convention can be overridden with a custom one using :meth:`naming_convention <naming_convention>` decorator.
:param property_name: Name of the property to synthesize.
:type property_name: str
:param default: Property's default value.
:type default: *
:param contract: Type constraint. See `PyContracts <http://andreacensi.github.com/contracts/>`_
:type contract: *
:param read_only: If set to ``True``, the property will not a have a setter.
:type read_only: bool
:param private_member_name: Custom name for the private attribute that contains the property's value.
:type private_member_name: str|None
:raises: :class:`DuplicateMemberNameError` when two synthetic members have the same name.
:raises: :class:`InvalidPropertyOverrideError` when there's already a member with that name and which is not a property.
"""
return SyntheticDecoratorFactory().syntheticMemberDecorator(memberName = property_name,
defaultValue = default,
contract = contract,
readOnly = read_only,
privateMemberName = private_member_name,
memberDelegate = PropertyDelegate())
def synthesizeConstructor():
"""
This class decorator will override the class's constructor by making it\
implicitly consume values for synthesized members and properties.
"""
return SyntheticDecoratorFactory().syntheticConstructorDecorator()
def synthesizeEquality():
"""
This class decorator will override the class's __eq__ and __neq__ operations
to be based on comparing the values of the synthetic members.
"""
return SyntheticDecoratorFactory().syntheticEqualityDecorator()
def namingConvention(namingConvention):
"""
When applied to a class, this decorator will override the CamelCase naming convention of all (previous and following)
:meth:`synthesizeMember` calls on the class to ``namingConvention``.
:param namingConvention: The new naming convention.
:type namingConvention: INamingConvention
"""
return SyntheticDecoratorFactory().namingConventionDecorator(namingConvention)
def naming_convention(naming_convention):
"""
When applied to a class, this decorator will override the underscore naming convention of all (previous and following)
:meth:`synthesizeMember` calls on the class to ``naming_convention``.
:param naming_convention: The new naming convention.
:type naming_convention: INamingConvention
"""
return SyntheticDecoratorFactory().namingConventionDecorator(naming_convention)
synthesize_constructor = synthesizeConstructor
synthesize_equality = synthesizeEquality
|
wishtack/pysynthetic | synthetic/synthetic_member.py | SyntheticMember.apply | python | def apply(self, cls, originalMemberNameList, classNamingConvention):
self._memberDelegate.apply(cls = cls,
originalMemberNameList = originalMemberNameList,
memberName = self._memberName,
classNamingConvention = classNamingConvention,
getter = self._makeGetter(),
setter = self._makeSetter()) | :type cls: type
:type originalMemberNameList: list(str)
:type classNamingConvention: INamingConvention | train | https://github.com/wishtack/pysynthetic/blob/f37a4a2f1e0313b8c544f60d37c93726bc806ec6/synthetic/synthetic_member.py#L73-L84 | [
"def _makeGetter(self):\n def getter(instance):\n return getattr(instance, self._privateMemberName)\n\n return getter\n",
"def _makeSetter(self):\n if self._readOnly:\n return None\n\n def setter(instance, value):\n if self._contract is not None:\n self.checkContract(se... | class SyntheticMember:
@contract
def __init__(self,
memberName,
default,
contract,
readOnly,
privateMemberName,
memberDelegate):
"""
:type memberName: str
:type readOnly: bool
:type privateMemberName: str|None
:type memberDelegate: IMemberDelegate
"""
if privateMemberName is None:
privateMemberName = '_%s' % memberName
if contract is not None:
contract = parse(contract)
self._memberName = memberName
self._default = default
self._contract = contract
self._readOnly = readOnly
self._privateMemberName = privateMemberName
self._memberDelegate = memberDelegate
def memberName(self):
return self._memberName
def default(self):
return self._default
def privateMemberName(self):
return self._privateMemberName
def readOnly(self):
return self._readOnly
def checkContract(self, argumentName, value):
# No contract to check.
if self._contract is None:
return
# Contracts are disabled.
if contracts.all_disabled():
return
self._contract._check_contract(value=value, context={argumentName: value}, silent=False)
def remove(self, cls, originalMemberNameList, classNamingConvention):
"""
:type cls: type
:type originalMemberNameList: list(str)
:type classNamingConvention: INamingConvention
"""
self._memberDelegate.remove(cls = cls,
originalMemberNameList = originalMemberNameList,
memberName = self._memberName,
classNamingConvention = classNamingConvention)
def _makeGetter(self):
def getter(instance):
return getattr(instance, self._privateMemberName)
return getter
def _makeSetter(self):
if self._readOnly:
return None
def setter(instance, value):
if self._contract is not None:
self.checkContract(self._memberName, value)
setattr(instance, self._privateMemberName, value)
return setter
|
wishtack/pysynthetic | synthetic/synthetic_member.py | SyntheticMember.remove | python | def remove(self, cls, originalMemberNameList, classNamingConvention):
self._memberDelegate.remove(cls = cls,
originalMemberNameList = originalMemberNameList,
memberName = self._memberName,
classNamingConvention = classNamingConvention) | :type cls: type
:type originalMemberNameList: list(str)
:type classNamingConvention: INamingConvention | train | https://github.com/wishtack/pysynthetic/blob/f37a4a2f1e0313b8c544f60d37c93726bc806ec6/synthetic/synthetic_member.py#L86-L95 | null | class SyntheticMember:
@contract
def __init__(self,
memberName,
default,
contract,
readOnly,
privateMemberName,
memberDelegate):
"""
:type memberName: str
:type readOnly: bool
:type privateMemberName: str|None
:type memberDelegate: IMemberDelegate
"""
if privateMemberName is None:
privateMemberName = '_%s' % memberName
if contract is not None:
contract = parse(contract)
self._memberName = memberName
self._default = default
self._contract = contract
self._readOnly = readOnly
self._privateMemberName = privateMemberName
self._memberDelegate = memberDelegate
def memberName(self):
return self._memberName
def default(self):
return self._default
def privateMemberName(self):
return self._privateMemberName
def readOnly(self):
return self._readOnly
def checkContract(self, argumentName, value):
# No contract to check.
if self._contract is None:
return
# Contracts are disabled.
if contracts.all_disabled():
return
self._contract._check_contract(value=value, context={argumentName: value}, silent=False)
def apply(self, cls, originalMemberNameList, classNamingConvention):
"""
:type cls: type
:type originalMemberNameList: list(str)
:type classNamingConvention: INamingConvention
"""
self._memberDelegate.apply(cls = cls,
originalMemberNameList = originalMemberNameList,
memberName = self._memberName,
classNamingConvention = classNamingConvention,
getter = self._makeGetter(),
setter = self._makeSetter())
def _makeGetter(self):
def getter(instance):
return getattr(instance, self._privateMemberName)
return getter
def _makeSetter(self):
if self._readOnly:
return None
def setter(instance, value):
if self._contract is not None:
self.checkContract(self._memberName, value)
setattr(instance, self._privateMemberName, value)
return setter
|
wishtack/pysynthetic | synthetic/accessor_delegate.py | AccessorDelegate.apply | python | def apply(self, cls, originalMemberNameList, memberName, classNamingConvention, getter, setter):
accessorDict = self._accessorDict(memberName, classNamingConvention, getter, setter)
for accessorName, accessor in accessorDict.items():
if accessorName not in originalMemberNameList and accessor is not None:
setattr(cls, accessorName, accessor) | :type cls: type
:type originalMemberNameList: list(str)
:type memberName: str
:type classNamingConvention: INamingConvention|None | train | https://github.com/wishtack/pysynthetic/blob/f37a4a2f1e0313b8c544f60d37c93726bc806ec6/synthetic/accessor_delegate.py#L37-L47 | [
"def _accessorDict(self, memberName, classNamingConvention, getter = None, setter = None):\n resultDict = {}\n resultDict[self._accessorName(memberName, self._GETTER_KEY, classNamingConvention)] = getter\n resultDict[self._accessorName(memberName, self._SETTER_KEY, classNamingConvention)] = setter\n ret... | class AccessorDelegate(IMemberDelegate):
_GETTER_KEY = 'getter'
_SETTER_KEY = 'setter'
# Mappings between accessor types and their names and methods.
# @hack: I don't much like that.
_NAMING_CONVENTION_ACCESSOR_NAME_METHOD_DICT = {_GETTER_KEY: 'getterName',
_SETTER_KEY: 'setterName'}
def __init__(self, namingConvention, getterName, setterName):
"""
:type namingConvention: INamingConvention
:type getterName: str|None
:type setterName: str|None
"""
self._namingConvention = namingConvention
# Accessor names.
self._accessorNameDict = {self._GETTER_KEY: getterName,
self._SETTER_KEY: setterName}
def remove(self, cls, originalMemberNameList, memberName, classNamingConvention):
"""
:type cls: type
:type originalMemberNameList: list(str)
:type memberName: str
:type classNamingConvention: INamingConvention|None
"""
accessorDict = self._accessorDict(memberName, classNamingConvention)
for accessorName, _ in accessorDict.items():
if accessorName not in originalMemberNameList and hasattr(cls, accessorName):
delattr(cls, accessorName)
def _accessorDict(self, memberName, classNamingConvention, getter = None, setter = None):
resultDict = {}
resultDict[self._accessorName(memberName, self._GETTER_KEY, classNamingConvention)] = getter
resultDict[self._accessorName(memberName, self._SETTER_KEY, classNamingConvention)] = setter
return resultDict
def _accessorName(self, memberName, accessorName, classNamingConvention):
"""
:type memberName: str
:type accessorName: str
:type classNamingConvention: INamingConvention|None
"""
# Using user's custom getter or setter name.
customAccessorName = self._accessorNameDict[accessorName]
if customAccessorName is not None:
return customAccessorName
# If the class has a custom naming convention, we use it. Cf. 'namingConvention' decorator.
# Otherwise, we use the member's naming convention, camelCase or underscore depending on the decorator that was used
# (respectively synthesizeMember or synthesize_member).
namingConvention = self._namingConvention
if classNamingConvention is not None:
namingConvention = classNamingConvention
# @hack: I don't much like that...
methodName = self._NAMING_CONVENTION_ACCESSOR_NAME_METHOD_DICT[accessorName]
# Using naming convention to transform member's name to an accessor name.
return getattr(namingConvention, methodName)(memberName)
|
wishtack/pysynthetic | synthetic/accessor_delegate.py | AccessorDelegate.remove | python | def remove(self, cls, originalMemberNameList, memberName, classNamingConvention):
accessorDict = self._accessorDict(memberName, classNamingConvention)
for accessorName, _ in accessorDict.items():
if accessorName not in originalMemberNameList and hasattr(cls, accessorName):
delattr(cls, accessorName) | :type cls: type
:type originalMemberNameList: list(str)
:type memberName: str
:type classNamingConvention: INamingConvention|None | train | https://github.com/wishtack/pysynthetic/blob/f37a4a2f1e0313b8c544f60d37c93726bc806ec6/synthetic/accessor_delegate.py#L49-L59 | [
"def _accessorDict(self, memberName, classNamingConvention, getter = None, setter = None):\n resultDict = {}\n resultDict[self._accessorName(memberName, self._GETTER_KEY, classNamingConvention)] = getter\n resultDict[self._accessorName(memberName, self._SETTER_KEY, classNamingConvention)] = setter\n ret... | class AccessorDelegate(IMemberDelegate):
_GETTER_KEY = 'getter'
_SETTER_KEY = 'setter'
# Mappings between accessor types and their names and methods.
# @hack: I don't much like that.
_NAMING_CONVENTION_ACCESSOR_NAME_METHOD_DICT = {_GETTER_KEY: 'getterName',
_SETTER_KEY: 'setterName'}
def __init__(self, namingConvention, getterName, setterName):
"""
:type namingConvention: INamingConvention
:type getterName: str|None
:type setterName: str|None
"""
self._namingConvention = namingConvention
# Accessor names.
self._accessorNameDict = {self._GETTER_KEY: getterName,
self._SETTER_KEY: setterName}
def apply(self, cls, originalMemberNameList, memberName, classNamingConvention, getter, setter):
"""
:type cls: type
:type originalMemberNameList: list(str)
:type memberName: str
:type classNamingConvention: INamingConvention|None
"""
accessorDict = self._accessorDict(memberName, classNamingConvention, getter, setter)
for accessorName, accessor in accessorDict.items():
if accessorName not in originalMemberNameList and accessor is not None:
setattr(cls, accessorName, accessor)
def _accessorDict(self, memberName, classNamingConvention, getter = None, setter = None):
resultDict = {}
resultDict[self._accessorName(memberName, self._GETTER_KEY, classNamingConvention)] = getter
resultDict[self._accessorName(memberName, self._SETTER_KEY, classNamingConvention)] = setter
return resultDict
def _accessorName(self, memberName, accessorName, classNamingConvention):
"""
:type memberName: str
:type accessorName: str
:type classNamingConvention: INamingConvention|None
"""
# Using user's custom getter or setter name.
customAccessorName = self._accessorNameDict[accessorName]
if customAccessorName is not None:
return customAccessorName
# If the class has a custom naming convention, we use it. Cf. 'namingConvention' decorator.
# Otherwise, we use the member's naming convention, camelCase or underscore depending on the decorator that was used
# (respectively synthesizeMember or synthesize_member).
namingConvention = self._namingConvention
if classNamingConvention is not None:
namingConvention = classNamingConvention
# @hack: I don't much like that...
methodName = self._NAMING_CONVENTION_ACCESSOR_NAME_METHOD_DICT[accessorName]
# Using naming convention to transform member's name to an accessor name.
return getattr(namingConvention, methodName)(memberName)
|
wishtack/pysynthetic | synthetic/accessor_delegate.py | AccessorDelegate._accessorName | python | def _accessorName(self, memberName, accessorName, classNamingConvention):
"""
:type memberName: str
:type accessorName: str
:type classNamingConvention: INamingConvention|None
"""
# Using user's custom getter or setter name.
customAccessorName = self._accessorNameDict[accessorName]
if customAccessorName is not None:
return customAccessorName
# If the class has a custom naming convention, we use it. Cf. 'namingConvention' decorator.
# Otherwise, we use the member's naming convention, camelCase or underscore depending on the decorator that was used
# (respectively synthesizeMember or synthesize_member).
namingConvention = self._namingConvention
if classNamingConvention is not None:
namingConvention = classNamingConvention
# @hack: I don't much like that...
methodName = self._NAMING_CONVENTION_ACCESSOR_NAME_METHOD_DICT[accessorName]
# Using naming convention to transform member's name to an accessor name.
return getattr(namingConvention, methodName)(memberName) | :type memberName: str
:type accessorName: str
:type classNamingConvention: INamingConvention|None | train | https://github.com/wishtack/pysynthetic/blob/f37a4a2f1e0313b8c544f60d37c93726bc806ec6/synthetic/accessor_delegate.py#L67-L88 | null | class AccessorDelegate(IMemberDelegate):
_GETTER_KEY = 'getter'
_SETTER_KEY = 'setter'
# Mappings between accessor types and their names and methods.
# @hack: I don't much like that.
_NAMING_CONVENTION_ACCESSOR_NAME_METHOD_DICT = {_GETTER_KEY: 'getterName',
_SETTER_KEY: 'setterName'}
def __init__(self, namingConvention, getterName, setterName):
"""
:type namingConvention: INamingConvention
:type getterName: str|None
:type setterName: str|None
"""
self._namingConvention = namingConvention
# Accessor names.
self._accessorNameDict = {self._GETTER_KEY: getterName,
self._SETTER_KEY: setterName}
def apply(self, cls, originalMemberNameList, memberName, classNamingConvention, getter, setter):
"""
:type cls: type
:type originalMemberNameList: list(str)
:type memberName: str
:type classNamingConvention: INamingConvention|None
"""
accessorDict = self._accessorDict(memberName, classNamingConvention, getter, setter)
for accessorName, accessor in accessorDict.items():
if accessorName not in originalMemberNameList and accessor is not None:
setattr(cls, accessorName, accessor)
def remove(self, cls, originalMemberNameList, memberName, classNamingConvention):
"""
:type cls: type
:type originalMemberNameList: list(str)
:type memberName: str
:type classNamingConvention: INamingConvention|None
"""
accessorDict = self._accessorDict(memberName, classNamingConvention)
for accessorName, _ in accessorDict.items():
if accessorName not in originalMemberNameList and hasattr(cls, accessorName):
delattr(cls, accessorName)
def _accessorDict(self, memberName, classNamingConvention, getter = None, setter = None):
resultDict = {}
resultDict[self._accessorName(memberName, self._GETTER_KEY, classNamingConvention)] = getter
resultDict[self._accessorName(memberName, self._SETTER_KEY, classNamingConvention)] = setter
return resultDict
def _accessorName(self, memberName, accessorName, classNamingConvention):
"""
:type memberName: str
:type accessorName: str
:type classNamingConvention: INamingConvention|None
"""
# Using user's custom getter or setter name.
customAccessorName = self._accessorNameDict[accessorName]
if customAccessorName is not None:
return customAccessorName
# If the class has a custom naming convention, we use it. Cf. 'namingConvention' decorator.
# Otherwise, we use the member's naming convention, camelCase or underscore depending on the decorator that was used
# (respectively synthesizeMember or synthesize_member).
namingConvention = self._namingConvention
if classNamingConvention is not None:
namingConvention = classNamingConvention
# @hack: I don't much like that...
methodName = self._NAMING_CONVENTION_ACCESSOR_NAME_METHOD_DICT[accessorName]
# Using naming convention to transform member's name to an accessor name.
return getattr(namingConvention, methodName)(memberName)
|
saschpe/rapport | rapport/timeframe.py | iso_year_start | python | def iso_year_start(iso_year):
"The gregorian calendar date of the first day of the given ISO year"
fourth_jan = datetime.date(iso_year, 1, 4)
delta = datetime.timedelta(fourth_jan.isoweekday() - 1)
return fourth_jan - delta | The gregorian calendar date of the first day of the given ISO year | train | https://github.com/saschpe/rapport/blob/ccceb8f84bd7e8add88ab5e137cdab6424aa4683/rapport/timeframe.py#L24-L28 | null | # Copyright 2013 Sascha Peilicke
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import datetime
import sys
import rapport.config
# http://stackoverflow.com/questions/304256/whats-the-best-way-to-find-the-inverse-of-datetime-isocalendar
def iso_to_gregorian(iso_year, iso_week, iso_day):
"Gregorian calendar date for the given ISO year, week and day"
year_start = iso_year_start(iso_year)
return year_start + datetime.timedelta(days=iso_day - 1, weeks=iso_week - 1)
def week_to_datetime(iso_year, iso_week):
"datetime instance for the start of the given ISO year and week"
gregorian = iso_to_gregorian(iso_year, iso_week, 0)
return datetime.datetime.combine(gregorian, datetime.time(0))
class Timeframe(object):
"""Represents a period of time between a start and end time.
:start: Start of timeframe (datetime object)
:end: End of timeframe (datetime object)
"""
def __init__(self, start, end):
self._name = "Generic"
self._start = start
self._end = end
@property
def start(self):
return self._start
@property
def end(self):
return self._end
def contains(self, date):
"""Checks if a date is within a timeframe.
:date: The date to check
"""
return self._start <= date and date < self._end
def __str__(self):
"""Returns a string representation of a timeframe.
"""
return "{0} [{1} - {2}]".format(self._name, self._start.isoformat(), self._end.isoformat())
class CurrentWeekTimeframe(Timeframe):
"""Current week timeframe (in UTC).
"""
def __init__(self):
self._name = "Current week"
self._end = datetime.datetime.utcnow()
# Compute the day but reset the hours/minutes/seconds to zero,
# we want the exact week's start:
week_start = self._end - datetime.timedelta(days=self._end.weekday())
self._start = datetime.datetime(year=week_start.year,
month=week_start.month,
day=week_start.day)
class WeekTimeframe(Timeframe):
"""N-th week of year timeframe (in UTC).
:week: Week number (starting from 1)
"""
def __init__(self, week=1):
self._name = "Week %d" % week
now = datetime.datetime.utcnow()
year = now.year
self._start = week_to_datetime(year, week)
if self._start > now:
self._start = week_to_datetime(year - 1, week)
self._end = self._start + datetime.timedelta(weeks=1)
class CurrentMonthTimeframe(Timeframe):
"""Current month timeframe (in UTC).
"""
def __init__(self):
self._name = "Current month"
self._end = datetime.datetime.utcnow()
self._start = datetime.datetime(year=self._end.year,
month=self._end.month, day=1)
class MonthTimeframe(Timeframe):
"""N-th month of year timeframe (in UTC).
:month: Month number (starting from 1)
"""
def __init__(self, month=1):
self._name = "Month"
raise NotImplementedError()
class RecentDaysTimeframe(Timeframe):
"""Recent days timeframe (in UTC).
"""
def __init__(self, days=14):
self._name = "Recent days ({0})".format(days)
self._end = datetime.datetime.utcnow()
self._start = self._end - datetime.timedelta(days=days)
self._days = days
_TIMEFRAME_CATALOG = {"current_month": CurrentMonthTimeframe,
"current_week": CurrentWeekTimeframe,
"month": MonthTimeframe,
"week": WeekTimeframe,
"recent_days": RecentDaysTimeframe}
def init(name, *args, **kwargs):
"""Instantiate a timeframe from the catalog.
"""
if name in _TIMEFRAME_CATALOG:
if rapport.config.get_int("rapport", "verbosity") >= 2:
print("Initialize timeframe {0}: {1} {2}".format(name, args, kwargs))
try:
return _TIMEFRAME_CATALOG[name](*args, **kwargs)
except ValueError as e:
print("Failed to initialize timeframe {0}: {1}!".format(name, e), file=sys.stderr)
else:
print("Failed to initialize timeframe {0}: Not in catalog!".format(name), file=sys.stderr)
sys.exit(1)
def init_from_config():
return init(rapport.config.get("timeframe", "default"))
def catalog():
"""Returns the list of registered timeframes.
"""
return _TIMEFRAME_CATALOG.keys()
|
saschpe/rapport | rapport/timeframe.py | iso_to_gregorian | python | def iso_to_gregorian(iso_year, iso_week, iso_day):
"Gregorian calendar date for the given ISO year, week and day"
year_start = iso_year_start(iso_year)
return year_start + datetime.timedelta(days=iso_day - 1, weeks=iso_week - 1) | Gregorian calendar date for the given ISO year, week and day | train | https://github.com/saschpe/rapport/blob/ccceb8f84bd7e8add88ab5e137cdab6424aa4683/rapport/timeframe.py#L31-L34 | [
"def iso_year_start(iso_year):\n \"The gregorian calendar date of the first day of the given ISO year\"\n fourth_jan = datetime.date(iso_year, 1, 4)\n delta = datetime.timedelta(fourth_jan.isoweekday() - 1)\n return fourth_jan - delta\n"
] | # Copyright 2013 Sascha Peilicke
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import datetime
import sys
import rapport.config
# http://stackoverflow.com/questions/304256/whats-the-best-way-to-find-the-inverse-of-datetime-isocalendar
def iso_year_start(iso_year):
"The gregorian calendar date of the first day of the given ISO year"
fourth_jan = datetime.date(iso_year, 1, 4)
delta = datetime.timedelta(fourth_jan.isoweekday() - 1)
return fourth_jan - delta
def week_to_datetime(iso_year, iso_week):
"datetime instance for the start of the given ISO year and week"
gregorian = iso_to_gregorian(iso_year, iso_week, 0)
return datetime.datetime.combine(gregorian, datetime.time(0))
class Timeframe(object):
"""Represents a period of time between a start and end time.
:start: Start of timeframe (datetime object)
:end: End of timeframe (datetime object)
"""
def __init__(self, start, end):
self._name = "Generic"
self._start = start
self._end = end
@property
def start(self):
return self._start
@property
def end(self):
return self._end
def contains(self, date):
"""Checks if a date is within a timeframe.
:date: The date to check
"""
return self._start <= date and date < self._end
def __str__(self):
"""Returns a string representation of a timeframe.
"""
return "{0} [{1} - {2}]".format(self._name, self._start.isoformat(), self._end.isoformat())
class CurrentWeekTimeframe(Timeframe):
"""Current week timeframe (in UTC).
"""
def __init__(self):
self._name = "Current week"
self._end = datetime.datetime.utcnow()
# Compute the day but reset the hours/minutes/seconds to zero,
# we want the exact week's start:
week_start = self._end - datetime.timedelta(days=self._end.weekday())
self._start = datetime.datetime(year=week_start.year,
month=week_start.month,
day=week_start.day)
class WeekTimeframe(Timeframe):
"""N-th week of year timeframe (in UTC).
:week: Week number (starting from 1)
"""
def __init__(self, week=1):
self._name = "Week %d" % week
now = datetime.datetime.utcnow()
year = now.year
self._start = week_to_datetime(year, week)
if self._start > now:
self._start = week_to_datetime(year - 1, week)
self._end = self._start + datetime.timedelta(weeks=1)
class CurrentMonthTimeframe(Timeframe):
"""Current month timeframe (in UTC).
"""
def __init__(self):
self._name = "Current month"
self._end = datetime.datetime.utcnow()
self._start = datetime.datetime(year=self._end.year,
month=self._end.month, day=1)
class MonthTimeframe(Timeframe):
"""N-th month of year timeframe (in UTC).
:month: Month number (starting from 1)
"""
def __init__(self, month=1):
self._name = "Month"
raise NotImplementedError()
class RecentDaysTimeframe(Timeframe):
"""Recent days timeframe (in UTC).
"""
def __init__(self, days=14):
self._name = "Recent days ({0})".format(days)
self._end = datetime.datetime.utcnow()
self._start = self._end - datetime.timedelta(days=days)
self._days = days
_TIMEFRAME_CATALOG = {"current_month": CurrentMonthTimeframe,
"current_week": CurrentWeekTimeframe,
"month": MonthTimeframe,
"week": WeekTimeframe,
"recent_days": RecentDaysTimeframe}
def init(name, *args, **kwargs):
"""Instantiate a timeframe from the catalog.
"""
if name in _TIMEFRAME_CATALOG:
if rapport.config.get_int("rapport", "verbosity") >= 2:
print("Initialize timeframe {0}: {1} {2}".format(name, args, kwargs))
try:
return _TIMEFRAME_CATALOG[name](*args, **kwargs)
except ValueError as e:
print("Failed to initialize timeframe {0}: {1}!".format(name, e), file=sys.stderr)
else:
print("Failed to initialize timeframe {0}: Not in catalog!".format(name), file=sys.stderr)
sys.exit(1)
def init_from_config():
return init(rapport.config.get("timeframe", "default"))
def catalog():
"""Returns the list of registered timeframes.
"""
return _TIMEFRAME_CATALOG.keys()
|
saschpe/rapport | rapport/timeframe.py | week_to_datetime | python | def week_to_datetime(iso_year, iso_week):
"datetime instance for the start of the given ISO year and week"
gregorian = iso_to_gregorian(iso_year, iso_week, 0)
return datetime.datetime.combine(gregorian, datetime.time(0)) | datetime instance for the start of the given ISO year and week | train | https://github.com/saschpe/rapport/blob/ccceb8f84bd7e8add88ab5e137cdab6424aa4683/rapport/timeframe.py#L37-L40 | [
"def iso_to_gregorian(iso_year, iso_week, iso_day):\n \"Gregorian calendar date for the given ISO year, week and day\"\n year_start = iso_year_start(iso_year)\n return year_start + datetime.timedelta(days=iso_day - 1, weeks=iso_week - 1)\n"
] | # Copyright 2013 Sascha Peilicke
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import datetime
import sys
import rapport.config
# http://stackoverflow.com/questions/304256/whats-the-best-way-to-find-the-inverse-of-datetime-isocalendar
def iso_year_start(iso_year):
"The gregorian calendar date of the first day of the given ISO year"
fourth_jan = datetime.date(iso_year, 1, 4)
delta = datetime.timedelta(fourth_jan.isoweekday() - 1)
return fourth_jan - delta
def iso_to_gregorian(iso_year, iso_week, iso_day):
"Gregorian calendar date for the given ISO year, week and day"
year_start = iso_year_start(iso_year)
return year_start + datetime.timedelta(days=iso_day - 1, weeks=iso_week - 1)
class Timeframe(object):
"""Represents a period of time between a start and end time.
:start: Start of timeframe (datetime object)
:end: End of timeframe (datetime object)
"""
def __init__(self, start, end):
self._name = "Generic"
self._start = start
self._end = end
@property
def start(self):
return self._start
@property
def end(self):
return self._end
def contains(self, date):
"""Checks if a date is within a timeframe.
:date: The date to check
"""
return self._start <= date and date < self._end
def __str__(self):
"""Returns a string representation of a timeframe.
"""
return "{0} [{1} - {2}]".format(self._name, self._start.isoformat(), self._end.isoformat())
class CurrentWeekTimeframe(Timeframe):
"""Current week timeframe (in UTC).
"""
def __init__(self):
self._name = "Current week"
self._end = datetime.datetime.utcnow()
# Compute the day but reset the hours/minutes/seconds to zero,
# we want the exact week's start:
week_start = self._end - datetime.timedelta(days=self._end.weekday())
self._start = datetime.datetime(year=week_start.year,
month=week_start.month,
day=week_start.day)
class WeekTimeframe(Timeframe):
"""N-th week of year timeframe (in UTC).
:week: Week number (starting from 1)
"""
def __init__(self, week=1):
self._name = "Week %d" % week
now = datetime.datetime.utcnow()
year = now.year
self._start = week_to_datetime(year, week)
if self._start > now:
self._start = week_to_datetime(year - 1, week)
self._end = self._start + datetime.timedelta(weeks=1)
class CurrentMonthTimeframe(Timeframe):
"""Current month timeframe (in UTC).
"""
def __init__(self):
self._name = "Current month"
self._end = datetime.datetime.utcnow()
self._start = datetime.datetime(year=self._end.year,
month=self._end.month, day=1)
class MonthTimeframe(Timeframe):
"""N-th month of year timeframe (in UTC).
:month: Month number (starting from 1)
"""
def __init__(self, month=1):
self._name = "Month"
raise NotImplementedError()
class RecentDaysTimeframe(Timeframe):
"""Recent days timeframe (in UTC).
"""
def __init__(self, days=14):
self._name = "Recent days ({0})".format(days)
self._end = datetime.datetime.utcnow()
self._start = self._end - datetime.timedelta(days=days)
self._days = days
_TIMEFRAME_CATALOG = {"current_month": CurrentMonthTimeframe,
"current_week": CurrentWeekTimeframe,
"month": MonthTimeframe,
"week": WeekTimeframe,
"recent_days": RecentDaysTimeframe}
def init(name, *args, **kwargs):
"""Instantiate a timeframe from the catalog.
"""
if name in _TIMEFRAME_CATALOG:
if rapport.config.get_int("rapport", "verbosity") >= 2:
print("Initialize timeframe {0}: {1} {2}".format(name, args, kwargs))
try:
return _TIMEFRAME_CATALOG[name](*args, **kwargs)
except ValueError as e:
print("Failed to initialize timeframe {0}: {1}!".format(name, e), file=sys.stderr)
else:
print("Failed to initialize timeframe {0}: Not in catalog!".format(name), file=sys.stderr)
sys.exit(1)
def init_from_config():
return init(rapport.config.get("timeframe", "default"))
def catalog():
"""Returns the list of registered timeframes.
"""
return _TIMEFRAME_CATALOG.keys()
|
saschpe/rapport | rapport/timeframe.py | init | python | def init(name, *args, **kwargs):
if name in _TIMEFRAME_CATALOG:
if rapport.config.get_int("rapport", "verbosity") >= 2:
print("Initialize timeframe {0}: {1} {2}".format(name, args, kwargs))
try:
return _TIMEFRAME_CATALOG[name](*args, **kwargs)
except ValueError as e:
print("Failed to initialize timeframe {0}: {1}!".format(name, e), file=sys.stderr)
else:
print("Failed to initialize timeframe {0}: Not in catalog!".format(name), file=sys.stderr)
sys.exit(1) | Instantiate a timeframe from the catalog. | train | https://github.com/saschpe/rapport/blob/ccceb8f84bd7e8add88ab5e137cdab6424aa4683/rapport/timeframe.py#L140-L152 | [
"def get_int(section, option, default=-1):\n return int(get(section, option, default))\n"
] | # Copyright 2013 Sascha Peilicke
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import datetime
import sys
import rapport.config
# http://stackoverflow.com/questions/304256/whats-the-best-way-to-find-the-inverse-of-datetime-isocalendar
def iso_year_start(iso_year):
"The gregorian calendar date of the first day of the given ISO year"
fourth_jan = datetime.date(iso_year, 1, 4)
delta = datetime.timedelta(fourth_jan.isoweekday() - 1)
return fourth_jan - delta
def iso_to_gregorian(iso_year, iso_week, iso_day):
"Gregorian calendar date for the given ISO year, week and day"
year_start = iso_year_start(iso_year)
return year_start + datetime.timedelta(days=iso_day - 1, weeks=iso_week - 1)
def week_to_datetime(iso_year, iso_week):
"datetime instance for the start of the given ISO year and week"
gregorian = iso_to_gregorian(iso_year, iso_week, 0)
return datetime.datetime.combine(gregorian, datetime.time(0))
class Timeframe(object):
"""Represents a period of time between a start and end time.
:start: Start of timeframe (datetime object)
:end: End of timeframe (datetime object)
"""
def __init__(self, start, end):
self._name = "Generic"
self._start = start
self._end = end
@property
def start(self):
return self._start
@property
def end(self):
return self._end
def contains(self, date):
"""Checks if a date is within a timeframe.
:date: The date to check
"""
return self._start <= date and date < self._end
def __str__(self):
"""Returns a string representation of a timeframe.
"""
return "{0} [{1} - {2}]".format(self._name, self._start.isoformat(), self._end.isoformat())
class CurrentWeekTimeframe(Timeframe):
"""Current week timeframe (in UTC).
"""
def __init__(self):
self._name = "Current week"
self._end = datetime.datetime.utcnow()
# Compute the day but reset the hours/minutes/seconds to zero,
# we want the exact week's start:
week_start = self._end - datetime.timedelta(days=self._end.weekday())
self._start = datetime.datetime(year=week_start.year,
month=week_start.month,
day=week_start.day)
class WeekTimeframe(Timeframe):
"""N-th week of year timeframe (in UTC).
:week: Week number (starting from 1)
"""
def __init__(self, week=1):
self._name = "Week %d" % week
now = datetime.datetime.utcnow()
year = now.year
self._start = week_to_datetime(year, week)
if self._start > now:
self._start = week_to_datetime(year - 1, week)
self._end = self._start + datetime.timedelta(weeks=1)
class CurrentMonthTimeframe(Timeframe):
"""Current month timeframe (in UTC).
"""
def __init__(self):
self._name = "Current month"
self._end = datetime.datetime.utcnow()
self._start = datetime.datetime(year=self._end.year,
month=self._end.month, day=1)
class MonthTimeframe(Timeframe):
"""N-th month of year timeframe (in UTC).
:month: Month number (starting from 1)
"""
def __init__(self, month=1):
self._name = "Month"
raise NotImplementedError()
class RecentDaysTimeframe(Timeframe):
"""Recent days timeframe (in UTC).
"""
def __init__(self, days=14):
self._name = "Recent days ({0})".format(days)
self._end = datetime.datetime.utcnow()
self._start = self._end - datetime.timedelta(days=days)
self._days = days
_TIMEFRAME_CATALOG = {"current_month": CurrentMonthTimeframe,
"current_week": CurrentWeekTimeframe,
"month": MonthTimeframe,
"week": WeekTimeframe,
"recent_days": RecentDaysTimeframe}
def init_from_config():
return init(rapport.config.get("timeframe", "default"))
def catalog():
"""Returns the list of registered timeframes.
"""
return _TIMEFRAME_CATALOG.keys()
|
saschpe/rapport | rapport/template.py | _get_template_dirs | python | def _get_template_dirs(type="plugin"):
template_dirs = [
os.path.expanduser(os.path.join(USER_CONFIG_DIR, "templates", type)),
os.path.join("rapport", "templates", type) # Local dev tree
]
return template_dirs | Return a list of directories where templates may be located. | train | https://github.com/saschpe/rapport/blob/ccceb8f84bd7e8add88ab5e137cdab6424aa4683/rapport/template.py#L26-L33 | null | # Copyright 2013 Sascha Peilicke
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import os
import re
import sys
import jinja2
import json
from rapport.config import USER_CONFIG_DIR
_JINJA2_ENV = {}
def sub_filter(s, find, replace):
"""A non-optimal implementation of a regex filter"""
return re.sub(find, replace, s)
def subn_filter(s, find, replace, count=0):
"""A non-optimal implementation of a regex filter"""
return re.gsub(find, replace, count, s)
def firstline_filter(s):
return re.sub("\n.*", '', s)
# useful for debugging when authoring templates
def json_filter(val):
return json.dumps(val, sort_keys=True, indent=4)
def init():
for type in ["plugin", "email", "web"]:
loader = jinja2.FileSystemLoader(_get_template_dirs(type))
env = jinja2.Environment(loader=loader,
extensions=["jinja2.ext.i18n", "jinja2.ext.loopcontrols"],
line_statement_prefix="%%",
line_comment_prefix="##",
trim_blocks=True)
env.install_null_translations(newstyle=False)
env.filters['firstline'] = firstline_filter
env.filters['json'] = json_filter
env.filters['sub'] = sub_filter
env.filters['subn'] = subn_filter
_JINJA2_ENV[type] = env
def get_template(name, format="text", type="plugin"):
if not _JINJA2_ENV:
init()
template_name = "{0}.{1}.jinja2".format(name, format)
try:
return _JINJA2_ENV[type].get_template(template_name)
except jinja2.TemplateNotFound:
print("Missing template {0}/{1}!".format(type, template_name), file=sys.stderr)
|
saschpe/rapport | rapport/template.py | subn_filter | python | def subn_filter(s, find, replace, count=0):
return re.gsub(find, replace, count, s) | A non-optimal implementation of a regex filter | train | https://github.com/saschpe/rapport/blob/ccceb8f84bd7e8add88ab5e137cdab6424aa4683/rapport/template.py#L42-L44 | null | # Copyright 2013 Sascha Peilicke
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import os
import re
import sys
import jinja2
import json
from rapport.config import USER_CONFIG_DIR
def _get_template_dirs(type="plugin"):
"""Return a list of directories where templates may be located.
"""
template_dirs = [
os.path.expanduser(os.path.join(USER_CONFIG_DIR, "templates", type)),
os.path.join("rapport", "templates", type) # Local dev tree
]
return template_dirs
_JINJA2_ENV = {}
def sub_filter(s, find, replace):
"""A non-optimal implementation of a regex filter"""
return re.sub(find, replace, s)
def firstline_filter(s):
return re.sub("\n.*", '', s)
# useful for debugging when authoring templates
def json_filter(val):
return json.dumps(val, sort_keys=True, indent=4)
def init():
for type in ["plugin", "email", "web"]:
loader = jinja2.FileSystemLoader(_get_template_dirs(type))
env = jinja2.Environment(loader=loader,
extensions=["jinja2.ext.i18n", "jinja2.ext.loopcontrols"],
line_statement_prefix="%%",
line_comment_prefix="##",
trim_blocks=True)
env.install_null_translations(newstyle=False)
env.filters['firstline'] = firstline_filter
env.filters['json'] = json_filter
env.filters['sub'] = sub_filter
env.filters['subn'] = subn_filter
_JINJA2_ENV[type] = env
def get_template(name, format="text", type="plugin"):
if not _JINJA2_ENV:
init()
template_name = "{0}.{1}.jinja2".format(name, format)
try:
return _JINJA2_ENV[type].get_template(template_name)
except jinja2.TemplateNotFound:
print("Missing template {0}/{1}!".format(type, template_name), file=sys.stderr)
|
saschpe/rapport | rapport/util.py | camelcase_to_underscores | python | def camelcase_to_underscores(word):
s1 = _FIRST_CAP_RE.sub(r'\1_\2', word)
return _ALL_CAP_RE.sub(r'\1_\2', s1).lower() | Converts a CamelCase word into an under_score word.
>>> camelcase_to_underscores("CamelCaseCase")
'camel_case_case'
>>> camelcase_to_underscores("getHTTPResponseCode")
'get_http_response_code' | train | https://github.com/saschpe/rapport/blob/ccceb8f84bd7e8add88ab5e137cdab6424aa4683/rapport/util.py#L32-L41 | null | # Copyright 2013 Sascha Peilicke
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Various utility functions.
"""
import datetime
import re
import site
import sys
import subprocess
_FIRST_CAP_RE = re.compile('(.)([A-Z][a-z]+)')
_ALL_CAP_RE = re.compile('([a-z0-9])([A-Z])')
ISO8610_FORMAT = "%Y-%m-%dT%H:%M:%S"
ISO8610_FORMAT_MICROSECONDS = "%Y-%m-%dT%H:%M:%S.%f"
def silent_popen(args, **kwargs):
"""Wrapper for subprocess.Popen with suppressed output.
STERR is redirected to STDOUT which is piped back to the
calling process and returned as the result.
"""
return subprocess.Popen(args,
stderr=subprocess.STDOUT,
stdout=subprocess.PIPE, **kwargs).communicate()[0]
def datetime_from_iso8601(date):
"""Small helper that parses ISO-8601 date dates.
>>> datetime_from_iso8601("2013-04-10T12:52:39")
datetime.datetime(2013, 4, 10, 12, 52, 39)
>>> datetime_from_iso8601("2013-01-07T12:55:19.257")
datetime.datetime(2013, 1, 7, 12, 55, 19, 257000)
"""
format = ISO8610_FORMAT
if date.endswith("Z"):
date = date[:-1] # Date date is UTC
if re.match(".*\.\d+", date):
# Date includes microseconds
format = ISO8610_FORMAT_MICROSECONDS
return datetime.datetime.strptime(date, format)
def under_virtualenv():
return hasattr(sys, "real_prefix")
def getsitepackages():
if hasattr(site, "getsitepackages"):
return site.getsitepackages()
else:
# Workaround for older Python versions and some broken virtualenvs:
if under_virtualenv():
return []
else:
from distutils.sysconfig import get_python_lib
return [get_python_lib(True)]
|
saschpe/rapport | rapport/util.py | silent_popen | python | def silent_popen(args, **kwargs):
return subprocess.Popen(args,
stderr=subprocess.STDOUT,
stdout=subprocess.PIPE, **kwargs).communicate()[0] | Wrapper for subprocess.Popen with suppressed output.
STERR is redirected to STDOUT which is piped back to the
calling process and returned as the result. | train | https://github.com/saschpe/rapport/blob/ccceb8f84bd7e8add88ab5e137cdab6424aa4683/rapport/util.py#L44-L52 | null | # Copyright 2013 Sascha Peilicke
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Various utility functions.
"""
import datetime
import re
import site
import sys
import subprocess
_FIRST_CAP_RE = re.compile('(.)([A-Z][a-z]+)')
_ALL_CAP_RE = re.compile('([a-z0-9])([A-Z])')
ISO8610_FORMAT = "%Y-%m-%dT%H:%M:%S"
ISO8610_FORMAT_MICROSECONDS = "%Y-%m-%dT%H:%M:%S.%f"
def camelcase_to_underscores(word):
"""Converts a CamelCase word into an under_score word.
>>> camelcase_to_underscores("CamelCaseCase")
'camel_case_case'
>>> camelcase_to_underscores("getHTTPResponseCode")
'get_http_response_code'
"""
s1 = _FIRST_CAP_RE.sub(r'\1_\2', word)
return _ALL_CAP_RE.sub(r'\1_\2', s1).lower()
def datetime_from_iso8601(date):
"""Small helper that parses ISO-8601 date dates.
>>> datetime_from_iso8601("2013-04-10T12:52:39")
datetime.datetime(2013, 4, 10, 12, 52, 39)
>>> datetime_from_iso8601("2013-01-07T12:55:19.257")
datetime.datetime(2013, 1, 7, 12, 55, 19, 257000)
"""
format = ISO8610_FORMAT
if date.endswith("Z"):
date = date[:-1] # Date date is UTC
if re.match(".*\.\d+", date):
# Date includes microseconds
format = ISO8610_FORMAT_MICROSECONDS
return datetime.datetime.strptime(date, format)
def under_virtualenv():
return hasattr(sys, "real_prefix")
def getsitepackages():
if hasattr(site, "getsitepackages"):
return site.getsitepackages()
else:
# Workaround for older Python versions and some broken virtualenvs:
if under_virtualenv():
return []
else:
from distutils.sysconfig import get_python_lib
return [get_python_lib(True)]
|
saschpe/rapport | rapport/util.py | datetime_from_iso8601 | python | def datetime_from_iso8601(date):
format = ISO8610_FORMAT
if date.endswith("Z"):
date = date[:-1] # Date date is UTC
if re.match(".*\.\d+", date):
# Date includes microseconds
format = ISO8610_FORMAT_MICROSECONDS
return datetime.datetime.strptime(date, format) | Small helper that parses ISO-8601 date dates.
>>> datetime_from_iso8601("2013-04-10T12:52:39")
datetime.datetime(2013, 4, 10, 12, 52, 39)
>>> datetime_from_iso8601("2013-01-07T12:55:19.257")
datetime.datetime(2013, 1, 7, 12, 55, 19, 257000) | train | https://github.com/saschpe/rapport/blob/ccceb8f84bd7e8add88ab5e137cdab6424aa4683/rapport/util.py#L55-L69 | null | # Copyright 2013 Sascha Peilicke
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Various utility functions.
"""
import datetime
import re
import site
import sys
import subprocess
_FIRST_CAP_RE = re.compile('(.)([A-Z][a-z]+)')
_ALL_CAP_RE = re.compile('([a-z0-9])([A-Z])')
ISO8610_FORMAT = "%Y-%m-%dT%H:%M:%S"
ISO8610_FORMAT_MICROSECONDS = "%Y-%m-%dT%H:%M:%S.%f"
def camelcase_to_underscores(word):
"""Converts a CamelCase word into an under_score word.
>>> camelcase_to_underscores("CamelCaseCase")
'camel_case_case'
>>> camelcase_to_underscores("getHTTPResponseCode")
'get_http_response_code'
"""
s1 = _FIRST_CAP_RE.sub(r'\1_\2', word)
return _ALL_CAP_RE.sub(r'\1_\2', s1).lower()
def silent_popen(args, **kwargs):
"""Wrapper for subprocess.Popen with suppressed output.
STERR is redirected to STDOUT which is piped back to the
calling process and returned as the result.
"""
return subprocess.Popen(args,
stderr=subprocess.STDOUT,
stdout=subprocess.PIPE, **kwargs).communicate()[0]
def under_virtualenv():
return hasattr(sys, "real_prefix")
def getsitepackages():
if hasattr(site, "getsitepackages"):
return site.getsitepackages()
else:
# Workaround for older Python versions and some broken virtualenvs:
if under_virtualenv():
return []
else:
from distutils.sysconfig import get_python_lib
return [get_python_lib(True)]
|
saschpe/rapport | rapport/email.py | xdg_compose | python | def xdg_compose(to, subject, body=None, cc=None, bcc=None):
command = ["xdg-email", "--utf8", "--subject", subject]
if body:
command += ["--body", body]
if cc:
if type(cc) is list:
cc = ", ".join(cc)
command += ["--cc", cc]
if bcc:
if type(bcc) is list:
bcc = ", ".join(bcc)
command += ["--bcc", bcc]
if type(to) is list:
to = ", ".join(to)
command.append(to)
return subprocess.call(command) | Use xdg-email to compose in an X environment.
Needs xdg-utils and a running X session. Works with GNOME, KDE,
MATE, XFCE, ... | train | https://github.com/saschpe/rapport/blob/ccceb8f84bd7e8add88ab5e137cdab6424aa4683/rapport/email.py#L30-L51 | null | # Copyright 2013 Sascha Peilicke
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
E-Mail functionality.
"""
import subprocess
def compose():
raise NotImplementedError()
def send():
raise NotImplementedError()
|
saschpe/rapport | rapport/plugins/gerrit.py | GerritPlugin._ssh_cmd | python | def _ssh_cmd(self, *args):
command = "gerrit {0}".format(" ".join(args))
_, stdout, stderr = self._client.exec_command(command)
return (stdout.readlines(), stderr.readlines()) | Execute a gerrit command over SSH. | train | https://github.com/saschpe/rapport/blob/ccceb8f84bd7e8add88ab5e137cdab6424aa4683/rapport/plugins/gerrit.py#L36-L41 | null | class GerritPlugin(rapport.plugin.Plugin):
def __init__(self, *args, **kwargs):
super(GerritPlugin, self).__init__(*args, **kwargs)
self._client = paramiko.SSHClient()
self._client.load_system_host_keys()
def _ssh_query(self, *args):
"""Execute a gerrit query over SSH and returns JSON-formatted data.
"""
return self._ssh_cmd("query", "--format=JSON", *args)
def collect(self, timeframe):
self._client.connect(self.url.hostname, self.url.port, self.login)
stdout, stderr = self._ssh_query("owner:{0}".format(self.login))
self._client.close()
changes = []
if not stderr:
for line in stdout[:-1]: # Last line contains only download stats
change = json.loads(line)
if "lastUpdated" in change:
last_updated = datetime.utcfromtimestamp(
change["lastUpdated"])
if timeframe.contains(last_updated):
changes.append(change)
else:
print("Change {0} is missing lastUpdated".format(change))
return self._results({"changes": changes})
|
saschpe/rapport | rapport/config.py | _get_config_dirs | python | def _get_config_dirs():
config_dirs = [
USER_CONFIG_DIR,
os.path.join("/", "etc", "rapport"),
os.path.abspath(os.path.join("rapport", "config"))
]
return config_dirs | Return a list of directories where config files may be located.
The following directories are returned::
$XDG_CONFIG_HOME/rapport/ ($XDG_CONFIG_HOME defaults to ~/.config)
/etc/rapport/ | train | https://github.com/saschpe/rapport/blob/ccceb8f84bd7e8add88ab5e137cdab6424aa4683/rapport/config.py#L31-L44 | null | # Copyright 2013 Sascha Peilicke
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import os
import shutil
try:
import ConfigParser as configparser # Py2
except ImportError:
import configparser
import rapport.config
XDG_CONFIG_HOME_DIR = os.getenv('XDG_CONFIG_HOME') or \
os.path.expanduser(os.path.join("~", ".config"))
USER_CONFIG_DIR = os.path.join(XDG_CONFIG_HOME_DIR, "rapport")
USER_CONFIG_FILE = os.path.join(USER_CONFIG_DIR, "rapport.conf")
def find_config_files():
"""Return a list of default configuration files.
"""
config_files = []
for config_dir in _get_config_dirs():
path = os.path.join(config_dir, "rapport.conf")
if os.path.exists(path):
config_files.append(path)
return list(filter(bool, config_files))
def init_user():
"""Create and populate the ~/.config/rapport directory tree if it's not existing.
Doesn't interfere with already existing directories or configuration files.
"""
if not os.path.exists(USER_CONFIG_DIR):
if rapport.config.get_int("rapport", "verbosity") >= 1:
print("Create user directory {0}".format(USER_CONFIG_DIR))
os.makedirs(USER_CONFIG_DIR)
for subdir in ["plugins", "reports", "templates/plugin", "templates/email", "templates/web"]:
user_conf_subdir = os.path.join(USER_CONFIG_DIR, subdir)
if not os.path.exists(user_conf_subdir):
if rapport.config.get_int("rapport", "verbosity") >= 1:
print("Create user directory {0}".format(user_conf_subdir))
os.makedirs(user_conf_subdir)
if subdir == "reports" and not (os.stat(user_conf_subdir).st_mode & 0o777) == 0o700:
if rapport.config.get_int("rapport", "verbosity") >= 1:
print("Set secure directory permissions for {0}".format(user_conf_subdir))
os.chmod(user_conf_subdir, 0o700)
if not os.path.exists(USER_CONFIG_FILE):
if rapport.config.get_int("rapport", "verbosity") >= 1:
print("Create user configuration {0}".format(USER_CONFIG_FILE))
default_config = os.path.abspath(os.path.join(os.path.splitext(__file__)[0], "rapport.conf"))
shutil.copyfile(default_config, USER_CONFIG_FILE)
if not (os.stat(USER_CONFIG_FILE).st_mode & 0o777) == 0o600:
if rapport.config.get_int("rapport", "verbosity") >= 1:
print("Set secure file permissions for {0}".format(USER_CONFIG_FILE))
os.chmod(USER_CONFIG_FILE, 0o600)
CONF = None
def load():
global CONF
config = configparser.SafeConfigParser()
if not find_config_files():
init_user()
config.read(find_config_files()[0])
CONF = config
return CONF
def get(section, option, default=None):
if CONF.has_option(section, option):
return CONF.get(section, option)
else:
return default
def get_int(section, option, default=-1):
return int(get(section, option, default))
def set(section, option, value):
CONF.set(section, option, str(value))
def plugins():
for section in CONF.sections():
if section.startswith("plugin:"):
name, alias = section.split(":")[1:]
plugin = {"name": name, "alias": alias}
for option in CONF.options(section):
plugin[option] = CONF.get(section, option)
yield plugin
|
saschpe/rapport | rapport/config.py | find_config_files | python | def find_config_files():
config_files = []
for config_dir in _get_config_dirs():
path = os.path.join(config_dir, "rapport.conf")
if os.path.exists(path):
config_files.append(path)
return list(filter(bool, config_files)) | Return a list of default configuration files. | train | https://github.com/saschpe/rapport/blob/ccceb8f84bd7e8add88ab5e137cdab6424aa4683/rapport/config.py#L47-L58 | [
"def _get_config_dirs():\n \"\"\"Return a list of directories where config files may be located.\n\n The following directories are returned::\n\n $XDG_CONFIG_HOME/rapport/ ($XDG_CONFIG_HOME defaults to ~/.config)\n /etc/rapport/\n \"\"\"\n config_dirs = [\n USER_CONFIG_DIR,\n os.... | # Copyright 2013 Sascha Peilicke
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import os
import shutil
try:
import ConfigParser as configparser # Py2
except ImportError:
import configparser
import rapport.config
XDG_CONFIG_HOME_DIR = os.getenv('XDG_CONFIG_HOME') or \
os.path.expanduser(os.path.join("~", ".config"))
USER_CONFIG_DIR = os.path.join(XDG_CONFIG_HOME_DIR, "rapport")
USER_CONFIG_FILE = os.path.join(USER_CONFIG_DIR, "rapport.conf")
def _get_config_dirs():
"""Return a list of directories where config files may be located.
The following directories are returned::
$XDG_CONFIG_HOME/rapport/ ($XDG_CONFIG_HOME defaults to ~/.config)
/etc/rapport/
"""
config_dirs = [
USER_CONFIG_DIR,
os.path.join("/", "etc", "rapport"),
os.path.abspath(os.path.join("rapport", "config"))
]
return config_dirs
def init_user():
"""Create and populate the ~/.config/rapport directory tree if it's not existing.
Doesn't interfere with already existing directories or configuration files.
"""
if not os.path.exists(USER_CONFIG_DIR):
if rapport.config.get_int("rapport", "verbosity") >= 1:
print("Create user directory {0}".format(USER_CONFIG_DIR))
os.makedirs(USER_CONFIG_DIR)
for subdir in ["plugins", "reports", "templates/plugin", "templates/email", "templates/web"]:
user_conf_subdir = os.path.join(USER_CONFIG_DIR, subdir)
if not os.path.exists(user_conf_subdir):
if rapport.config.get_int("rapport", "verbosity") >= 1:
print("Create user directory {0}".format(user_conf_subdir))
os.makedirs(user_conf_subdir)
if subdir == "reports" and not (os.stat(user_conf_subdir).st_mode & 0o777) == 0o700:
if rapport.config.get_int("rapport", "verbosity") >= 1:
print("Set secure directory permissions for {0}".format(user_conf_subdir))
os.chmod(user_conf_subdir, 0o700)
if not os.path.exists(USER_CONFIG_FILE):
if rapport.config.get_int("rapport", "verbosity") >= 1:
print("Create user configuration {0}".format(USER_CONFIG_FILE))
default_config = os.path.abspath(os.path.join(os.path.splitext(__file__)[0], "rapport.conf"))
shutil.copyfile(default_config, USER_CONFIG_FILE)
if not (os.stat(USER_CONFIG_FILE).st_mode & 0o777) == 0o600:
if rapport.config.get_int("rapport", "verbosity") >= 1:
print("Set secure file permissions for {0}".format(USER_CONFIG_FILE))
os.chmod(USER_CONFIG_FILE, 0o600)
CONF = None
def load():
global CONF
config = configparser.SafeConfigParser()
if not find_config_files():
init_user()
config.read(find_config_files()[0])
CONF = config
return CONF
def get(section, option, default=None):
if CONF.has_option(section, option):
return CONF.get(section, option)
else:
return default
def get_int(section, option, default=-1):
return int(get(section, option, default))
def set(section, option, value):
CONF.set(section, option, str(value))
def plugins():
for section in CONF.sections():
if section.startswith("plugin:"):
name, alias = section.split(":")[1:]
plugin = {"name": name, "alias": alias}
for option in CONF.options(section):
plugin[option] = CONF.get(section, option)
yield plugin
|
saschpe/rapport | rapport/config.py | init_user | python | def init_user():
if not os.path.exists(USER_CONFIG_DIR):
if rapport.config.get_int("rapport", "verbosity") >= 1:
print("Create user directory {0}".format(USER_CONFIG_DIR))
os.makedirs(USER_CONFIG_DIR)
for subdir in ["plugins", "reports", "templates/plugin", "templates/email", "templates/web"]:
user_conf_subdir = os.path.join(USER_CONFIG_DIR, subdir)
if not os.path.exists(user_conf_subdir):
if rapport.config.get_int("rapport", "verbosity") >= 1:
print("Create user directory {0}".format(user_conf_subdir))
os.makedirs(user_conf_subdir)
if subdir == "reports" and not (os.stat(user_conf_subdir).st_mode & 0o777) == 0o700:
if rapport.config.get_int("rapport", "verbosity") >= 1:
print("Set secure directory permissions for {0}".format(user_conf_subdir))
os.chmod(user_conf_subdir, 0o700)
if not os.path.exists(USER_CONFIG_FILE):
if rapport.config.get_int("rapport", "verbosity") >= 1:
print("Create user configuration {0}".format(USER_CONFIG_FILE))
default_config = os.path.abspath(os.path.join(os.path.splitext(__file__)[0], "rapport.conf"))
shutil.copyfile(default_config, USER_CONFIG_FILE)
if not (os.stat(USER_CONFIG_FILE).st_mode & 0o777) == 0o600:
if rapport.config.get_int("rapport", "verbosity") >= 1:
print("Set secure file permissions for {0}".format(USER_CONFIG_FILE))
os.chmod(USER_CONFIG_FILE, 0o600) | Create and populate the ~/.config/rapport directory tree if it's not existing.
Doesn't interfere with already existing directories or configuration files. | train | https://github.com/saschpe/rapport/blob/ccceb8f84bd7e8add88ab5e137cdab6424aa4683/rapport/config.py#L61-L89 | [
"def get_int(section, option, default=-1):\n return int(get(section, option, default))\n"
] | # Copyright 2013 Sascha Peilicke
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import os
import shutil
try:
import ConfigParser as configparser # Py2
except ImportError:
import configparser
import rapport.config
XDG_CONFIG_HOME_DIR = os.getenv('XDG_CONFIG_HOME') or \
os.path.expanduser(os.path.join("~", ".config"))
USER_CONFIG_DIR = os.path.join(XDG_CONFIG_HOME_DIR, "rapport")
USER_CONFIG_FILE = os.path.join(USER_CONFIG_DIR, "rapport.conf")
def _get_config_dirs():
"""Return a list of directories where config files may be located.
The following directories are returned::
$XDG_CONFIG_HOME/rapport/ ($XDG_CONFIG_HOME defaults to ~/.config)
/etc/rapport/
"""
config_dirs = [
USER_CONFIG_DIR,
os.path.join("/", "etc", "rapport"),
os.path.abspath(os.path.join("rapport", "config"))
]
return config_dirs
def find_config_files():
"""Return a list of default configuration files.
"""
config_files = []
for config_dir in _get_config_dirs():
path = os.path.join(config_dir, "rapport.conf")
if os.path.exists(path):
config_files.append(path)
return list(filter(bool, config_files))
CONF = None
def load():
global CONF
config = configparser.SafeConfigParser()
if not find_config_files():
init_user()
config.read(find_config_files()[0])
CONF = config
return CONF
def get(section, option, default=None):
if CONF.has_option(section, option):
return CONF.get(section, option)
else:
return default
def get_int(section, option, default=-1):
return int(get(section, option, default))
def set(section, option, value):
CONF.set(section, option, str(value))
def plugins():
for section in CONF.sections():
if section.startswith("plugin:"):
name, alias = section.split(":")[1:]
plugin = {"name": name, "alias": alias}
for option in CONF.options(section):
plugin[option] = CONF.get(section, option)
yield plugin
|
saschpe/rapport | rapport/report.py | get_report | python | def get_report(report=None):
if not report:
report = list_reports()[-1:][0]
report_path = _get_reports_path(report)
report_dict = {"report": report}
for filename in os.listdir(report_path):
with open(os.path.join(report_path, filename), "r") as f:
report_dict[filename] = f.read()
return report_dict | Returns details of a specific report | train | https://github.com/saschpe/rapport/blob/ccceb8f84bd7e8add88ab5e137cdab6424aa4683/rapport/report.py#L50-L60 | [
"def list_reports():\n \"\"\"Returns a list of created reports.\n \"\"\"\n return sorted(os.listdir(_get_reports_path()))\n",
"def _get_reports_path(report=None):\n path_parts = [USER_DATA_DIR, \"reports\"]\n if report:\n path_parts.append(report)\n return os.path.expanduser(os.path.join(... | # Copyright 2013 Sascha Peilicke
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import codecs
import glob
import os
import shutil
import subprocess
import sys
import concurrent.futures as futures
import traceback
import jinja2
XDG_CONFIG_DATA_DIR = os.getenv('XDG_DATA_HOME') or \
os.path.expanduser(os.path.join("~", ".local", "share"))
USER_DATA_DIR = os.path.join(XDG_CONFIG_DATA_DIR, "rapport")
from rapport.config import USER_CONFIG_DIR
import rapport.template
import rapport.util
def _get_reports_path(report=None):
path_parts = [USER_DATA_DIR, "reports"]
if report:
path_parts.append(report)
return os.path.expanduser(os.path.join(*path_parts))
def list_reports():
"""Returns a list of created reports.
"""
return sorted(os.listdir(_get_reports_path()))
def edit_report(report=None, type="email", email_part="body"):
if not report:
report = list_reports()[-1:][0]
report_path = _get_reports_path(report)
editor = os.getenv("EDITOR", "vi")
if type == "email":
report_file = "{0}.{1}.text".format(type, email_part)
elif type == "html":
report_file = "index.html"
subprocess.call([editor, os.path.join(report_path, report_file)])
def create_report(plugins, timeframe):
report_date_string = timeframe.end.strftime(rapport.util.ISO8610_FORMAT)
report_path = _get_reports_path(report_date_string)
if not os.path.exists(report_path):
os.makedirs(report_path)
# Execute all plugins in parallel and join on results:
results = {}
with futures.ThreadPoolExecutor(max_workers=4) as executor:
plugin_futures = dict((executor.submit(p.try_collect, timeframe), p) for p in plugins)
for future in futures.as_completed(plugin_futures):
plugin = plugin_futures[future]
try:
res = future.result()
if rapport.config.get_int("rapport", "verbosity") >= 2:
visible_result = repr(res)
if len(visible_result) > 1000:
visible_result = visible_result[:1000] + ' ...'
print("Result for %s: %s" % (plugin.alias, visible_result))
tmpl = rapport.template.get_template(plugin, "text")
if tmpl:
results[plugin] = tmpl.render(res)
except jinja2.TemplateSyntaxError as e:
print >>sys.stderr, "Syntax error in plugin {0} at {1} line {2}: {3}".format(plugin, e.name, e.lineno, e.message)
except Exception as e:
exc_type, exc_val, exc_tb = sys.exc_info()
if hasattr(e, 'original_traceback'):
print("Traceback from plugin thread:", file=sys.stderr)
traceback.print_tb(e.original_traceback, file=sys.stderr)
print("\nTraceback from parent process:", file=sys.stderr)
traceback.print_tb(exc_tb, file=sys.stderr)
print("Failed plugin {0}:{1}: {2}: {3}" \
.format(plugin, plugin.alias, e.__class__.__name__, e),
file=sys.stderr)
sys.exit(1)
results_dict = {"login": rapport.config.get("user", "login"),
"date": report_date_string,
"plugins": plugins,
"results": results}
# Render mail body template:
template_email_body = rapport.template.get_template("body", type="email")
email_body = template_email_body.render(results_dict)
email_body_file = os.path.join(report_path, "email.body.text")
with codecs.open(email_body_file, "w", encoding="utf-8") as report:
report.write(email_body)
# We can re-use the e-mail body as the general report body:
results_dict["body"] = email_body
# Render mail subject template:
template_email_subject = rapport.template.get_template("subject", type="email")
email_subject = template_email_subject.render(results_dict)
email_subject_file = os.path.join(report_path, "email.subject.text")
with open(email_subject_file, "w") as report:
report.write(email_subject)
#TODO: Maybe even create a Result class and return that instead of a dict?
return results_dict
def delete_report(report):
"""Delete report(s), supports globbing.
"""
for path in glob.glob(os.path.join(_get_reports_path(), report)):
shutil.rmtree(path)
|
saschpe/rapport | rapport/report.py | delete_report | python | def delete_report(report):
for path in glob.glob(os.path.join(_get_reports_path(), report)):
shutil.rmtree(path) | Delete report(s), supports globbing. | train | https://github.com/saschpe/rapport/blob/ccceb8f84bd7e8add88ab5e137cdab6424aa4683/rapport/report.py#L137-L141 | [
"def _get_reports_path(report=None):\n path_parts = [USER_DATA_DIR, \"reports\"]\n if report:\n path_parts.append(report)\n return os.path.expanduser(os.path.join(*path_parts))\n"
] | # Copyright 2013 Sascha Peilicke
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import codecs
import glob
import os
import shutil
import subprocess
import sys
import concurrent.futures as futures
import traceback
import jinja2
XDG_CONFIG_DATA_DIR = os.getenv('XDG_DATA_HOME') or \
os.path.expanduser(os.path.join("~", ".local", "share"))
USER_DATA_DIR = os.path.join(XDG_CONFIG_DATA_DIR, "rapport")
from rapport.config import USER_CONFIG_DIR
import rapport.template
import rapport.util
def _get_reports_path(report=None):
path_parts = [USER_DATA_DIR, "reports"]
if report:
path_parts.append(report)
return os.path.expanduser(os.path.join(*path_parts))
def list_reports():
"""Returns a list of created reports.
"""
return sorted(os.listdir(_get_reports_path()))
def get_report(report=None):
"""Returns details of a specific report
"""
if not report:
report = list_reports()[-1:][0]
report_path = _get_reports_path(report)
report_dict = {"report": report}
for filename in os.listdir(report_path):
with open(os.path.join(report_path, filename), "r") as f:
report_dict[filename] = f.read()
return report_dict
def edit_report(report=None, type="email", email_part="body"):
if not report:
report = list_reports()[-1:][0]
report_path = _get_reports_path(report)
editor = os.getenv("EDITOR", "vi")
if type == "email":
report_file = "{0}.{1}.text".format(type, email_part)
elif type == "html":
report_file = "index.html"
subprocess.call([editor, os.path.join(report_path, report_file)])
def create_report(plugins, timeframe):
report_date_string = timeframe.end.strftime(rapport.util.ISO8610_FORMAT)
report_path = _get_reports_path(report_date_string)
if not os.path.exists(report_path):
os.makedirs(report_path)
# Execute all plugins in parallel and join on results:
results = {}
with futures.ThreadPoolExecutor(max_workers=4) as executor:
plugin_futures = dict((executor.submit(p.try_collect, timeframe), p) for p in plugins)
for future in futures.as_completed(plugin_futures):
plugin = plugin_futures[future]
try:
res = future.result()
if rapport.config.get_int("rapport", "verbosity") >= 2:
visible_result = repr(res)
if len(visible_result) > 1000:
visible_result = visible_result[:1000] + ' ...'
print("Result for %s: %s" % (plugin.alias, visible_result))
tmpl = rapport.template.get_template(plugin, "text")
if tmpl:
results[plugin] = tmpl.render(res)
except jinja2.TemplateSyntaxError as e:
print >>sys.stderr, "Syntax error in plugin {0} at {1} line {2}: {3}".format(plugin, e.name, e.lineno, e.message)
except Exception as e:
exc_type, exc_val, exc_tb = sys.exc_info()
if hasattr(e, 'original_traceback'):
print("Traceback from plugin thread:", file=sys.stderr)
traceback.print_tb(e.original_traceback, file=sys.stderr)
print("\nTraceback from parent process:", file=sys.stderr)
traceback.print_tb(exc_tb, file=sys.stderr)
print("Failed plugin {0}:{1}: {2}: {3}" \
.format(plugin, plugin.alias, e.__class__.__name__, e),
file=sys.stderr)
sys.exit(1)
results_dict = {"login": rapport.config.get("user", "login"),
"date": report_date_string,
"plugins": plugins,
"results": results}
# Render mail body template:
template_email_body = rapport.template.get_template("body", type="email")
email_body = template_email_body.render(results_dict)
email_body_file = os.path.join(report_path, "email.body.text")
with codecs.open(email_body_file, "w", encoding="utf-8") as report:
report.write(email_body)
# We can re-use the e-mail body as the general report body:
results_dict["body"] = email_body
# Render mail subject template:
template_email_subject = rapport.template.get_template("subject", type="email")
email_subject = template_email_subject.render(results_dict)
email_subject_file = os.path.join(report_path, "email.subject.text")
with open(email_subject_file, "w") as report:
report.write(email_subject)
#TODO: Maybe even create a Result class and return that instead of a dict?
return results_dict
|
saschpe/rapport | rapport/plugin.py | _get_plugin_dirs | python | def _get_plugin_dirs():
plugin_dirs = [
os.path.expanduser(os.path.join(USER_CONFIG_DIR, "plugins")),
os.path.join("rapport", "plugins") # Local dev tree
]
return plugin_dirs | Return a list of directories where plugins may be located. | train | https://github.com/saschpe/rapport/blob/ccceb8f84bd7e8add88ab5e137cdab6424aa4683/rapport/plugin.py#L94-L101 | null | # Copyright 2013 Sascha Peilicke
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import os
import sys
try:
import urllib.parse as urlparse
except ImportError:
import urlparse
from rapport.config import USER_CONFIG_DIR
import rapport.config
import rapport.util
class Plugin(object):
def __init__(self, alias=None, url=None, login=None, password=None):
self.alias = alias
self.url = url
self.login = login
self.password = password
if alias == "":
self.alias == self.__str__()
if url and type(url) is not urlparse.ParseResult:
self.url = urlparse.urlparse(url)
def _results(self, dict={}):
"""Helper to merge a dict with cross-plugin defaults.
All plugin sub-classes share some config values, i.e. alias, url,
login and password. This help should be used in the :collect: method
of any Plugin implementation.
>>> import rapport.plugin
>>> c = rapport.plugin.Plugin(alias="a", url="http://example.com", login="u")
>>> c._results()
{'url': ParseResult(scheme='http', netloc='example.com', path='', params='', query='', fragment=''), 'alias': 'a', 'login': 'u', 'plugin': 'plugin'}
>>> c._results({'mykey': 'mykey'})
{'url': ParseResult(scheme='http', netloc='example.com', path='', params='', query='', fragment=''), 'alias': 'a', 'login': 'u', 'mykey': 'mykey', 'plugin': 'plugin'}
"""
results = {"plugin": str(self),
"alias": self.alias,
"url": self.url,
"login": self.login}
results.update(dict)
return results
def __str__(self):
"""Returns the class name in underscores.
Additionally, for sub-classes, the suffix '_plugin' is split off.
>>> import rapport.plugin
>>> c = rapport.plugin.Plugin()
>>> str(c)
'plugin'
"""
return rapport.util.camelcase_to_underscores(self.__class__.__name__).rsplit("_plugin")[0]
def try_collect(self, timeframe):
"""
Run the plugin's collect() method, and if an exception was caught,
store the traceback before re-raising, in order that it doesn't
get lost when concurrent.futures.Future.result() is invoked.
"""
try:
result = self.collect(timeframe)
except Exception as e:
exc_type, exc_val, exc_tb = sys.exc_info()
e.original_traceback = exc_tb
raise
return result
def collect(self, timeframe):
raise NotImplementedError()
def _path_to_module(path):
"""Translates paths to *.py? files into module paths.
>>> _path_to_module("rapport/bar.py")
'rapport.bar'
>>> _path_to_module("/usr/lib/rapport/bar.py")
'rapport.bar'
"""
# Split of preceeding path elements:
path = "rapport" + path.split("rapport")[1]
# Split of ending and replace os.sep with dots:
path = path.replace(os.sep, ".").rsplit(".", 1)[0]
return path
def discover():
"""Find and load all available plugins.
"""
plugin_files = []
for plugin_dir in _get_plugin_dirs():
if os.path.isdir(plugin_dir):
for plugin_file in os.listdir(plugin_dir):
if plugin_file.endswith(".py") and not plugin_file == "__init__.py":
plugin_files.append(os.path.join(plugin_dir, plugin_file))
if rapport.config.get_int("rapport", "verbosity") >= 2:
print("Found plugin modules: {0}".format(plugin_files))
for plugin_file in plugin_files:
if rapport.config.get_int("rapport", "verbosity") >= 2:
print("Importing module {0}".format(_path_to_module(plugin_file)))
__import__(_path_to_module(plugin_file))
_PLUGIN_CATALOG = {}
def register(name, klass):
"""Add a plugin to the plugin catalog.
"""
if rapport.config.get_int("rapport", "verbosity") >= 1:
print("Registered plugin: {0}".format(name))
_PLUGIN_CATALOG[name] = klass
def init(name, *args, **kwargs):
"""Instantiate a plugin from the catalog.
"""
if name in _PLUGIN_CATALOG:
if rapport.config.get_int("rapport", "verbosity") >= 2:
print("Initialize plugin {0}: {1} {2}".format(name, args, kwargs))
try:
return _PLUGIN_CATALOG[name](*args, **kwargs)
except (ValueError, TypeError) as e:
print("Failed to initialize plugin {0}: {1}!".format(name, e), file=sys.stderr)
else:
print("Failed to initialize plugin {0}: Not in catalog!".format(name), file=sys.stderr)
def init_from_config():
plugins = []
for plugin in rapport.config.plugins():
plugins.append(init(**plugin))
return list(filter(bool, plugins))
def catalog():
"""Returns the list of registered plugins.
"""
return _PLUGIN_CATALOG.keys()
|
saschpe/rapport | rapport/plugin.py | _path_to_module | python | def _path_to_module(path):
# Split of preceeding path elements:
path = "rapport" + path.split("rapport")[1]
# Split of ending and replace os.sep with dots:
path = path.replace(os.sep, ".").rsplit(".", 1)[0]
return path | Translates paths to *.py? files into module paths.
>>> _path_to_module("rapport/bar.py")
'rapport.bar'
>>> _path_to_module("/usr/lib/rapport/bar.py")
'rapport.bar' | train | https://github.com/saschpe/rapport/blob/ccceb8f84bd7e8add88ab5e137cdab6424aa4683/rapport/plugin.py#L104-L116 | null | # Copyright 2013 Sascha Peilicke
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import os
import sys
try:
import urllib.parse as urlparse
except ImportError:
import urlparse
from rapport.config import USER_CONFIG_DIR
import rapport.config
import rapport.util
class Plugin(object):
def __init__(self, alias=None, url=None, login=None, password=None):
self.alias = alias
self.url = url
self.login = login
self.password = password
if alias == "":
self.alias == self.__str__()
if url and type(url) is not urlparse.ParseResult:
self.url = urlparse.urlparse(url)
def _results(self, dict={}):
"""Helper to merge a dict with cross-plugin defaults.
All plugin sub-classes share some config values, i.e. alias, url,
login and password. This help should be used in the :collect: method
of any Plugin implementation.
>>> import rapport.plugin
>>> c = rapport.plugin.Plugin(alias="a", url="http://example.com", login="u")
>>> c._results()
{'url': ParseResult(scheme='http', netloc='example.com', path='', params='', query='', fragment=''), 'alias': 'a', 'login': 'u', 'plugin': 'plugin'}
>>> c._results({'mykey': 'mykey'})
{'url': ParseResult(scheme='http', netloc='example.com', path='', params='', query='', fragment=''), 'alias': 'a', 'login': 'u', 'mykey': 'mykey', 'plugin': 'plugin'}
"""
results = {"plugin": str(self),
"alias": self.alias,
"url": self.url,
"login": self.login}
results.update(dict)
return results
def __str__(self):
"""Returns the class name in underscores.
Additionally, for sub-classes, the suffix '_plugin' is split off.
>>> import rapport.plugin
>>> c = rapport.plugin.Plugin()
>>> str(c)
'plugin'
"""
return rapport.util.camelcase_to_underscores(self.__class__.__name__).rsplit("_plugin")[0]
def try_collect(self, timeframe):
"""
Run the plugin's collect() method, and if an exception was caught,
store the traceback before re-raising, in order that it doesn't
get lost when concurrent.futures.Future.result() is invoked.
"""
try:
result = self.collect(timeframe)
except Exception as e:
exc_type, exc_val, exc_tb = sys.exc_info()
e.original_traceback = exc_tb
raise
return result
def collect(self, timeframe):
raise NotImplementedError()
def _get_plugin_dirs():
"""Return a list of directories where plugins may be located.
"""
plugin_dirs = [
os.path.expanduser(os.path.join(USER_CONFIG_DIR, "plugins")),
os.path.join("rapport", "plugins") # Local dev tree
]
return plugin_dirs
def discover():
"""Find and load all available plugins.
"""
plugin_files = []
for plugin_dir in _get_plugin_dirs():
if os.path.isdir(plugin_dir):
for plugin_file in os.listdir(plugin_dir):
if plugin_file.endswith(".py") and not plugin_file == "__init__.py":
plugin_files.append(os.path.join(plugin_dir, plugin_file))
if rapport.config.get_int("rapport", "verbosity") >= 2:
print("Found plugin modules: {0}".format(plugin_files))
for plugin_file in plugin_files:
if rapport.config.get_int("rapport", "verbosity") >= 2:
print("Importing module {0}".format(_path_to_module(plugin_file)))
__import__(_path_to_module(plugin_file))
_PLUGIN_CATALOG = {}
def register(name, klass):
"""Add a plugin to the plugin catalog.
"""
if rapport.config.get_int("rapport", "verbosity") >= 1:
print("Registered plugin: {0}".format(name))
_PLUGIN_CATALOG[name] = klass
def init(name, *args, **kwargs):
"""Instantiate a plugin from the catalog.
"""
if name in _PLUGIN_CATALOG:
if rapport.config.get_int("rapport", "verbosity") >= 2:
print("Initialize plugin {0}: {1} {2}".format(name, args, kwargs))
try:
return _PLUGIN_CATALOG[name](*args, **kwargs)
except (ValueError, TypeError) as e:
print("Failed to initialize plugin {0}: {1}!".format(name, e), file=sys.stderr)
else:
print("Failed to initialize plugin {0}: Not in catalog!".format(name), file=sys.stderr)
def init_from_config():
plugins = []
for plugin in rapport.config.plugins():
plugins.append(init(**plugin))
return list(filter(bool, plugins))
def catalog():
"""Returns the list of registered plugins.
"""
return _PLUGIN_CATALOG.keys()
|
saschpe/rapport | rapport/plugin.py | discover | python | def discover():
plugin_files = []
for plugin_dir in _get_plugin_dirs():
if os.path.isdir(plugin_dir):
for plugin_file in os.listdir(plugin_dir):
if plugin_file.endswith(".py") and not plugin_file == "__init__.py":
plugin_files.append(os.path.join(plugin_dir, plugin_file))
if rapport.config.get_int("rapport", "verbosity") >= 2:
print("Found plugin modules: {0}".format(plugin_files))
for plugin_file in plugin_files:
if rapport.config.get_int("rapport", "verbosity") >= 2:
print("Importing module {0}".format(_path_to_module(plugin_file)))
__import__(_path_to_module(plugin_file)) | Find and load all available plugins. | train | https://github.com/saschpe/rapport/blob/ccceb8f84bd7e8add88ab5e137cdab6424aa4683/rapport/plugin.py#L119-L136 | [
"def get_int(section, option, default=-1):\n return int(get(section, option, default))\n",
"def _get_plugin_dirs():\n \"\"\"Return a list of directories where plugins may be located.\n \"\"\"\n plugin_dirs = [\n os.path.expanduser(os.path.join(USER_CONFIG_DIR, \"plugins\")),\n os.path.jo... | # Copyright 2013 Sascha Peilicke
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import os
import sys
try:
import urllib.parse as urlparse
except ImportError:
import urlparse
from rapport.config import USER_CONFIG_DIR
import rapport.config
import rapport.util
class Plugin(object):
def __init__(self, alias=None, url=None, login=None, password=None):
self.alias = alias
self.url = url
self.login = login
self.password = password
if alias == "":
self.alias == self.__str__()
if url and type(url) is not urlparse.ParseResult:
self.url = urlparse.urlparse(url)
def _results(self, dict={}):
"""Helper to merge a dict with cross-plugin defaults.
All plugin sub-classes share some config values, i.e. alias, url,
login and password. This help should be used in the :collect: method
of any Plugin implementation.
>>> import rapport.plugin
>>> c = rapport.plugin.Plugin(alias="a", url="http://example.com", login="u")
>>> c._results()
{'url': ParseResult(scheme='http', netloc='example.com', path='', params='', query='', fragment=''), 'alias': 'a', 'login': 'u', 'plugin': 'plugin'}
>>> c._results({'mykey': 'mykey'})
{'url': ParseResult(scheme='http', netloc='example.com', path='', params='', query='', fragment=''), 'alias': 'a', 'login': 'u', 'mykey': 'mykey', 'plugin': 'plugin'}
"""
results = {"plugin": str(self),
"alias": self.alias,
"url": self.url,
"login": self.login}
results.update(dict)
return results
def __str__(self):
"""Returns the class name in underscores.
Additionally, for sub-classes, the suffix '_plugin' is split off.
>>> import rapport.plugin
>>> c = rapport.plugin.Plugin()
>>> str(c)
'plugin'
"""
return rapport.util.camelcase_to_underscores(self.__class__.__name__).rsplit("_plugin")[0]
def try_collect(self, timeframe):
"""
Run the plugin's collect() method, and if an exception was caught,
store the traceback before re-raising, in order that it doesn't
get lost when concurrent.futures.Future.result() is invoked.
"""
try:
result = self.collect(timeframe)
except Exception as e:
exc_type, exc_val, exc_tb = sys.exc_info()
e.original_traceback = exc_tb
raise
return result
def collect(self, timeframe):
raise NotImplementedError()
def _get_plugin_dirs():
"""Return a list of directories where plugins may be located.
"""
plugin_dirs = [
os.path.expanduser(os.path.join(USER_CONFIG_DIR, "plugins")),
os.path.join("rapport", "plugins") # Local dev tree
]
return plugin_dirs
def _path_to_module(path):
"""Translates paths to *.py? files into module paths.
>>> _path_to_module("rapport/bar.py")
'rapport.bar'
>>> _path_to_module("/usr/lib/rapport/bar.py")
'rapport.bar'
"""
# Split of preceeding path elements:
path = "rapport" + path.split("rapport")[1]
# Split of ending and replace os.sep with dots:
path = path.replace(os.sep, ".").rsplit(".", 1)[0]
return path
_PLUGIN_CATALOG = {}
def register(name, klass):
"""Add a plugin to the plugin catalog.
"""
if rapport.config.get_int("rapport", "verbosity") >= 1:
print("Registered plugin: {0}".format(name))
_PLUGIN_CATALOG[name] = klass
def init(name, *args, **kwargs):
"""Instantiate a plugin from the catalog.
"""
if name in _PLUGIN_CATALOG:
if rapport.config.get_int("rapport", "verbosity") >= 2:
print("Initialize plugin {0}: {1} {2}".format(name, args, kwargs))
try:
return _PLUGIN_CATALOG[name](*args, **kwargs)
except (ValueError, TypeError) as e:
print("Failed to initialize plugin {0}: {1}!".format(name, e), file=sys.stderr)
else:
print("Failed to initialize plugin {0}: Not in catalog!".format(name), file=sys.stderr)
def init_from_config():
plugins = []
for plugin in rapport.config.plugins():
plugins.append(init(**plugin))
return list(filter(bool, plugins))
def catalog():
"""Returns the list of registered plugins.
"""
return _PLUGIN_CATALOG.keys()
|
saschpe/rapport | rapport/plugin.py | register | python | def register(name, klass):
if rapport.config.get_int("rapport", "verbosity") >= 1:
print("Registered plugin: {0}".format(name))
_PLUGIN_CATALOG[name] = klass | Add a plugin to the plugin catalog. | train | https://github.com/saschpe/rapport/blob/ccceb8f84bd7e8add88ab5e137cdab6424aa4683/rapport/plugin.py#L142-L147 | [
"def get_int(section, option, default=-1):\n return int(get(section, option, default))\n"
] | # Copyright 2013 Sascha Peilicke
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import os
import sys
try:
import urllib.parse as urlparse
except ImportError:
import urlparse
from rapport.config import USER_CONFIG_DIR
import rapport.config
import rapport.util
class Plugin(object):
def __init__(self, alias=None, url=None, login=None, password=None):
self.alias = alias
self.url = url
self.login = login
self.password = password
if alias == "":
self.alias == self.__str__()
if url and type(url) is not urlparse.ParseResult:
self.url = urlparse.urlparse(url)
def _results(self, dict={}):
"""Helper to merge a dict with cross-plugin defaults.
All plugin sub-classes share some config values, i.e. alias, url,
login and password. This help should be used in the :collect: method
of any Plugin implementation.
>>> import rapport.plugin
>>> c = rapport.plugin.Plugin(alias="a", url="http://example.com", login="u")
>>> c._results()
{'url': ParseResult(scheme='http', netloc='example.com', path='', params='', query='', fragment=''), 'alias': 'a', 'login': 'u', 'plugin': 'plugin'}
>>> c._results({'mykey': 'mykey'})
{'url': ParseResult(scheme='http', netloc='example.com', path='', params='', query='', fragment=''), 'alias': 'a', 'login': 'u', 'mykey': 'mykey', 'plugin': 'plugin'}
"""
results = {"plugin": str(self),
"alias": self.alias,
"url": self.url,
"login": self.login}
results.update(dict)
return results
def __str__(self):
"""Returns the class name in underscores.
Additionally, for sub-classes, the suffix '_plugin' is split off.
>>> import rapport.plugin
>>> c = rapport.plugin.Plugin()
>>> str(c)
'plugin'
"""
return rapport.util.camelcase_to_underscores(self.__class__.__name__).rsplit("_plugin")[0]
def try_collect(self, timeframe):
"""
Run the plugin's collect() method, and if an exception was caught,
store the traceback before re-raising, in order that it doesn't
get lost when concurrent.futures.Future.result() is invoked.
"""
try:
result = self.collect(timeframe)
except Exception as e:
exc_type, exc_val, exc_tb = sys.exc_info()
e.original_traceback = exc_tb
raise
return result
def collect(self, timeframe):
raise NotImplementedError()
def _get_plugin_dirs():
"""Return a list of directories where plugins may be located.
"""
plugin_dirs = [
os.path.expanduser(os.path.join(USER_CONFIG_DIR, "plugins")),
os.path.join("rapport", "plugins") # Local dev tree
]
return plugin_dirs
def _path_to_module(path):
"""Translates paths to *.py? files into module paths.
>>> _path_to_module("rapport/bar.py")
'rapport.bar'
>>> _path_to_module("/usr/lib/rapport/bar.py")
'rapport.bar'
"""
# Split of preceeding path elements:
path = "rapport" + path.split("rapport")[1]
# Split of ending and replace os.sep with dots:
path = path.replace(os.sep, ".").rsplit(".", 1)[0]
return path
def discover():
"""Find and load all available plugins.
"""
plugin_files = []
for plugin_dir in _get_plugin_dirs():
if os.path.isdir(plugin_dir):
for plugin_file in os.listdir(plugin_dir):
if plugin_file.endswith(".py") and not plugin_file == "__init__.py":
plugin_files.append(os.path.join(plugin_dir, plugin_file))
if rapport.config.get_int("rapport", "verbosity") >= 2:
print("Found plugin modules: {0}".format(plugin_files))
for plugin_file in plugin_files:
if rapport.config.get_int("rapport", "verbosity") >= 2:
print("Importing module {0}".format(_path_to_module(plugin_file)))
__import__(_path_to_module(plugin_file))
_PLUGIN_CATALOG = {}
def init(name, *args, **kwargs):
"""Instantiate a plugin from the catalog.
"""
if name in _PLUGIN_CATALOG:
if rapport.config.get_int("rapport", "verbosity") >= 2:
print("Initialize plugin {0}: {1} {2}".format(name, args, kwargs))
try:
return _PLUGIN_CATALOG[name](*args, **kwargs)
except (ValueError, TypeError) as e:
print("Failed to initialize plugin {0}: {1}!".format(name, e), file=sys.stderr)
else:
print("Failed to initialize plugin {0}: Not in catalog!".format(name), file=sys.stderr)
def init_from_config():
plugins = []
for plugin in rapport.config.plugins():
plugins.append(init(**plugin))
return list(filter(bool, plugins))
def catalog():
"""Returns the list of registered plugins.
"""
return _PLUGIN_CATALOG.keys()
|
saschpe/rapport | rapport/plugin.py | init | python | def init(name, *args, **kwargs):
if name in _PLUGIN_CATALOG:
if rapport.config.get_int("rapport", "verbosity") >= 2:
print("Initialize plugin {0}: {1} {2}".format(name, args, kwargs))
try:
return _PLUGIN_CATALOG[name](*args, **kwargs)
except (ValueError, TypeError) as e:
print("Failed to initialize plugin {0}: {1}!".format(name, e), file=sys.stderr)
else:
print("Failed to initialize plugin {0}: Not in catalog!".format(name), file=sys.stderr) | Instantiate a plugin from the catalog. | train | https://github.com/saschpe/rapport/blob/ccceb8f84bd7e8add88ab5e137cdab6424aa4683/rapport/plugin.py#L150-L161 | [
"def get_int(section, option, default=-1):\n return int(get(section, option, default))\n"
] | # Copyright 2013 Sascha Peilicke
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import os
import sys
try:
import urllib.parse as urlparse
except ImportError:
import urlparse
from rapport.config import USER_CONFIG_DIR
import rapport.config
import rapport.util
class Plugin(object):
def __init__(self, alias=None, url=None, login=None, password=None):
self.alias = alias
self.url = url
self.login = login
self.password = password
if alias == "":
self.alias == self.__str__()
if url and type(url) is not urlparse.ParseResult:
self.url = urlparse.urlparse(url)
def _results(self, dict={}):
"""Helper to merge a dict with cross-plugin defaults.
All plugin sub-classes share some config values, i.e. alias, url,
login and password. This help should be used in the :collect: method
of any Plugin implementation.
>>> import rapport.plugin
>>> c = rapport.plugin.Plugin(alias="a", url="http://example.com", login="u")
>>> c._results()
{'url': ParseResult(scheme='http', netloc='example.com', path='', params='', query='', fragment=''), 'alias': 'a', 'login': 'u', 'plugin': 'plugin'}
>>> c._results({'mykey': 'mykey'})
{'url': ParseResult(scheme='http', netloc='example.com', path='', params='', query='', fragment=''), 'alias': 'a', 'login': 'u', 'mykey': 'mykey', 'plugin': 'plugin'}
"""
results = {"plugin": str(self),
"alias": self.alias,
"url": self.url,
"login": self.login}
results.update(dict)
return results
def __str__(self):
"""Returns the class name in underscores.
Additionally, for sub-classes, the suffix '_plugin' is split off.
>>> import rapport.plugin
>>> c = rapport.plugin.Plugin()
>>> str(c)
'plugin'
"""
return rapport.util.camelcase_to_underscores(self.__class__.__name__).rsplit("_plugin")[0]
def try_collect(self, timeframe):
"""
Run the plugin's collect() method, and if an exception was caught,
store the traceback before re-raising, in order that it doesn't
get lost when concurrent.futures.Future.result() is invoked.
"""
try:
result = self.collect(timeframe)
except Exception as e:
exc_type, exc_val, exc_tb = sys.exc_info()
e.original_traceback = exc_tb
raise
return result
def collect(self, timeframe):
raise NotImplementedError()
def _get_plugin_dirs():
"""Return a list of directories where plugins may be located.
"""
plugin_dirs = [
os.path.expanduser(os.path.join(USER_CONFIG_DIR, "plugins")),
os.path.join("rapport", "plugins") # Local dev tree
]
return plugin_dirs
def _path_to_module(path):
"""Translates paths to *.py? files into module paths.
>>> _path_to_module("rapport/bar.py")
'rapport.bar'
>>> _path_to_module("/usr/lib/rapport/bar.py")
'rapport.bar'
"""
# Split of preceeding path elements:
path = "rapport" + path.split("rapport")[1]
# Split of ending and replace os.sep with dots:
path = path.replace(os.sep, ".").rsplit(".", 1)[0]
return path
def discover():
"""Find and load all available plugins.
"""
plugin_files = []
for plugin_dir in _get_plugin_dirs():
if os.path.isdir(plugin_dir):
for plugin_file in os.listdir(plugin_dir):
if plugin_file.endswith(".py") and not plugin_file == "__init__.py":
plugin_files.append(os.path.join(plugin_dir, plugin_file))
if rapport.config.get_int("rapport", "verbosity") >= 2:
print("Found plugin modules: {0}".format(plugin_files))
for plugin_file in plugin_files:
if rapport.config.get_int("rapport", "verbosity") >= 2:
print("Importing module {0}".format(_path_to_module(plugin_file)))
__import__(_path_to_module(plugin_file))
_PLUGIN_CATALOG = {}
def register(name, klass):
"""Add a plugin to the plugin catalog.
"""
if rapport.config.get_int("rapport", "verbosity") >= 1:
print("Registered plugin: {0}".format(name))
_PLUGIN_CATALOG[name] = klass
def init_from_config():
plugins = []
for plugin in rapport.config.plugins():
plugins.append(init(**plugin))
return list(filter(bool, plugins))
def catalog():
"""Returns the list of registered plugins.
"""
return _PLUGIN_CATALOG.keys()
|
saschpe/rapport | rapport/plugin.py | Plugin._results | python | def _results(self, dict={}):
results = {"plugin": str(self),
"alias": self.alias,
"url": self.url,
"login": self.login}
results.update(dict)
return results | Helper to merge a dict with cross-plugin defaults.
All plugin sub-classes share some config values, i.e. alias, url,
login and password. This help should be used in the :collect: method
of any Plugin implementation.
>>> import rapport.plugin
>>> c = rapport.plugin.Plugin(alias="a", url="http://example.com", login="u")
>>> c._results()
{'url': ParseResult(scheme='http', netloc='example.com', path='', params='', query='', fragment=''), 'alias': 'a', 'login': 'u', 'plugin': 'plugin'}
>>> c._results({'mykey': 'mykey'})
{'url': ParseResult(scheme='http', netloc='example.com', path='', params='', query='', fragment=''), 'alias': 'a', 'login': 'u', 'mykey': 'mykey', 'plugin': 'plugin'} | train | https://github.com/saschpe/rapport/blob/ccceb8f84bd7e8add88ab5e137cdab6424aa4683/rapport/plugin.py#L42-L61 | null | class Plugin(object):
def __init__(self, alias=None, url=None, login=None, password=None):
self.alias = alias
self.url = url
self.login = login
self.password = password
if alias == "":
self.alias == self.__str__()
if url and type(url) is not urlparse.ParseResult:
self.url = urlparse.urlparse(url)
def __str__(self):
"""Returns the class name in underscores.
Additionally, for sub-classes, the suffix '_plugin' is split off.
>>> import rapport.plugin
>>> c = rapport.plugin.Plugin()
>>> str(c)
'plugin'
"""
return rapport.util.camelcase_to_underscores(self.__class__.__name__).rsplit("_plugin")[0]
def try_collect(self, timeframe):
"""
Run the plugin's collect() method, and if an exception was caught,
store the traceback before re-raising, in order that it doesn't
get lost when concurrent.futures.Future.result() is invoked.
"""
try:
result = self.collect(timeframe)
except Exception as e:
exc_type, exc_val, exc_tb = sys.exc_info()
e.original_traceback = exc_tb
raise
return result
def collect(self, timeframe):
raise NotImplementedError()
|
saschpe/rapport | rapport/plugin.py | Plugin.try_collect | python | def try_collect(self, timeframe):
try:
result = self.collect(timeframe)
except Exception as e:
exc_type, exc_val, exc_tb = sys.exc_info()
e.original_traceback = exc_tb
raise
return result | Run the plugin's collect() method, and if an exception was caught,
store the traceback before re-raising, in order that it doesn't
get lost when concurrent.futures.Future.result() is invoked. | train | https://github.com/saschpe/rapport/blob/ccceb8f84bd7e8add88ab5e137cdab6424aa4683/rapport/plugin.py#L75-L88 | [
"def collect(self, timeframe):\n raise NotImplementedError()\n"
] | class Plugin(object):
def __init__(self, alias=None, url=None, login=None, password=None):
self.alias = alias
self.url = url
self.login = login
self.password = password
if alias == "":
self.alias == self.__str__()
if url and type(url) is not urlparse.ParseResult:
self.url = urlparse.urlparse(url)
def _results(self, dict={}):
"""Helper to merge a dict with cross-plugin defaults.
All plugin sub-classes share some config values, i.e. alias, url,
login and password. This help should be used in the :collect: method
of any Plugin implementation.
>>> import rapport.plugin
>>> c = rapport.plugin.Plugin(alias="a", url="http://example.com", login="u")
>>> c._results()
{'url': ParseResult(scheme='http', netloc='example.com', path='', params='', query='', fragment=''), 'alias': 'a', 'login': 'u', 'plugin': 'plugin'}
>>> c._results({'mykey': 'mykey'})
{'url': ParseResult(scheme='http', netloc='example.com', path='', params='', query='', fragment=''), 'alias': 'a', 'login': 'u', 'mykey': 'mykey', 'plugin': 'plugin'}
"""
results = {"plugin": str(self),
"alias": self.alias,
"url": self.url,
"login": self.login}
results.update(dict)
return results
def __str__(self):
"""Returns the class name in underscores.
Additionally, for sub-classes, the suffix '_plugin' is split off.
>>> import rapport.plugin
>>> c = rapport.plugin.Plugin()
>>> str(c)
'plugin'
"""
return rapport.util.camelcase_to_underscores(self.__class__.__name__).rsplit("_plugin")[0]
def collect(self, timeframe):
raise NotImplementedError()
|
shaunduncan/nosqlite | nosqlite.py | _eq | python | def _eq(field, value, document):
try:
return document.get(field, None) == value
except TypeError: # pragma: no cover Python < 3.0
return False | Returns True if the value of a document field is equal to a given value | train | https://github.com/shaunduncan/nosqlite/blob/3033c029b7c8290c66a8b36dc512e560505d4c85/nosqlite.py#L430-L437 | null | import json
import re
import sqlite3
import sys
import warnings
from functools import partial
from itertools import starmap
try:
from itertools import ifilter as filter, imap as map
except ImportError: # pragma: no cover Python >= 3.0
pass
class MalformedQueryException(Exception):
pass
class Connection(object):
"""
The high-level connection to a sqlite database. Creating a connection accepts
the same args and keyword args as the ``sqlite3.connect`` method
"""
def __init__(self, *args, **kwargs):
self._collections = {}
self.connect(*args, **kwargs)
def connect(self, *args, **kwargs):
"""
Connect to a sqlite database only if no connection exists. Isolation level
for the connection is automatically set to autocommit
"""
self.db = sqlite3.connect(*args, **kwargs)
self.db.isolation_level = None
def close(self):
"""
Terminate the connection to the sqlite database
"""
if self.db is not None:
self.db.close()
def __getitem__(self, name):
"""
A pymongo-like behavior for dynamically obtaining a collection of documents
"""
if name not in self._collections:
self._collections[name] = Collection(self.db, name)
return self._collections[name]
def __getattr__(self, name):
if name in self.__dict__:
return self.__dict__[name]
return self[name]
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_traceback):
self.close()
return False
def drop_collection(self, name):
"""
Drops a collection permanently if it exists
"""
self.db.execute("drop table if exists %s" % name)
class Collection(object):
"""
A virtual database table that holds JSON-type documents
"""
def __init__(self, db, name, create=True):
self.db = db
self.name = name
if create:
self.create()
def clear(self):
"""
Clears all stored documents in this database. THERE IS NO GOING BACK
"""
self.db.execute("delete from %s" % self.name)
def exists(self):
"""
Checks if this collection exists
"""
return self._object_exists('table', self.name)
def _object_exists(self, type, name):
row = self.db.execute(
"select count(1) from sqlite_master where type = ? and name = ?",
(type, name.strip('[]'))
).fetchone()
return int(row[0]) > 0
def create(self):
"""
Creates the collections database only if it does not already exist
"""
self.db.execute("""
create table if not exists %s (
id integer primary key autoincrement,
data text not null
)
""" % self.name)
def insert(self, document):
"""
Inserts a document into this collection. If a document already has an '_id'
value it will be updated
:returns: inserted document with id
"""
if '_id' in document:
return self.update(document)
# Create it and return a modified one with the id
cursor = self.db.execute("""
insert into %s(data) values (?)
""" % self.name, (json.dumps(document),))
document['_id'] = cursor.lastrowid
return document
def update(self, document):
"""
Updates a document stored in this collection. If the document does not
already have an '_id' value, it will be created
"""
if '_id' not in document:
return self.insert(document)
# Update the stored document, removing the id
copy = document.copy()
del copy['_id']
self.db.execute("""
update %s set data = ? where id = ?
""" % self.name, (json.dumps(copy), document['_id']))
return document
def remove(self, document):
"""
Removes a document from this collection. This will raise AssertionError if the
document does not have an _id attribute
"""
assert '_id' in document, 'Document must have an id'
self.db.execute("delete from %s where id = ?" % self.name, (document['_id'],))
def save(self, document):
"""
Alias for ``update``
"""
return self.update(document)
def delete(self, document):
"""
Alias for ``remove``
"""
return self.remove(document)
def _load(self, id, data):
"""
Loads a JSON document taking care to apply the document id
"""
if isinstance(data, bytes): # pragma: no cover Python >= 3.0
data = data.decode('utf-8')
document = json.loads(data)
document['_id'] = id
return document
def find(self, query=None, limit=None):
"""
Returns a list of documents in this collection that match a given query
"""
results = []
query = query or {}
# TODO: When indexes are implemented, we'll need to intelligently hit one of the
# index stores so we don't do a full table scan
cursor = self.db.execute("select id, data from %s" % self.name)
apply = partial(self._apply_query, query)
for match in filter(apply, starmap(self._load, cursor.fetchall())):
results.append(match)
# Just return if we already reached the limit
if limit and len(results) == limit:
return results
return results
def _apply_query(self, query, document):
"""
Applies a query to a document. Returns True if the document meets the criteria of
the supplied query. The ``query`` argument generally follows mongodb style syntax
and consists of the following logical checks and operators.
Logical: $and, $or, $nor, $not
Operators: $eq, $ne, $gt, $gte, $lt, $lte, $mod, $in, $nin, $all
If no logical operator is supplied, it assumed that all field checks must pass. For
example, these are equivalent:
{'foo': 'bar', 'baz': 'qux'}
{'$and': [{'foo': 'bar'}, {'baz': 'qux'}]}
Both logical and operational queries can be nested in a complex fashion:
{
'bar': 'baz',
'$or': [
{
'foo': {
'$gte': 0,
'$lte': 10,
'$mod': [2, 0]
}
},
{
'foo': {
'$gt': 10,
'$mod': [2, 1]
}
},
]
}
In the previous example, this will return any document where the 'bar' key is equal
to 'baz' and either the 'foo' key is an even number between 0 and 10 or is an odd number
greater than 10.
"""
matches = [] # A list of booleans
reapply = lambda q: self._apply_query(q, document)
for field, value in query.items():
# A more complex query type $and, $or, etc
if field == '$and':
matches.append(all(map(reapply, value)))
elif field == '$or':
matches.append(any(map(reapply, value)))
elif field == '$nor':
matches.append(not any(map(reapply, value)))
elif field == '$not':
matches.append(not self._apply_query(value, document))
# Invoke a query operator
elif isinstance(value, dict):
for operator, arg in value.items():
if not self._get_operator_fn(operator)(field, arg, document):
matches.append(False)
break
else:
matches.append(True)
# Standard
elif value != document.get(field, None):
# check if field contains a dot
if '.' in field:
nodes = field.split('.')
document_section = document
try:
for path in nodes[:-1]:
document_section = document_section.get(path, None)
except AttributeError:
document_section = None
if document_section is None:
matches.append(False)
else:
if value != document_section.get(nodes[-1], None):
matches.append(False)
else:
matches.append(False)
return all(matches)
def _get_operator_fn(self, op):
"""
Returns the function in this module that corresponds to an operator string.
This simply checks if there is a method that handles the operator defined
in this module, replacing '$' with '_' (i.e. if this module has a _gt
method for $gt) and returns it. If no match is found, or the operator does not
start with '$', a MalformedQueryException is raised
"""
if not op.startswith('$'):
raise MalformedQueryException("Operator '%s' is not a valid query operation" % op)
try:
return getattr(sys.modules[__name__], op.replace('$', '_'))
except AttributeError:
raise MalformedQueryException("Operator '%s' is not currently implemented" % op)
def find_one(self, query=None):
"""
Equivalent to ``find(query, limit=1)[0]``
"""
try:
return self.find(query=query, limit=1)[0]
except (sqlite3.OperationalError, IndexError):
return None
def find_and_modify(self, query=None, update=None):
"""
Finds documents in this collection that match a given query and updates them
"""
update = update or {}
for document in self.find(query=query):
document.update(update)
self.update(document)
def count(self, query=None):
"""
Equivalent to ``len(find(query))``
"""
return len(self.find(query=query))
def rename(self, new_name):
"""
Rename this collection
"""
new_collection = Collection(self.db, new_name, create=False)
assert not new_collection.exists()
self.db.execute("alter table %s rename to %s" % (self.name, new_name))
self.name = new_name
def distinct(self, key):
"""
Get a set of distinct values for the given key excluding an implicit
None for documents that do not contain the key
"""
return set(d[key] for d in filter(lambda d: key in d, self.find()))
def create_index(self, key, reindex=True, sparse=False):
"""
Creates an index if it does not exist then performs a full reindex for this collection
"""
warnings.warn('Index support is currently very alpha and is not guaranteed')
if isinstance(key, (list, tuple)):
index_name = ','.join(key)
index_columns = ', '.join('%s text' % f for f in key)
else:
index_name = key
index_columns = '%s text' % key
table_name = '[%s{%s}]' % (self.name, index_name)
reindex = reindex or not self._object_exists('table', table_name)
# Create a table store for the index data
self.db.execute("""
create table if not exists {table} (
id integer primary key,
{columns},
foreign key(id) references {collection}(id) on delete cascade on update cascade
)
""".format(
table=table_name,
collection=self.name,
columns=index_columns
))
# Create the index
self.db.execute("""
create index if not exists [idx.{collection}{{index}}] on {table}({index})
""".format(
collection=self.name,
index=index_name,
table=table_name,
))
if reindex:
self.reindex(key)
def ensure_index(self, key, sparse=False):
"""
Equivalent to ``create_index(key, reindex=False)``
"""
self.create_index(key, reindex=False, sparse=False)
def reindex(self, table, sparse=False):
warnings.warn('Index support is currently very alpha and is not guaranteed')
index = re.findall(r'^\[.*\{(.*)\}\]$', table)[0].split(',')
update = "update {table} set {key} = ? where id = ?"
insert = "insert into {table}({index}) values({q})"
count = "select count(1) from {table} where id = ?"
qs = ('?,' * len(index)).rstrip(',')
for document in self.find():
# Ensure there's a row before we update
row = self.db.execute(count.format(table=table), (document['_id'],)).fetchone()
if int(row[0]) == 0:
self.db.execute(insert.format(table=table, index=index, q=qs),
[None for x in index])
for key in index:
# Ignore this document if it doesn't have the key
if key not in document and sparse:
continue
self.db.execute(update.format(table=table, key=key),
(document.get(key, None), document['_id']))
def drop_index(self):
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
def drop_indexes(self):
"""
Drop all indexes for this collection
"""
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
# BELOW ARE OPERATIONS FOR LOOKUPS
# TypeErrors are caught specifically for python 3 compatibility
def _gt(field, value, document):
"""
Returns True if the value of a document field is greater than a given value
"""
try:
return document.get(field, None) > value
except TypeError: # pragma: no cover Python < 3.0
return False
def _lt(field, value, document):
"""
Returns True if the value of a document field is less than a given value
"""
try:
return document.get(field, None) < value
except TypeError: # pragma: no cover Python < 3.0
return False
def _gte(field, value, document):
"""
Returns True if the value of a document field is greater than or
equal to a given value
"""
try:
return document.get(field, None) >= value
except TypeError: # pragma: no cover Python < 3.0
return False
def _lte(field, value, document):
"""
Returns True if the value of a document field is less than or
equal to a given value
"""
try:
return document.get(field, None) <= value
except TypeError: # pragma: no cover Python < 3.0
return False
def _all(field, value, document):
"""
Returns True if the value of document field contains all the values
specified by ``value``. If supplied value is not an iterable, a
MalformedQueryException is raised. If the value of the document field
is not an iterable, False is returned
"""
try:
a = set(value)
except TypeError:
raise MalformedQueryException("'$all' must accept an iterable")
try:
b = set(document.get(field, []))
except TypeError:
return False
else:
return a.intersection(b) == a
def _in(field, value, document):
"""
Returns True if document[field] is in the interable value. If the
supplied value is not an iterable, then a MalformedQueryException is raised
"""
try:
values = iter(value)
except TypeError:
raise MalformedQueryException("'$in' must accept an iterable")
return document.get(field, None) in values
def _ne(field, value, document):
"""
Returns True if the value of document[field] is not equal to a given value
"""
return document.get(field, None) != value
def _nin(field, value, document):
"""
Returns True if document[field] is NOT in the interable value. If the
supplied value is not an iterable, then a MalformedQueryException is raised
"""
try:
values = iter(value)
except TypeError:
raise MalformedQueryException("'$nin' must accept an iterable")
return document.get(field, None) not in values
def _mod(field, value, document):
"""
Performs a mod on a document field. Value must be a list or tuple with
two values divisor and remainder (i.e. [2, 0]). This will essentially
perform the following:
document[field] % divisor == remainder
If the value does not contain integers or is not a two-item list/tuple,
a MalformedQueryException will be raised. If the value of document[field]
cannot be converted to an integer, this will return False.
"""
try:
divisor, remainder = map(int, value)
except (TypeError, ValueError):
raise MalformedQueryException("'$mod' must accept an iterable: [divisor, remainder]")
try:
return int(document.get(field, None)) % divisor == remainder
except (TypeError, ValueError):
return False
def _exists(field, value, document):
"""
Ensures a document has a given field or not. ``value`` must be either True or
False, otherwise a MalformedQueryException is raised
"""
if value not in (True, False):
raise MalformedQueryException("'$exists' must be supplied a boolean")
if value:
return field in document
else:
return field not in document
|
shaunduncan/nosqlite | nosqlite.py | _gt | python | def _gt(field, value, document):
try:
return document.get(field, None) > value
except TypeError: # pragma: no cover Python < 3.0
return False | Returns True if the value of a document field is greater than a given value | train | https://github.com/shaunduncan/nosqlite/blob/3033c029b7c8290c66a8b36dc512e560505d4c85/nosqlite.py#L440-L447 | null | import json
import re
import sqlite3
import sys
import warnings
from functools import partial
from itertools import starmap
try:
from itertools import ifilter as filter, imap as map
except ImportError: # pragma: no cover Python >= 3.0
pass
class MalformedQueryException(Exception):
pass
class Connection(object):
"""
The high-level connection to a sqlite database. Creating a connection accepts
the same args and keyword args as the ``sqlite3.connect`` method
"""
def __init__(self, *args, **kwargs):
self._collections = {}
self.connect(*args, **kwargs)
def connect(self, *args, **kwargs):
"""
Connect to a sqlite database only if no connection exists. Isolation level
for the connection is automatically set to autocommit
"""
self.db = sqlite3.connect(*args, **kwargs)
self.db.isolation_level = None
def close(self):
"""
Terminate the connection to the sqlite database
"""
if self.db is not None:
self.db.close()
def __getitem__(self, name):
"""
A pymongo-like behavior for dynamically obtaining a collection of documents
"""
if name not in self._collections:
self._collections[name] = Collection(self.db, name)
return self._collections[name]
def __getattr__(self, name):
if name in self.__dict__:
return self.__dict__[name]
return self[name]
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_traceback):
self.close()
return False
def drop_collection(self, name):
"""
Drops a collection permanently if it exists
"""
self.db.execute("drop table if exists %s" % name)
class Collection(object):
"""
A virtual database table that holds JSON-type documents
"""
def __init__(self, db, name, create=True):
self.db = db
self.name = name
if create:
self.create()
def clear(self):
"""
Clears all stored documents in this database. THERE IS NO GOING BACK
"""
self.db.execute("delete from %s" % self.name)
def exists(self):
"""
Checks if this collection exists
"""
return self._object_exists('table', self.name)
def _object_exists(self, type, name):
row = self.db.execute(
"select count(1) from sqlite_master where type = ? and name = ?",
(type, name.strip('[]'))
).fetchone()
return int(row[0]) > 0
def create(self):
"""
Creates the collections database only if it does not already exist
"""
self.db.execute("""
create table if not exists %s (
id integer primary key autoincrement,
data text not null
)
""" % self.name)
def insert(self, document):
"""
Inserts a document into this collection. If a document already has an '_id'
value it will be updated
:returns: inserted document with id
"""
if '_id' in document:
return self.update(document)
# Create it and return a modified one with the id
cursor = self.db.execute("""
insert into %s(data) values (?)
""" % self.name, (json.dumps(document),))
document['_id'] = cursor.lastrowid
return document
def update(self, document):
"""
Updates a document stored in this collection. If the document does not
already have an '_id' value, it will be created
"""
if '_id' not in document:
return self.insert(document)
# Update the stored document, removing the id
copy = document.copy()
del copy['_id']
self.db.execute("""
update %s set data = ? where id = ?
""" % self.name, (json.dumps(copy), document['_id']))
return document
def remove(self, document):
"""
Removes a document from this collection. This will raise AssertionError if the
document does not have an _id attribute
"""
assert '_id' in document, 'Document must have an id'
self.db.execute("delete from %s where id = ?" % self.name, (document['_id'],))
def save(self, document):
"""
Alias for ``update``
"""
return self.update(document)
def delete(self, document):
"""
Alias for ``remove``
"""
return self.remove(document)
def _load(self, id, data):
"""
Loads a JSON document taking care to apply the document id
"""
if isinstance(data, bytes): # pragma: no cover Python >= 3.0
data = data.decode('utf-8')
document = json.loads(data)
document['_id'] = id
return document
def find(self, query=None, limit=None):
"""
Returns a list of documents in this collection that match a given query
"""
results = []
query = query or {}
# TODO: When indexes are implemented, we'll need to intelligently hit one of the
# index stores so we don't do a full table scan
cursor = self.db.execute("select id, data from %s" % self.name)
apply = partial(self._apply_query, query)
for match in filter(apply, starmap(self._load, cursor.fetchall())):
results.append(match)
# Just return if we already reached the limit
if limit and len(results) == limit:
return results
return results
def _apply_query(self, query, document):
"""
Applies a query to a document. Returns True if the document meets the criteria of
the supplied query. The ``query`` argument generally follows mongodb style syntax
and consists of the following logical checks and operators.
Logical: $and, $or, $nor, $not
Operators: $eq, $ne, $gt, $gte, $lt, $lte, $mod, $in, $nin, $all
If no logical operator is supplied, it assumed that all field checks must pass. For
example, these are equivalent:
{'foo': 'bar', 'baz': 'qux'}
{'$and': [{'foo': 'bar'}, {'baz': 'qux'}]}
Both logical and operational queries can be nested in a complex fashion:
{
'bar': 'baz',
'$or': [
{
'foo': {
'$gte': 0,
'$lte': 10,
'$mod': [2, 0]
}
},
{
'foo': {
'$gt': 10,
'$mod': [2, 1]
}
},
]
}
In the previous example, this will return any document where the 'bar' key is equal
to 'baz' and either the 'foo' key is an even number between 0 and 10 or is an odd number
greater than 10.
"""
matches = [] # A list of booleans
reapply = lambda q: self._apply_query(q, document)
for field, value in query.items():
# A more complex query type $and, $or, etc
if field == '$and':
matches.append(all(map(reapply, value)))
elif field == '$or':
matches.append(any(map(reapply, value)))
elif field == '$nor':
matches.append(not any(map(reapply, value)))
elif field == '$not':
matches.append(not self._apply_query(value, document))
# Invoke a query operator
elif isinstance(value, dict):
for operator, arg in value.items():
if not self._get_operator_fn(operator)(field, arg, document):
matches.append(False)
break
else:
matches.append(True)
# Standard
elif value != document.get(field, None):
# check if field contains a dot
if '.' in field:
nodes = field.split('.')
document_section = document
try:
for path in nodes[:-1]:
document_section = document_section.get(path, None)
except AttributeError:
document_section = None
if document_section is None:
matches.append(False)
else:
if value != document_section.get(nodes[-1], None):
matches.append(False)
else:
matches.append(False)
return all(matches)
def _get_operator_fn(self, op):
"""
Returns the function in this module that corresponds to an operator string.
This simply checks if there is a method that handles the operator defined
in this module, replacing '$' with '_' (i.e. if this module has a _gt
method for $gt) and returns it. If no match is found, or the operator does not
start with '$', a MalformedQueryException is raised
"""
if not op.startswith('$'):
raise MalformedQueryException("Operator '%s' is not a valid query operation" % op)
try:
return getattr(sys.modules[__name__], op.replace('$', '_'))
except AttributeError:
raise MalformedQueryException("Operator '%s' is not currently implemented" % op)
def find_one(self, query=None):
"""
Equivalent to ``find(query, limit=1)[0]``
"""
try:
return self.find(query=query, limit=1)[0]
except (sqlite3.OperationalError, IndexError):
return None
def find_and_modify(self, query=None, update=None):
"""
Finds documents in this collection that match a given query and updates them
"""
update = update or {}
for document in self.find(query=query):
document.update(update)
self.update(document)
def count(self, query=None):
"""
Equivalent to ``len(find(query))``
"""
return len(self.find(query=query))
def rename(self, new_name):
"""
Rename this collection
"""
new_collection = Collection(self.db, new_name, create=False)
assert not new_collection.exists()
self.db.execute("alter table %s rename to %s" % (self.name, new_name))
self.name = new_name
def distinct(self, key):
"""
Get a set of distinct values for the given key excluding an implicit
None for documents that do not contain the key
"""
return set(d[key] for d in filter(lambda d: key in d, self.find()))
def create_index(self, key, reindex=True, sparse=False):
"""
Creates an index if it does not exist then performs a full reindex for this collection
"""
warnings.warn('Index support is currently very alpha and is not guaranteed')
if isinstance(key, (list, tuple)):
index_name = ','.join(key)
index_columns = ', '.join('%s text' % f for f in key)
else:
index_name = key
index_columns = '%s text' % key
table_name = '[%s{%s}]' % (self.name, index_name)
reindex = reindex or not self._object_exists('table', table_name)
# Create a table store for the index data
self.db.execute("""
create table if not exists {table} (
id integer primary key,
{columns},
foreign key(id) references {collection}(id) on delete cascade on update cascade
)
""".format(
table=table_name,
collection=self.name,
columns=index_columns
))
# Create the index
self.db.execute("""
create index if not exists [idx.{collection}{{index}}] on {table}({index})
""".format(
collection=self.name,
index=index_name,
table=table_name,
))
if reindex:
self.reindex(key)
def ensure_index(self, key, sparse=False):
"""
Equivalent to ``create_index(key, reindex=False)``
"""
self.create_index(key, reindex=False, sparse=False)
def reindex(self, table, sparse=False):
warnings.warn('Index support is currently very alpha and is not guaranteed')
index = re.findall(r'^\[.*\{(.*)\}\]$', table)[0].split(',')
update = "update {table} set {key} = ? where id = ?"
insert = "insert into {table}({index}) values({q})"
count = "select count(1) from {table} where id = ?"
qs = ('?,' * len(index)).rstrip(',')
for document in self.find():
# Ensure there's a row before we update
row = self.db.execute(count.format(table=table), (document['_id'],)).fetchone()
if int(row[0]) == 0:
self.db.execute(insert.format(table=table, index=index, q=qs),
[None for x in index])
for key in index:
# Ignore this document if it doesn't have the key
if key not in document and sparse:
continue
self.db.execute(update.format(table=table, key=key),
(document.get(key, None), document['_id']))
def drop_index(self):
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
def drop_indexes(self):
"""
Drop all indexes for this collection
"""
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
# BELOW ARE OPERATIONS FOR LOOKUPS
# TypeErrors are caught specifically for python 3 compatibility
def _eq(field, value, document):
"""
Returns True if the value of a document field is equal to a given value
"""
try:
return document.get(field, None) == value
except TypeError: # pragma: no cover Python < 3.0
return False
def _lt(field, value, document):
"""
Returns True if the value of a document field is less than a given value
"""
try:
return document.get(field, None) < value
except TypeError: # pragma: no cover Python < 3.0
return False
def _gte(field, value, document):
"""
Returns True if the value of a document field is greater than or
equal to a given value
"""
try:
return document.get(field, None) >= value
except TypeError: # pragma: no cover Python < 3.0
return False
def _lte(field, value, document):
"""
Returns True if the value of a document field is less than or
equal to a given value
"""
try:
return document.get(field, None) <= value
except TypeError: # pragma: no cover Python < 3.0
return False
def _all(field, value, document):
"""
Returns True if the value of document field contains all the values
specified by ``value``. If supplied value is not an iterable, a
MalformedQueryException is raised. If the value of the document field
is not an iterable, False is returned
"""
try:
a = set(value)
except TypeError:
raise MalformedQueryException("'$all' must accept an iterable")
try:
b = set(document.get(field, []))
except TypeError:
return False
else:
return a.intersection(b) == a
def _in(field, value, document):
"""
Returns True if document[field] is in the interable value. If the
supplied value is not an iterable, then a MalformedQueryException is raised
"""
try:
values = iter(value)
except TypeError:
raise MalformedQueryException("'$in' must accept an iterable")
return document.get(field, None) in values
def _ne(field, value, document):
"""
Returns True if the value of document[field] is not equal to a given value
"""
return document.get(field, None) != value
def _nin(field, value, document):
"""
Returns True if document[field] is NOT in the interable value. If the
supplied value is not an iterable, then a MalformedQueryException is raised
"""
try:
values = iter(value)
except TypeError:
raise MalformedQueryException("'$nin' must accept an iterable")
return document.get(field, None) not in values
def _mod(field, value, document):
"""
Performs a mod on a document field. Value must be a list or tuple with
two values divisor and remainder (i.e. [2, 0]). This will essentially
perform the following:
document[field] % divisor == remainder
If the value does not contain integers or is not a two-item list/tuple,
a MalformedQueryException will be raised. If the value of document[field]
cannot be converted to an integer, this will return False.
"""
try:
divisor, remainder = map(int, value)
except (TypeError, ValueError):
raise MalformedQueryException("'$mod' must accept an iterable: [divisor, remainder]")
try:
return int(document.get(field, None)) % divisor == remainder
except (TypeError, ValueError):
return False
def _exists(field, value, document):
"""
Ensures a document has a given field or not. ``value`` must be either True or
False, otherwise a MalformedQueryException is raised
"""
if value not in (True, False):
raise MalformedQueryException("'$exists' must be supplied a boolean")
if value:
return field in document
else:
return field not in document
|
shaunduncan/nosqlite | nosqlite.py | _lt | python | def _lt(field, value, document):
try:
return document.get(field, None) < value
except TypeError: # pragma: no cover Python < 3.0
return False | Returns True if the value of a document field is less than a given value | train | https://github.com/shaunduncan/nosqlite/blob/3033c029b7c8290c66a8b36dc512e560505d4c85/nosqlite.py#L450-L457 | null | import json
import re
import sqlite3
import sys
import warnings
from functools import partial
from itertools import starmap
try:
from itertools import ifilter as filter, imap as map
except ImportError: # pragma: no cover Python >= 3.0
pass
class MalformedQueryException(Exception):
pass
class Connection(object):
"""
The high-level connection to a sqlite database. Creating a connection accepts
the same args and keyword args as the ``sqlite3.connect`` method
"""
def __init__(self, *args, **kwargs):
self._collections = {}
self.connect(*args, **kwargs)
def connect(self, *args, **kwargs):
"""
Connect to a sqlite database only if no connection exists. Isolation level
for the connection is automatically set to autocommit
"""
self.db = sqlite3.connect(*args, **kwargs)
self.db.isolation_level = None
def close(self):
"""
Terminate the connection to the sqlite database
"""
if self.db is not None:
self.db.close()
def __getitem__(self, name):
"""
A pymongo-like behavior for dynamically obtaining a collection of documents
"""
if name not in self._collections:
self._collections[name] = Collection(self.db, name)
return self._collections[name]
def __getattr__(self, name):
if name in self.__dict__:
return self.__dict__[name]
return self[name]
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_traceback):
self.close()
return False
def drop_collection(self, name):
"""
Drops a collection permanently if it exists
"""
self.db.execute("drop table if exists %s" % name)
class Collection(object):
"""
A virtual database table that holds JSON-type documents
"""
def __init__(self, db, name, create=True):
self.db = db
self.name = name
if create:
self.create()
def clear(self):
"""
Clears all stored documents in this database. THERE IS NO GOING BACK
"""
self.db.execute("delete from %s" % self.name)
def exists(self):
"""
Checks if this collection exists
"""
return self._object_exists('table', self.name)
def _object_exists(self, type, name):
row = self.db.execute(
"select count(1) from sqlite_master where type = ? and name = ?",
(type, name.strip('[]'))
).fetchone()
return int(row[0]) > 0
def create(self):
"""
Creates the collections database only if it does not already exist
"""
self.db.execute("""
create table if not exists %s (
id integer primary key autoincrement,
data text not null
)
""" % self.name)
def insert(self, document):
"""
Inserts a document into this collection. If a document already has an '_id'
value it will be updated
:returns: inserted document with id
"""
if '_id' in document:
return self.update(document)
# Create it and return a modified one with the id
cursor = self.db.execute("""
insert into %s(data) values (?)
""" % self.name, (json.dumps(document),))
document['_id'] = cursor.lastrowid
return document
def update(self, document):
"""
Updates a document stored in this collection. If the document does not
already have an '_id' value, it will be created
"""
if '_id' not in document:
return self.insert(document)
# Update the stored document, removing the id
copy = document.copy()
del copy['_id']
self.db.execute("""
update %s set data = ? where id = ?
""" % self.name, (json.dumps(copy), document['_id']))
return document
def remove(self, document):
"""
Removes a document from this collection. This will raise AssertionError if the
document does not have an _id attribute
"""
assert '_id' in document, 'Document must have an id'
self.db.execute("delete from %s where id = ?" % self.name, (document['_id'],))
def save(self, document):
"""
Alias for ``update``
"""
return self.update(document)
def delete(self, document):
"""
Alias for ``remove``
"""
return self.remove(document)
def _load(self, id, data):
"""
Loads a JSON document taking care to apply the document id
"""
if isinstance(data, bytes): # pragma: no cover Python >= 3.0
data = data.decode('utf-8')
document = json.loads(data)
document['_id'] = id
return document
def find(self, query=None, limit=None):
"""
Returns a list of documents in this collection that match a given query
"""
results = []
query = query or {}
# TODO: When indexes are implemented, we'll need to intelligently hit one of the
# index stores so we don't do a full table scan
cursor = self.db.execute("select id, data from %s" % self.name)
apply = partial(self._apply_query, query)
for match in filter(apply, starmap(self._load, cursor.fetchall())):
results.append(match)
# Just return if we already reached the limit
if limit and len(results) == limit:
return results
return results
def _apply_query(self, query, document):
"""
Applies a query to a document. Returns True if the document meets the criteria of
the supplied query. The ``query`` argument generally follows mongodb style syntax
and consists of the following logical checks and operators.
Logical: $and, $or, $nor, $not
Operators: $eq, $ne, $gt, $gte, $lt, $lte, $mod, $in, $nin, $all
If no logical operator is supplied, it assumed that all field checks must pass. For
example, these are equivalent:
{'foo': 'bar', 'baz': 'qux'}
{'$and': [{'foo': 'bar'}, {'baz': 'qux'}]}
Both logical and operational queries can be nested in a complex fashion:
{
'bar': 'baz',
'$or': [
{
'foo': {
'$gte': 0,
'$lte': 10,
'$mod': [2, 0]
}
},
{
'foo': {
'$gt': 10,
'$mod': [2, 1]
}
},
]
}
In the previous example, this will return any document where the 'bar' key is equal
to 'baz' and either the 'foo' key is an even number between 0 and 10 or is an odd number
greater than 10.
"""
matches = [] # A list of booleans
reapply = lambda q: self._apply_query(q, document)
for field, value in query.items():
# A more complex query type $and, $or, etc
if field == '$and':
matches.append(all(map(reapply, value)))
elif field == '$or':
matches.append(any(map(reapply, value)))
elif field == '$nor':
matches.append(not any(map(reapply, value)))
elif field == '$not':
matches.append(not self._apply_query(value, document))
# Invoke a query operator
elif isinstance(value, dict):
for operator, arg in value.items():
if not self._get_operator_fn(operator)(field, arg, document):
matches.append(False)
break
else:
matches.append(True)
# Standard
elif value != document.get(field, None):
# check if field contains a dot
if '.' in field:
nodes = field.split('.')
document_section = document
try:
for path in nodes[:-1]:
document_section = document_section.get(path, None)
except AttributeError:
document_section = None
if document_section is None:
matches.append(False)
else:
if value != document_section.get(nodes[-1], None):
matches.append(False)
else:
matches.append(False)
return all(matches)
def _get_operator_fn(self, op):
"""
Returns the function in this module that corresponds to an operator string.
This simply checks if there is a method that handles the operator defined
in this module, replacing '$' with '_' (i.e. if this module has a _gt
method for $gt) and returns it. If no match is found, or the operator does not
start with '$', a MalformedQueryException is raised
"""
if not op.startswith('$'):
raise MalformedQueryException("Operator '%s' is not a valid query operation" % op)
try:
return getattr(sys.modules[__name__], op.replace('$', '_'))
except AttributeError:
raise MalformedQueryException("Operator '%s' is not currently implemented" % op)
def find_one(self, query=None):
"""
Equivalent to ``find(query, limit=1)[0]``
"""
try:
return self.find(query=query, limit=1)[0]
except (sqlite3.OperationalError, IndexError):
return None
def find_and_modify(self, query=None, update=None):
"""
Finds documents in this collection that match a given query and updates them
"""
update = update or {}
for document in self.find(query=query):
document.update(update)
self.update(document)
def count(self, query=None):
"""
Equivalent to ``len(find(query))``
"""
return len(self.find(query=query))
def rename(self, new_name):
"""
Rename this collection
"""
new_collection = Collection(self.db, new_name, create=False)
assert not new_collection.exists()
self.db.execute("alter table %s rename to %s" % (self.name, new_name))
self.name = new_name
def distinct(self, key):
"""
Get a set of distinct values for the given key excluding an implicit
None for documents that do not contain the key
"""
return set(d[key] for d in filter(lambda d: key in d, self.find()))
def create_index(self, key, reindex=True, sparse=False):
"""
Creates an index if it does not exist then performs a full reindex for this collection
"""
warnings.warn('Index support is currently very alpha and is not guaranteed')
if isinstance(key, (list, tuple)):
index_name = ','.join(key)
index_columns = ', '.join('%s text' % f for f in key)
else:
index_name = key
index_columns = '%s text' % key
table_name = '[%s{%s}]' % (self.name, index_name)
reindex = reindex or not self._object_exists('table', table_name)
# Create a table store for the index data
self.db.execute("""
create table if not exists {table} (
id integer primary key,
{columns},
foreign key(id) references {collection}(id) on delete cascade on update cascade
)
""".format(
table=table_name,
collection=self.name,
columns=index_columns
))
# Create the index
self.db.execute("""
create index if not exists [idx.{collection}{{index}}] on {table}({index})
""".format(
collection=self.name,
index=index_name,
table=table_name,
))
if reindex:
self.reindex(key)
def ensure_index(self, key, sparse=False):
"""
Equivalent to ``create_index(key, reindex=False)``
"""
self.create_index(key, reindex=False, sparse=False)
def reindex(self, table, sparse=False):
warnings.warn('Index support is currently very alpha and is not guaranteed')
index = re.findall(r'^\[.*\{(.*)\}\]$', table)[0].split(',')
update = "update {table} set {key} = ? where id = ?"
insert = "insert into {table}({index}) values({q})"
count = "select count(1) from {table} where id = ?"
qs = ('?,' * len(index)).rstrip(',')
for document in self.find():
# Ensure there's a row before we update
row = self.db.execute(count.format(table=table), (document['_id'],)).fetchone()
if int(row[0]) == 0:
self.db.execute(insert.format(table=table, index=index, q=qs),
[None for x in index])
for key in index:
# Ignore this document if it doesn't have the key
if key not in document and sparse:
continue
self.db.execute(update.format(table=table, key=key),
(document.get(key, None), document['_id']))
def drop_index(self):
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
def drop_indexes(self):
"""
Drop all indexes for this collection
"""
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
# BELOW ARE OPERATIONS FOR LOOKUPS
# TypeErrors are caught specifically for python 3 compatibility
def _eq(field, value, document):
"""
Returns True if the value of a document field is equal to a given value
"""
try:
return document.get(field, None) == value
except TypeError: # pragma: no cover Python < 3.0
return False
def _gt(field, value, document):
"""
Returns True if the value of a document field is greater than a given value
"""
try:
return document.get(field, None) > value
except TypeError: # pragma: no cover Python < 3.0
return False
def _gte(field, value, document):
"""
Returns True if the value of a document field is greater than or
equal to a given value
"""
try:
return document.get(field, None) >= value
except TypeError: # pragma: no cover Python < 3.0
return False
def _lte(field, value, document):
"""
Returns True if the value of a document field is less than or
equal to a given value
"""
try:
return document.get(field, None) <= value
except TypeError: # pragma: no cover Python < 3.0
return False
def _all(field, value, document):
"""
Returns True if the value of document field contains all the values
specified by ``value``. If supplied value is not an iterable, a
MalformedQueryException is raised. If the value of the document field
is not an iterable, False is returned
"""
try:
a = set(value)
except TypeError:
raise MalformedQueryException("'$all' must accept an iterable")
try:
b = set(document.get(field, []))
except TypeError:
return False
else:
return a.intersection(b) == a
def _in(field, value, document):
"""
Returns True if document[field] is in the interable value. If the
supplied value is not an iterable, then a MalformedQueryException is raised
"""
try:
values = iter(value)
except TypeError:
raise MalformedQueryException("'$in' must accept an iterable")
return document.get(field, None) in values
def _ne(field, value, document):
"""
Returns True if the value of document[field] is not equal to a given value
"""
return document.get(field, None) != value
def _nin(field, value, document):
"""
Returns True if document[field] is NOT in the interable value. If the
supplied value is not an iterable, then a MalformedQueryException is raised
"""
try:
values = iter(value)
except TypeError:
raise MalformedQueryException("'$nin' must accept an iterable")
return document.get(field, None) not in values
def _mod(field, value, document):
"""
Performs a mod on a document field. Value must be a list or tuple with
two values divisor and remainder (i.e. [2, 0]). This will essentially
perform the following:
document[field] % divisor == remainder
If the value does not contain integers or is not a two-item list/tuple,
a MalformedQueryException will be raised. If the value of document[field]
cannot be converted to an integer, this will return False.
"""
try:
divisor, remainder = map(int, value)
except (TypeError, ValueError):
raise MalformedQueryException("'$mod' must accept an iterable: [divisor, remainder]")
try:
return int(document.get(field, None)) % divisor == remainder
except (TypeError, ValueError):
return False
def _exists(field, value, document):
"""
Ensures a document has a given field or not. ``value`` must be either True or
False, otherwise a MalformedQueryException is raised
"""
if value not in (True, False):
raise MalformedQueryException("'$exists' must be supplied a boolean")
if value:
return field in document
else:
return field not in document
|
shaunduncan/nosqlite | nosqlite.py | _gte | python | def _gte(field, value, document):
try:
return document.get(field, None) >= value
except TypeError: # pragma: no cover Python < 3.0
return False | Returns True if the value of a document field is greater than or
equal to a given value | train | https://github.com/shaunduncan/nosqlite/blob/3033c029b7c8290c66a8b36dc512e560505d4c85/nosqlite.py#L460-L468 | null | import json
import re
import sqlite3
import sys
import warnings
from functools import partial
from itertools import starmap
try:
from itertools import ifilter as filter, imap as map
except ImportError: # pragma: no cover Python >= 3.0
pass
class MalformedQueryException(Exception):
pass
class Connection(object):
"""
The high-level connection to a sqlite database. Creating a connection accepts
the same args and keyword args as the ``sqlite3.connect`` method
"""
def __init__(self, *args, **kwargs):
self._collections = {}
self.connect(*args, **kwargs)
def connect(self, *args, **kwargs):
"""
Connect to a sqlite database only if no connection exists. Isolation level
for the connection is automatically set to autocommit
"""
self.db = sqlite3.connect(*args, **kwargs)
self.db.isolation_level = None
def close(self):
"""
Terminate the connection to the sqlite database
"""
if self.db is not None:
self.db.close()
def __getitem__(self, name):
"""
A pymongo-like behavior for dynamically obtaining a collection of documents
"""
if name not in self._collections:
self._collections[name] = Collection(self.db, name)
return self._collections[name]
def __getattr__(self, name):
if name in self.__dict__:
return self.__dict__[name]
return self[name]
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_traceback):
self.close()
return False
def drop_collection(self, name):
"""
Drops a collection permanently if it exists
"""
self.db.execute("drop table if exists %s" % name)
class Collection(object):
"""
A virtual database table that holds JSON-type documents
"""
def __init__(self, db, name, create=True):
self.db = db
self.name = name
if create:
self.create()
def clear(self):
"""
Clears all stored documents in this database. THERE IS NO GOING BACK
"""
self.db.execute("delete from %s" % self.name)
def exists(self):
"""
Checks if this collection exists
"""
return self._object_exists('table', self.name)
def _object_exists(self, type, name):
row = self.db.execute(
"select count(1) from sqlite_master where type = ? and name = ?",
(type, name.strip('[]'))
).fetchone()
return int(row[0]) > 0
def create(self):
"""
Creates the collections database only if it does not already exist
"""
self.db.execute("""
create table if not exists %s (
id integer primary key autoincrement,
data text not null
)
""" % self.name)
def insert(self, document):
"""
Inserts a document into this collection. If a document already has an '_id'
value it will be updated
:returns: inserted document with id
"""
if '_id' in document:
return self.update(document)
# Create it and return a modified one with the id
cursor = self.db.execute("""
insert into %s(data) values (?)
""" % self.name, (json.dumps(document),))
document['_id'] = cursor.lastrowid
return document
def update(self, document):
"""
Updates a document stored in this collection. If the document does not
already have an '_id' value, it will be created
"""
if '_id' not in document:
return self.insert(document)
# Update the stored document, removing the id
copy = document.copy()
del copy['_id']
self.db.execute("""
update %s set data = ? where id = ?
""" % self.name, (json.dumps(copy), document['_id']))
return document
def remove(self, document):
"""
Removes a document from this collection. This will raise AssertionError if the
document does not have an _id attribute
"""
assert '_id' in document, 'Document must have an id'
self.db.execute("delete from %s where id = ?" % self.name, (document['_id'],))
def save(self, document):
"""
Alias for ``update``
"""
return self.update(document)
def delete(self, document):
"""
Alias for ``remove``
"""
return self.remove(document)
def _load(self, id, data):
"""
Loads a JSON document taking care to apply the document id
"""
if isinstance(data, bytes): # pragma: no cover Python >= 3.0
data = data.decode('utf-8')
document = json.loads(data)
document['_id'] = id
return document
def find(self, query=None, limit=None):
"""
Returns a list of documents in this collection that match a given query
"""
results = []
query = query or {}
# TODO: When indexes are implemented, we'll need to intelligently hit one of the
# index stores so we don't do a full table scan
cursor = self.db.execute("select id, data from %s" % self.name)
apply = partial(self._apply_query, query)
for match in filter(apply, starmap(self._load, cursor.fetchall())):
results.append(match)
# Just return if we already reached the limit
if limit and len(results) == limit:
return results
return results
def _apply_query(self, query, document):
"""
Applies a query to a document. Returns True if the document meets the criteria of
the supplied query. The ``query`` argument generally follows mongodb style syntax
and consists of the following logical checks and operators.
Logical: $and, $or, $nor, $not
Operators: $eq, $ne, $gt, $gte, $lt, $lte, $mod, $in, $nin, $all
If no logical operator is supplied, it assumed that all field checks must pass. For
example, these are equivalent:
{'foo': 'bar', 'baz': 'qux'}
{'$and': [{'foo': 'bar'}, {'baz': 'qux'}]}
Both logical and operational queries can be nested in a complex fashion:
{
'bar': 'baz',
'$or': [
{
'foo': {
'$gte': 0,
'$lte': 10,
'$mod': [2, 0]
}
},
{
'foo': {
'$gt': 10,
'$mod': [2, 1]
}
},
]
}
In the previous example, this will return any document where the 'bar' key is equal
to 'baz' and either the 'foo' key is an even number between 0 and 10 or is an odd number
greater than 10.
"""
matches = [] # A list of booleans
reapply = lambda q: self._apply_query(q, document)
for field, value in query.items():
# A more complex query type $and, $or, etc
if field == '$and':
matches.append(all(map(reapply, value)))
elif field == '$or':
matches.append(any(map(reapply, value)))
elif field == '$nor':
matches.append(not any(map(reapply, value)))
elif field == '$not':
matches.append(not self._apply_query(value, document))
# Invoke a query operator
elif isinstance(value, dict):
for operator, arg in value.items():
if not self._get_operator_fn(operator)(field, arg, document):
matches.append(False)
break
else:
matches.append(True)
# Standard
elif value != document.get(field, None):
# check if field contains a dot
if '.' in field:
nodes = field.split('.')
document_section = document
try:
for path in nodes[:-1]:
document_section = document_section.get(path, None)
except AttributeError:
document_section = None
if document_section is None:
matches.append(False)
else:
if value != document_section.get(nodes[-1], None):
matches.append(False)
else:
matches.append(False)
return all(matches)
def _get_operator_fn(self, op):
"""
Returns the function in this module that corresponds to an operator string.
This simply checks if there is a method that handles the operator defined
in this module, replacing '$' with '_' (i.e. if this module has a _gt
method for $gt) and returns it. If no match is found, or the operator does not
start with '$', a MalformedQueryException is raised
"""
if not op.startswith('$'):
raise MalformedQueryException("Operator '%s' is not a valid query operation" % op)
try:
return getattr(sys.modules[__name__], op.replace('$', '_'))
except AttributeError:
raise MalformedQueryException("Operator '%s' is not currently implemented" % op)
def find_one(self, query=None):
"""
Equivalent to ``find(query, limit=1)[0]``
"""
try:
return self.find(query=query, limit=1)[0]
except (sqlite3.OperationalError, IndexError):
return None
def find_and_modify(self, query=None, update=None):
"""
Finds documents in this collection that match a given query and updates them
"""
update = update or {}
for document in self.find(query=query):
document.update(update)
self.update(document)
def count(self, query=None):
"""
Equivalent to ``len(find(query))``
"""
return len(self.find(query=query))
def rename(self, new_name):
"""
Rename this collection
"""
new_collection = Collection(self.db, new_name, create=False)
assert not new_collection.exists()
self.db.execute("alter table %s rename to %s" % (self.name, new_name))
self.name = new_name
def distinct(self, key):
"""
Get a set of distinct values for the given key excluding an implicit
None for documents that do not contain the key
"""
return set(d[key] for d in filter(lambda d: key in d, self.find()))
def create_index(self, key, reindex=True, sparse=False):
"""
Creates an index if it does not exist then performs a full reindex for this collection
"""
warnings.warn('Index support is currently very alpha and is not guaranteed')
if isinstance(key, (list, tuple)):
index_name = ','.join(key)
index_columns = ', '.join('%s text' % f for f in key)
else:
index_name = key
index_columns = '%s text' % key
table_name = '[%s{%s}]' % (self.name, index_name)
reindex = reindex or not self._object_exists('table', table_name)
# Create a table store for the index data
self.db.execute("""
create table if not exists {table} (
id integer primary key,
{columns},
foreign key(id) references {collection}(id) on delete cascade on update cascade
)
""".format(
table=table_name,
collection=self.name,
columns=index_columns
))
# Create the index
self.db.execute("""
create index if not exists [idx.{collection}{{index}}] on {table}({index})
""".format(
collection=self.name,
index=index_name,
table=table_name,
))
if reindex:
self.reindex(key)
def ensure_index(self, key, sparse=False):
"""
Equivalent to ``create_index(key, reindex=False)``
"""
self.create_index(key, reindex=False, sparse=False)
def reindex(self, table, sparse=False):
warnings.warn('Index support is currently very alpha and is not guaranteed')
index = re.findall(r'^\[.*\{(.*)\}\]$', table)[0].split(',')
update = "update {table} set {key} = ? where id = ?"
insert = "insert into {table}({index}) values({q})"
count = "select count(1) from {table} where id = ?"
qs = ('?,' * len(index)).rstrip(',')
for document in self.find():
# Ensure there's a row before we update
row = self.db.execute(count.format(table=table), (document['_id'],)).fetchone()
if int(row[0]) == 0:
self.db.execute(insert.format(table=table, index=index, q=qs),
[None for x in index])
for key in index:
# Ignore this document if it doesn't have the key
if key not in document and sparse:
continue
self.db.execute(update.format(table=table, key=key),
(document.get(key, None), document['_id']))
def drop_index(self):
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
def drop_indexes(self):
"""
Drop all indexes for this collection
"""
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
# BELOW ARE OPERATIONS FOR LOOKUPS
# TypeErrors are caught specifically for python 3 compatibility
def _eq(field, value, document):
"""
Returns True if the value of a document field is equal to a given value
"""
try:
return document.get(field, None) == value
except TypeError: # pragma: no cover Python < 3.0
return False
def _gt(field, value, document):
"""
Returns True if the value of a document field is greater than a given value
"""
try:
return document.get(field, None) > value
except TypeError: # pragma: no cover Python < 3.0
return False
def _lt(field, value, document):
"""
Returns True if the value of a document field is less than a given value
"""
try:
return document.get(field, None) < value
except TypeError: # pragma: no cover Python < 3.0
return False
def _lte(field, value, document):
"""
Returns True if the value of a document field is less than or
equal to a given value
"""
try:
return document.get(field, None) <= value
except TypeError: # pragma: no cover Python < 3.0
return False
def _all(field, value, document):
"""
Returns True if the value of document field contains all the values
specified by ``value``. If supplied value is not an iterable, a
MalformedQueryException is raised. If the value of the document field
is not an iterable, False is returned
"""
try:
a = set(value)
except TypeError:
raise MalformedQueryException("'$all' must accept an iterable")
try:
b = set(document.get(field, []))
except TypeError:
return False
else:
return a.intersection(b) == a
def _in(field, value, document):
"""
Returns True if document[field] is in the interable value. If the
supplied value is not an iterable, then a MalformedQueryException is raised
"""
try:
values = iter(value)
except TypeError:
raise MalformedQueryException("'$in' must accept an iterable")
return document.get(field, None) in values
def _ne(field, value, document):
"""
Returns True if the value of document[field] is not equal to a given value
"""
return document.get(field, None) != value
def _nin(field, value, document):
"""
Returns True if document[field] is NOT in the interable value. If the
supplied value is not an iterable, then a MalformedQueryException is raised
"""
try:
values = iter(value)
except TypeError:
raise MalformedQueryException("'$nin' must accept an iterable")
return document.get(field, None) not in values
def _mod(field, value, document):
"""
Performs a mod on a document field. Value must be a list or tuple with
two values divisor and remainder (i.e. [2, 0]). This will essentially
perform the following:
document[field] % divisor == remainder
If the value does not contain integers or is not a two-item list/tuple,
a MalformedQueryException will be raised. If the value of document[field]
cannot be converted to an integer, this will return False.
"""
try:
divisor, remainder = map(int, value)
except (TypeError, ValueError):
raise MalformedQueryException("'$mod' must accept an iterable: [divisor, remainder]")
try:
return int(document.get(field, None)) % divisor == remainder
except (TypeError, ValueError):
return False
def _exists(field, value, document):
"""
Ensures a document has a given field or not. ``value`` must be either True or
False, otherwise a MalformedQueryException is raised
"""
if value not in (True, False):
raise MalformedQueryException("'$exists' must be supplied a boolean")
if value:
return field in document
else:
return field not in document
|
shaunduncan/nosqlite | nosqlite.py | _lte | python | def _lte(field, value, document):
try:
return document.get(field, None) <= value
except TypeError: # pragma: no cover Python < 3.0
return False | Returns True if the value of a document field is less than or
equal to a given value | train | https://github.com/shaunduncan/nosqlite/blob/3033c029b7c8290c66a8b36dc512e560505d4c85/nosqlite.py#L471-L479 | null | import json
import re
import sqlite3
import sys
import warnings
from functools import partial
from itertools import starmap
try:
from itertools import ifilter as filter, imap as map
except ImportError: # pragma: no cover Python >= 3.0
pass
class MalformedQueryException(Exception):
pass
class Connection(object):
"""
The high-level connection to a sqlite database. Creating a connection accepts
the same args and keyword args as the ``sqlite3.connect`` method
"""
def __init__(self, *args, **kwargs):
self._collections = {}
self.connect(*args, **kwargs)
def connect(self, *args, **kwargs):
"""
Connect to a sqlite database only if no connection exists. Isolation level
for the connection is automatically set to autocommit
"""
self.db = sqlite3.connect(*args, **kwargs)
self.db.isolation_level = None
def close(self):
"""
Terminate the connection to the sqlite database
"""
if self.db is not None:
self.db.close()
def __getitem__(self, name):
"""
A pymongo-like behavior for dynamically obtaining a collection of documents
"""
if name not in self._collections:
self._collections[name] = Collection(self.db, name)
return self._collections[name]
def __getattr__(self, name):
if name in self.__dict__:
return self.__dict__[name]
return self[name]
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_traceback):
self.close()
return False
def drop_collection(self, name):
"""
Drops a collection permanently if it exists
"""
self.db.execute("drop table if exists %s" % name)
class Collection(object):
"""
A virtual database table that holds JSON-type documents
"""
def __init__(self, db, name, create=True):
self.db = db
self.name = name
if create:
self.create()
def clear(self):
"""
Clears all stored documents in this database. THERE IS NO GOING BACK
"""
self.db.execute("delete from %s" % self.name)
def exists(self):
"""
Checks if this collection exists
"""
return self._object_exists('table', self.name)
def _object_exists(self, type, name):
row = self.db.execute(
"select count(1) from sqlite_master where type = ? and name = ?",
(type, name.strip('[]'))
).fetchone()
return int(row[0]) > 0
def create(self):
"""
Creates the collections database only if it does not already exist
"""
self.db.execute("""
create table if not exists %s (
id integer primary key autoincrement,
data text not null
)
""" % self.name)
def insert(self, document):
"""
Inserts a document into this collection. If a document already has an '_id'
value it will be updated
:returns: inserted document with id
"""
if '_id' in document:
return self.update(document)
# Create it and return a modified one with the id
cursor = self.db.execute("""
insert into %s(data) values (?)
""" % self.name, (json.dumps(document),))
document['_id'] = cursor.lastrowid
return document
def update(self, document):
"""
Updates a document stored in this collection. If the document does not
already have an '_id' value, it will be created
"""
if '_id' not in document:
return self.insert(document)
# Update the stored document, removing the id
copy = document.copy()
del copy['_id']
self.db.execute("""
update %s set data = ? where id = ?
""" % self.name, (json.dumps(copy), document['_id']))
return document
def remove(self, document):
"""
Removes a document from this collection. This will raise AssertionError if the
document does not have an _id attribute
"""
assert '_id' in document, 'Document must have an id'
self.db.execute("delete from %s where id = ?" % self.name, (document['_id'],))
def save(self, document):
"""
Alias for ``update``
"""
return self.update(document)
def delete(self, document):
"""
Alias for ``remove``
"""
return self.remove(document)
def _load(self, id, data):
"""
Loads a JSON document taking care to apply the document id
"""
if isinstance(data, bytes): # pragma: no cover Python >= 3.0
data = data.decode('utf-8')
document = json.loads(data)
document['_id'] = id
return document
def find(self, query=None, limit=None):
"""
Returns a list of documents in this collection that match a given query
"""
results = []
query = query or {}
# TODO: When indexes are implemented, we'll need to intelligently hit one of the
# index stores so we don't do a full table scan
cursor = self.db.execute("select id, data from %s" % self.name)
apply = partial(self._apply_query, query)
for match in filter(apply, starmap(self._load, cursor.fetchall())):
results.append(match)
# Just return if we already reached the limit
if limit and len(results) == limit:
return results
return results
def _apply_query(self, query, document):
"""
Applies a query to a document. Returns True if the document meets the criteria of
the supplied query. The ``query`` argument generally follows mongodb style syntax
and consists of the following logical checks and operators.
Logical: $and, $or, $nor, $not
Operators: $eq, $ne, $gt, $gte, $lt, $lte, $mod, $in, $nin, $all
If no logical operator is supplied, it assumed that all field checks must pass. For
example, these are equivalent:
{'foo': 'bar', 'baz': 'qux'}
{'$and': [{'foo': 'bar'}, {'baz': 'qux'}]}
Both logical and operational queries can be nested in a complex fashion:
{
'bar': 'baz',
'$or': [
{
'foo': {
'$gte': 0,
'$lte': 10,
'$mod': [2, 0]
}
},
{
'foo': {
'$gt': 10,
'$mod': [2, 1]
}
},
]
}
In the previous example, this will return any document where the 'bar' key is equal
to 'baz' and either the 'foo' key is an even number between 0 and 10 or is an odd number
greater than 10.
"""
matches = [] # A list of booleans
reapply = lambda q: self._apply_query(q, document)
for field, value in query.items():
# A more complex query type $and, $or, etc
if field == '$and':
matches.append(all(map(reapply, value)))
elif field == '$or':
matches.append(any(map(reapply, value)))
elif field == '$nor':
matches.append(not any(map(reapply, value)))
elif field == '$not':
matches.append(not self._apply_query(value, document))
# Invoke a query operator
elif isinstance(value, dict):
for operator, arg in value.items():
if not self._get_operator_fn(operator)(field, arg, document):
matches.append(False)
break
else:
matches.append(True)
# Standard
elif value != document.get(field, None):
# check if field contains a dot
if '.' in field:
nodes = field.split('.')
document_section = document
try:
for path in nodes[:-1]:
document_section = document_section.get(path, None)
except AttributeError:
document_section = None
if document_section is None:
matches.append(False)
else:
if value != document_section.get(nodes[-1], None):
matches.append(False)
else:
matches.append(False)
return all(matches)
def _get_operator_fn(self, op):
"""
Returns the function in this module that corresponds to an operator string.
This simply checks if there is a method that handles the operator defined
in this module, replacing '$' with '_' (i.e. if this module has a _gt
method for $gt) and returns it. If no match is found, or the operator does not
start with '$', a MalformedQueryException is raised
"""
if not op.startswith('$'):
raise MalformedQueryException("Operator '%s' is not a valid query operation" % op)
try:
return getattr(sys.modules[__name__], op.replace('$', '_'))
except AttributeError:
raise MalformedQueryException("Operator '%s' is not currently implemented" % op)
def find_one(self, query=None):
"""
Equivalent to ``find(query, limit=1)[0]``
"""
try:
return self.find(query=query, limit=1)[0]
except (sqlite3.OperationalError, IndexError):
return None
def find_and_modify(self, query=None, update=None):
"""
Finds documents in this collection that match a given query and updates them
"""
update = update or {}
for document in self.find(query=query):
document.update(update)
self.update(document)
def count(self, query=None):
"""
Equivalent to ``len(find(query))``
"""
return len(self.find(query=query))
def rename(self, new_name):
"""
Rename this collection
"""
new_collection = Collection(self.db, new_name, create=False)
assert not new_collection.exists()
self.db.execute("alter table %s rename to %s" % (self.name, new_name))
self.name = new_name
def distinct(self, key):
"""
Get a set of distinct values for the given key excluding an implicit
None for documents that do not contain the key
"""
return set(d[key] for d in filter(lambda d: key in d, self.find()))
def create_index(self, key, reindex=True, sparse=False):
"""
Creates an index if it does not exist then performs a full reindex for this collection
"""
warnings.warn('Index support is currently very alpha and is not guaranteed')
if isinstance(key, (list, tuple)):
index_name = ','.join(key)
index_columns = ', '.join('%s text' % f for f in key)
else:
index_name = key
index_columns = '%s text' % key
table_name = '[%s{%s}]' % (self.name, index_name)
reindex = reindex or not self._object_exists('table', table_name)
# Create a table store for the index data
self.db.execute("""
create table if not exists {table} (
id integer primary key,
{columns},
foreign key(id) references {collection}(id) on delete cascade on update cascade
)
""".format(
table=table_name,
collection=self.name,
columns=index_columns
))
# Create the index
self.db.execute("""
create index if not exists [idx.{collection}{{index}}] on {table}({index})
""".format(
collection=self.name,
index=index_name,
table=table_name,
))
if reindex:
self.reindex(key)
def ensure_index(self, key, sparse=False):
"""
Equivalent to ``create_index(key, reindex=False)``
"""
self.create_index(key, reindex=False, sparse=False)
def reindex(self, table, sparse=False):
warnings.warn('Index support is currently very alpha and is not guaranteed')
index = re.findall(r'^\[.*\{(.*)\}\]$', table)[0].split(',')
update = "update {table} set {key} = ? where id = ?"
insert = "insert into {table}({index}) values({q})"
count = "select count(1) from {table} where id = ?"
qs = ('?,' * len(index)).rstrip(',')
for document in self.find():
# Ensure there's a row before we update
row = self.db.execute(count.format(table=table), (document['_id'],)).fetchone()
if int(row[0]) == 0:
self.db.execute(insert.format(table=table, index=index, q=qs),
[None for x in index])
for key in index:
# Ignore this document if it doesn't have the key
if key not in document and sparse:
continue
self.db.execute(update.format(table=table, key=key),
(document.get(key, None), document['_id']))
def drop_index(self):
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
def drop_indexes(self):
"""
Drop all indexes for this collection
"""
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
# BELOW ARE OPERATIONS FOR LOOKUPS
# TypeErrors are caught specifically for python 3 compatibility
def _eq(field, value, document):
"""
Returns True if the value of a document field is equal to a given value
"""
try:
return document.get(field, None) == value
except TypeError: # pragma: no cover Python < 3.0
return False
def _gt(field, value, document):
"""
Returns True if the value of a document field is greater than a given value
"""
try:
return document.get(field, None) > value
except TypeError: # pragma: no cover Python < 3.0
return False
def _lt(field, value, document):
"""
Returns True if the value of a document field is less than a given value
"""
try:
return document.get(field, None) < value
except TypeError: # pragma: no cover Python < 3.0
return False
def _gte(field, value, document):
"""
Returns True if the value of a document field is greater than or
equal to a given value
"""
try:
return document.get(field, None) >= value
except TypeError: # pragma: no cover Python < 3.0
return False
def _all(field, value, document):
"""
Returns True if the value of document field contains all the values
specified by ``value``. If supplied value is not an iterable, a
MalformedQueryException is raised. If the value of the document field
is not an iterable, False is returned
"""
try:
a = set(value)
except TypeError:
raise MalformedQueryException("'$all' must accept an iterable")
try:
b = set(document.get(field, []))
except TypeError:
return False
else:
return a.intersection(b) == a
def _in(field, value, document):
"""
Returns True if document[field] is in the interable value. If the
supplied value is not an iterable, then a MalformedQueryException is raised
"""
try:
values = iter(value)
except TypeError:
raise MalformedQueryException("'$in' must accept an iterable")
return document.get(field, None) in values
def _ne(field, value, document):
"""
Returns True if the value of document[field] is not equal to a given value
"""
return document.get(field, None) != value
def _nin(field, value, document):
"""
Returns True if document[field] is NOT in the interable value. If the
supplied value is not an iterable, then a MalformedQueryException is raised
"""
try:
values = iter(value)
except TypeError:
raise MalformedQueryException("'$nin' must accept an iterable")
return document.get(field, None) not in values
def _mod(field, value, document):
"""
Performs a mod on a document field. Value must be a list or tuple with
two values divisor and remainder (i.e. [2, 0]). This will essentially
perform the following:
document[field] % divisor == remainder
If the value does not contain integers or is not a two-item list/tuple,
a MalformedQueryException will be raised. If the value of document[field]
cannot be converted to an integer, this will return False.
"""
try:
divisor, remainder = map(int, value)
except (TypeError, ValueError):
raise MalformedQueryException("'$mod' must accept an iterable: [divisor, remainder]")
try:
return int(document.get(field, None)) % divisor == remainder
except (TypeError, ValueError):
return False
def _exists(field, value, document):
"""
Ensures a document has a given field or not. ``value`` must be either True or
False, otherwise a MalformedQueryException is raised
"""
if value not in (True, False):
raise MalformedQueryException("'$exists' must be supplied a boolean")
if value:
return field in document
else:
return field not in document
|
shaunduncan/nosqlite | nosqlite.py | _all | python | def _all(field, value, document):
try:
a = set(value)
except TypeError:
raise MalformedQueryException("'$all' must accept an iterable")
try:
b = set(document.get(field, []))
except TypeError:
return False
else:
return a.intersection(b) == a | Returns True if the value of document field contains all the values
specified by ``value``. If supplied value is not an iterable, a
MalformedQueryException is raised. If the value of the document field
is not an iterable, False is returned | train | https://github.com/shaunduncan/nosqlite/blob/3033c029b7c8290c66a8b36dc512e560505d4c85/nosqlite.py#L482-L499 | null | import json
import re
import sqlite3
import sys
import warnings
from functools import partial
from itertools import starmap
try:
from itertools import ifilter as filter, imap as map
except ImportError: # pragma: no cover Python >= 3.0
pass
class MalformedQueryException(Exception):
pass
class Connection(object):
"""
The high-level connection to a sqlite database. Creating a connection accepts
the same args and keyword args as the ``sqlite3.connect`` method
"""
def __init__(self, *args, **kwargs):
self._collections = {}
self.connect(*args, **kwargs)
def connect(self, *args, **kwargs):
"""
Connect to a sqlite database only if no connection exists. Isolation level
for the connection is automatically set to autocommit
"""
self.db = sqlite3.connect(*args, **kwargs)
self.db.isolation_level = None
def close(self):
"""
Terminate the connection to the sqlite database
"""
if self.db is not None:
self.db.close()
def __getitem__(self, name):
"""
A pymongo-like behavior for dynamically obtaining a collection of documents
"""
if name not in self._collections:
self._collections[name] = Collection(self.db, name)
return self._collections[name]
def __getattr__(self, name):
if name in self.__dict__:
return self.__dict__[name]
return self[name]
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_traceback):
self.close()
return False
def drop_collection(self, name):
"""
Drops a collection permanently if it exists
"""
self.db.execute("drop table if exists %s" % name)
class Collection(object):
"""
A virtual database table that holds JSON-type documents
"""
def __init__(self, db, name, create=True):
self.db = db
self.name = name
if create:
self.create()
def clear(self):
"""
Clears all stored documents in this database. THERE IS NO GOING BACK
"""
self.db.execute("delete from %s" % self.name)
def exists(self):
"""
Checks if this collection exists
"""
return self._object_exists('table', self.name)
def _object_exists(self, type, name):
row = self.db.execute(
"select count(1) from sqlite_master where type = ? and name = ?",
(type, name.strip('[]'))
).fetchone()
return int(row[0]) > 0
def create(self):
"""
Creates the collections database only if it does not already exist
"""
self.db.execute("""
create table if not exists %s (
id integer primary key autoincrement,
data text not null
)
""" % self.name)
def insert(self, document):
"""
Inserts a document into this collection. If a document already has an '_id'
value it will be updated
:returns: inserted document with id
"""
if '_id' in document:
return self.update(document)
# Create it and return a modified one with the id
cursor = self.db.execute("""
insert into %s(data) values (?)
""" % self.name, (json.dumps(document),))
document['_id'] = cursor.lastrowid
return document
def update(self, document):
"""
Updates a document stored in this collection. If the document does not
already have an '_id' value, it will be created
"""
if '_id' not in document:
return self.insert(document)
# Update the stored document, removing the id
copy = document.copy()
del copy['_id']
self.db.execute("""
update %s set data = ? where id = ?
""" % self.name, (json.dumps(copy), document['_id']))
return document
def remove(self, document):
"""
Removes a document from this collection. This will raise AssertionError if the
document does not have an _id attribute
"""
assert '_id' in document, 'Document must have an id'
self.db.execute("delete from %s where id = ?" % self.name, (document['_id'],))
def save(self, document):
"""
Alias for ``update``
"""
return self.update(document)
def delete(self, document):
"""
Alias for ``remove``
"""
return self.remove(document)
def _load(self, id, data):
"""
Loads a JSON document taking care to apply the document id
"""
if isinstance(data, bytes): # pragma: no cover Python >= 3.0
data = data.decode('utf-8')
document = json.loads(data)
document['_id'] = id
return document
def find(self, query=None, limit=None):
"""
Returns a list of documents in this collection that match a given query
"""
results = []
query = query or {}
# TODO: When indexes are implemented, we'll need to intelligently hit one of the
# index stores so we don't do a full table scan
cursor = self.db.execute("select id, data from %s" % self.name)
apply = partial(self._apply_query, query)
for match in filter(apply, starmap(self._load, cursor.fetchall())):
results.append(match)
# Just return if we already reached the limit
if limit and len(results) == limit:
return results
return results
def _apply_query(self, query, document):
"""
Applies a query to a document. Returns True if the document meets the criteria of
the supplied query. The ``query`` argument generally follows mongodb style syntax
and consists of the following logical checks and operators.
Logical: $and, $or, $nor, $not
Operators: $eq, $ne, $gt, $gte, $lt, $lte, $mod, $in, $nin, $all
If no logical operator is supplied, it assumed that all field checks must pass. For
example, these are equivalent:
{'foo': 'bar', 'baz': 'qux'}
{'$and': [{'foo': 'bar'}, {'baz': 'qux'}]}
Both logical and operational queries can be nested in a complex fashion:
{
'bar': 'baz',
'$or': [
{
'foo': {
'$gte': 0,
'$lte': 10,
'$mod': [2, 0]
}
},
{
'foo': {
'$gt': 10,
'$mod': [2, 1]
}
},
]
}
In the previous example, this will return any document where the 'bar' key is equal
to 'baz' and either the 'foo' key is an even number between 0 and 10 or is an odd number
greater than 10.
"""
matches = [] # A list of booleans
reapply = lambda q: self._apply_query(q, document)
for field, value in query.items():
# A more complex query type $and, $or, etc
if field == '$and':
matches.append(all(map(reapply, value)))
elif field == '$or':
matches.append(any(map(reapply, value)))
elif field == '$nor':
matches.append(not any(map(reapply, value)))
elif field == '$not':
matches.append(not self._apply_query(value, document))
# Invoke a query operator
elif isinstance(value, dict):
for operator, arg in value.items():
if not self._get_operator_fn(operator)(field, arg, document):
matches.append(False)
break
else:
matches.append(True)
# Standard
elif value != document.get(field, None):
# check if field contains a dot
if '.' in field:
nodes = field.split('.')
document_section = document
try:
for path in nodes[:-1]:
document_section = document_section.get(path, None)
except AttributeError:
document_section = None
if document_section is None:
matches.append(False)
else:
if value != document_section.get(nodes[-1], None):
matches.append(False)
else:
matches.append(False)
return all(matches)
def _get_operator_fn(self, op):
"""
Returns the function in this module that corresponds to an operator string.
This simply checks if there is a method that handles the operator defined
in this module, replacing '$' with '_' (i.e. if this module has a _gt
method for $gt) and returns it. If no match is found, or the operator does not
start with '$', a MalformedQueryException is raised
"""
if not op.startswith('$'):
raise MalformedQueryException("Operator '%s' is not a valid query operation" % op)
try:
return getattr(sys.modules[__name__], op.replace('$', '_'))
except AttributeError:
raise MalformedQueryException("Operator '%s' is not currently implemented" % op)
def find_one(self, query=None):
"""
Equivalent to ``find(query, limit=1)[0]``
"""
try:
return self.find(query=query, limit=1)[0]
except (sqlite3.OperationalError, IndexError):
return None
def find_and_modify(self, query=None, update=None):
"""
Finds documents in this collection that match a given query and updates them
"""
update = update or {}
for document in self.find(query=query):
document.update(update)
self.update(document)
def count(self, query=None):
"""
Equivalent to ``len(find(query))``
"""
return len(self.find(query=query))
def rename(self, new_name):
"""
Rename this collection
"""
new_collection = Collection(self.db, new_name, create=False)
assert not new_collection.exists()
self.db.execute("alter table %s rename to %s" % (self.name, new_name))
self.name = new_name
def distinct(self, key):
"""
Get a set of distinct values for the given key excluding an implicit
None for documents that do not contain the key
"""
return set(d[key] for d in filter(lambda d: key in d, self.find()))
def create_index(self, key, reindex=True, sparse=False):
"""
Creates an index if it does not exist then performs a full reindex for this collection
"""
warnings.warn('Index support is currently very alpha and is not guaranteed')
if isinstance(key, (list, tuple)):
index_name = ','.join(key)
index_columns = ', '.join('%s text' % f for f in key)
else:
index_name = key
index_columns = '%s text' % key
table_name = '[%s{%s}]' % (self.name, index_name)
reindex = reindex or not self._object_exists('table', table_name)
# Create a table store for the index data
self.db.execute("""
create table if not exists {table} (
id integer primary key,
{columns},
foreign key(id) references {collection}(id) on delete cascade on update cascade
)
""".format(
table=table_name,
collection=self.name,
columns=index_columns
))
# Create the index
self.db.execute("""
create index if not exists [idx.{collection}{{index}}] on {table}({index})
""".format(
collection=self.name,
index=index_name,
table=table_name,
))
if reindex:
self.reindex(key)
def ensure_index(self, key, sparse=False):
"""
Equivalent to ``create_index(key, reindex=False)``
"""
self.create_index(key, reindex=False, sparse=False)
def reindex(self, table, sparse=False):
warnings.warn('Index support is currently very alpha and is not guaranteed')
index = re.findall(r'^\[.*\{(.*)\}\]$', table)[0].split(',')
update = "update {table} set {key} = ? where id = ?"
insert = "insert into {table}({index}) values({q})"
count = "select count(1) from {table} where id = ?"
qs = ('?,' * len(index)).rstrip(',')
for document in self.find():
# Ensure there's a row before we update
row = self.db.execute(count.format(table=table), (document['_id'],)).fetchone()
if int(row[0]) == 0:
self.db.execute(insert.format(table=table, index=index, q=qs),
[None for x in index])
for key in index:
# Ignore this document if it doesn't have the key
if key not in document and sparse:
continue
self.db.execute(update.format(table=table, key=key),
(document.get(key, None), document['_id']))
def drop_index(self):
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
def drop_indexes(self):
"""
Drop all indexes for this collection
"""
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
# BELOW ARE OPERATIONS FOR LOOKUPS
# TypeErrors are caught specifically for python 3 compatibility
def _eq(field, value, document):
"""
Returns True if the value of a document field is equal to a given value
"""
try:
return document.get(field, None) == value
except TypeError: # pragma: no cover Python < 3.0
return False
def _gt(field, value, document):
"""
Returns True if the value of a document field is greater than a given value
"""
try:
return document.get(field, None) > value
except TypeError: # pragma: no cover Python < 3.0
return False
def _lt(field, value, document):
"""
Returns True if the value of a document field is less than a given value
"""
try:
return document.get(field, None) < value
except TypeError: # pragma: no cover Python < 3.0
return False
def _gte(field, value, document):
"""
Returns True if the value of a document field is greater than or
equal to a given value
"""
try:
return document.get(field, None) >= value
except TypeError: # pragma: no cover Python < 3.0
return False
def _lte(field, value, document):
"""
Returns True if the value of a document field is less than or
equal to a given value
"""
try:
return document.get(field, None) <= value
except TypeError: # pragma: no cover Python < 3.0
return False
def _in(field, value, document):
"""
Returns True if document[field] is in the interable value. If the
supplied value is not an iterable, then a MalformedQueryException is raised
"""
try:
values = iter(value)
except TypeError:
raise MalformedQueryException("'$in' must accept an iterable")
return document.get(field, None) in values
def _ne(field, value, document):
"""
Returns True if the value of document[field] is not equal to a given value
"""
return document.get(field, None) != value
def _nin(field, value, document):
"""
Returns True if document[field] is NOT in the interable value. If the
supplied value is not an iterable, then a MalformedQueryException is raised
"""
try:
values = iter(value)
except TypeError:
raise MalformedQueryException("'$nin' must accept an iterable")
return document.get(field, None) not in values
def _mod(field, value, document):
"""
Performs a mod on a document field. Value must be a list or tuple with
two values divisor and remainder (i.e. [2, 0]). This will essentially
perform the following:
document[field] % divisor == remainder
If the value does not contain integers or is not a two-item list/tuple,
a MalformedQueryException will be raised. If the value of document[field]
cannot be converted to an integer, this will return False.
"""
try:
divisor, remainder = map(int, value)
except (TypeError, ValueError):
raise MalformedQueryException("'$mod' must accept an iterable: [divisor, remainder]")
try:
return int(document.get(field, None)) % divisor == remainder
except (TypeError, ValueError):
return False
def _exists(field, value, document):
"""
Ensures a document has a given field or not. ``value`` must be either True or
False, otherwise a MalformedQueryException is raised
"""
if value not in (True, False):
raise MalformedQueryException("'$exists' must be supplied a boolean")
if value:
return field in document
else:
return field not in document
|
shaunduncan/nosqlite | nosqlite.py | _in | python | def _in(field, value, document):
try:
values = iter(value)
except TypeError:
raise MalformedQueryException("'$in' must accept an iterable")
return document.get(field, None) in values | Returns True if document[field] is in the interable value. If the
supplied value is not an iterable, then a MalformedQueryException is raised | train | https://github.com/shaunduncan/nosqlite/blob/3033c029b7c8290c66a8b36dc512e560505d4c85/nosqlite.py#L502-L512 | null | import json
import re
import sqlite3
import sys
import warnings
from functools import partial
from itertools import starmap
try:
from itertools import ifilter as filter, imap as map
except ImportError: # pragma: no cover Python >= 3.0
pass
class MalformedQueryException(Exception):
pass
class Connection(object):
"""
The high-level connection to a sqlite database. Creating a connection accepts
the same args and keyword args as the ``sqlite3.connect`` method
"""
def __init__(self, *args, **kwargs):
self._collections = {}
self.connect(*args, **kwargs)
def connect(self, *args, **kwargs):
"""
Connect to a sqlite database only if no connection exists. Isolation level
for the connection is automatically set to autocommit
"""
self.db = sqlite3.connect(*args, **kwargs)
self.db.isolation_level = None
def close(self):
"""
Terminate the connection to the sqlite database
"""
if self.db is not None:
self.db.close()
def __getitem__(self, name):
"""
A pymongo-like behavior for dynamically obtaining a collection of documents
"""
if name not in self._collections:
self._collections[name] = Collection(self.db, name)
return self._collections[name]
def __getattr__(self, name):
if name in self.__dict__:
return self.__dict__[name]
return self[name]
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_traceback):
self.close()
return False
def drop_collection(self, name):
"""
Drops a collection permanently if it exists
"""
self.db.execute("drop table if exists %s" % name)
class Collection(object):
"""
A virtual database table that holds JSON-type documents
"""
def __init__(self, db, name, create=True):
self.db = db
self.name = name
if create:
self.create()
def clear(self):
"""
Clears all stored documents in this database. THERE IS NO GOING BACK
"""
self.db.execute("delete from %s" % self.name)
def exists(self):
"""
Checks if this collection exists
"""
return self._object_exists('table', self.name)
def _object_exists(self, type, name):
row = self.db.execute(
"select count(1) from sqlite_master where type = ? and name = ?",
(type, name.strip('[]'))
).fetchone()
return int(row[0]) > 0
def create(self):
"""
Creates the collections database only if it does not already exist
"""
self.db.execute("""
create table if not exists %s (
id integer primary key autoincrement,
data text not null
)
""" % self.name)
def insert(self, document):
"""
Inserts a document into this collection. If a document already has an '_id'
value it will be updated
:returns: inserted document with id
"""
if '_id' in document:
return self.update(document)
# Create it and return a modified one with the id
cursor = self.db.execute("""
insert into %s(data) values (?)
""" % self.name, (json.dumps(document),))
document['_id'] = cursor.lastrowid
return document
def update(self, document):
"""
Updates a document stored in this collection. If the document does not
already have an '_id' value, it will be created
"""
if '_id' not in document:
return self.insert(document)
# Update the stored document, removing the id
copy = document.copy()
del copy['_id']
self.db.execute("""
update %s set data = ? where id = ?
""" % self.name, (json.dumps(copy), document['_id']))
return document
def remove(self, document):
"""
Removes a document from this collection. This will raise AssertionError if the
document does not have an _id attribute
"""
assert '_id' in document, 'Document must have an id'
self.db.execute("delete from %s where id = ?" % self.name, (document['_id'],))
def save(self, document):
"""
Alias for ``update``
"""
return self.update(document)
def delete(self, document):
"""
Alias for ``remove``
"""
return self.remove(document)
def _load(self, id, data):
"""
Loads a JSON document taking care to apply the document id
"""
if isinstance(data, bytes): # pragma: no cover Python >= 3.0
data = data.decode('utf-8')
document = json.loads(data)
document['_id'] = id
return document
def find(self, query=None, limit=None):
"""
Returns a list of documents in this collection that match a given query
"""
results = []
query = query or {}
# TODO: When indexes are implemented, we'll need to intelligently hit one of the
# index stores so we don't do a full table scan
cursor = self.db.execute("select id, data from %s" % self.name)
apply = partial(self._apply_query, query)
for match in filter(apply, starmap(self._load, cursor.fetchall())):
results.append(match)
# Just return if we already reached the limit
if limit and len(results) == limit:
return results
return results
def _apply_query(self, query, document):
"""
Applies a query to a document. Returns True if the document meets the criteria of
the supplied query. The ``query`` argument generally follows mongodb style syntax
and consists of the following logical checks and operators.
Logical: $and, $or, $nor, $not
Operators: $eq, $ne, $gt, $gte, $lt, $lte, $mod, $in, $nin, $all
If no logical operator is supplied, it assumed that all field checks must pass. For
example, these are equivalent:
{'foo': 'bar', 'baz': 'qux'}
{'$and': [{'foo': 'bar'}, {'baz': 'qux'}]}
Both logical and operational queries can be nested in a complex fashion:
{
'bar': 'baz',
'$or': [
{
'foo': {
'$gte': 0,
'$lte': 10,
'$mod': [2, 0]
}
},
{
'foo': {
'$gt': 10,
'$mod': [2, 1]
}
},
]
}
In the previous example, this will return any document where the 'bar' key is equal
to 'baz' and either the 'foo' key is an even number between 0 and 10 or is an odd number
greater than 10.
"""
matches = [] # A list of booleans
reapply = lambda q: self._apply_query(q, document)
for field, value in query.items():
# A more complex query type $and, $or, etc
if field == '$and':
matches.append(all(map(reapply, value)))
elif field == '$or':
matches.append(any(map(reapply, value)))
elif field == '$nor':
matches.append(not any(map(reapply, value)))
elif field == '$not':
matches.append(not self._apply_query(value, document))
# Invoke a query operator
elif isinstance(value, dict):
for operator, arg in value.items():
if not self._get_operator_fn(operator)(field, arg, document):
matches.append(False)
break
else:
matches.append(True)
# Standard
elif value != document.get(field, None):
# check if field contains a dot
if '.' in field:
nodes = field.split('.')
document_section = document
try:
for path in nodes[:-1]:
document_section = document_section.get(path, None)
except AttributeError:
document_section = None
if document_section is None:
matches.append(False)
else:
if value != document_section.get(nodes[-1], None):
matches.append(False)
else:
matches.append(False)
return all(matches)
def _get_operator_fn(self, op):
"""
Returns the function in this module that corresponds to an operator string.
This simply checks if there is a method that handles the operator defined
in this module, replacing '$' with '_' (i.e. if this module has a _gt
method for $gt) and returns it. If no match is found, or the operator does not
start with '$', a MalformedQueryException is raised
"""
if not op.startswith('$'):
raise MalformedQueryException("Operator '%s' is not a valid query operation" % op)
try:
return getattr(sys.modules[__name__], op.replace('$', '_'))
except AttributeError:
raise MalformedQueryException("Operator '%s' is not currently implemented" % op)
def find_one(self, query=None):
"""
Equivalent to ``find(query, limit=1)[0]``
"""
try:
return self.find(query=query, limit=1)[0]
except (sqlite3.OperationalError, IndexError):
return None
def find_and_modify(self, query=None, update=None):
"""
Finds documents in this collection that match a given query and updates them
"""
update = update or {}
for document in self.find(query=query):
document.update(update)
self.update(document)
def count(self, query=None):
"""
Equivalent to ``len(find(query))``
"""
return len(self.find(query=query))
def rename(self, new_name):
"""
Rename this collection
"""
new_collection = Collection(self.db, new_name, create=False)
assert not new_collection.exists()
self.db.execute("alter table %s rename to %s" % (self.name, new_name))
self.name = new_name
def distinct(self, key):
"""
Get a set of distinct values for the given key excluding an implicit
None for documents that do not contain the key
"""
return set(d[key] for d in filter(lambda d: key in d, self.find()))
def create_index(self, key, reindex=True, sparse=False):
"""
Creates an index if it does not exist then performs a full reindex for this collection
"""
warnings.warn('Index support is currently very alpha and is not guaranteed')
if isinstance(key, (list, tuple)):
index_name = ','.join(key)
index_columns = ', '.join('%s text' % f for f in key)
else:
index_name = key
index_columns = '%s text' % key
table_name = '[%s{%s}]' % (self.name, index_name)
reindex = reindex or not self._object_exists('table', table_name)
# Create a table store for the index data
self.db.execute("""
create table if not exists {table} (
id integer primary key,
{columns},
foreign key(id) references {collection}(id) on delete cascade on update cascade
)
""".format(
table=table_name,
collection=self.name,
columns=index_columns
))
# Create the index
self.db.execute("""
create index if not exists [idx.{collection}{{index}}] on {table}({index})
""".format(
collection=self.name,
index=index_name,
table=table_name,
))
if reindex:
self.reindex(key)
def ensure_index(self, key, sparse=False):
"""
Equivalent to ``create_index(key, reindex=False)``
"""
self.create_index(key, reindex=False, sparse=False)
def reindex(self, table, sparse=False):
warnings.warn('Index support is currently very alpha and is not guaranteed')
index = re.findall(r'^\[.*\{(.*)\}\]$', table)[0].split(',')
update = "update {table} set {key} = ? where id = ?"
insert = "insert into {table}({index}) values({q})"
count = "select count(1) from {table} where id = ?"
qs = ('?,' * len(index)).rstrip(',')
for document in self.find():
# Ensure there's a row before we update
row = self.db.execute(count.format(table=table), (document['_id'],)).fetchone()
if int(row[0]) == 0:
self.db.execute(insert.format(table=table, index=index, q=qs),
[None for x in index])
for key in index:
# Ignore this document if it doesn't have the key
if key not in document and sparse:
continue
self.db.execute(update.format(table=table, key=key),
(document.get(key, None), document['_id']))
def drop_index(self):
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
def drop_indexes(self):
"""
Drop all indexes for this collection
"""
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
# BELOW ARE OPERATIONS FOR LOOKUPS
# TypeErrors are caught specifically for python 3 compatibility
def _eq(field, value, document):
"""
Returns True if the value of a document field is equal to a given value
"""
try:
return document.get(field, None) == value
except TypeError: # pragma: no cover Python < 3.0
return False
def _gt(field, value, document):
"""
Returns True if the value of a document field is greater than a given value
"""
try:
return document.get(field, None) > value
except TypeError: # pragma: no cover Python < 3.0
return False
def _lt(field, value, document):
"""
Returns True if the value of a document field is less than a given value
"""
try:
return document.get(field, None) < value
except TypeError: # pragma: no cover Python < 3.0
return False
def _gte(field, value, document):
"""
Returns True if the value of a document field is greater than or
equal to a given value
"""
try:
return document.get(field, None) >= value
except TypeError: # pragma: no cover Python < 3.0
return False
def _lte(field, value, document):
"""
Returns True if the value of a document field is less than or
equal to a given value
"""
try:
return document.get(field, None) <= value
except TypeError: # pragma: no cover Python < 3.0
return False
def _all(field, value, document):
"""
Returns True if the value of document field contains all the values
specified by ``value``. If supplied value is not an iterable, a
MalformedQueryException is raised. If the value of the document field
is not an iterable, False is returned
"""
try:
a = set(value)
except TypeError:
raise MalformedQueryException("'$all' must accept an iterable")
try:
b = set(document.get(field, []))
except TypeError:
return False
else:
return a.intersection(b) == a
def _ne(field, value, document):
"""
Returns True if the value of document[field] is not equal to a given value
"""
return document.get(field, None) != value
def _nin(field, value, document):
"""
Returns True if document[field] is NOT in the interable value. If the
supplied value is not an iterable, then a MalformedQueryException is raised
"""
try:
values = iter(value)
except TypeError:
raise MalformedQueryException("'$nin' must accept an iterable")
return document.get(field, None) not in values
def _mod(field, value, document):
"""
Performs a mod on a document field. Value must be a list or tuple with
two values divisor and remainder (i.e. [2, 0]). This will essentially
perform the following:
document[field] % divisor == remainder
If the value does not contain integers or is not a two-item list/tuple,
a MalformedQueryException will be raised. If the value of document[field]
cannot be converted to an integer, this will return False.
"""
try:
divisor, remainder = map(int, value)
except (TypeError, ValueError):
raise MalformedQueryException("'$mod' must accept an iterable: [divisor, remainder]")
try:
return int(document.get(field, None)) % divisor == remainder
except (TypeError, ValueError):
return False
def _exists(field, value, document):
"""
Ensures a document has a given field or not. ``value`` must be either True or
False, otherwise a MalformedQueryException is raised
"""
if value not in (True, False):
raise MalformedQueryException("'$exists' must be supplied a boolean")
if value:
return field in document
else:
return field not in document
|
shaunduncan/nosqlite | nosqlite.py | _mod | python | def _mod(field, value, document):
try:
divisor, remainder = map(int, value)
except (TypeError, ValueError):
raise MalformedQueryException("'$mod' must accept an iterable: [divisor, remainder]")
try:
return int(document.get(field, None)) % divisor == remainder
except (TypeError, ValueError):
return False | Performs a mod on a document field. Value must be a list or tuple with
two values divisor and remainder (i.e. [2, 0]). This will essentially
perform the following:
document[field] % divisor == remainder
If the value does not contain integers or is not a two-item list/tuple,
a MalformedQueryException will be raised. If the value of document[field]
cannot be converted to an integer, this will return False. | train | https://github.com/shaunduncan/nosqlite/blob/3033c029b7c8290c66a8b36dc512e560505d4c85/nosqlite.py#L535-L555 | null | import json
import re
import sqlite3
import sys
import warnings
from functools import partial
from itertools import starmap
try:
from itertools import ifilter as filter, imap as map
except ImportError: # pragma: no cover Python >= 3.0
pass
class MalformedQueryException(Exception):
pass
class Connection(object):
"""
The high-level connection to a sqlite database. Creating a connection accepts
the same args and keyword args as the ``sqlite3.connect`` method
"""
def __init__(self, *args, **kwargs):
self._collections = {}
self.connect(*args, **kwargs)
def connect(self, *args, **kwargs):
"""
Connect to a sqlite database only if no connection exists. Isolation level
for the connection is automatically set to autocommit
"""
self.db = sqlite3.connect(*args, **kwargs)
self.db.isolation_level = None
def close(self):
"""
Terminate the connection to the sqlite database
"""
if self.db is not None:
self.db.close()
def __getitem__(self, name):
"""
A pymongo-like behavior for dynamically obtaining a collection of documents
"""
if name not in self._collections:
self._collections[name] = Collection(self.db, name)
return self._collections[name]
def __getattr__(self, name):
if name in self.__dict__:
return self.__dict__[name]
return self[name]
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_traceback):
self.close()
return False
def drop_collection(self, name):
"""
Drops a collection permanently if it exists
"""
self.db.execute("drop table if exists %s" % name)
class Collection(object):
"""
A virtual database table that holds JSON-type documents
"""
def __init__(self, db, name, create=True):
self.db = db
self.name = name
if create:
self.create()
def clear(self):
"""
Clears all stored documents in this database. THERE IS NO GOING BACK
"""
self.db.execute("delete from %s" % self.name)
def exists(self):
"""
Checks if this collection exists
"""
return self._object_exists('table', self.name)
def _object_exists(self, type, name):
row = self.db.execute(
"select count(1) from sqlite_master where type = ? and name = ?",
(type, name.strip('[]'))
).fetchone()
return int(row[0]) > 0
def create(self):
"""
Creates the collections database only if it does not already exist
"""
self.db.execute("""
create table if not exists %s (
id integer primary key autoincrement,
data text not null
)
""" % self.name)
def insert(self, document):
"""
Inserts a document into this collection. If a document already has an '_id'
value it will be updated
:returns: inserted document with id
"""
if '_id' in document:
return self.update(document)
# Create it and return a modified one with the id
cursor = self.db.execute("""
insert into %s(data) values (?)
""" % self.name, (json.dumps(document),))
document['_id'] = cursor.lastrowid
return document
def update(self, document):
"""
Updates a document stored in this collection. If the document does not
already have an '_id' value, it will be created
"""
if '_id' not in document:
return self.insert(document)
# Update the stored document, removing the id
copy = document.copy()
del copy['_id']
self.db.execute("""
update %s set data = ? where id = ?
""" % self.name, (json.dumps(copy), document['_id']))
return document
def remove(self, document):
"""
Removes a document from this collection. This will raise AssertionError if the
document does not have an _id attribute
"""
assert '_id' in document, 'Document must have an id'
self.db.execute("delete from %s where id = ?" % self.name, (document['_id'],))
def save(self, document):
"""
Alias for ``update``
"""
return self.update(document)
def delete(self, document):
"""
Alias for ``remove``
"""
return self.remove(document)
def _load(self, id, data):
"""
Loads a JSON document taking care to apply the document id
"""
if isinstance(data, bytes): # pragma: no cover Python >= 3.0
data = data.decode('utf-8')
document = json.loads(data)
document['_id'] = id
return document
def find(self, query=None, limit=None):
"""
Returns a list of documents in this collection that match a given query
"""
results = []
query = query or {}
# TODO: When indexes are implemented, we'll need to intelligently hit one of the
# index stores so we don't do a full table scan
cursor = self.db.execute("select id, data from %s" % self.name)
apply = partial(self._apply_query, query)
for match in filter(apply, starmap(self._load, cursor.fetchall())):
results.append(match)
# Just return if we already reached the limit
if limit and len(results) == limit:
return results
return results
def _apply_query(self, query, document):
"""
Applies a query to a document. Returns True if the document meets the criteria of
the supplied query. The ``query`` argument generally follows mongodb style syntax
and consists of the following logical checks and operators.
Logical: $and, $or, $nor, $not
Operators: $eq, $ne, $gt, $gte, $lt, $lte, $mod, $in, $nin, $all
If no logical operator is supplied, it assumed that all field checks must pass. For
example, these are equivalent:
{'foo': 'bar', 'baz': 'qux'}
{'$and': [{'foo': 'bar'}, {'baz': 'qux'}]}
Both logical and operational queries can be nested in a complex fashion:
{
'bar': 'baz',
'$or': [
{
'foo': {
'$gte': 0,
'$lte': 10,
'$mod': [2, 0]
}
},
{
'foo': {
'$gt': 10,
'$mod': [2, 1]
}
},
]
}
In the previous example, this will return any document where the 'bar' key is equal
to 'baz' and either the 'foo' key is an even number between 0 and 10 or is an odd number
greater than 10.
"""
matches = [] # A list of booleans
reapply = lambda q: self._apply_query(q, document)
for field, value in query.items():
# A more complex query type $and, $or, etc
if field == '$and':
matches.append(all(map(reapply, value)))
elif field == '$or':
matches.append(any(map(reapply, value)))
elif field == '$nor':
matches.append(not any(map(reapply, value)))
elif field == '$not':
matches.append(not self._apply_query(value, document))
# Invoke a query operator
elif isinstance(value, dict):
for operator, arg in value.items():
if not self._get_operator_fn(operator)(field, arg, document):
matches.append(False)
break
else:
matches.append(True)
# Standard
elif value != document.get(field, None):
# check if field contains a dot
if '.' in field:
nodes = field.split('.')
document_section = document
try:
for path in nodes[:-1]:
document_section = document_section.get(path, None)
except AttributeError:
document_section = None
if document_section is None:
matches.append(False)
else:
if value != document_section.get(nodes[-1], None):
matches.append(False)
else:
matches.append(False)
return all(matches)
def _get_operator_fn(self, op):
"""
Returns the function in this module that corresponds to an operator string.
This simply checks if there is a method that handles the operator defined
in this module, replacing '$' with '_' (i.e. if this module has a _gt
method for $gt) and returns it. If no match is found, or the operator does not
start with '$', a MalformedQueryException is raised
"""
if not op.startswith('$'):
raise MalformedQueryException("Operator '%s' is not a valid query operation" % op)
try:
return getattr(sys.modules[__name__], op.replace('$', '_'))
except AttributeError:
raise MalformedQueryException("Operator '%s' is not currently implemented" % op)
def find_one(self, query=None):
"""
Equivalent to ``find(query, limit=1)[0]``
"""
try:
return self.find(query=query, limit=1)[0]
except (sqlite3.OperationalError, IndexError):
return None
def find_and_modify(self, query=None, update=None):
"""
Finds documents in this collection that match a given query and updates them
"""
update = update or {}
for document in self.find(query=query):
document.update(update)
self.update(document)
def count(self, query=None):
"""
Equivalent to ``len(find(query))``
"""
return len(self.find(query=query))
def rename(self, new_name):
"""
Rename this collection
"""
new_collection = Collection(self.db, new_name, create=False)
assert not new_collection.exists()
self.db.execute("alter table %s rename to %s" % (self.name, new_name))
self.name = new_name
def distinct(self, key):
"""
Get a set of distinct values for the given key excluding an implicit
None for documents that do not contain the key
"""
return set(d[key] for d in filter(lambda d: key in d, self.find()))
def create_index(self, key, reindex=True, sparse=False):
"""
Creates an index if it does not exist then performs a full reindex for this collection
"""
warnings.warn('Index support is currently very alpha and is not guaranteed')
if isinstance(key, (list, tuple)):
index_name = ','.join(key)
index_columns = ', '.join('%s text' % f for f in key)
else:
index_name = key
index_columns = '%s text' % key
table_name = '[%s{%s}]' % (self.name, index_name)
reindex = reindex or not self._object_exists('table', table_name)
# Create a table store for the index data
self.db.execute("""
create table if not exists {table} (
id integer primary key,
{columns},
foreign key(id) references {collection}(id) on delete cascade on update cascade
)
""".format(
table=table_name,
collection=self.name,
columns=index_columns
))
# Create the index
self.db.execute("""
create index if not exists [idx.{collection}{{index}}] on {table}({index})
""".format(
collection=self.name,
index=index_name,
table=table_name,
))
if reindex:
self.reindex(key)
def ensure_index(self, key, sparse=False):
"""
Equivalent to ``create_index(key, reindex=False)``
"""
self.create_index(key, reindex=False, sparse=False)
def reindex(self, table, sparse=False):
warnings.warn('Index support is currently very alpha and is not guaranteed')
index = re.findall(r'^\[.*\{(.*)\}\]$', table)[0].split(',')
update = "update {table} set {key} = ? where id = ?"
insert = "insert into {table}({index}) values({q})"
count = "select count(1) from {table} where id = ?"
qs = ('?,' * len(index)).rstrip(',')
for document in self.find():
# Ensure there's a row before we update
row = self.db.execute(count.format(table=table), (document['_id'],)).fetchone()
if int(row[0]) == 0:
self.db.execute(insert.format(table=table, index=index, q=qs),
[None for x in index])
for key in index:
# Ignore this document if it doesn't have the key
if key not in document and sparse:
continue
self.db.execute(update.format(table=table, key=key),
(document.get(key, None), document['_id']))
def drop_index(self):
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
def drop_indexes(self):
"""
Drop all indexes for this collection
"""
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
# BELOW ARE OPERATIONS FOR LOOKUPS
# TypeErrors are caught specifically for python 3 compatibility
def _eq(field, value, document):
"""
Returns True if the value of a document field is equal to a given value
"""
try:
return document.get(field, None) == value
except TypeError: # pragma: no cover Python < 3.0
return False
def _gt(field, value, document):
"""
Returns True if the value of a document field is greater than a given value
"""
try:
return document.get(field, None) > value
except TypeError: # pragma: no cover Python < 3.0
return False
def _lt(field, value, document):
"""
Returns True if the value of a document field is less than a given value
"""
try:
return document.get(field, None) < value
except TypeError: # pragma: no cover Python < 3.0
return False
def _gte(field, value, document):
"""
Returns True if the value of a document field is greater than or
equal to a given value
"""
try:
return document.get(field, None) >= value
except TypeError: # pragma: no cover Python < 3.0
return False
def _lte(field, value, document):
"""
Returns True if the value of a document field is less than or
equal to a given value
"""
try:
return document.get(field, None) <= value
except TypeError: # pragma: no cover Python < 3.0
return False
def _all(field, value, document):
"""
Returns True if the value of document field contains all the values
specified by ``value``. If supplied value is not an iterable, a
MalformedQueryException is raised. If the value of the document field
is not an iterable, False is returned
"""
try:
a = set(value)
except TypeError:
raise MalformedQueryException("'$all' must accept an iterable")
try:
b = set(document.get(field, []))
except TypeError:
return False
else:
return a.intersection(b) == a
def _in(field, value, document):
"""
Returns True if document[field] is in the interable value. If the
supplied value is not an iterable, then a MalformedQueryException is raised
"""
try:
values = iter(value)
except TypeError:
raise MalformedQueryException("'$in' must accept an iterable")
return document.get(field, None) in values
def _ne(field, value, document):
"""
Returns True if the value of document[field] is not equal to a given value
"""
return document.get(field, None) != value
def _nin(field, value, document):
"""
Returns True if document[field] is NOT in the interable value. If the
supplied value is not an iterable, then a MalformedQueryException is raised
"""
try:
values = iter(value)
except TypeError:
raise MalformedQueryException("'$nin' must accept an iterable")
return document.get(field, None) not in values
def _exists(field, value, document):
"""
Ensures a document has a given field or not. ``value`` must be either True or
False, otherwise a MalformedQueryException is raised
"""
if value not in (True, False):
raise MalformedQueryException("'$exists' must be supplied a boolean")
if value:
return field in document
else:
return field not in document
|
shaunduncan/nosqlite | nosqlite.py | _exists | python | def _exists(field, value, document):
if value not in (True, False):
raise MalformedQueryException("'$exists' must be supplied a boolean")
if value:
return field in document
else:
return field not in document | Ensures a document has a given field or not. ``value`` must be either True or
False, otherwise a MalformedQueryException is raised | train | https://github.com/shaunduncan/nosqlite/blob/3033c029b7c8290c66a8b36dc512e560505d4c85/nosqlite.py#L558-L569 | null | import json
import re
import sqlite3
import sys
import warnings
from functools import partial
from itertools import starmap
try:
from itertools import ifilter as filter, imap as map
except ImportError: # pragma: no cover Python >= 3.0
pass
class MalformedQueryException(Exception):
pass
class Connection(object):
"""
The high-level connection to a sqlite database. Creating a connection accepts
the same args and keyword args as the ``sqlite3.connect`` method
"""
def __init__(self, *args, **kwargs):
self._collections = {}
self.connect(*args, **kwargs)
def connect(self, *args, **kwargs):
"""
Connect to a sqlite database only if no connection exists. Isolation level
for the connection is automatically set to autocommit
"""
self.db = sqlite3.connect(*args, **kwargs)
self.db.isolation_level = None
def close(self):
"""
Terminate the connection to the sqlite database
"""
if self.db is not None:
self.db.close()
def __getitem__(self, name):
"""
A pymongo-like behavior for dynamically obtaining a collection of documents
"""
if name not in self._collections:
self._collections[name] = Collection(self.db, name)
return self._collections[name]
def __getattr__(self, name):
if name in self.__dict__:
return self.__dict__[name]
return self[name]
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_traceback):
self.close()
return False
def drop_collection(self, name):
"""
Drops a collection permanently if it exists
"""
self.db.execute("drop table if exists %s" % name)
class Collection(object):
"""
A virtual database table that holds JSON-type documents
"""
def __init__(self, db, name, create=True):
self.db = db
self.name = name
if create:
self.create()
def clear(self):
"""
Clears all stored documents in this database. THERE IS NO GOING BACK
"""
self.db.execute("delete from %s" % self.name)
def exists(self):
"""
Checks if this collection exists
"""
return self._object_exists('table', self.name)
def _object_exists(self, type, name):
row = self.db.execute(
"select count(1) from sqlite_master where type = ? and name = ?",
(type, name.strip('[]'))
).fetchone()
return int(row[0]) > 0
def create(self):
"""
Creates the collections database only if it does not already exist
"""
self.db.execute("""
create table if not exists %s (
id integer primary key autoincrement,
data text not null
)
""" % self.name)
def insert(self, document):
"""
Inserts a document into this collection. If a document already has an '_id'
value it will be updated
:returns: inserted document with id
"""
if '_id' in document:
return self.update(document)
# Create it and return a modified one with the id
cursor = self.db.execute("""
insert into %s(data) values (?)
""" % self.name, (json.dumps(document),))
document['_id'] = cursor.lastrowid
return document
def update(self, document):
"""
Updates a document stored in this collection. If the document does not
already have an '_id' value, it will be created
"""
if '_id' not in document:
return self.insert(document)
# Update the stored document, removing the id
copy = document.copy()
del copy['_id']
self.db.execute("""
update %s set data = ? where id = ?
""" % self.name, (json.dumps(copy), document['_id']))
return document
def remove(self, document):
"""
Removes a document from this collection. This will raise AssertionError if the
document does not have an _id attribute
"""
assert '_id' in document, 'Document must have an id'
self.db.execute("delete from %s where id = ?" % self.name, (document['_id'],))
def save(self, document):
"""
Alias for ``update``
"""
return self.update(document)
def delete(self, document):
"""
Alias for ``remove``
"""
return self.remove(document)
def _load(self, id, data):
"""
Loads a JSON document taking care to apply the document id
"""
if isinstance(data, bytes): # pragma: no cover Python >= 3.0
data = data.decode('utf-8')
document = json.loads(data)
document['_id'] = id
return document
def find(self, query=None, limit=None):
"""
Returns a list of documents in this collection that match a given query
"""
results = []
query = query or {}
# TODO: When indexes are implemented, we'll need to intelligently hit one of the
# index stores so we don't do a full table scan
cursor = self.db.execute("select id, data from %s" % self.name)
apply = partial(self._apply_query, query)
for match in filter(apply, starmap(self._load, cursor.fetchall())):
results.append(match)
# Just return if we already reached the limit
if limit and len(results) == limit:
return results
return results
def _apply_query(self, query, document):
"""
Applies a query to a document. Returns True if the document meets the criteria of
the supplied query. The ``query`` argument generally follows mongodb style syntax
and consists of the following logical checks and operators.
Logical: $and, $or, $nor, $not
Operators: $eq, $ne, $gt, $gte, $lt, $lte, $mod, $in, $nin, $all
If no logical operator is supplied, it assumed that all field checks must pass. For
example, these are equivalent:
{'foo': 'bar', 'baz': 'qux'}
{'$and': [{'foo': 'bar'}, {'baz': 'qux'}]}
Both logical and operational queries can be nested in a complex fashion:
{
'bar': 'baz',
'$or': [
{
'foo': {
'$gte': 0,
'$lte': 10,
'$mod': [2, 0]
}
},
{
'foo': {
'$gt': 10,
'$mod': [2, 1]
}
},
]
}
In the previous example, this will return any document where the 'bar' key is equal
to 'baz' and either the 'foo' key is an even number between 0 and 10 or is an odd number
greater than 10.
"""
matches = [] # A list of booleans
reapply = lambda q: self._apply_query(q, document)
for field, value in query.items():
# A more complex query type $and, $or, etc
if field == '$and':
matches.append(all(map(reapply, value)))
elif field == '$or':
matches.append(any(map(reapply, value)))
elif field == '$nor':
matches.append(not any(map(reapply, value)))
elif field == '$not':
matches.append(not self._apply_query(value, document))
# Invoke a query operator
elif isinstance(value, dict):
for operator, arg in value.items():
if not self._get_operator_fn(operator)(field, arg, document):
matches.append(False)
break
else:
matches.append(True)
# Standard
elif value != document.get(field, None):
# check if field contains a dot
if '.' in field:
nodes = field.split('.')
document_section = document
try:
for path in nodes[:-1]:
document_section = document_section.get(path, None)
except AttributeError:
document_section = None
if document_section is None:
matches.append(False)
else:
if value != document_section.get(nodes[-1], None):
matches.append(False)
else:
matches.append(False)
return all(matches)
def _get_operator_fn(self, op):
"""
Returns the function in this module that corresponds to an operator string.
This simply checks if there is a method that handles the operator defined
in this module, replacing '$' with '_' (i.e. if this module has a _gt
method for $gt) and returns it. If no match is found, or the operator does not
start with '$', a MalformedQueryException is raised
"""
if not op.startswith('$'):
raise MalformedQueryException("Operator '%s' is not a valid query operation" % op)
try:
return getattr(sys.modules[__name__], op.replace('$', '_'))
except AttributeError:
raise MalformedQueryException("Operator '%s' is not currently implemented" % op)
def find_one(self, query=None):
"""
Equivalent to ``find(query, limit=1)[0]``
"""
try:
return self.find(query=query, limit=1)[0]
except (sqlite3.OperationalError, IndexError):
return None
def find_and_modify(self, query=None, update=None):
"""
Finds documents in this collection that match a given query and updates them
"""
update = update or {}
for document in self.find(query=query):
document.update(update)
self.update(document)
def count(self, query=None):
"""
Equivalent to ``len(find(query))``
"""
return len(self.find(query=query))
def rename(self, new_name):
"""
Rename this collection
"""
new_collection = Collection(self.db, new_name, create=False)
assert not new_collection.exists()
self.db.execute("alter table %s rename to %s" % (self.name, new_name))
self.name = new_name
def distinct(self, key):
"""
Get a set of distinct values for the given key excluding an implicit
None for documents that do not contain the key
"""
return set(d[key] for d in filter(lambda d: key in d, self.find()))
def create_index(self, key, reindex=True, sparse=False):
"""
Creates an index if it does not exist then performs a full reindex for this collection
"""
warnings.warn('Index support is currently very alpha and is not guaranteed')
if isinstance(key, (list, tuple)):
index_name = ','.join(key)
index_columns = ', '.join('%s text' % f for f in key)
else:
index_name = key
index_columns = '%s text' % key
table_name = '[%s{%s}]' % (self.name, index_name)
reindex = reindex or not self._object_exists('table', table_name)
# Create a table store for the index data
self.db.execute("""
create table if not exists {table} (
id integer primary key,
{columns},
foreign key(id) references {collection}(id) on delete cascade on update cascade
)
""".format(
table=table_name,
collection=self.name,
columns=index_columns
))
# Create the index
self.db.execute("""
create index if not exists [idx.{collection}{{index}}] on {table}({index})
""".format(
collection=self.name,
index=index_name,
table=table_name,
))
if reindex:
self.reindex(key)
def ensure_index(self, key, sparse=False):
"""
Equivalent to ``create_index(key, reindex=False)``
"""
self.create_index(key, reindex=False, sparse=False)
def reindex(self, table, sparse=False):
warnings.warn('Index support is currently very alpha and is not guaranteed')
index = re.findall(r'^\[.*\{(.*)\}\]$', table)[0].split(',')
update = "update {table} set {key} = ? where id = ?"
insert = "insert into {table}({index}) values({q})"
count = "select count(1) from {table} where id = ?"
qs = ('?,' * len(index)).rstrip(',')
for document in self.find():
# Ensure there's a row before we update
row = self.db.execute(count.format(table=table), (document['_id'],)).fetchone()
if int(row[0]) == 0:
self.db.execute(insert.format(table=table, index=index, q=qs),
[None for x in index])
for key in index:
# Ignore this document if it doesn't have the key
if key not in document and sparse:
continue
self.db.execute(update.format(table=table, key=key),
(document.get(key, None), document['_id']))
def drop_index(self):
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
def drop_indexes(self):
"""
Drop all indexes for this collection
"""
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
# BELOW ARE OPERATIONS FOR LOOKUPS
# TypeErrors are caught specifically for python 3 compatibility
def _eq(field, value, document):
"""
Returns True if the value of a document field is equal to a given value
"""
try:
return document.get(field, None) == value
except TypeError: # pragma: no cover Python < 3.0
return False
def _gt(field, value, document):
"""
Returns True if the value of a document field is greater than a given value
"""
try:
return document.get(field, None) > value
except TypeError: # pragma: no cover Python < 3.0
return False
def _lt(field, value, document):
"""
Returns True if the value of a document field is less than a given value
"""
try:
return document.get(field, None) < value
except TypeError: # pragma: no cover Python < 3.0
return False
def _gte(field, value, document):
"""
Returns True if the value of a document field is greater than or
equal to a given value
"""
try:
return document.get(field, None) >= value
except TypeError: # pragma: no cover Python < 3.0
return False
def _lte(field, value, document):
"""
Returns True if the value of a document field is less than or
equal to a given value
"""
try:
return document.get(field, None) <= value
except TypeError: # pragma: no cover Python < 3.0
return False
def _all(field, value, document):
"""
Returns True if the value of document field contains all the values
specified by ``value``. If supplied value is not an iterable, a
MalformedQueryException is raised. If the value of the document field
is not an iterable, False is returned
"""
try:
a = set(value)
except TypeError:
raise MalformedQueryException("'$all' must accept an iterable")
try:
b = set(document.get(field, []))
except TypeError:
return False
else:
return a.intersection(b) == a
def _in(field, value, document):
"""
Returns True if document[field] is in the interable value. If the
supplied value is not an iterable, then a MalformedQueryException is raised
"""
try:
values = iter(value)
except TypeError:
raise MalformedQueryException("'$in' must accept an iterable")
return document.get(field, None) in values
def _ne(field, value, document):
"""
Returns True if the value of document[field] is not equal to a given value
"""
return document.get(field, None) != value
def _nin(field, value, document):
"""
Returns True if document[field] is NOT in the interable value. If the
supplied value is not an iterable, then a MalformedQueryException is raised
"""
try:
values = iter(value)
except TypeError:
raise MalformedQueryException("'$nin' must accept an iterable")
return document.get(field, None) not in values
def _mod(field, value, document):
"""
Performs a mod on a document field. Value must be a list or tuple with
two values divisor and remainder (i.e. [2, 0]). This will essentially
perform the following:
document[field] % divisor == remainder
If the value does not contain integers or is not a two-item list/tuple,
a MalformedQueryException will be raised. If the value of document[field]
cannot be converted to an integer, this will return False.
"""
try:
divisor, remainder = map(int, value)
except (TypeError, ValueError):
raise MalformedQueryException("'$mod' must accept an iterable: [divisor, remainder]")
try:
return int(document.get(field, None)) % divisor == remainder
except (TypeError, ValueError):
return False
|
shaunduncan/nosqlite | nosqlite.py | Connection.connect | python | def connect(self, *args, **kwargs):
self.db = sqlite3.connect(*args, **kwargs)
self.db.isolation_level = None | Connect to a sqlite database only if no connection exists. Isolation level
for the connection is automatically set to autocommit | train | https://github.com/shaunduncan/nosqlite/blob/3033c029b7c8290c66a8b36dc512e560505d4c85/nosqlite.py#L30-L36 | null | class Connection(object):
"""
The high-level connection to a sqlite database. Creating a connection accepts
the same args and keyword args as the ``sqlite3.connect`` method
"""
def __init__(self, *args, **kwargs):
self._collections = {}
self.connect(*args, **kwargs)
def close(self):
"""
Terminate the connection to the sqlite database
"""
if self.db is not None:
self.db.close()
def __getitem__(self, name):
"""
A pymongo-like behavior for dynamically obtaining a collection of documents
"""
if name not in self._collections:
self._collections[name] = Collection(self.db, name)
return self._collections[name]
def __getattr__(self, name):
if name in self.__dict__:
return self.__dict__[name]
return self[name]
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_traceback):
self.close()
return False
def drop_collection(self, name):
"""
Drops a collection permanently if it exists
"""
self.db.execute("drop table if exists %s" % name)
|
shaunduncan/nosqlite | nosqlite.py | Collection.insert | python | def insert(self, document):
if '_id' in document:
return self.update(document)
# Create it and return a modified one with the id
cursor = self.db.execute("""
insert into %s(data) values (?)
""" % self.name, (json.dumps(document),))
document['_id'] = cursor.lastrowid
return document | Inserts a document into this collection. If a document already has an '_id'
value it will be updated
:returns: inserted document with id | train | https://github.com/shaunduncan/nosqlite/blob/3033c029b7c8290c66a8b36dc512e560505d4c85/nosqlite.py#L115-L131 | [
"def update(self, document):\n \"\"\"\n Updates a document stored in this collection. If the document does not\n already have an '_id' value, it will be created\n \"\"\"\n if '_id' not in document:\n return self.insert(document)\n\n # Update the stored document, removing the id\n copy = ... | class Collection(object):
"""
A virtual database table that holds JSON-type documents
"""
def __init__(self, db, name, create=True):
self.db = db
self.name = name
if create:
self.create()
def clear(self):
"""
Clears all stored documents in this database. THERE IS NO GOING BACK
"""
self.db.execute("delete from %s" % self.name)
def exists(self):
"""
Checks if this collection exists
"""
return self._object_exists('table', self.name)
def _object_exists(self, type, name):
row = self.db.execute(
"select count(1) from sqlite_master where type = ? and name = ?",
(type, name.strip('[]'))
).fetchone()
return int(row[0]) > 0
def create(self):
"""
Creates the collections database only if it does not already exist
"""
self.db.execute("""
create table if not exists %s (
id integer primary key autoincrement,
data text not null
)
""" % self.name)
def update(self, document):
"""
Updates a document stored in this collection. If the document does not
already have an '_id' value, it will be created
"""
if '_id' not in document:
return self.insert(document)
# Update the stored document, removing the id
copy = document.copy()
del copy['_id']
self.db.execute("""
update %s set data = ? where id = ?
""" % self.name, (json.dumps(copy), document['_id']))
return document
def remove(self, document):
"""
Removes a document from this collection. This will raise AssertionError if the
document does not have an _id attribute
"""
assert '_id' in document, 'Document must have an id'
self.db.execute("delete from %s where id = ?" % self.name, (document['_id'],))
def save(self, document):
"""
Alias for ``update``
"""
return self.update(document)
def delete(self, document):
"""
Alias for ``remove``
"""
return self.remove(document)
def _load(self, id, data):
"""
Loads a JSON document taking care to apply the document id
"""
if isinstance(data, bytes): # pragma: no cover Python >= 3.0
data = data.decode('utf-8')
document = json.loads(data)
document['_id'] = id
return document
def find(self, query=None, limit=None):
"""
Returns a list of documents in this collection that match a given query
"""
results = []
query = query or {}
# TODO: When indexes are implemented, we'll need to intelligently hit one of the
# index stores so we don't do a full table scan
cursor = self.db.execute("select id, data from %s" % self.name)
apply = partial(self._apply_query, query)
for match in filter(apply, starmap(self._load, cursor.fetchall())):
results.append(match)
# Just return if we already reached the limit
if limit and len(results) == limit:
return results
return results
def _apply_query(self, query, document):
"""
Applies a query to a document. Returns True if the document meets the criteria of
the supplied query. The ``query`` argument generally follows mongodb style syntax
and consists of the following logical checks and operators.
Logical: $and, $or, $nor, $not
Operators: $eq, $ne, $gt, $gte, $lt, $lte, $mod, $in, $nin, $all
If no logical operator is supplied, it assumed that all field checks must pass. For
example, these are equivalent:
{'foo': 'bar', 'baz': 'qux'}
{'$and': [{'foo': 'bar'}, {'baz': 'qux'}]}
Both logical and operational queries can be nested in a complex fashion:
{
'bar': 'baz',
'$or': [
{
'foo': {
'$gte': 0,
'$lte': 10,
'$mod': [2, 0]
}
},
{
'foo': {
'$gt': 10,
'$mod': [2, 1]
}
},
]
}
In the previous example, this will return any document where the 'bar' key is equal
to 'baz' and either the 'foo' key is an even number between 0 and 10 or is an odd number
greater than 10.
"""
matches = [] # A list of booleans
reapply = lambda q: self._apply_query(q, document)
for field, value in query.items():
# A more complex query type $and, $or, etc
if field == '$and':
matches.append(all(map(reapply, value)))
elif field == '$or':
matches.append(any(map(reapply, value)))
elif field == '$nor':
matches.append(not any(map(reapply, value)))
elif field == '$not':
matches.append(not self._apply_query(value, document))
# Invoke a query operator
elif isinstance(value, dict):
for operator, arg in value.items():
if not self._get_operator_fn(operator)(field, arg, document):
matches.append(False)
break
else:
matches.append(True)
# Standard
elif value != document.get(field, None):
# check if field contains a dot
if '.' in field:
nodes = field.split('.')
document_section = document
try:
for path in nodes[:-1]:
document_section = document_section.get(path, None)
except AttributeError:
document_section = None
if document_section is None:
matches.append(False)
else:
if value != document_section.get(nodes[-1], None):
matches.append(False)
else:
matches.append(False)
return all(matches)
def _get_operator_fn(self, op):
"""
Returns the function in this module that corresponds to an operator string.
This simply checks if there is a method that handles the operator defined
in this module, replacing '$' with '_' (i.e. if this module has a _gt
method for $gt) and returns it. If no match is found, or the operator does not
start with '$', a MalformedQueryException is raised
"""
if not op.startswith('$'):
raise MalformedQueryException("Operator '%s' is not a valid query operation" % op)
try:
return getattr(sys.modules[__name__], op.replace('$', '_'))
except AttributeError:
raise MalformedQueryException("Operator '%s' is not currently implemented" % op)
def find_one(self, query=None):
"""
Equivalent to ``find(query, limit=1)[0]``
"""
try:
return self.find(query=query, limit=1)[0]
except (sqlite3.OperationalError, IndexError):
return None
def find_and_modify(self, query=None, update=None):
"""
Finds documents in this collection that match a given query and updates them
"""
update = update or {}
for document in self.find(query=query):
document.update(update)
self.update(document)
def count(self, query=None):
"""
Equivalent to ``len(find(query))``
"""
return len(self.find(query=query))
def rename(self, new_name):
"""
Rename this collection
"""
new_collection = Collection(self.db, new_name, create=False)
assert not new_collection.exists()
self.db.execute("alter table %s rename to %s" % (self.name, new_name))
self.name = new_name
def distinct(self, key):
"""
Get a set of distinct values for the given key excluding an implicit
None for documents that do not contain the key
"""
return set(d[key] for d in filter(lambda d: key in d, self.find()))
def create_index(self, key, reindex=True, sparse=False):
"""
Creates an index if it does not exist then performs a full reindex for this collection
"""
warnings.warn('Index support is currently very alpha and is not guaranteed')
if isinstance(key, (list, tuple)):
index_name = ','.join(key)
index_columns = ', '.join('%s text' % f for f in key)
else:
index_name = key
index_columns = '%s text' % key
table_name = '[%s{%s}]' % (self.name, index_name)
reindex = reindex or not self._object_exists('table', table_name)
# Create a table store for the index data
self.db.execute("""
create table if not exists {table} (
id integer primary key,
{columns},
foreign key(id) references {collection}(id) on delete cascade on update cascade
)
""".format(
table=table_name,
collection=self.name,
columns=index_columns
))
# Create the index
self.db.execute("""
create index if not exists [idx.{collection}{{index}}] on {table}({index})
""".format(
collection=self.name,
index=index_name,
table=table_name,
))
if reindex:
self.reindex(key)
def ensure_index(self, key, sparse=False):
"""
Equivalent to ``create_index(key, reindex=False)``
"""
self.create_index(key, reindex=False, sparse=False)
def reindex(self, table, sparse=False):
warnings.warn('Index support is currently very alpha and is not guaranteed')
index = re.findall(r'^\[.*\{(.*)\}\]$', table)[0].split(',')
update = "update {table} set {key} = ? where id = ?"
insert = "insert into {table}({index}) values({q})"
count = "select count(1) from {table} where id = ?"
qs = ('?,' * len(index)).rstrip(',')
for document in self.find():
# Ensure there's a row before we update
row = self.db.execute(count.format(table=table), (document['_id'],)).fetchone()
if int(row[0]) == 0:
self.db.execute(insert.format(table=table, index=index, q=qs),
[None for x in index])
for key in index:
# Ignore this document if it doesn't have the key
if key not in document and sparse:
continue
self.db.execute(update.format(table=table, key=key),
(document.get(key, None), document['_id']))
def drop_index(self):
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
def drop_indexes(self):
"""
Drop all indexes for this collection
"""
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
|
shaunduncan/nosqlite | nosqlite.py | Collection.update | python | def update(self, document):
if '_id' not in document:
return self.insert(document)
# Update the stored document, removing the id
copy = document.copy()
del copy['_id']
self.db.execute("""
update %s set data = ? where id = ?
""" % self.name, (json.dumps(copy), document['_id']))
return document | Updates a document stored in this collection. If the document does not
already have an '_id' value, it will be created | train | https://github.com/shaunduncan/nosqlite/blob/3033c029b7c8290c66a8b36dc512e560505d4c85/nosqlite.py#L133-L149 | [
"def insert(self, document):\n \"\"\"\n Inserts a document into this collection. If a document already has an '_id'\n value it will be updated\n\n :returns: inserted document with id\n \"\"\"\n if '_id' in document:\n return self.update(document)\n\n # Create it and return a modified one... | class Collection(object):
"""
A virtual database table that holds JSON-type documents
"""
def __init__(self, db, name, create=True):
self.db = db
self.name = name
if create:
self.create()
def clear(self):
"""
Clears all stored documents in this database. THERE IS NO GOING BACK
"""
self.db.execute("delete from %s" % self.name)
def exists(self):
"""
Checks if this collection exists
"""
return self._object_exists('table', self.name)
def _object_exists(self, type, name):
row = self.db.execute(
"select count(1) from sqlite_master where type = ? and name = ?",
(type, name.strip('[]'))
).fetchone()
return int(row[0]) > 0
def create(self):
"""
Creates the collections database only if it does not already exist
"""
self.db.execute("""
create table if not exists %s (
id integer primary key autoincrement,
data text not null
)
""" % self.name)
def insert(self, document):
"""
Inserts a document into this collection. If a document already has an '_id'
value it will be updated
:returns: inserted document with id
"""
if '_id' in document:
return self.update(document)
# Create it and return a modified one with the id
cursor = self.db.execute("""
insert into %s(data) values (?)
""" % self.name, (json.dumps(document),))
document['_id'] = cursor.lastrowid
return document
def remove(self, document):
"""
Removes a document from this collection. This will raise AssertionError if the
document does not have an _id attribute
"""
assert '_id' in document, 'Document must have an id'
self.db.execute("delete from %s where id = ?" % self.name, (document['_id'],))
def save(self, document):
"""
Alias for ``update``
"""
return self.update(document)
def delete(self, document):
"""
Alias for ``remove``
"""
return self.remove(document)
def _load(self, id, data):
"""
Loads a JSON document taking care to apply the document id
"""
if isinstance(data, bytes): # pragma: no cover Python >= 3.0
data = data.decode('utf-8')
document = json.loads(data)
document['_id'] = id
return document
def find(self, query=None, limit=None):
"""
Returns a list of documents in this collection that match a given query
"""
results = []
query = query or {}
# TODO: When indexes are implemented, we'll need to intelligently hit one of the
# index stores so we don't do a full table scan
cursor = self.db.execute("select id, data from %s" % self.name)
apply = partial(self._apply_query, query)
for match in filter(apply, starmap(self._load, cursor.fetchall())):
results.append(match)
# Just return if we already reached the limit
if limit and len(results) == limit:
return results
return results
def _apply_query(self, query, document):
"""
Applies a query to a document. Returns True if the document meets the criteria of
the supplied query. The ``query`` argument generally follows mongodb style syntax
and consists of the following logical checks and operators.
Logical: $and, $or, $nor, $not
Operators: $eq, $ne, $gt, $gte, $lt, $lte, $mod, $in, $nin, $all
If no logical operator is supplied, it assumed that all field checks must pass. For
example, these are equivalent:
{'foo': 'bar', 'baz': 'qux'}
{'$and': [{'foo': 'bar'}, {'baz': 'qux'}]}
Both logical and operational queries can be nested in a complex fashion:
{
'bar': 'baz',
'$or': [
{
'foo': {
'$gte': 0,
'$lte': 10,
'$mod': [2, 0]
}
},
{
'foo': {
'$gt': 10,
'$mod': [2, 1]
}
},
]
}
In the previous example, this will return any document where the 'bar' key is equal
to 'baz' and either the 'foo' key is an even number between 0 and 10 or is an odd number
greater than 10.
"""
matches = [] # A list of booleans
reapply = lambda q: self._apply_query(q, document)
for field, value in query.items():
# A more complex query type $and, $or, etc
if field == '$and':
matches.append(all(map(reapply, value)))
elif field == '$or':
matches.append(any(map(reapply, value)))
elif field == '$nor':
matches.append(not any(map(reapply, value)))
elif field == '$not':
matches.append(not self._apply_query(value, document))
# Invoke a query operator
elif isinstance(value, dict):
for operator, arg in value.items():
if not self._get_operator_fn(operator)(field, arg, document):
matches.append(False)
break
else:
matches.append(True)
# Standard
elif value != document.get(field, None):
# check if field contains a dot
if '.' in field:
nodes = field.split('.')
document_section = document
try:
for path in nodes[:-1]:
document_section = document_section.get(path, None)
except AttributeError:
document_section = None
if document_section is None:
matches.append(False)
else:
if value != document_section.get(nodes[-1], None):
matches.append(False)
else:
matches.append(False)
return all(matches)
def _get_operator_fn(self, op):
"""
Returns the function in this module that corresponds to an operator string.
This simply checks if there is a method that handles the operator defined
in this module, replacing '$' with '_' (i.e. if this module has a _gt
method for $gt) and returns it. If no match is found, or the operator does not
start with '$', a MalformedQueryException is raised
"""
if not op.startswith('$'):
raise MalformedQueryException("Operator '%s' is not a valid query operation" % op)
try:
return getattr(sys.modules[__name__], op.replace('$', '_'))
except AttributeError:
raise MalformedQueryException("Operator '%s' is not currently implemented" % op)
def find_one(self, query=None):
"""
Equivalent to ``find(query, limit=1)[0]``
"""
try:
return self.find(query=query, limit=1)[0]
except (sqlite3.OperationalError, IndexError):
return None
def find_and_modify(self, query=None, update=None):
"""
Finds documents in this collection that match a given query and updates them
"""
update = update or {}
for document in self.find(query=query):
document.update(update)
self.update(document)
def count(self, query=None):
"""
Equivalent to ``len(find(query))``
"""
return len(self.find(query=query))
def rename(self, new_name):
"""
Rename this collection
"""
new_collection = Collection(self.db, new_name, create=False)
assert not new_collection.exists()
self.db.execute("alter table %s rename to %s" % (self.name, new_name))
self.name = new_name
def distinct(self, key):
"""
Get a set of distinct values for the given key excluding an implicit
None for documents that do not contain the key
"""
return set(d[key] for d in filter(lambda d: key in d, self.find()))
def create_index(self, key, reindex=True, sparse=False):
"""
Creates an index if it does not exist then performs a full reindex for this collection
"""
warnings.warn('Index support is currently very alpha and is not guaranteed')
if isinstance(key, (list, tuple)):
index_name = ','.join(key)
index_columns = ', '.join('%s text' % f for f in key)
else:
index_name = key
index_columns = '%s text' % key
table_name = '[%s{%s}]' % (self.name, index_name)
reindex = reindex or not self._object_exists('table', table_name)
# Create a table store for the index data
self.db.execute("""
create table if not exists {table} (
id integer primary key,
{columns},
foreign key(id) references {collection}(id) on delete cascade on update cascade
)
""".format(
table=table_name,
collection=self.name,
columns=index_columns
))
# Create the index
self.db.execute("""
create index if not exists [idx.{collection}{{index}}] on {table}({index})
""".format(
collection=self.name,
index=index_name,
table=table_name,
))
if reindex:
self.reindex(key)
def ensure_index(self, key, sparse=False):
"""
Equivalent to ``create_index(key, reindex=False)``
"""
self.create_index(key, reindex=False, sparse=False)
def reindex(self, table, sparse=False):
warnings.warn('Index support is currently very alpha and is not guaranteed')
index = re.findall(r'^\[.*\{(.*)\}\]$', table)[0].split(',')
update = "update {table} set {key} = ? where id = ?"
insert = "insert into {table}({index}) values({q})"
count = "select count(1) from {table} where id = ?"
qs = ('?,' * len(index)).rstrip(',')
for document in self.find():
# Ensure there's a row before we update
row = self.db.execute(count.format(table=table), (document['_id'],)).fetchone()
if int(row[0]) == 0:
self.db.execute(insert.format(table=table, index=index, q=qs),
[None for x in index])
for key in index:
# Ignore this document if it doesn't have the key
if key not in document and sparse:
continue
self.db.execute(update.format(table=table, key=key),
(document.get(key, None), document['_id']))
def drop_index(self):
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
def drop_indexes(self):
"""
Drop all indexes for this collection
"""
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
|
shaunduncan/nosqlite | nosqlite.py | Collection.remove | python | def remove(self, document):
assert '_id' in document, 'Document must have an id'
self.db.execute("delete from %s where id = ?" % self.name, (document['_id'],)) | Removes a document from this collection. This will raise AssertionError if the
document does not have an _id attribute | train | https://github.com/shaunduncan/nosqlite/blob/3033c029b7c8290c66a8b36dc512e560505d4c85/nosqlite.py#L151-L157 | null | class Collection(object):
"""
A virtual database table that holds JSON-type documents
"""
def __init__(self, db, name, create=True):
self.db = db
self.name = name
if create:
self.create()
def clear(self):
"""
Clears all stored documents in this database. THERE IS NO GOING BACK
"""
self.db.execute("delete from %s" % self.name)
def exists(self):
"""
Checks if this collection exists
"""
return self._object_exists('table', self.name)
def _object_exists(self, type, name):
row = self.db.execute(
"select count(1) from sqlite_master where type = ? and name = ?",
(type, name.strip('[]'))
).fetchone()
return int(row[0]) > 0
def create(self):
"""
Creates the collections database only if it does not already exist
"""
self.db.execute("""
create table if not exists %s (
id integer primary key autoincrement,
data text not null
)
""" % self.name)
def insert(self, document):
"""
Inserts a document into this collection. If a document already has an '_id'
value it will be updated
:returns: inserted document with id
"""
if '_id' in document:
return self.update(document)
# Create it and return a modified one with the id
cursor = self.db.execute("""
insert into %s(data) values (?)
""" % self.name, (json.dumps(document),))
document['_id'] = cursor.lastrowid
return document
def update(self, document):
"""
Updates a document stored in this collection. If the document does not
already have an '_id' value, it will be created
"""
if '_id' not in document:
return self.insert(document)
# Update the stored document, removing the id
copy = document.copy()
del copy['_id']
self.db.execute("""
update %s set data = ? where id = ?
""" % self.name, (json.dumps(copy), document['_id']))
return document
def save(self, document):
"""
Alias for ``update``
"""
return self.update(document)
def delete(self, document):
"""
Alias for ``remove``
"""
return self.remove(document)
def _load(self, id, data):
"""
Loads a JSON document taking care to apply the document id
"""
if isinstance(data, bytes): # pragma: no cover Python >= 3.0
data = data.decode('utf-8')
document = json.loads(data)
document['_id'] = id
return document
def find(self, query=None, limit=None):
"""
Returns a list of documents in this collection that match a given query
"""
results = []
query = query or {}
# TODO: When indexes are implemented, we'll need to intelligently hit one of the
# index stores so we don't do a full table scan
cursor = self.db.execute("select id, data from %s" % self.name)
apply = partial(self._apply_query, query)
for match in filter(apply, starmap(self._load, cursor.fetchall())):
results.append(match)
# Just return if we already reached the limit
if limit and len(results) == limit:
return results
return results
def _apply_query(self, query, document):
"""
Applies a query to a document. Returns True if the document meets the criteria of
the supplied query. The ``query`` argument generally follows mongodb style syntax
and consists of the following logical checks and operators.
Logical: $and, $or, $nor, $not
Operators: $eq, $ne, $gt, $gte, $lt, $lte, $mod, $in, $nin, $all
If no logical operator is supplied, it assumed that all field checks must pass. For
example, these are equivalent:
{'foo': 'bar', 'baz': 'qux'}
{'$and': [{'foo': 'bar'}, {'baz': 'qux'}]}
Both logical and operational queries can be nested in a complex fashion:
{
'bar': 'baz',
'$or': [
{
'foo': {
'$gte': 0,
'$lte': 10,
'$mod': [2, 0]
}
},
{
'foo': {
'$gt': 10,
'$mod': [2, 1]
}
},
]
}
In the previous example, this will return any document where the 'bar' key is equal
to 'baz' and either the 'foo' key is an even number between 0 and 10 or is an odd number
greater than 10.
"""
matches = [] # A list of booleans
reapply = lambda q: self._apply_query(q, document)
for field, value in query.items():
# A more complex query type $and, $or, etc
if field == '$and':
matches.append(all(map(reapply, value)))
elif field == '$or':
matches.append(any(map(reapply, value)))
elif field == '$nor':
matches.append(not any(map(reapply, value)))
elif field == '$not':
matches.append(not self._apply_query(value, document))
# Invoke a query operator
elif isinstance(value, dict):
for operator, arg in value.items():
if not self._get_operator_fn(operator)(field, arg, document):
matches.append(False)
break
else:
matches.append(True)
# Standard
elif value != document.get(field, None):
# check if field contains a dot
if '.' in field:
nodes = field.split('.')
document_section = document
try:
for path in nodes[:-1]:
document_section = document_section.get(path, None)
except AttributeError:
document_section = None
if document_section is None:
matches.append(False)
else:
if value != document_section.get(nodes[-1], None):
matches.append(False)
else:
matches.append(False)
return all(matches)
def _get_operator_fn(self, op):
"""
Returns the function in this module that corresponds to an operator string.
This simply checks if there is a method that handles the operator defined
in this module, replacing '$' with '_' (i.e. if this module has a _gt
method for $gt) and returns it. If no match is found, or the operator does not
start with '$', a MalformedQueryException is raised
"""
if not op.startswith('$'):
raise MalformedQueryException("Operator '%s' is not a valid query operation" % op)
try:
return getattr(sys.modules[__name__], op.replace('$', '_'))
except AttributeError:
raise MalformedQueryException("Operator '%s' is not currently implemented" % op)
def find_one(self, query=None):
"""
Equivalent to ``find(query, limit=1)[0]``
"""
try:
return self.find(query=query, limit=1)[0]
except (sqlite3.OperationalError, IndexError):
return None
def find_and_modify(self, query=None, update=None):
"""
Finds documents in this collection that match a given query and updates them
"""
update = update or {}
for document in self.find(query=query):
document.update(update)
self.update(document)
def count(self, query=None):
"""
Equivalent to ``len(find(query))``
"""
return len(self.find(query=query))
def rename(self, new_name):
"""
Rename this collection
"""
new_collection = Collection(self.db, new_name, create=False)
assert not new_collection.exists()
self.db.execute("alter table %s rename to %s" % (self.name, new_name))
self.name = new_name
def distinct(self, key):
"""
Get a set of distinct values for the given key excluding an implicit
None for documents that do not contain the key
"""
return set(d[key] for d in filter(lambda d: key in d, self.find()))
def create_index(self, key, reindex=True, sparse=False):
"""
Creates an index if it does not exist then performs a full reindex for this collection
"""
warnings.warn('Index support is currently very alpha and is not guaranteed')
if isinstance(key, (list, tuple)):
index_name = ','.join(key)
index_columns = ', '.join('%s text' % f for f in key)
else:
index_name = key
index_columns = '%s text' % key
table_name = '[%s{%s}]' % (self.name, index_name)
reindex = reindex or not self._object_exists('table', table_name)
# Create a table store for the index data
self.db.execute("""
create table if not exists {table} (
id integer primary key,
{columns},
foreign key(id) references {collection}(id) on delete cascade on update cascade
)
""".format(
table=table_name,
collection=self.name,
columns=index_columns
))
# Create the index
self.db.execute("""
create index if not exists [idx.{collection}{{index}}] on {table}({index})
""".format(
collection=self.name,
index=index_name,
table=table_name,
))
if reindex:
self.reindex(key)
def ensure_index(self, key, sparse=False):
"""
Equivalent to ``create_index(key, reindex=False)``
"""
self.create_index(key, reindex=False, sparse=False)
def reindex(self, table, sparse=False):
warnings.warn('Index support is currently very alpha and is not guaranteed')
index = re.findall(r'^\[.*\{(.*)\}\]$', table)[0].split(',')
update = "update {table} set {key} = ? where id = ?"
insert = "insert into {table}({index}) values({q})"
count = "select count(1) from {table} where id = ?"
qs = ('?,' * len(index)).rstrip(',')
for document in self.find():
# Ensure there's a row before we update
row = self.db.execute(count.format(table=table), (document['_id'],)).fetchone()
if int(row[0]) == 0:
self.db.execute(insert.format(table=table, index=index, q=qs),
[None for x in index])
for key in index:
# Ignore this document if it doesn't have the key
if key not in document and sparse:
continue
self.db.execute(update.format(table=table, key=key),
(document.get(key, None), document['_id']))
def drop_index(self):
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
def drop_indexes(self):
"""
Drop all indexes for this collection
"""
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
|
shaunduncan/nosqlite | nosqlite.py | Collection._load | python | def _load(self, id, data):
if isinstance(data, bytes): # pragma: no cover Python >= 3.0
data = data.decode('utf-8')
document = json.loads(data)
document['_id'] = id
return document | Loads a JSON document taking care to apply the document id | train | https://github.com/shaunduncan/nosqlite/blob/3033c029b7c8290c66a8b36dc512e560505d4c85/nosqlite.py#L171-L180 | null | class Collection(object):
"""
A virtual database table that holds JSON-type documents
"""
def __init__(self, db, name, create=True):
self.db = db
self.name = name
if create:
self.create()
def clear(self):
"""
Clears all stored documents in this database. THERE IS NO GOING BACK
"""
self.db.execute("delete from %s" % self.name)
def exists(self):
"""
Checks if this collection exists
"""
return self._object_exists('table', self.name)
def _object_exists(self, type, name):
row = self.db.execute(
"select count(1) from sqlite_master where type = ? and name = ?",
(type, name.strip('[]'))
).fetchone()
return int(row[0]) > 0
def create(self):
"""
Creates the collections database only if it does not already exist
"""
self.db.execute("""
create table if not exists %s (
id integer primary key autoincrement,
data text not null
)
""" % self.name)
def insert(self, document):
"""
Inserts a document into this collection. If a document already has an '_id'
value it will be updated
:returns: inserted document with id
"""
if '_id' in document:
return self.update(document)
# Create it and return a modified one with the id
cursor = self.db.execute("""
insert into %s(data) values (?)
""" % self.name, (json.dumps(document),))
document['_id'] = cursor.lastrowid
return document
def update(self, document):
"""
Updates a document stored in this collection. If the document does not
already have an '_id' value, it will be created
"""
if '_id' not in document:
return self.insert(document)
# Update the stored document, removing the id
copy = document.copy()
del copy['_id']
self.db.execute("""
update %s set data = ? where id = ?
""" % self.name, (json.dumps(copy), document['_id']))
return document
def remove(self, document):
"""
Removes a document from this collection. This will raise AssertionError if the
document does not have an _id attribute
"""
assert '_id' in document, 'Document must have an id'
self.db.execute("delete from %s where id = ?" % self.name, (document['_id'],))
def save(self, document):
"""
Alias for ``update``
"""
return self.update(document)
def delete(self, document):
"""
Alias for ``remove``
"""
return self.remove(document)
def find(self, query=None, limit=None):
"""
Returns a list of documents in this collection that match a given query
"""
results = []
query = query or {}
# TODO: When indexes are implemented, we'll need to intelligently hit one of the
# index stores so we don't do a full table scan
cursor = self.db.execute("select id, data from %s" % self.name)
apply = partial(self._apply_query, query)
for match in filter(apply, starmap(self._load, cursor.fetchall())):
results.append(match)
# Just return if we already reached the limit
if limit and len(results) == limit:
return results
return results
def _apply_query(self, query, document):
"""
Applies a query to a document. Returns True if the document meets the criteria of
the supplied query. The ``query`` argument generally follows mongodb style syntax
and consists of the following logical checks and operators.
Logical: $and, $or, $nor, $not
Operators: $eq, $ne, $gt, $gte, $lt, $lte, $mod, $in, $nin, $all
If no logical operator is supplied, it assumed that all field checks must pass. For
example, these are equivalent:
{'foo': 'bar', 'baz': 'qux'}
{'$and': [{'foo': 'bar'}, {'baz': 'qux'}]}
Both logical and operational queries can be nested in a complex fashion:
{
'bar': 'baz',
'$or': [
{
'foo': {
'$gte': 0,
'$lte': 10,
'$mod': [2, 0]
}
},
{
'foo': {
'$gt': 10,
'$mod': [2, 1]
}
},
]
}
In the previous example, this will return any document where the 'bar' key is equal
to 'baz' and either the 'foo' key is an even number between 0 and 10 or is an odd number
greater than 10.
"""
matches = [] # A list of booleans
reapply = lambda q: self._apply_query(q, document)
for field, value in query.items():
# A more complex query type $and, $or, etc
if field == '$and':
matches.append(all(map(reapply, value)))
elif field == '$or':
matches.append(any(map(reapply, value)))
elif field == '$nor':
matches.append(not any(map(reapply, value)))
elif field == '$not':
matches.append(not self._apply_query(value, document))
# Invoke a query operator
elif isinstance(value, dict):
for operator, arg in value.items():
if not self._get_operator_fn(operator)(field, arg, document):
matches.append(False)
break
else:
matches.append(True)
# Standard
elif value != document.get(field, None):
# check if field contains a dot
if '.' in field:
nodes = field.split('.')
document_section = document
try:
for path in nodes[:-1]:
document_section = document_section.get(path, None)
except AttributeError:
document_section = None
if document_section is None:
matches.append(False)
else:
if value != document_section.get(nodes[-1], None):
matches.append(False)
else:
matches.append(False)
return all(matches)
def _get_operator_fn(self, op):
"""
Returns the function in this module that corresponds to an operator string.
This simply checks if there is a method that handles the operator defined
in this module, replacing '$' with '_' (i.e. if this module has a _gt
method for $gt) and returns it. If no match is found, or the operator does not
start with '$', a MalformedQueryException is raised
"""
if not op.startswith('$'):
raise MalformedQueryException("Operator '%s' is not a valid query operation" % op)
try:
return getattr(sys.modules[__name__], op.replace('$', '_'))
except AttributeError:
raise MalformedQueryException("Operator '%s' is not currently implemented" % op)
def find_one(self, query=None):
"""
Equivalent to ``find(query, limit=1)[0]``
"""
try:
return self.find(query=query, limit=1)[0]
except (sqlite3.OperationalError, IndexError):
return None
def find_and_modify(self, query=None, update=None):
"""
Finds documents in this collection that match a given query and updates them
"""
update = update or {}
for document in self.find(query=query):
document.update(update)
self.update(document)
def count(self, query=None):
"""
Equivalent to ``len(find(query))``
"""
return len(self.find(query=query))
def rename(self, new_name):
"""
Rename this collection
"""
new_collection = Collection(self.db, new_name, create=False)
assert not new_collection.exists()
self.db.execute("alter table %s rename to %s" % (self.name, new_name))
self.name = new_name
def distinct(self, key):
"""
Get a set of distinct values for the given key excluding an implicit
None for documents that do not contain the key
"""
return set(d[key] for d in filter(lambda d: key in d, self.find()))
def create_index(self, key, reindex=True, sparse=False):
"""
Creates an index if it does not exist then performs a full reindex for this collection
"""
warnings.warn('Index support is currently very alpha and is not guaranteed')
if isinstance(key, (list, tuple)):
index_name = ','.join(key)
index_columns = ', '.join('%s text' % f for f in key)
else:
index_name = key
index_columns = '%s text' % key
table_name = '[%s{%s}]' % (self.name, index_name)
reindex = reindex or not self._object_exists('table', table_name)
# Create a table store for the index data
self.db.execute("""
create table if not exists {table} (
id integer primary key,
{columns},
foreign key(id) references {collection}(id) on delete cascade on update cascade
)
""".format(
table=table_name,
collection=self.name,
columns=index_columns
))
# Create the index
self.db.execute("""
create index if not exists [idx.{collection}{{index}}] on {table}({index})
""".format(
collection=self.name,
index=index_name,
table=table_name,
))
if reindex:
self.reindex(key)
def ensure_index(self, key, sparse=False):
"""
Equivalent to ``create_index(key, reindex=False)``
"""
self.create_index(key, reindex=False, sparse=False)
def reindex(self, table, sparse=False):
warnings.warn('Index support is currently very alpha and is not guaranteed')
index = re.findall(r'^\[.*\{(.*)\}\]$', table)[0].split(',')
update = "update {table} set {key} = ? where id = ?"
insert = "insert into {table}({index}) values({q})"
count = "select count(1) from {table} where id = ?"
qs = ('?,' * len(index)).rstrip(',')
for document in self.find():
# Ensure there's a row before we update
row = self.db.execute(count.format(table=table), (document['_id'],)).fetchone()
if int(row[0]) == 0:
self.db.execute(insert.format(table=table, index=index, q=qs),
[None for x in index])
for key in index:
# Ignore this document if it doesn't have the key
if key not in document and sparse:
continue
self.db.execute(update.format(table=table, key=key),
(document.get(key, None), document['_id']))
def drop_index(self):
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
def drop_indexes(self):
"""
Drop all indexes for this collection
"""
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
|
shaunduncan/nosqlite | nosqlite.py | Collection.find | python | def find(self, query=None, limit=None):
results = []
query = query or {}
# TODO: When indexes are implemented, we'll need to intelligently hit one of the
# index stores so we don't do a full table scan
cursor = self.db.execute("select id, data from %s" % self.name)
apply = partial(self._apply_query, query)
for match in filter(apply, starmap(self._load, cursor.fetchall())):
results.append(match)
# Just return if we already reached the limit
if limit and len(results) == limit:
return results
return results | Returns a list of documents in this collection that match a given query | train | https://github.com/shaunduncan/nosqlite/blob/3033c029b7c8290c66a8b36dc512e560505d4c85/nosqlite.py#L182-L201 | null | class Collection(object):
"""
A virtual database table that holds JSON-type documents
"""
def __init__(self, db, name, create=True):
self.db = db
self.name = name
if create:
self.create()
def clear(self):
"""
Clears all stored documents in this database. THERE IS NO GOING BACK
"""
self.db.execute("delete from %s" % self.name)
def exists(self):
"""
Checks if this collection exists
"""
return self._object_exists('table', self.name)
def _object_exists(self, type, name):
row = self.db.execute(
"select count(1) from sqlite_master where type = ? and name = ?",
(type, name.strip('[]'))
).fetchone()
return int(row[0]) > 0
def create(self):
"""
Creates the collections database only if it does not already exist
"""
self.db.execute("""
create table if not exists %s (
id integer primary key autoincrement,
data text not null
)
""" % self.name)
def insert(self, document):
"""
Inserts a document into this collection. If a document already has an '_id'
value it will be updated
:returns: inserted document with id
"""
if '_id' in document:
return self.update(document)
# Create it and return a modified one with the id
cursor = self.db.execute("""
insert into %s(data) values (?)
""" % self.name, (json.dumps(document),))
document['_id'] = cursor.lastrowid
return document
def update(self, document):
"""
Updates a document stored in this collection. If the document does not
already have an '_id' value, it will be created
"""
if '_id' not in document:
return self.insert(document)
# Update the stored document, removing the id
copy = document.copy()
del copy['_id']
self.db.execute("""
update %s set data = ? where id = ?
""" % self.name, (json.dumps(copy), document['_id']))
return document
def remove(self, document):
"""
Removes a document from this collection. This will raise AssertionError if the
document does not have an _id attribute
"""
assert '_id' in document, 'Document must have an id'
self.db.execute("delete from %s where id = ?" % self.name, (document['_id'],))
def save(self, document):
"""
Alias for ``update``
"""
return self.update(document)
def delete(self, document):
"""
Alias for ``remove``
"""
return self.remove(document)
def _load(self, id, data):
"""
Loads a JSON document taking care to apply the document id
"""
if isinstance(data, bytes): # pragma: no cover Python >= 3.0
data = data.decode('utf-8')
document = json.loads(data)
document['_id'] = id
return document
def _apply_query(self, query, document):
"""
Applies a query to a document. Returns True if the document meets the criteria of
the supplied query. The ``query`` argument generally follows mongodb style syntax
and consists of the following logical checks and operators.
Logical: $and, $or, $nor, $not
Operators: $eq, $ne, $gt, $gte, $lt, $lte, $mod, $in, $nin, $all
If no logical operator is supplied, it assumed that all field checks must pass. For
example, these are equivalent:
{'foo': 'bar', 'baz': 'qux'}
{'$and': [{'foo': 'bar'}, {'baz': 'qux'}]}
Both logical and operational queries can be nested in a complex fashion:
{
'bar': 'baz',
'$or': [
{
'foo': {
'$gte': 0,
'$lte': 10,
'$mod': [2, 0]
}
},
{
'foo': {
'$gt': 10,
'$mod': [2, 1]
}
},
]
}
In the previous example, this will return any document where the 'bar' key is equal
to 'baz' and either the 'foo' key is an even number between 0 and 10 or is an odd number
greater than 10.
"""
matches = [] # A list of booleans
reapply = lambda q: self._apply_query(q, document)
for field, value in query.items():
# A more complex query type $and, $or, etc
if field == '$and':
matches.append(all(map(reapply, value)))
elif field == '$or':
matches.append(any(map(reapply, value)))
elif field == '$nor':
matches.append(not any(map(reapply, value)))
elif field == '$not':
matches.append(not self._apply_query(value, document))
# Invoke a query operator
elif isinstance(value, dict):
for operator, arg in value.items():
if not self._get_operator_fn(operator)(field, arg, document):
matches.append(False)
break
else:
matches.append(True)
# Standard
elif value != document.get(field, None):
# check if field contains a dot
if '.' in field:
nodes = field.split('.')
document_section = document
try:
for path in nodes[:-1]:
document_section = document_section.get(path, None)
except AttributeError:
document_section = None
if document_section is None:
matches.append(False)
else:
if value != document_section.get(nodes[-1], None):
matches.append(False)
else:
matches.append(False)
return all(matches)
def _get_operator_fn(self, op):
"""
Returns the function in this module that corresponds to an operator string.
This simply checks if there is a method that handles the operator defined
in this module, replacing '$' with '_' (i.e. if this module has a _gt
method for $gt) and returns it. If no match is found, or the operator does not
start with '$', a MalformedQueryException is raised
"""
if not op.startswith('$'):
raise MalformedQueryException("Operator '%s' is not a valid query operation" % op)
try:
return getattr(sys.modules[__name__], op.replace('$', '_'))
except AttributeError:
raise MalformedQueryException("Operator '%s' is not currently implemented" % op)
def find_one(self, query=None):
"""
Equivalent to ``find(query, limit=1)[0]``
"""
try:
return self.find(query=query, limit=1)[0]
except (sqlite3.OperationalError, IndexError):
return None
def find_and_modify(self, query=None, update=None):
"""
Finds documents in this collection that match a given query and updates them
"""
update = update or {}
for document in self.find(query=query):
document.update(update)
self.update(document)
def count(self, query=None):
"""
Equivalent to ``len(find(query))``
"""
return len(self.find(query=query))
def rename(self, new_name):
"""
Rename this collection
"""
new_collection = Collection(self.db, new_name, create=False)
assert not new_collection.exists()
self.db.execute("alter table %s rename to %s" % (self.name, new_name))
self.name = new_name
def distinct(self, key):
"""
Get a set of distinct values for the given key excluding an implicit
None for documents that do not contain the key
"""
return set(d[key] for d in filter(lambda d: key in d, self.find()))
def create_index(self, key, reindex=True, sparse=False):
"""
Creates an index if it does not exist then performs a full reindex for this collection
"""
warnings.warn('Index support is currently very alpha and is not guaranteed')
if isinstance(key, (list, tuple)):
index_name = ','.join(key)
index_columns = ', '.join('%s text' % f for f in key)
else:
index_name = key
index_columns = '%s text' % key
table_name = '[%s{%s}]' % (self.name, index_name)
reindex = reindex or not self._object_exists('table', table_name)
# Create a table store for the index data
self.db.execute("""
create table if not exists {table} (
id integer primary key,
{columns},
foreign key(id) references {collection}(id) on delete cascade on update cascade
)
""".format(
table=table_name,
collection=self.name,
columns=index_columns
))
# Create the index
self.db.execute("""
create index if not exists [idx.{collection}{{index}}] on {table}({index})
""".format(
collection=self.name,
index=index_name,
table=table_name,
))
if reindex:
self.reindex(key)
def ensure_index(self, key, sparse=False):
"""
Equivalent to ``create_index(key, reindex=False)``
"""
self.create_index(key, reindex=False, sparse=False)
def reindex(self, table, sparse=False):
warnings.warn('Index support is currently very alpha and is not guaranteed')
index = re.findall(r'^\[.*\{(.*)\}\]$', table)[0].split(',')
update = "update {table} set {key} = ? where id = ?"
insert = "insert into {table}({index}) values({q})"
count = "select count(1) from {table} where id = ?"
qs = ('?,' * len(index)).rstrip(',')
for document in self.find():
# Ensure there's a row before we update
row = self.db.execute(count.format(table=table), (document['_id'],)).fetchone()
if int(row[0]) == 0:
self.db.execute(insert.format(table=table, index=index, q=qs),
[None for x in index])
for key in index:
# Ignore this document if it doesn't have the key
if key not in document and sparse:
continue
self.db.execute(update.format(table=table, key=key),
(document.get(key, None), document['_id']))
def drop_index(self):
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
def drop_indexes(self):
"""
Drop all indexes for this collection
"""
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
|
shaunduncan/nosqlite | nosqlite.py | Collection._apply_query | python | def _apply_query(self, query, document):
matches = [] # A list of booleans
reapply = lambda q: self._apply_query(q, document)
for field, value in query.items():
# A more complex query type $and, $or, etc
if field == '$and':
matches.append(all(map(reapply, value)))
elif field == '$or':
matches.append(any(map(reapply, value)))
elif field == '$nor':
matches.append(not any(map(reapply, value)))
elif field == '$not':
matches.append(not self._apply_query(value, document))
# Invoke a query operator
elif isinstance(value, dict):
for operator, arg in value.items():
if not self._get_operator_fn(operator)(field, arg, document):
matches.append(False)
break
else:
matches.append(True)
# Standard
elif value != document.get(field, None):
# check if field contains a dot
if '.' in field:
nodes = field.split('.')
document_section = document
try:
for path in nodes[:-1]:
document_section = document_section.get(path, None)
except AttributeError:
document_section = None
if document_section is None:
matches.append(False)
else:
if value != document_section.get(nodes[-1], None):
matches.append(False)
else:
matches.append(False)
return all(matches) | Applies a query to a document. Returns True if the document meets the criteria of
the supplied query. The ``query`` argument generally follows mongodb style syntax
and consists of the following logical checks and operators.
Logical: $and, $or, $nor, $not
Operators: $eq, $ne, $gt, $gte, $lt, $lte, $mod, $in, $nin, $all
If no logical operator is supplied, it assumed that all field checks must pass. For
example, these are equivalent:
{'foo': 'bar', 'baz': 'qux'}
{'$and': [{'foo': 'bar'}, {'baz': 'qux'}]}
Both logical and operational queries can be nested in a complex fashion:
{
'bar': 'baz',
'$or': [
{
'foo': {
'$gte': 0,
'$lte': 10,
'$mod': [2, 0]
}
},
{
'foo': {
'$gt': 10,
'$mod': [2, 1]
}
},
]
}
In the previous example, this will return any document where the 'bar' key is equal
to 'baz' and either the 'foo' key is an even number between 0 and 10 or is an odd number
greater than 10. | train | https://github.com/shaunduncan/nosqlite/blob/3033c029b7c8290c66a8b36dc512e560505d4c85/nosqlite.py#L203-L287 | null | class Collection(object):
"""
A virtual database table that holds JSON-type documents
"""
def __init__(self, db, name, create=True):
self.db = db
self.name = name
if create:
self.create()
def clear(self):
"""
Clears all stored documents in this database. THERE IS NO GOING BACK
"""
self.db.execute("delete from %s" % self.name)
def exists(self):
"""
Checks if this collection exists
"""
return self._object_exists('table', self.name)
def _object_exists(self, type, name):
row = self.db.execute(
"select count(1) from sqlite_master where type = ? and name = ?",
(type, name.strip('[]'))
).fetchone()
return int(row[0]) > 0
def create(self):
"""
Creates the collections database only if it does not already exist
"""
self.db.execute("""
create table if not exists %s (
id integer primary key autoincrement,
data text not null
)
""" % self.name)
def insert(self, document):
"""
Inserts a document into this collection. If a document already has an '_id'
value it will be updated
:returns: inserted document with id
"""
if '_id' in document:
return self.update(document)
# Create it and return a modified one with the id
cursor = self.db.execute("""
insert into %s(data) values (?)
""" % self.name, (json.dumps(document),))
document['_id'] = cursor.lastrowid
return document
def update(self, document):
"""
Updates a document stored in this collection. If the document does not
already have an '_id' value, it will be created
"""
if '_id' not in document:
return self.insert(document)
# Update the stored document, removing the id
copy = document.copy()
del copy['_id']
self.db.execute("""
update %s set data = ? where id = ?
""" % self.name, (json.dumps(copy), document['_id']))
return document
def remove(self, document):
"""
Removes a document from this collection. This will raise AssertionError if the
document does not have an _id attribute
"""
assert '_id' in document, 'Document must have an id'
self.db.execute("delete from %s where id = ?" % self.name, (document['_id'],))
def save(self, document):
"""
Alias for ``update``
"""
return self.update(document)
def delete(self, document):
"""
Alias for ``remove``
"""
return self.remove(document)
def _load(self, id, data):
"""
Loads a JSON document taking care to apply the document id
"""
if isinstance(data, bytes): # pragma: no cover Python >= 3.0
data = data.decode('utf-8')
document = json.loads(data)
document['_id'] = id
return document
def find(self, query=None, limit=None):
"""
Returns a list of documents in this collection that match a given query
"""
results = []
query = query or {}
# TODO: When indexes are implemented, we'll need to intelligently hit one of the
# index stores so we don't do a full table scan
cursor = self.db.execute("select id, data from %s" % self.name)
apply = partial(self._apply_query, query)
for match in filter(apply, starmap(self._load, cursor.fetchall())):
results.append(match)
# Just return if we already reached the limit
if limit and len(results) == limit:
return results
return results
def _get_operator_fn(self, op):
"""
Returns the function in this module that corresponds to an operator string.
This simply checks if there is a method that handles the operator defined
in this module, replacing '$' with '_' (i.e. if this module has a _gt
method for $gt) and returns it. If no match is found, or the operator does not
start with '$', a MalformedQueryException is raised
"""
if not op.startswith('$'):
raise MalformedQueryException("Operator '%s' is not a valid query operation" % op)
try:
return getattr(sys.modules[__name__], op.replace('$', '_'))
except AttributeError:
raise MalformedQueryException("Operator '%s' is not currently implemented" % op)
def find_one(self, query=None):
"""
Equivalent to ``find(query, limit=1)[0]``
"""
try:
return self.find(query=query, limit=1)[0]
except (sqlite3.OperationalError, IndexError):
return None
def find_and_modify(self, query=None, update=None):
"""
Finds documents in this collection that match a given query and updates them
"""
update = update or {}
for document in self.find(query=query):
document.update(update)
self.update(document)
def count(self, query=None):
"""
Equivalent to ``len(find(query))``
"""
return len(self.find(query=query))
def rename(self, new_name):
"""
Rename this collection
"""
new_collection = Collection(self.db, new_name, create=False)
assert not new_collection.exists()
self.db.execute("alter table %s rename to %s" % (self.name, new_name))
self.name = new_name
def distinct(self, key):
"""
Get a set of distinct values for the given key excluding an implicit
None for documents that do not contain the key
"""
return set(d[key] for d in filter(lambda d: key in d, self.find()))
def create_index(self, key, reindex=True, sparse=False):
"""
Creates an index if it does not exist then performs a full reindex for this collection
"""
warnings.warn('Index support is currently very alpha and is not guaranteed')
if isinstance(key, (list, tuple)):
index_name = ','.join(key)
index_columns = ', '.join('%s text' % f for f in key)
else:
index_name = key
index_columns = '%s text' % key
table_name = '[%s{%s}]' % (self.name, index_name)
reindex = reindex or not self._object_exists('table', table_name)
# Create a table store for the index data
self.db.execute("""
create table if not exists {table} (
id integer primary key,
{columns},
foreign key(id) references {collection}(id) on delete cascade on update cascade
)
""".format(
table=table_name,
collection=self.name,
columns=index_columns
))
# Create the index
self.db.execute("""
create index if not exists [idx.{collection}{{index}}] on {table}({index})
""".format(
collection=self.name,
index=index_name,
table=table_name,
))
if reindex:
self.reindex(key)
def ensure_index(self, key, sparse=False):
"""
Equivalent to ``create_index(key, reindex=False)``
"""
self.create_index(key, reindex=False, sparse=False)
def reindex(self, table, sparse=False):
warnings.warn('Index support is currently very alpha and is not guaranteed')
index = re.findall(r'^\[.*\{(.*)\}\]$', table)[0].split(',')
update = "update {table} set {key} = ? where id = ?"
insert = "insert into {table}({index}) values({q})"
count = "select count(1) from {table} where id = ?"
qs = ('?,' * len(index)).rstrip(',')
for document in self.find():
# Ensure there's a row before we update
row = self.db.execute(count.format(table=table), (document['_id'],)).fetchone()
if int(row[0]) == 0:
self.db.execute(insert.format(table=table, index=index, q=qs),
[None for x in index])
for key in index:
# Ignore this document if it doesn't have the key
if key not in document and sparse:
continue
self.db.execute(update.format(table=table, key=key),
(document.get(key, None), document['_id']))
def drop_index(self):
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
def drop_indexes(self):
"""
Drop all indexes for this collection
"""
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
|
shaunduncan/nosqlite | nosqlite.py | Collection._get_operator_fn | python | def _get_operator_fn(self, op):
if not op.startswith('$'):
raise MalformedQueryException("Operator '%s' is not a valid query operation" % op)
try:
return getattr(sys.modules[__name__], op.replace('$', '_'))
except AttributeError:
raise MalformedQueryException("Operator '%s' is not currently implemented" % op) | Returns the function in this module that corresponds to an operator string.
This simply checks if there is a method that handles the operator defined
in this module, replacing '$' with '_' (i.e. if this module has a _gt
method for $gt) and returns it. If no match is found, or the operator does not
start with '$', a MalformedQueryException is raised | train | https://github.com/shaunduncan/nosqlite/blob/3033c029b7c8290c66a8b36dc512e560505d4c85/nosqlite.py#L289-L303 | null | class Collection(object):
"""
A virtual database table that holds JSON-type documents
"""
def __init__(self, db, name, create=True):
self.db = db
self.name = name
if create:
self.create()
def clear(self):
"""
Clears all stored documents in this database. THERE IS NO GOING BACK
"""
self.db.execute("delete from %s" % self.name)
def exists(self):
"""
Checks if this collection exists
"""
return self._object_exists('table', self.name)
def _object_exists(self, type, name):
row = self.db.execute(
"select count(1) from sqlite_master where type = ? and name = ?",
(type, name.strip('[]'))
).fetchone()
return int(row[0]) > 0
def create(self):
"""
Creates the collections database only if it does not already exist
"""
self.db.execute("""
create table if not exists %s (
id integer primary key autoincrement,
data text not null
)
""" % self.name)
def insert(self, document):
"""
Inserts a document into this collection. If a document already has an '_id'
value it will be updated
:returns: inserted document with id
"""
if '_id' in document:
return self.update(document)
# Create it and return a modified one with the id
cursor = self.db.execute("""
insert into %s(data) values (?)
""" % self.name, (json.dumps(document),))
document['_id'] = cursor.lastrowid
return document
def update(self, document):
"""
Updates a document stored in this collection. If the document does not
already have an '_id' value, it will be created
"""
if '_id' not in document:
return self.insert(document)
# Update the stored document, removing the id
copy = document.copy()
del copy['_id']
self.db.execute("""
update %s set data = ? where id = ?
""" % self.name, (json.dumps(copy), document['_id']))
return document
def remove(self, document):
"""
Removes a document from this collection. This will raise AssertionError if the
document does not have an _id attribute
"""
assert '_id' in document, 'Document must have an id'
self.db.execute("delete from %s where id = ?" % self.name, (document['_id'],))
def save(self, document):
"""
Alias for ``update``
"""
return self.update(document)
def delete(self, document):
"""
Alias for ``remove``
"""
return self.remove(document)
def _load(self, id, data):
"""
Loads a JSON document taking care to apply the document id
"""
if isinstance(data, bytes): # pragma: no cover Python >= 3.0
data = data.decode('utf-8')
document = json.loads(data)
document['_id'] = id
return document
def find(self, query=None, limit=None):
"""
Returns a list of documents in this collection that match a given query
"""
results = []
query = query or {}
# TODO: When indexes are implemented, we'll need to intelligently hit one of the
# index stores so we don't do a full table scan
cursor = self.db.execute("select id, data from %s" % self.name)
apply = partial(self._apply_query, query)
for match in filter(apply, starmap(self._load, cursor.fetchall())):
results.append(match)
# Just return if we already reached the limit
if limit and len(results) == limit:
return results
return results
def _apply_query(self, query, document):
"""
Applies a query to a document. Returns True if the document meets the criteria of
the supplied query. The ``query`` argument generally follows mongodb style syntax
and consists of the following logical checks and operators.
Logical: $and, $or, $nor, $not
Operators: $eq, $ne, $gt, $gte, $lt, $lte, $mod, $in, $nin, $all
If no logical operator is supplied, it assumed that all field checks must pass. For
example, these are equivalent:
{'foo': 'bar', 'baz': 'qux'}
{'$and': [{'foo': 'bar'}, {'baz': 'qux'}]}
Both logical and operational queries can be nested in a complex fashion:
{
'bar': 'baz',
'$or': [
{
'foo': {
'$gte': 0,
'$lte': 10,
'$mod': [2, 0]
}
},
{
'foo': {
'$gt': 10,
'$mod': [2, 1]
}
},
]
}
In the previous example, this will return any document where the 'bar' key is equal
to 'baz' and either the 'foo' key is an even number between 0 and 10 or is an odd number
greater than 10.
"""
matches = [] # A list of booleans
reapply = lambda q: self._apply_query(q, document)
for field, value in query.items():
# A more complex query type $and, $or, etc
if field == '$and':
matches.append(all(map(reapply, value)))
elif field == '$or':
matches.append(any(map(reapply, value)))
elif field == '$nor':
matches.append(not any(map(reapply, value)))
elif field == '$not':
matches.append(not self._apply_query(value, document))
# Invoke a query operator
elif isinstance(value, dict):
for operator, arg in value.items():
if not self._get_operator_fn(operator)(field, arg, document):
matches.append(False)
break
else:
matches.append(True)
# Standard
elif value != document.get(field, None):
# check if field contains a dot
if '.' in field:
nodes = field.split('.')
document_section = document
try:
for path in nodes[:-1]:
document_section = document_section.get(path, None)
except AttributeError:
document_section = None
if document_section is None:
matches.append(False)
else:
if value != document_section.get(nodes[-1], None):
matches.append(False)
else:
matches.append(False)
return all(matches)
def find_one(self, query=None):
"""
Equivalent to ``find(query, limit=1)[0]``
"""
try:
return self.find(query=query, limit=1)[0]
except (sqlite3.OperationalError, IndexError):
return None
def find_and_modify(self, query=None, update=None):
"""
Finds documents in this collection that match a given query and updates them
"""
update = update or {}
for document in self.find(query=query):
document.update(update)
self.update(document)
def count(self, query=None):
"""
Equivalent to ``len(find(query))``
"""
return len(self.find(query=query))
def rename(self, new_name):
"""
Rename this collection
"""
new_collection = Collection(self.db, new_name, create=False)
assert not new_collection.exists()
self.db.execute("alter table %s rename to %s" % (self.name, new_name))
self.name = new_name
def distinct(self, key):
"""
Get a set of distinct values for the given key excluding an implicit
None for documents that do not contain the key
"""
return set(d[key] for d in filter(lambda d: key in d, self.find()))
def create_index(self, key, reindex=True, sparse=False):
"""
Creates an index if it does not exist then performs a full reindex for this collection
"""
warnings.warn('Index support is currently very alpha and is not guaranteed')
if isinstance(key, (list, tuple)):
index_name = ','.join(key)
index_columns = ', '.join('%s text' % f for f in key)
else:
index_name = key
index_columns = '%s text' % key
table_name = '[%s{%s}]' % (self.name, index_name)
reindex = reindex or not self._object_exists('table', table_name)
# Create a table store for the index data
self.db.execute("""
create table if not exists {table} (
id integer primary key,
{columns},
foreign key(id) references {collection}(id) on delete cascade on update cascade
)
""".format(
table=table_name,
collection=self.name,
columns=index_columns
))
# Create the index
self.db.execute("""
create index if not exists [idx.{collection}{{index}}] on {table}({index})
""".format(
collection=self.name,
index=index_name,
table=table_name,
))
if reindex:
self.reindex(key)
def ensure_index(self, key, sparse=False):
"""
Equivalent to ``create_index(key, reindex=False)``
"""
self.create_index(key, reindex=False, sparse=False)
def reindex(self, table, sparse=False):
warnings.warn('Index support is currently very alpha and is not guaranteed')
index = re.findall(r'^\[.*\{(.*)\}\]$', table)[0].split(',')
update = "update {table} set {key} = ? where id = ?"
insert = "insert into {table}({index}) values({q})"
count = "select count(1) from {table} where id = ?"
qs = ('?,' * len(index)).rstrip(',')
for document in self.find():
# Ensure there's a row before we update
row = self.db.execute(count.format(table=table), (document['_id'],)).fetchone()
if int(row[0]) == 0:
self.db.execute(insert.format(table=table, index=index, q=qs),
[None for x in index])
for key in index:
# Ignore this document if it doesn't have the key
if key not in document and sparse:
continue
self.db.execute(update.format(table=table, key=key),
(document.get(key, None), document['_id']))
def drop_index(self):
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
def drop_indexes(self):
"""
Drop all indexes for this collection
"""
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
|
shaunduncan/nosqlite | nosqlite.py | Collection.find_one | python | def find_one(self, query=None):
try:
return self.find(query=query, limit=1)[0]
except (sqlite3.OperationalError, IndexError):
return None | Equivalent to ``find(query, limit=1)[0]`` | train | https://github.com/shaunduncan/nosqlite/blob/3033c029b7c8290c66a8b36dc512e560505d4c85/nosqlite.py#L305-L312 | [
"def find(self, query=None, limit=None):\n \"\"\"\n Returns a list of documents in this collection that match a given query\n \"\"\"\n results = []\n query = query or {}\n\n # TODO: When indexes are implemented, we'll need to intelligently hit one of the\n # index stores so we don't do a full t... | class Collection(object):
"""
A virtual database table that holds JSON-type documents
"""
def __init__(self, db, name, create=True):
self.db = db
self.name = name
if create:
self.create()
def clear(self):
"""
Clears all stored documents in this database. THERE IS NO GOING BACK
"""
self.db.execute("delete from %s" % self.name)
def exists(self):
"""
Checks if this collection exists
"""
return self._object_exists('table', self.name)
def _object_exists(self, type, name):
row = self.db.execute(
"select count(1) from sqlite_master where type = ? and name = ?",
(type, name.strip('[]'))
).fetchone()
return int(row[0]) > 0
def create(self):
"""
Creates the collections database only if it does not already exist
"""
self.db.execute("""
create table if not exists %s (
id integer primary key autoincrement,
data text not null
)
""" % self.name)
def insert(self, document):
"""
Inserts a document into this collection. If a document already has an '_id'
value it will be updated
:returns: inserted document with id
"""
if '_id' in document:
return self.update(document)
# Create it and return a modified one with the id
cursor = self.db.execute("""
insert into %s(data) values (?)
""" % self.name, (json.dumps(document),))
document['_id'] = cursor.lastrowid
return document
def update(self, document):
"""
Updates a document stored in this collection. If the document does not
already have an '_id' value, it will be created
"""
if '_id' not in document:
return self.insert(document)
# Update the stored document, removing the id
copy = document.copy()
del copy['_id']
self.db.execute("""
update %s set data = ? where id = ?
""" % self.name, (json.dumps(copy), document['_id']))
return document
def remove(self, document):
"""
Removes a document from this collection. This will raise AssertionError if the
document does not have an _id attribute
"""
assert '_id' in document, 'Document must have an id'
self.db.execute("delete from %s where id = ?" % self.name, (document['_id'],))
def save(self, document):
"""
Alias for ``update``
"""
return self.update(document)
def delete(self, document):
"""
Alias for ``remove``
"""
return self.remove(document)
def _load(self, id, data):
"""
Loads a JSON document taking care to apply the document id
"""
if isinstance(data, bytes): # pragma: no cover Python >= 3.0
data = data.decode('utf-8')
document = json.loads(data)
document['_id'] = id
return document
def find(self, query=None, limit=None):
"""
Returns a list of documents in this collection that match a given query
"""
results = []
query = query or {}
# TODO: When indexes are implemented, we'll need to intelligently hit one of the
# index stores so we don't do a full table scan
cursor = self.db.execute("select id, data from %s" % self.name)
apply = partial(self._apply_query, query)
for match in filter(apply, starmap(self._load, cursor.fetchall())):
results.append(match)
# Just return if we already reached the limit
if limit and len(results) == limit:
return results
return results
def _apply_query(self, query, document):
"""
Applies a query to a document. Returns True if the document meets the criteria of
the supplied query. The ``query`` argument generally follows mongodb style syntax
and consists of the following logical checks and operators.
Logical: $and, $or, $nor, $not
Operators: $eq, $ne, $gt, $gte, $lt, $lte, $mod, $in, $nin, $all
If no logical operator is supplied, it assumed that all field checks must pass. For
example, these are equivalent:
{'foo': 'bar', 'baz': 'qux'}
{'$and': [{'foo': 'bar'}, {'baz': 'qux'}]}
Both logical and operational queries can be nested in a complex fashion:
{
'bar': 'baz',
'$or': [
{
'foo': {
'$gte': 0,
'$lte': 10,
'$mod': [2, 0]
}
},
{
'foo': {
'$gt': 10,
'$mod': [2, 1]
}
},
]
}
In the previous example, this will return any document where the 'bar' key is equal
to 'baz' and either the 'foo' key is an even number between 0 and 10 or is an odd number
greater than 10.
"""
matches = [] # A list of booleans
reapply = lambda q: self._apply_query(q, document)
for field, value in query.items():
# A more complex query type $and, $or, etc
if field == '$and':
matches.append(all(map(reapply, value)))
elif field == '$or':
matches.append(any(map(reapply, value)))
elif field == '$nor':
matches.append(not any(map(reapply, value)))
elif field == '$not':
matches.append(not self._apply_query(value, document))
# Invoke a query operator
elif isinstance(value, dict):
for operator, arg in value.items():
if not self._get_operator_fn(operator)(field, arg, document):
matches.append(False)
break
else:
matches.append(True)
# Standard
elif value != document.get(field, None):
# check if field contains a dot
if '.' in field:
nodes = field.split('.')
document_section = document
try:
for path in nodes[:-1]:
document_section = document_section.get(path, None)
except AttributeError:
document_section = None
if document_section is None:
matches.append(False)
else:
if value != document_section.get(nodes[-1], None):
matches.append(False)
else:
matches.append(False)
return all(matches)
def _get_operator_fn(self, op):
"""
Returns the function in this module that corresponds to an operator string.
This simply checks if there is a method that handles the operator defined
in this module, replacing '$' with '_' (i.e. if this module has a _gt
method for $gt) and returns it. If no match is found, or the operator does not
start with '$', a MalformedQueryException is raised
"""
if not op.startswith('$'):
raise MalformedQueryException("Operator '%s' is not a valid query operation" % op)
try:
return getattr(sys.modules[__name__], op.replace('$', '_'))
except AttributeError:
raise MalformedQueryException("Operator '%s' is not currently implemented" % op)
def find_and_modify(self, query=None, update=None):
"""
Finds documents in this collection that match a given query and updates them
"""
update = update or {}
for document in self.find(query=query):
document.update(update)
self.update(document)
def count(self, query=None):
"""
Equivalent to ``len(find(query))``
"""
return len(self.find(query=query))
def rename(self, new_name):
"""
Rename this collection
"""
new_collection = Collection(self.db, new_name, create=False)
assert not new_collection.exists()
self.db.execute("alter table %s rename to %s" % (self.name, new_name))
self.name = new_name
def distinct(self, key):
"""
Get a set of distinct values for the given key excluding an implicit
None for documents that do not contain the key
"""
return set(d[key] for d in filter(lambda d: key in d, self.find()))
def create_index(self, key, reindex=True, sparse=False):
"""
Creates an index if it does not exist then performs a full reindex for this collection
"""
warnings.warn('Index support is currently very alpha and is not guaranteed')
if isinstance(key, (list, tuple)):
index_name = ','.join(key)
index_columns = ', '.join('%s text' % f for f in key)
else:
index_name = key
index_columns = '%s text' % key
table_name = '[%s{%s}]' % (self.name, index_name)
reindex = reindex or not self._object_exists('table', table_name)
# Create a table store for the index data
self.db.execute("""
create table if not exists {table} (
id integer primary key,
{columns},
foreign key(id) references {collection}(id) on delete cascade on update cascade
)
""".format(
table=table_name,
collection=self.name,
columns=index_columns
))
# Create the index
self.db.execute("""
create index if not exists [idx.{collection}{{index}}] on {table}({index})
""".format(
collection=self.name,
index=index_name,
table=table_name,
))
if reindex:
self.reindex(key)
def ensure_index(self, key, sparse=False):
"""
Equivalent to ``create_index(key, reindex=False)``
"""
self.create_index(key, reindex=False, sparse=False)
def reindex(self, table, sparse=False):
warnings.warn('Index support is currently very alpha and is not guaranteed')
index = re.findall(r'^\[.*\{(.*)\}\]$', table)[0].split(',')
update = "update {table} set {key} = ? where id = ?"
insert = "insert into {table}({index}) values({q})"
count = "select count(1) from {table} where id = ?"
qs = ('?,' * len(index)).rstrip(',')
for document in self.find():
# Ensure there's a row before we update
row = self.db.execute(count.format(table=table), (document['_id'],)).fetchone()
if int(row[0]) == 0:
self.db.execute(insert.format(table=table, index=index, q=qs),
[None for x in index])
for key in index:
# Ignore this document if it doesn't have the key
if key not in document and sparse:
continue
self.db.execute(update.format(table=table, key=key),
(document.get(key, None), document['_id']))
def drop_index(self):
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
def drop_indexes(self):
"""
Drop all indexes for this collection
"""
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
|
shaunduncan/nosqlite | nosqlite.py | Collection.find_and_modify | python | def find_and_modify(self, query=None, update=None):
update = update or {}
for document in self.find(query=query):
document.update(update)
self.update(document) | Finds documents in this collection that match a given query and updates them | train | https://github.com/shaunduncan/nosqlite/blob/3033c029b7c8290c66a8b36dc512e560505d4c85/nosqlite.py#L314-L322 | [
"def update(self, document):\n \"\"\"\n Updates a document stored in this collection. If the document does not\n already have an '_id' value, it will be created\n \"\"\"\n if '_id' not in document:\n return self.insert(document)\n\n # Update the stored document, removing the id\n copy = ... | class Collection(object):
"""
A virtual database table that holds JSON-type documents
"""
def __init__(self, db, name, create=True):
self.db = db
self.name = name
if create:
self.create()
def clear(self):
"""
Clears all stored documents in this database. THERE IS NO GOING BACK
"""
self.db.execute("delete from %s" % self.name)
def exists(self):
"""
Checks if this collection exists
"""
return self._object_exists('table', self.name)
def _object_exists(self, type, name):
row = self.db.execute(
"select count(1) from sqlite_master where type = ? and name = ?",
(type, name.strip('[]'))
).fetchone()
return int(row[0]) > 0
def create(self):
"""
Creates the collections database only if it does not already exist
"""
self.db.execute("""
create table if not exists %s (
id integer primary key autoincrement,
data text not null
)
""" % self.name)
def insert(self, document):
"""
Inserts a document into this collection. If a document already has an '_id'
value it will be updated
:returns: inserted document with id
"""
if '_id' in document:
return self.update(document)
# Create it and return a modified one with the id
cursor = self.db.execute("""
insert into %s(data) values (?)
""" % self.name, (json.dumps(document),))
document['_id'] = cursor.lastrowid
return document
def update(self, document):
"""
Updates a document stored in this collection. If the document does not
already have an '_id' value, it will be created
"""
if '_id' not in document:
return self.insert(document)
# Update the stored document, removing the id
copy = document.copy()
del copy['_id']
self.db.execute("""
update %s set data = ? where id = ?
""" % self.name, (json.dumps(copy), document['_id']))
return document
def remove(self, document):
"""
Removes a document from this collection. This will raise AssertionError if the
document does not have an _id attribute
"""
assert '_id' in document, 'Document must have an id'
self.db.execute("delete from %s where id = ?" % self.name, (document['_id'],))
def save(self, document):
"""
Alias for ``update``
"""
return self.update(document)
def delete(self, document):
"""
Alias for ``remove``
"""
return self.remove(document)
def _load(self, id, data):
"""
Loads a JSON document taking care to apply the document id
"""
if isinstance(data, bytes): # pragma: no cover Python >= 3.0
data = data.decode('utf-8')
document = json.loads(data)
document['_id'] = id
return document
def find(self, query=None, limit=None):
"""
Returns a list of documents in this collection that match a given query
"""
results = []
query = query or {}
# TODO: When indexes are implemented, we'll need to intelligently hit one of the
# index stores so we don't do a full table scan
cursor = self.db.execute("select id, data from %s" % self.name)
apply = partial(self._apply_query, query)
for match in filter(apply, starmap(self._load, cursor.fetchall())):
results.append(match)
# Just return if we already reached the limit
if limit and len(results) == limit:
return results
return results
def _apply_query(self, query, document):
"""
Applies a query to a document. Returns True if the document meets the criteria of
the supplied query. The ``query`` argument generally follows mongodb style syntax
and consists of the following logical checks and operators.
Logical: $and, $or, $nor, $not
Operators: $eq, $ne, $gt, $gte, $lt, $lte, $mod, $in, $nin, $all
If no logical operator is supplied, it assumed that all field checks must pass. For
example, these are equivalent:
{'foo': 'bar', 'baz': 'qux'}
{'$and': [{'foo': 'bar'}, {'baz': 'qux'}]}
Both logical and operational queries can be nested in a complex fashion:
{
'bar': 'baz',
'$or': [
{
'foo': {
'$gte': 0,
'$lte': 10,
'$mod': [2, 0]
}
},
{
'foo': {
'$gt': 10,
'$mod': [2, 1]
}
},
]
}
In the previous example, this will return any document where the 'bar' key is equal
to 'baz' and either the 'foo' key is an even number between 0 and 10 or is an odd number
greater than 10.
"""
matches = [] # A list of booleans
reapply = lambda q: self._apply_query(q, document)
for field, value in query.items():
# A more complex query type $and, $or, etc
if field == '$and':
matches.append(all(map(reapply, value)))
elif field == '$or':
matches.append(any(map(reapply, value)))
elif field == '$nor':
matches.append(not any(map(reapply, value)))
elif field == '$not':
matches.append(not self._apply_query(value, document))
# Invoke a query operator
elif isinstance(value, dict):
for operator, arg in value.items():
if not self._get_operator_fn(operator)(field, arg, document):
matches.append(False)
break
else:
matches.append(True)
# Standard
elif value != document.get(field, None):
# check if field contains a dot
if '.' in field:
nodes = field.split('.')
document_section = document
try:
for path in nodes[:-1]:
document_section = document_section.get(path, None)
except AttributeError:
document_section = None
if document_section is None:
matches.append(False)
else:
if value != document_section.get(nodes[-1], None):
matches.append(False)
else:
matches.append(False)
return all(matches)
def _get_operator_fn(self, op):
"""
Returns the function in this module that corresponds to an operator string.
This simply checks if there is a method that handles the operator defined
in this module, replacing '$' with '_' (i.e. if this module has a _gt
method for $gt) and returns it. If no match is found, or the operator does not
start with '$', a MalformedQueryException is raised
"""
if not op.startswith('$'):
raise MalformedQueryException("Operator '%s' is not a valid query operation" % op)
try:
return getattr(sys.modules[__name__], op.replace('$', '_'))
except AttributeError:
raise MalformedQueryException("Operator '%s' is not currently implemented" % op)
def find_one(self, query=None):
"""
Equivalent to ``find(query, limit=1)[0]``
"""
try:
return self.find(query=query, limit=1)[0]
except (sqlite3.OperationalError, IndexError):
return None
def count(self, query=None):
"""
Equivalent to ``len(find(query))``
"""
return len(self.find(query=query))
def rename(self, new_name):
"""
Rename this collection
"""
new_collection = Collection(self.db, new_name, create=False)
assert not new_collection.exists()
self.db.execute("alter table %s rename to %s" % (self.name, new_name))
self.name = new_name
def distinct(self, key):
"""
Get a set of distinct values for the given key excluding an implicit
None for documents that do not contain the key
"""
return set(d[key] for d in filter(lambda d: key in d, self.find()))
def create_index(self, key, reindex=True, sparse=False):
"""
Creates an index if it does not exist then performs a full reindex for this collection
"""
warnings.warn('Index support is currently very alpha and is not guaranteed')
if isinstance(key, (list, tuple)):
index_name = ','.join(key)
index_columns = ', '.join('%s text' % f for f in key)
else:
index_name = key
index_columns = '%s text' % key
table_name = '[%s{%s}]' % (self.name, index_name)
reindex = reindex or not self._object_exists('table', table_name)
# Create a table store for the index data
self.db.execute("""
create table if not exists {table} (
id integer primary key,
{columns},
foreign key(id) references {collection}(id) on delete cascade on update cascade
)
""".format(
table=table_name,
collection=self.name,
columns=index_columns
))
# Create the index
self.db.execute("""
create index if not exists [idx.{collection}{{index}}] on {table}({index})
""".format(
collection=self.name,
index=index_name,
table=table_name,
))
if reindex:
self.reindex(key)
def ensure_index(self, key, sparse=False):
"""
Equivalent to ``create_index(key, reindex=False)``
"""
self.create_index(key, reindex=False, sparse=False)
def reindex(self, table, sparse=False):
warnings.warn('Index support is currently very alpha and is not guaranteed')
index = re.findall(r'^\[.*\{(.*)\}\]$', table)[0].split(',')
update = "update {table} set {key} = ? where id = ?"
insert = "insert into {table}({index}) values({q})"
count = "select count(1) from {table} where id = ?"
qs = ('?,' * len(index)).rstrip(',')
for document in self.find():
# Ensure there's a row before we update
row = self.db.execute(count.format(table=table), (document['_id'],)).fetchone()
if int(row[0]) == 0:
self.db.execute(insert.format(table=table, index=index, q=qs),
[None for x in index])
for key in index:
# Ignore this document if it doesn't have the key
if key not in document and sparse:
continue
self.db.execute(update.format(table=table, key=key),
(document.get(key, None), document['_id']))
def drop_index(self):
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
def drop_indexes(self):
"""
Drop all indexes for this collection
"""
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
|
shaunduncan/nosqlite | nosqlite.py | Collection.rename | python | def rename(self, new_name):
new_collection = Collection(self.db, new_name, create=False)
assert not new_collection.exists()
self.db.execute("alter table %s rename to %s" % (self.name, new_name))
self.name = new_name | Rename this collection | train | https://github.com/shaunduncan/nosqlite/blob/3033c029b7c8290c66a8b36dc512e560505d4c85/nosqlite.py#L330-L338 | [
"def exists(self):\n \"\"\"\n Checks if this collection exists\n \"\"\"\n return self._object_exists('table', self.name)\n"
] | class Collection(object):
"""
A virtual database table that holds JSON-type documents
"""
def __init__(self, db, name, create=True):
self.db = db
self.name = name
if create:
self.create()
def clear(self):
"""
Clears all stored documents in this database. THERE IS NO GOING BACK
"""
self.db.execute("delete from %s" % self.name)
def exists(self):
"""
Checks if this collection exists
"""
return self._object_exists('table', self.name)
def _object_exists(self, type, name):
row = self.db.execute(
"select count(1) from sqlite_master where type = ? and name = ?",
(type, name.strip('[]'))
).fetchone()
return int(row[0]) > 0
def create(self):
"""
Creates the collections database only if it does not already exist
"""
self.db.execute("""
create table if not exists %s (
id integer primary key autoincrement,
data text not null
)
""" % self.name)
def insert(self, document):
"""
Inserts a document into this collection. If a document already has an '_id'
value it will be updated
:returns: inserted document with id
"""
if '_id' in document:
return self.update(document)
# Create it and return a modified one with the id
cursor = self.db.execute("""
insert into %s(data) values (?)
""" % self.name, (json.dumps(document),))
document['_id'] = cursor.lastrowid
return document
def update(self, document):
"""
Updates a document stored in this collection. If the document does not
already have an '_id' value, it will be created
"""
if '_id' not in document:
return self.insert(document)
# Update the stored document, removing the id
copy = document.copy()
del copy['_id']
self.db.execute("""
update %s set data = ? where id = ?
""" % self.name, (json.dumps(copy), document['_id']))
return document
def remove(self, document):
"""
Removes a document from this collection. This will raise AssertionError if the
document does not have an _id attribute
"""
assert '_id' in document, 'Document must have an id'
self.db.execute("delete from %s where id = ?" % self.name, (document['_id'],))
def save(self, document):
"""
Alias for ``update``
"""
return self.update(document)
def delete(self, document):
"""
Alias for ``remove``
"""
return self.remove(document)
def _load(self, id, data):
"""
Loads a JSON document taking care to apply the document id
"""
if isinstance(data, bytes): # pragma: no cover Python >= 3.0
data = data.decode('utf-8')
document = json.loads(data)
document['_id'] = id
return document
def find(self, query=None, limit=None):
"""
Returns a list of documents in this collection that match a given query
"""
results = []
query = query or {}
# TODO: When indexes are implemented, we'll need to intelligently hit one of the
# index stores so we don't do a full table scan
cursor = self.db.execute("select id, data from %s" % self.name)
apply = partial(self._apply_query, query)
for match in filter(apply, starmap(self._load, cursor.fetchall())):
results.append(match)
# Just return if we already reached the limit
if limit and len(results) == limit:
return results
return results
def _apply_query(self, query, document):
"""
Applies a query to a document. Returns True if the document meets the criteria of
the supplied query. The ``query`` argument generally follows mongodb style syntax
and consists of the following logical checks and operators.
Logical: $and, $or, $nor, $not
Operators: $eq, $ne, $gt, $gte, $lt, $lte, $mod, $in, $nin, $all
If no logical operator is supplied, it assumed that all field checks must pass. For
example, these are equivalent:
{'foo': 'bar', 'baz': 'qux'}
{'$and': [{'foo': 'bar'}, {'baz': 'qux'}]}
Both logical and operational queries can be nested in a complex fashion:
{
'bar': 'baz',
'$or': [
{
'foo': {
'$gte': 0,
'$lte': 10,
'$mod': [2, 0]
}
},
{
'foo': {
'$gt': 10,
'$mod': [2, 1]
}
},
]
}
In the previous example, this will return any document where the 'bar' key is equal
to 'baz' and either the 'foo' key is an even number between 0 and 10 or is an odd number
greater than 10.
"""
matches = [] # A list of booleans
reapply = lambda q: self._apply_query(q, document)
for field, value in query.items():
# A more complex query type $and, $or, etc
if field == '$and':
matches.append(all(map(reapply, value)))
elif field == '$or':
matches.append(any(map(reapply, value)))
elif field == '$nor':
matches.append(not any(map(reapply, value)))
elif field == '$not':
matches.append(not self._apply_query(value, document))
# Invoke a query operator
elif isinstance(value, dict):
for operator, arg in value.items():
if not self._get_operator_fn(operator)(field, arg, document):
matches.append(False)
break
else:
matches.append(True)
# Standard
elif value != document.get(field, None):
# check if field contains a dot
if '.' in field:
nodes = field.split('.')
document_section = document
try:
for path in nodes[:-1]:
document_section = document_section.get(path, None)
except AttributeError:
document_section = None
if document_section is None:
matches.append(False)
else:
if value != document_section.get(nodes[-1], None):
matches.append(False)
else:
matches.append(False)
return all(matches)
def _get_operator_fn(self, op):
"""
Returns the function in this module that corresponds to an operator string.
This simply checks if there is a method that handles the operator defined
in this module, replacing '$' with '_' (i.e. if this module has a _gt
method for $gt) and returns it. If no match is found, or the operator does not
start with '$', a MalformedQueryException is raised
"""
if not op.startswith('$'):
raise MalformedQueryException("Operator '%s' is not a valid query operation" % op)
try:
return getattr(sys.modules[__name__], op.replace('$', '_'))
except AttributeError:
raise MalformedQueryException("Operator '%s' is not currently implemented" % op)
def find_one(self, query=None):
"""
Equivalent to ``find(query, limit=1)[0]``
"""
try:
return self.find(query=query, limit=1)[0]
except (sqlite3.OperationalError, IndexError):
return None
def find_and_modify(self, query=None, update=None):
"""
Finds documents in this collection that match a given query and updates them
"""
update = update or {}
for document in self.find(query=query):
document.update(update)
self.update(document)
def count(self, query=None):
"""
Equivalent to ``len(find(query))``
"""
return len(self.find(query=query))
def distinct(self, key):
"""
Get a set of distinct values for the given key excluding an implicit
None for documents that do not contain the key
"""
return set(d[key] for d in filter(lambda d: key in d, self.find()))
def create_index(self, key, reindex=True, sparse=False):
"""
Creates an index if it does not exist then performs a full reindex for this collection
"""
warnings.warn('Index support is currently very alpha and is not guaranteed')
if isinstance(key, (list, tuple)):
index_name = ','.join(key)
index_columns = ', '.join('%s text' % f for f in key)
else:
index_name = key
index_columns = '%s text' % key
table_name = '[%s{%s}]' % (self.name, index_name)
reindex = reindex or not self._object_exists('table', table_name)
# Create a table store for the index data
self.db.execute("""
create table if not exists {table} (
id integer primary key,
{columns},
foreign key(id) references {collection}(id) on delete cascade on update cascade
)
""".format(
table=table_name,
collection=self.name,
columns=index_columns
))
# Create the index
self.db.execute("""
create index if not exists [idx.{collection}{{index}}] on {table}({index})
""".format(
collection=self.name,
index=index_name,
table=table_name,
))
if reindex:
self.reindex(key)
def ensure_index(self, key, sparse=False):
"""
Equivalent to ``create_index(key, reindex=False)``
"""
self.create_index(key, reindex=False, sparse=False)
def reindex(self, table, sparse=False):
warnings.warn('Index support is currently very alpha and is not guaranteed')
index = re.findall(r'^\[.*\{(.*)\}\]$', table)[0].split(',')
update = "update {table} set {key} = ? where id = ?"
insert = "insert into {table}({index}) values({q})"
count = "select count(1) from {table} where id = ?"
qs = ('?,' * len(index)).rstrip(',')
for document in self.find():
# Ensure there's a row before we update
row = self.db.execute(count.format(table=table), (document['_id'],)).fetchone()
if int(row[0]) == 0:
self.db.execute(insert.format(table=table, index=index, q=qs),
[None for x in index])
for key in index:
# Ignore this document if it doesn't have the key
if key not in document and sparse:
continue
self.db.execute(update.format(table=table, key=key),
(document.get(key, None), document['_id']))
def drop_index(self):
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
def drop_indexes(self):
"""
Drop all indexes for this collection
"""
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
|
shaunduncan/nosqlite | nosqlite.py | Collection.distinct | python | def distinct(self, key):
return set(d[key] for d in filter(lambda d: key in d, self.find())) | Get a set of distinct values for the given key excluding an implicit
None for documents that do not contain the key | train | https://github.com/shaunduncan/nosqlite/blob/3033c029b7c8290c66a8b36dc512e560505d4c85/nosqlite.py#L340-L345 | [
"def find(self, query=None, limit=None):\n \"\"\"\n Returns a list of documents in this collection that match a given query\n \"\"\"\n results = []\n query = query or {}\n\n # TODO: When indexes are implemented, we'll need to intelligently hit one of the\n # index stores so we don't do a full t... | class Collection(object):
"""
A virtual database table that holds JSON-type documents
"""
def __init__(self, db, name, create=True):
self.db = db
self.name = name
if create:
self.create()
def clear(self):
"""
Clears all stored documents in this database. THERE IS NO GOING BACK
"""
self.db.execute("delete from %s" % self.name)
def exists(self):
"""
Checks if this collection exists
"""
return self._object_exists('table', self.name)
def _object_exists(self, type, name):
row = self.db.execute(
"select count(1) from sqlite_master where type = ? and name = ?",
(type, name.strip('[]'))
).fetchone()
return int(row[0]) > 0
def create(self):
"""
Creates the collections database only if it does not already exist
"""
self.db.execute("""
create table if not exists %s (
id integer primary key autoincrement,
data text not null
)
""" % self.name)
def insert(self, document):
"""
Inserts a document into this collection. If a document already has an '_id'
value it will be updated
:returns: inserted document with id
"""
if '_id' in document:
return self.update(document)
# Create it and return a modified one with the id
cursor = self.db.execute("""
insert into %s(data) values (?)
""" % self.name, (json.dumps(document),))
document['_id'] = cursor.lastrowid
return document
def update(self, document):
"""
Updates a document stored in this collection. If the document does not
already have an '_id' value, it will be created
"""
if '_id' not in document:
return self.insert(document)
# Update the stored document, removing the id
copy = document.copy()
del copy['_id']
self.db.execute("""
update %s set data = ? where id = ?
""" % self.name, (json.dumps(copy), document['_id']))
return document
def remove(self, document):
"""
Removes a document from this collection. This will raise AssertionError if the
document does not have an _id attribute
"""
assert '_id' in document, 'Document must have an id'
self.db.execute("delete from %s where id = ?" % self.name, (document['_id'],))
def save(self, document):
"""
Alias for ``update``
"""
return self.update(document)
def delete(self, document):
"""
Alias for ``remove``
"""
return self.remove(document)
def _load(self, id, data):
"""
Loads a JSON document taking care to apply the document id
"""
if isinstance(data, bytes): # pragma: no cover Python >= 3.0
data = data.decode('utf-8')
document = json.loads(data)
document['_id'] = id
return document
def find(self, query=None, limit=None):
"""
Returns a list of documents in this collection that match a given query
"""
results = []
query = query or {}
# TODO: When indexes are implemented, we'll need to intelligently hit one of the
# index stores so we don't do a full table scan
cursor = self.db.execute("select id, data from %s" % self.name)
apply = partial(self._apply_query, query)
for match in filter(apply, starmap(self._load, cursor.fetchall())):
results.append(match)
# Just return if we already reached the limit
if limit and len(results) == limit:
return results
return results
def _apply_query(self, query, document):
"""
Applies a query to a document. Returns True if the document meets the criteria of
the supplied query. The ``query`` argument generally follows mongodb style syntax
and consists of the following logical checks and operators.
Logical: $and, $or, $nor, $not
Operators: $eq, $ne, $gt, $gte, $lt, $lte, $mod, $in, $nin, $all
If no logical operator is supplied, it assumed that all field checks must pass. For
example, these are equivalent:
{'foo': 'bar', 'baz': 'qux'}
{'$and': [{'foo': 'bar'}, {'baz': 'qux'}]}
Both logical and operational queries can be nested in a complex fashion:
{
'bar': 'baz',
'$or': [
{
'foo': {
'$gte': 0,
'$lte': 10,
'$mod': [2, 0]
}
},
{
'foo': {
'$gt': 10,
'$mod': [2, 1]
}
},
]
}
In the previous example, this will return any document where the 'bar' key is equal
to 'baz' and either the 'foo' key is an even number between 0 and 10 or is an odd number
greater than 10.
"""
matches = [] # A list of booleans
reapply = lambda q: self._apply_query(q, document)
for field, value in query.items():
# A more complex query type $and, $or, etc
if field == '$and':
matches.append(all(map(reapply, value)))
elif field == '$or':
matches.append(any(map(reapply, value)))
elif field == '$nor':
matches.append(not any(map(reapply, value)))
elif field == '$not':
matches.append(not self._apply_query(value, document))
# Invoke a query operator
elif isinstance(value, dict):
for operator, arg in value.items():
if not self._get_operator_fn(operator)(field, arg, document):
matches.append(False)
break
else:
matches.append(True)
# Standard
elif value != document.get(field, None):
# check if field contains a dot
if '.' in field:
nodes = field.split('.')
document_section = document
try:
for path in nodes[:-1]:
document_section = document_section.get(path, None)
except AttributeError:
document_section = None
if document_section is None:
matches.append(False)
else:
if value != document_section.get(nodes[-1], None):
matches.append(False)
else:
matches.append(False)
return all(matches)
def _get_operator_fn(self, op):
"""
Returns the function in this module that corresponds to an operator string.
This simply checks if there is a method that handles the operator defined
in this module, replacing '$' with '_' (i.e. if this module has a _gt
method for $gt) and returns it. If no match is found, or the operator does not
start with '$', a MalformedQueryException is raised
"""
if not op.startswith('$'):
raise MalformedQueryException("Operator '%s' is not a valid query operation" % op)
try:
return getattr(sys.modules[__name__], op.replace('$', '_'))
except AttributeError:
raise MalformedQueryException("Operator '%s' is not currently implemented" % op)
def find_one(self, query=None):
"""
Equivalent to ``find(query, limit=1)[0]``
"""
try:
return self.find(query=query, limit=1)[0]
except (sqlite3.OperationalError, IndexError):
return None
def find_and_modify(self, query=None, update=None):
"""
Finds documents in this collection that match a given query and updates them
"""
update = update or {}
for document in self.find(query=query):
document.update(update)
self.update(document)
def count(self, query=None):
"""
Equivalent to ``len(find(query))``
"""
return len(self.find(query=query))
def rename(self, new_name):
"""
Rename this collection
"""
new_collection = Collection(self.db, new_name, create=False)
assert not new_collection.exists()
self.db.execute("alter table %s rename to %s" % (self.name, new_name))
self.name = new_name
def create_index(self, key, reindex=True, sparse=False):
"""
Creates an index if it does not exist then performs a full reindex for this collection
"""
warnings.warn('Index support is currently very alpha and is not guaranteed')
if isinstance(key, (list, tuple)):
index_name = ','.join(key)
index_columns = ', '.join('%s text' % f for f in key)
else:
index_name = key
index_columns = '%s text' % key
table_name = '[%s{%s}]' % (self.name, index_name)
reindex = reindex or not self._object_exists('table', table_name)
# Create a table store for the index data
self.db.execute("""
create table if not exists {table} (
id integer primary key,
{columns},
foreign key(id) references {collection}(id) on delete cascade on update cascade
)
""".format(
table=table_name,
collection=self.name,
columns=index_columns
))
# Create the index
self.db.execute("""
create index if not exists [idx.{collection}{{index}}] on {table}({index})
""".format(
collection=self.name,
index=index_name,
table=table_name,
))
if reindex:
self.reindex(key)
def ensure_index(self, key, sparse=False):
"""
Equivalent to ``create_index(key, reindex=False)``
"""
self.create_index(key, reindex=False, sparse=False)
def reindex(self, table, sparse=False):
warnings.warn('Index support is currently very alpha and is not guaranteed')
index = re.findall(r'^\[.*\{(.*)\}\]$', table)[0].split(',')
update = "update {table} set {key} = ? where id = ?"
insert = "insert into {table}({index}) values({q})"
count = "select count(1) from {table} where id = ?"
qs = ('?,' * len(index)).rstrip(',')
for document in self.find():
# Ensure there's a row before we update
row = self.db.execute(count.format(table=table), (document['_id'],)).fetchone()
if int(row[0]) == 0:
self.db.execute(insert.format(table=table, index=index, q=qs),
[None for x in index])
for key in index:
# Ignore this document if it doesn't have the key
if key not in document and sparse:
continue
self.db.execute(update.format(table=table, key=key),
(document.get(key, None), document['_id']))
def drop_index(self):
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
def drop_indexes(self):
"""
Drop all indexes for this collection
"""
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
|
shaunduncan/nosqlite | nosqlite.py | Collection.create_index | python | def create_index(self, key, reindex=True, sparse=False):
warnings.warn('Index support is currently very alpha and is not guaranteed')
if isinstance(key, (list, tuple)):
index_name = ','.join(key)
index_columns = ', '.join('%s text' % f for f in key)
else:
index_name = key
index_columns = '%s text' % key
table_name = '[%s{%s}]' % (self.name, index_name)
reindex = reindex or not self._object_exists('table', table_name)
# Create a table store for the index data
self.db.execute("""
create table if not exists {table} (
id integer primary key,
{columns},
foreign key(id) references {collection}(id) on delete cascade on update cascade
)
""".format(
table=table_name,
collection=self.name,
columns=index_columns
))
# Create the index
self.db.execute("""
create index if not exists [idx.{collection}{{index}}] on {table}({index})
""".format(
collection=self.name,
index=index_name,
table=table_name,
))
if reindex:
self.reindex(key) | Creates an index if it does not exist then performs a full reindex for this collection | train | https://github.com/shaunduncan/nosqlite/blob/3033c029b7c8290c66a8b36dc512e560505d4c85/nosqlite.py#L347-L385 | [
"def _object_exists(self, type, name):\n row = self.db.execute(\n \"select count(1) from sqlite_master where type = ? and name = ?\",\n (type, name.strip('[]'))\n ).fetchone()\n\n return int(row[0]) > 0\n"
] | class Collection(object):
"""
A virtual database table that holds JSON-type documents
"""
def __init__(self, db, name, create=True):
self.db = db
self.name = name
if create:
self.create()
def clear(self):
"""
Clears all stored documents in this database. THERE IS NO GOING BACK
"""
self.db.execute("delete from %s" % self.name)
def exists(self):
"""
Checks if this collection exists
"""
return self._object_exists('table', self.name)
def _object_exists(self, type, name):
row = self.db.execute(
"select count(1) from sqlite_master where type = ? and name = ?",
(type, name.strip('[]'))
).fetchone()
return int(row[0]) > 0
def create(self):
"""
Creates the collections database only if it does not already exist
"""
self.db.execute("""
create table if not exists %s (
id integer primary key autoincrement,
data text not null
)
""" % self.name)
def insert(self, document):
"""
Inserts a document into this collection. If a document already has an '_id'
value it will be updated
:returns: inserted document with id
"""
if '_id' in document:
return self.update(document)
# Create it and return a modified one with the id
cursor = self.db.execute("""
insert into %s(data) values (?)
""" % self.name, (json.dumps(document),))
document['_id'] = cursor.lastrowid
return document
def update(self, document):
"""
Updates a document stored in this collection. If the document does not
already have an '_id' value, it will be created
"""
if '_id' not in document:
return self.insert(document)
# Update the stored document, removing the id
copy = document.copy()
del copy['_id']
self.db.execute("""
update %s set data = ? where id = ?
""" % self.name, (json.dumps(copy), document['_id']))
return document
def remove(self, document):
"""
Removes a document from this collection. This will raise AssertionError if the
document does not have an _id attribute
"""
assert '_id' in document, 'Document must have an id'
self.db.execute("delete from %s where id = ?" % self.name, (document['_id'],))
def save(self, document):
"""
Alias for ``update``
"""
return self.update(document)
def delete(self, document):
"""
Alias for ``remove``
"""
return self.remove(document)
def _load(self, id, data):
"""
Loads a JSON document taking care to apply the document id
"""
if isinstance(data, bytes): # pragma: no cover Python >= 3.0
data = data.decode('utf-8')
document = json.loads(data)
document['_id'] = id
return document
def find(self, query=None, limit=None):
"""
Returns a list of documents in this collection that match a given query
"""
results = []
query = query or {}
# TODO: When indexes are implemented, we'll need to intelligently hit one of the
# index stores so we don't do a full table scan
cursor = self.db.execute("select id, data from %s" % self.name)
apply = partial(self._apply_query, query)
for match in filter(apply, starmap(self._load, cursor.fetchall())):
results.append(match)
# Just return if we already reached the limit
if limit and len(results) == limit:
return results
return results
def _apply_query(self, query, document):
"""
Applies a query to a document. Returns True if the document meets the criteria of
the supplied query. The ``query`` argument generally follows mongodb style syntax
and consists of the following logical checks and operators.
Logical: $and, $or, $nor, $not
Operators: $eq, $ne, $gt, $gte, $lt, $lte, $mod, $in, $nin, $all
If no logical operator is supplied, it assumed that all field checks must pass. For
example, these are equivalent:
{'foo': 'bar', 'baz': 'qux'}
{'$and': [{'foo': 'bar'}, {'baz': 'qux'}]}
Both logical and operational queries can be nested in a complex fashion:
{
'bar': 'baz',
'$or': [
{
'foo': {
'$gte': 0,
'$lte': 10,
'$mod': [2, 0]
}
},
{
'foo': {
'$gt': 10,
'$mod': [2, 1]
}
},
]
}
In the previous example, this will return any document where the 'bar' key is equal
to 'baz' and either the 'foo' key is an even number between 0 and 10 or is an odd number
greater than 10.
"""
matches = [] # A list of booleans
reapply = lambda q: self._apply_query(q, document)
for field, value in query.items():
# A more complex query type $and, $or, etc
if field == '$and':
matches.append(all(map(reapply, value)))
elif field == '$or':
matches.append(any(map(reapply, value)))
elif field == '$nor':
matches.append(not any(map(reapply, value)))
elif field == '$not':
matches.append(not self._apply_query(value, document))
# Invoke a query operator
elif isinstance(value, dict):
for operator, arg in value.items():
if not self._get_operator_fn(operator)(field, arg, document):
matches.append(False)
break
else:
matches.append(True)
# Standard
elif value != document.get(field, None):
# check if field contains a dot
if '.' in field:
nodes = field.split('.')
document_section = document
try:
for path in nodes[:-1]:
document_section = document_section.get(path, None)
except AttributeError:
document_section = None
if document_section is None:
matches.append(False)
else:
if value != document_section.get(nodes[-1], None):
matches.append(False)
else:
matches.append(False)
return all(matches)
def _get_operator_fn(self, op):
"""
Returns the function in this module that corresponds to an operator string.
This simply checks if there is a method that handles the operator defined
in this module, replacing '$' with '_' (i.e. if this module has a _gt
method for $gt) and returns it. If no match is found, or the operator does not
start with '$', a MalformedQueryException is raised
"""
if not op.startswith('$'):
raise MalformedQueryException("Operator '%s' is not a valid query operation" % op)
try:
return getattr(sys.modules[__name__], op.replace('$', '_'))
except AttributeError:
raise MalformedQueryException("Operator '%s' is not currently implemented" % op)
def find_one(self, query=None):
"""
Equivalent to ``find(query, limit=1)[0]``
"""
try:
return self.find(query=query, limit=1)[0]
except (sqlite3.OperationalError, IndexError):
return None
def find_and_modify(self, query=None, update=None):
"""
Finds documents in this collection that match a given query and updates them
"""
update = update or {}
for document in self.find(query=query):
document.update(update)
self.update(document)
def count(self, query=None):
"""
Equivalent to ``len(find(query))``
"""
return len(self.find(query=query))
def rename(self, new_name):
"""
Rename this collection
"""
new_collection = Collection(self.db, new_name, create=False)
assert not new_collection.exists()
self.db.execute("alter table %s rename to %s" % (self.name, new_name))
self.name = new_name
def distinct(self, key):
"""
Get a set of distinct values for the given key excluding an implicit
None for documents that do not contain the key
"""
return set(d[key] for d in filter(lambda d: key in d, self.find()))
def ensure_index(self, key, sparse=False):
"""
Equivalent to ``create_index(key, reindex=False)``
"""
self.create_index(key, reindex=False, sparse=False)
def reindex(self, table, sparse=False):
warnings.warn('Index support is currently very alpha and is not guaranteed')
index = re.findall(r'^\[.*\{(.*)\}\]$', table)[0].split(',')
update = "update {table} set {key} = ? where id = ?"
insert = "insert into {table}({index}) values({q})"
count = "select count(1) from {table} where id = ?"
qs = ('?,' * len(index)).rstrip(',')
for document in self.find():
# Ensure there's a row before we update
row = self.db.execute(count.format(table=table), (document['_id'],)).fetchone()
if int(row[0]) == 0:
self.db.execute(insert.format(table=table, index=index, q=qs),
[None for x in index])
for key in index:
# Ignore this document if it doesn't have the key
if key not in document and sparse:
continue
self.db.execute(update.format(table=table, key=key),
(document.get(key, None), document['_id']))
def drop_index(self):
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
def drop_indexes(self):
"""
Drop all indexes for this collection
"""
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
|
shaunduncan/nosqlite | nosqlite.py | Collection.ensure_index | python | def ensure_index(self, key, sparse=False):
self.create_index(key, reindex=False, sparse=False) | Equivalent to ``create_index(key, reindex=False)`` | train | https://github.com/shaunduncan/nosqlite/blob/3033c029b7c8290c66a8b36dc512e560505d4c85/nosqlite.py#L387-L391 | [
"def create_index(self, key, reindex=True, sparse=False):\n \"\"\"\n Creates an index if it does not exist then performs a full reindex for this collection\n \"\"\"\n warnings.warn('Index support is currently very alpha and is not guaranteed')\n if isinstance(key, (list, tuple)):\n index_name ... | class Collection(object):
"""
A virtual database table that holds JSON-type documents
"""
def __init__(self, db, name, create=True):
self.db = db
self.name = name
if create:
self.create()
def clear(self):
"""
Clears all stored documents in this database. THERE IS NO GOING BACK
"""
self.db.execute("delete from %s" % self.name)
def exists(self):
"""
Checks if this collection exists
"""
return self._object_exists('table', self.name)
def _object_exists(self, type, name):
row = self.db.execute(
"select count(1) from sqlite_master where type = ? and name = ?",
(type, name.strip('[]'))
).fetchone()
return int(row[0]) > 0
def create(self):
"""
Creates the collections database only if it does not already exist
"""
self.db.execute("""
create table if not exists %s (
id integer primary key autoincrement,
data text not null
)
""" % self.name)
def insert(self, document):
"""
Inserts a document into this collection. If a document already has an '_id'
value it will be updated
:returns: inserted document with id
"""
if '_id' in document:
return self.update(document)
# Create it and return a modified one with the id
cursor = self.db.execute("""
insert into %s(data) values (?)
""" % self.name, (json.dumps(document),))
document['_id'] = cursor.lastrowid
return document
def update(self, document):
"""
Updates a document stored in this collection. If the document does not
already have an '_id' value, it will be created
"""
if '_id' not in document:
return self.insert(document)
# Update the stored document, removing the id
copy = document.copy()
del copy['_id']
self.db.execute("""
update %s set data = ? where id = ?
""" % self.name, (json.dumps(copy), document['_id']))
return document
def remove(self, document):
"""
Removes a document from this collection. This will raise AssertionError if the
document does not have an _id attribute
"""
assert '_id' in document, 'Document must have an id'
self.db.execute("delete from %s where id = ?" % self.name, (document['_id'],))
def save(self, document):
"""
Alias for ``update``
"""
return self.update(document)
def delete(self, document):
"""
Alias for ``remove``
"""
return self.remove(document)
def _load(self, id, data):
"""
Loads a JSON document taking care to apply the document id
"""
if isinstance(data, bytes): # pragma: no cover Python >= 3.0
data = data.decode('utf-8')
document = json.loads(data)
document['_id'] = id
return document
def find(self, query=None, limit=None):
"""
Returns a list of documents in this collection that match a given query
"""
results = []
query = query or {}
# TODO: When indexes are implemented, we'll need to intelligently hit one of the
# index stores so we don't do a full table scan
cursor = self.db.execute("select id, data from %s" % self.name)
apply = partial(self._apply_query, query)
for match in filter(apply, starmap(self._load, cursor.fetchall())):
results.append(match)
# Just return if we already reached the limit
if limit and len(results) == limit:
return results
return results
def _apply_query(self, query, document):
"""
Applies a query to a document. Returns True if the document meets the criteria of
the supplied query. The ``query`` argument generally follows mongodb style syntax
and consists of the following logical checks and operators.
Logical: $and, $or, $nor, $not
Operators: $eq, $ne, $gt, $gte, $lt, $lte, $mod, $in, $nin, $all
If no logical operator is supplied, it assumed that all field checks must pass. For
example, these are equivalent:
{'foo': 'bar', 'baz': 'qux'}
{'$and': [{'foo': 'bar'}, {'baz': 'qux'}]}
Both logical and operational queries can be nested in a complex fashion:
{
'bar': 'baz',
'$or': [
{
'foo': {
'$gte': 0,
'$lte': 10,
'$mod': [2, 0]
}
},
{
'foo': {
'$gt': 10,
'$mod': [2, 1]
}
},
]
}
In the previous example, this will return any document where the 'bar' key is equal
to 'baz' and either the 'foo' key is an even number between 0 and 10 or is an odd number
greater than 10.
"""
matches = [] # A list of booleans
reapply = lambda q: self._apply_query(q, document)
for field, value in query.items():
# A more complex query type $and, $or, etc
if field == '$and':
matches.append(all(map(reapply, value)))
elif field == '$or':
matches.append(any(map(reapply, value)))
elif field == '$nor':
matches.append(not any(map(reapply, value)))
elif field == '$not':
matches.append(not self._apply_query(value, document))
# Invoke a query operator
elif isinstance(value, dict):
for operator, arg in value.items():
if not self._get_operator_fn(operator)(field, arg, document):
matches.append(False)
break
else:
matches.append(True)
# Standard
elif value != document.get(field, None):
# check if field contains a dot
if '.' in field:
nodes = field.split('.')
document_section = document
try:
for path in nodes[:-1]:
document_section = document_section.get(path, None)
except AttributeError:
document_section = None
if document_section is None:
matches.append(False)
else:
if value != document_section.get(nodes[-1], None):
matches.append(False)
else:
matches.append(False)
return all(matches)
def _get_operator_fn(self, op):
"""
Returns the function in this module that corresponds to an operator string.
This simply checks if there is a method that handles the operator defined
in this module, replacing '$' with '_' (i.e. if this module has a _gt
method for $gt) and returns it. If no match is found, or the operator does not
start with '$', a MalformedQueryException is raised
"""
if not op.startswith('$'):
raise MalformedQueryException("Operator '%s' is not a valid query operation" % op)
try:
return getattr(sys.modules[__name__], op.replace('$', '_'))
except AttributeError:
raise MalformedQueryException("Operator '%s' is not currently implemented" % op)
def find_one(self, query=None):
"""
Equivalent to ``find(query, limit=1)[0]``
"""
try:
return self.find(query=query, limit=1)[0]
except (sqlite3.OperationalError, IndexError):
return None
def find_and_modify(self, query=None, update=None):
"""
Finds documents in this collection that match a given query and updates them
"""
update = update or {}
for document in self.find(query=query):
document.update(update)
self.update(document)
def count(self, query=None):
"""
Equivalent to ``len(find(query))``
"""
return len(self.find(query=query))
def rename(self, new_name):
"""
Rename this collection
"""
new_collection = Collection(self.db, new_name, create=False)
assert not new_collection.exists()
self.db.execute("alter table %s rename to %s" % (self.name, new_name))
self.name = new_name
def distinct(self, key):
"""
Get a set of distinct values for the given key excluding an implicit
None for documents that do not contain the key
"""
return set(d[key] for d in filter(lambda d: key in d, self.find()))
def create_index(self, key, reindex=True, sparse=False):
"""
Creates an index if it does not exist then performs a full reindex for this collection
"""
warnings.warn('Index support is currently very alpha and is not guaranteed')
if isinstance(key, (list, tuple)):
index_name = ','.join(key)
index_columns = ', '.join('%s text' % f for f in key)
else:
index_name = key
index_columns = '%s text' % key
table_name = '[%s{%s}]' % (self.name, index_name)
reindex = reindex or not self._object_exists('table', table_name)
# Create a table store for the index data
self.db.execute("""
create table if not exists {table} (
id integer primary key,
{columns},
foreign key(id) references {collection}(id) on delete cascade on update cascade
)
""".format(
table=table_name,
collection=self.name,
columns=index_columns
))
# Create the index
self.db.execute("""
create index if not exists [idx.{collection}{{index}}] on {table}({index})
""".format(
collection=self.name,
index=index_name,
table=table_name,
))
if reindex:
self.reindex(key)
def reindex(self, table, sparse=False):
warnings.warn('Index support is currently very alpha and is not guaranteed')
index = re.findall(r'^\[.*\{(.*)\}\]$', table)[0].split(',')
update = "update {table} set {key} = ? where id = ?"
insert = "insert into {table}({index}) values({q})"
count = "select count(1) from {table} where id = ?"
qs = ('?,' * len(index)).rstrip(',')
for document in self.find():
# Ensure there's a row before we update
row = self.db.execute(count.format(table=table), (document['_id'],)).fetchone()
if int(row[0]) == 0:
self.db.execute(insert.format(table=table, index=index, q=qs),
[None for x in index])
for key in index:
# Ignore this document if it doesn't have the key
if key not in document and sparse:
continue
self.db.execute(update.format(table=table, key=key),
(document.get(key, None), document['_id']))
def drop_index(self):
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
def drop_indexes(self):
"""
Drop all indexes for this collection
"""
warnings.warn('Index support is currently very alpha and is not guaranteed')
pass
|
bitlabstudio/django-development-fabfile | development_fabfile/fabfile/utils.py | require_server | python | def require_server(fn):
@wraps(fn)
def wrapper(*args, **kwargs):
if env.machine is None:
abort(red('ERROR: You must provide a server name to call this'
' task!'))
return fn(*args, **kwargs)
return wrapper | Checks if the user has called the task with a server name.
Fabric tasks decorated with this decorator must be called like so::
fab <server name> <task name>
If no server name is given, the task will not be executed. | train | https://github.com/bitlabstudio/django-development-fabfile/blob/a135c6eb5bdd0b496a7eccfd271aca558dd99243/development_fabfile/fabfile/utils.py#L9-L26 | null | """Utilities for the fabfile."""
from functools import wraps
from fabric.api import env, run
from fabric.colors import red
from fabric.utils import abort
def run_workon(command):
"""
Starts the virtualenv before running the given command.
:param command: A string representing a shell command that should be
executed.
"""
env.shell = "/bin/bash -l -i -c"
return run('workon {0} && {1}'.format(env.venv_name, command))
|
bitlabstudio/django-development-fabfile | development_fabfile/fabfile/remote.py | run_deploy_website | python | def run_deploy_website(restart_apache=False, restart_uwsgi=False,
restart_nginx=False):
run_git_pull()
run_pip_install()
run_rsync_project()
run_syncdb()
run_collectstatic()
if getattr(settings, 'MAKEMESSAGES_ON_DEPLOYMENT', False):
run_makemessages()
if getattr(settings, 'COMPILEMESSAGES_ON_DEPLOYMENT', False):
run_compilemessages()
if restart_apache:
run_restart_apache()
if restart_uwsgi:
run_restart_uwsgi()
if restart_nginx:
run_restart_nginx()
else:
run_touch_wsgi() | Executes all tasks necessary to deploy the website on the given server.
Usage::
fab <server> run_deploy_website | train | https://github.com/bitlabstudio/django-development-fabfile/blob/a135c6eb5bdd0b496a7eccfd271aca558dd99243/development_fabfile/fabfile/remote.py#L55-L81 | null | """Fab tasks that execute things on a remote server."""
import sys
import django
from django.conf import settings
from distutils.version import StrictVersion
from fabric.api import cd, env, local, run
from .local import drop_db, create_db, import_db, import_media, reset_passwords
from .utils import require_server, run_workon
if settings.PYTHON_VERSION:
PYTHON_VERSION = settings.PYTHON_VERSION
else:
PYTHON_VERSION = '{}.{}'.format(
sys.version_info.major, sys.version_info.minor)
if getattr(settings, 'PEM_KEY_DIR', False):
env.key_filename = settings.PEM_KEY_DIR
@require_server
def run_collectstatic():
"""
Runs `./manage.py collectstatic` on the given server.
Usage::
fab <server> run_collectstatic
"""
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
run_workon('python{} manage.py collectstatic --noinput'.format(
PYTHON_VERSION))
@require_server
def run_compilemessages():
"""
Executes ./manage.py compilemessages on the server.
Usage::
fab <server name> run_compilemessages
"""
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
run_workon('python{} manage.py compilemessages'.format(PYTHON_VERSION))
@require_server
@require_server
def run_download_db(filename=None):
"""
Downloads the database from the server into your local machine.
In order to import the downloaded database, run ``fab import_db``
Usage::
fab prod run_download_db
fab prod run_download_db:filename=foobar.dump
"""
if not filename:
filename = settings.DB_DUMP_FILENAME
if env.key_filename:
ssh = settings.PROJECT_NAME
else:
ssh = '{0}@{1}'.format(env.user, env.host_string)
local('scp {0}:{1}{2} .'.format(
ssh, settings.FAB_SETTING('SERVER_DB_BACKUP_DIR'), filename))
@require_server
def run_download_media(filename=None):
"""
Downloads the media dump from the server into your local machine.
In order to import the downloaded media dump, run ``fab import_media``
Usage::
fab prod run_download_media
fab prod run_download_media:filename=foobar.tar.gz
"""
if not filename:
filename = settings.MEDIA_DUMP_FILENAME
if env.key_filename:
ssh = settings.PROJECT_NAME
else:
ssh = '{0}@{1}'.format(env.user, env.host_string)
local('scp {0}:{1}{2} .'.format(
ssh, settings.FAB_SETTING('SERVER_MEDIA_BACKUP_DIR'), filename))
@require_server
def run_export_db(filename=None):
"""
Exports the database on the server.
Usage::
fab prod run_export_db
fab prod run_export_db:filename=foobar.dump
"""
if not filename:
filename = settings.DB_DUMP_FILENAME
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
run_workon('fab export_db:remote=True,filename={}'.format(filename))
@require_server
def run_export_media(filename=None):
"""
Exports the media folder on the server.
Usage::
fab prod run_export_media
fab prod run_export_media:filename=foobar.tar.gz
"""
if not filename:
filename = settings.MEDIA_DUMP_FILENAME
with cd(settings.FAB_SETTING('SERVER_MEDIA_ROOT')):
run('rm -rf {0}'.format(filename))
run('tar -czf {0} *'.format(filename))
run('mv {0} {1}'.format(
filename, settings.FAB_SETTING('SERVER_MEDIA_BACKUP_DIR')))
@require_server
def run_git_pull():
"""
Pulls the latest code and updates submodules.
Usage::
fab <server> run_git_pull
"""
with cd(settings.FAB_SETTING('SERVER_REPO_ROOT')):
run('git pull && git submodule init && git submodule update')
@require_server
def import_remote_db():
"""
Downloads a db and imports it locally.
"""
run_export_db()
run_download_db()
drop_db()
create_db()
import_db()
reset_passwords()
@require_server
def import_remote_media():
"""
Downloads media and imports it locally.
"""
run_export_media()
run_download_media()
import_media()
@require_server
def run_makemessages():
"""
Executes ./manage.py makemessages -s --all on the server.
Usage::
fab <server name> run_makemessages
"""
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
run_workon('python{} manage.py makemessages -s --all'.format(
PYTHON_VERSION))
@require_server
def run_pip_install(upgrade=0):
"""
Installs the requirement.txt file on the given server.
Usage::
fab <server> run_pip_install
fab <server> run_pip_install:upgrade=1
:param upgrade: If set to 1, the command will be executed with the
``--upgrade`` flag.
"""
command = 'pip install -r {0}'.format(
settings.FAB_SETTING('SERVER_REQUIREMENTS_PATH'))
if upgrade:
command += ' --upgrade'
run_workon(command)
@require_server
def run_restart_apache():
"""
Restarts apache on the given server.
Usage::
fab <server> run_restart_apache
"""
run('{0}restart'.format(settings.FAB_SETTING('SERVER_APACHE_BIN_DIR')))
@require_server
def run_restart_uwsgi():
"""
Restarts uwsgi on the given server.
Usage::
fab <server> run_restart_uwsgi
"""
with cd(settings.FAB_SETTING('SERVER_LOCAL_ETC_DIR')):
run('supervisorctl restart uwsgi')
@require_server
def run_restart_nginx():
"""
Restarts uwsgi on the given server.
Usage::
fab <server> run_restart_nginx
"""
with cd(settings.FAB_SETTING('SERVER_LOCAL_ETC_DIR')):
run('supervisorctl restart nginx')
@require_server
def run_rsync_project():
"""
Copies the project from the git repository to it's destination folder.
This has the nice side effect of rsync deleting all ``.pyc`` files and
removing other files that might have been left behind by sys admins messing
around on the server.
Usage::
fab <server> run_rsync_project
"""
excludes = ''
for exclude in settings.RSYNC_EXCLUDES:
excludes += " --exclude '{0}'".format(exclude)
command = "rsync -avz --stats --delete {0} {1} {2}".format(
excludes, settings.FAB_SETTING('SERVER_REPO_PROJECT_ROOT'),
settings.FAB_SETTING('SERVER_APP_ROOT'))
run(command)
@require_server
def run_syncdb():
"""
Runs `./manage.py syncdb --migrate` on the given server.
Usage::
fab <server> run_syncdb
"""
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
if StrictVersion(django.get_version()) < StrictVersion('1.7'):
run_workon('python{} manage.py syncdb --migrate --noinput'.format(
PYTHON_VERSION))
else:
run_workon('python{} manage.py migrate'.format(PYTHON_VERSION))
@require_server
def run_touch_wsgi():
"""
Runs `touch <path>/wsgi.py` on the given server.
Usage::
fab <server> run_touch_wsgi
"""
run('touch {0}'.format(settings.FAB_SETTING('SERVER_WSGI_FILE')))
@require_server
def run_upload_db(filename=None):
"""
Uploads your local database to the server.
You can create a local dump with ``fab export_db`` first.
In order to import the database on the server you still need to SSH into
the server.
Usage::
fab prod run_upload_db
fab prod run_upload_db:filename=foobar.dump
"""
if not filename:
filename = settings.DB_DUMP_FILENAME
if env.key_filename:
ssh = settings.PROJECT_NAME
else:
ssh = '{0}@{1}'.format(env.user, env.host_string)
local('scp {0} {1}:{3}'.format(
filename, ssh, settings.FAB_SETTING('SERVER_DB_BACKUP_DIR')))
|
bitlabstudio/django-development-fabfile | development_fabfile/fabfile/remote.py | run_download_media | python | def run_download_media(filename=None):
if not filename:
filename = settings.MEDIA_DUMP_FILENAME
if env.key_filename:
ssh = settings.PROJECT_NAME
else:
ssh = '{0}@{1}'.format(env.user, env.host_string)
local('scp {0}:{1}{2} .'.format(
ssh, settings.FAB_SETTING('SERVER_MEDIA_BACKUP_DIR'), filename)) | Downloads the media dump from the server into your local machine.
In order to import the downloaded media dump, run ``fab import_media``
Usage::
fab prod run_download_media
fab prod run_download_media:filename=foobar.tar.gz | train | https://github.com/bitlabstudio/django-development-fabfile/blob/a135c6eb5bdd0b496a7eccfd271aca558dd99243/development_fabfile/fabfile/remote.py#L108-L127 | null | """Fab tasks that execute things on a remote server."""
import sys
import django
from django.conf import settings
from distutils.version import StrictVersion
from fabric.api import cd, env, local, run
from .local import drop_db, create_db, import_db, import_media, reset_passwords
from .utils import require_server, run_workon
if settings.PYTHON_VERSION:
PYTHON_VERSION = settings.PYTHON_VERSION
else:
PYTHON_VERSION = '{}.{}'.format(
sys.version_info.major, sys.version_info.minor)
if getattr(settings, 'PEM_KEY_DIR', False):
env.key_filename = settings.PEM_KEY_DIR
@require_server
def run_collectstatic():
"""
Runs `./manage.py collectstatic` on the given server.
Usage::
fab <server> run_collectstatic
"""
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
run_workon('python{} manage.py collectstatic --noinput'.format(
PYTHON_VERSION))
@require_server
def run_compilemessages():
"""
Executes ./manage.py compilemessages on the server.
Usage::
fab <server name> run_compilemessages
"""
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
run_workon('python{} manage.py compilemessages'.format(PYTHON_VERSION))
@require_server
def run_deploy_website(restart_apache=False, restart_uwsgi=False,
restart_nginx=False):
"""
Executes all tasks necessary to deploy the website on the given server.
Usage::
fab <server> run_deploy_website
"""
run_git_pull()
run_pip_install()
run_rsync_project()
run_syncdb()
run_collectstatic()
if getattr(settings, 'MAKEMESSAGES_ON_DEPLOYMENT', False):
run_makemessages()
if getattr(settings, 'COMPILEMESSAGES_ON_DEPLOYMENT', False):
run_compilemessages()
if restart_apache:
run_restart_apache()
if restart_uwsgi:
run_restart_uwsgi()
if restart_nginx:
run_restart_nginx()
else:
run_touch_wsgi()
@require_server
def run_download_db(filename=None):
"""
Downloads the database from the server into your local machine.
In order to import the downloaded database, run ``fab import_db``
Usage::
fab prod run_download_db
fab prod run_download_db:filename=foobar.dump
"""
if not filename:
filename = settings.DB_DUMP_FILENAME
if env.key_filename:
ssh = settings.PROJECT_NAME
else:
ssh = '{0}@{1}'.format(env.user, env.host_string)
local('scp {0}:{1}{2} .'.format(
ssh, settings.FAB_SETTING('SERVER_DB_BACKUP_DIR'), filename))
@require_server
@require_server
def run_export_db(filename=None):
"""
Exports the database on the server.
Usage::
fab prod run_export_db
fab prod run_export_db:filename=foobar.dump
"""
if not filename:
filename = settings.DB_DUMP_FILENAME
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
run_workon('fab export_db:remote=True,filename={}'.format(filename))
@require_server
def run_export_media(filename=None):
"""
Exports the media folder on the server.
Usage::
fab prod run_export_media
fab prod run_export_media:filename=foobar.tar.gz
"""
if not filename:
filename = settings.MEDIA_DUMP_FILENAME
with cd(settings.FAB_SETTING('SERVER_MEDIA_ROOT')):
run('rm -rf {0}'.format(filename))
run('tar -czf {0} *'.format(filename))
run('mv {0} {1}'.format(
filename, settings.FAB_SETTING('SERVER_MEDIA_BACKUP_DIR')))
@require_server
def run_git_pull():
"""
Pulls the latest code and updates submodules.
Usage::
fab <server> run_git_pull
"""
with cd(settings.FAB_SETTING('SERVER_REPO_ROOT')):
run('git pull && git submodule init && git submodule update')
@require_server
def import_remote_db():
"""
Downloads a db and imports it locally.
"""
run_export_db()
run_download_db()
drop_db()
create_db()
import_db()
reset_passwords()
@require_server
def import_remote_media():
"""
Downloads media and imports it locally.
"""
run_export_media()
run_download_media()
import_media()
@require_server
def run_makemessages():
"""
Executes ./manage.py makemessages -s --all on the server.
Usage::
fab <server name> run_makemessages
"""
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
run_workon('python{} manage.py makemessages -s --all'.format(
PYTHON_VERSION))
@require_server
def run_pip_install(upgrade=0):
"""
Installs the requirement.txt file on the given server.
Usage::
fab <server> run_pip_install
fab <server> run_pip_install:upgrade=1
:param upgrade: If set to 1, the command will be executed with the
``--upgrade`` flag.
"""
command = 'pip install -r {0}'.format(
settings.FAB_SETTING('SERVER_REQUIREMENTS_PATH'))
if upgrade:
command += ' --upgrade'
run_workon(command)
@require_server
def run_restart_apache():
"""
Restarts apache on the given server.
Usage::
fab <server> run_restart_apache
"""
run('{0}restart'.format(settings.FAB_SETTING('SERVER_APACHE_BIN_DIR')))
@require_server
def run_restart_uwsgi():
"""
Restarts uwsgi on the given server.
Usage::
fab <server> run_restart_uwsgi
"""
with cd(settings.FAB_SETTING('SERVER_LOCAL_ETC_DIR')):
run('supervisorctl restart uwsgi')
@require_server
def run_restart_nginx():
"""
Restarts uwsgi on the given server.
Usage::
fab <server> run_restart_nginx
"""
with cd(settings.FAB_SETTING('SERVER_LOCAL_ETC_DIR')):
run('supervisorctl restart nginx')
@require_server
def run_rsync_project():
"""
Copies the project from the git repository to it's destination folder.
This has the nice side effect of rsync deleting all ``.pyc`` files and
removing other files that might have been left behind by sys admins messing
around on the server.
Usage::
fab <server> run_rsync_project
"""
excludes = ''
for exclude in settings.RSYNC_EXCLUDES:
excludes += " --exclude '{0}'".format(exclude)
command = "rsync -avz --stats --delete {0} {1} {2}".format(
excludes, settings.FAB_SETTING('SERVER_REPO_PROJECT_ROOT'),
settings.FAB_SETTING('SERVER_APP_ROOT'))
run(command)
@require_server
def run_syncdb():
"""
Runs `./manage.py syncdb --migrate` on the given server.
Usage::
fab <server> run_syncdb
"""
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
if StrictVersion(django.get_version()) < StrictVersion('1.7'):
run_workon('python{} manage.py syncdb --migrate --noinput'.format(
PYTHON_VERSION))
else:
run_workon('python{} manage.py migrate'.format(PYTHON_VERSION))
@require_server
def run_touch_wsgi():
"""
Runs `touch <path>/wsgi.py` on the given server.
Usage::
fab <server> run_touch_wsgi
"""
run('touch {0}'.format(settings.FAB_SETTING('SERVER_WSGI_FILE')))
@require_server
def run_upload_db(filename=None):
"""
Uploads your local database to the server.
You can create a local dump with ``fab export_db`` first.
In order to import the database on the server you still need to SSH into
the server.
Usage::
fab prod run_upload_db
fab prod run_upload_db:filename=foobar.dump
"""
if not filename:
filename = settings.DB_DUMP_FILENAME
if env.key_filename:
ssh = settings.PROJECT_NAME
else:
ssh = '{0}@{1}'.format(env.user, env.host_string)
local('scp {0} {1}:{3}'.format(
filename, ssh, settings.FAB_SETTING('SERVER_DB_BACKUP_DIR')))
|
bitlabstudio/django-development-fabfile | development_fabfile/fabfile/remote.py | run_export_db | python | def run_export_db(filename=None):
if not filename:
filename = settings.DB_DUMP_FILENAME
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
run_workon('fab export_db:remote=True,filename={}'.format(filename)) | Exports the database on the server.
Usage::
fab prod run_export_db
fab prod run_export_db:filename=foobar.dump | train | https://github.com/bitlabstudio/django-development-fabfile/blob/a135c6eb5bdd0b496a7eccfd271aca558dd99243/development_fabfile/fabfile/remote.py#L131-L144 | [
"def run_workon(command):\n \"\"\"\n Starts the virtualenv before running the given command.\n\n :param command: A string representing a shell command that should be\n executed.\n \"\"\"\n env.shell = \"/bin/bash -l -i -c\"\n return run('workon {0} && {1}'.format(env.venv_name, command))\n"
] | """Fab tasks that execute things on a remote server."""
import sys
import django
from django.conf import settings
from distutils.version import StrictVersion
from fabric.api import cd, env, local, run
from .local import drop_db, create_db, import_db, import_media, reset_passwords
from .utils import require_server, run_workon
if settings.PYTHON_VERSION:
PYTHON_VERSION = settings.PYTHON_VERSION
else:
PYTHON_VERSION = '{}.{}'.format(
sys.version_info.major, sys.version_info.minor)
if getattr(settings, 'PEM_KEY_DIR', False):
env.key_filename = settings.PEM_KEY_DIR
@require_server
def run_collectstatic():
"""
Runs `./manage.py collectstatic` on the given server.
Usage::
fab <server> run_collectstatic
"""
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
run_workon('python{} manage.py collectstatic --noinput'.format(
PYTHON_VERSION))
@require_server
def run_compilemessages():
"""
Executes ./manage.py compilemessages on the server.
Usage::
fab <server name> run_compilemessages
"""
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
run_workon('python{} manage.py compilemessages'.format(PYTHON_VERSION))
@require_server
def run_deploy_website(restart_apache=False, restart_uwsgi=False,
restart_nginx=False):
"""
Executes all tasks necessary to deploy the website on the given server.
Usage::
fab <server> run_deploy_website
"""
run_git_pull()
run_pip_install()
run_rsync_project()
run_syncdb()
run_collectstatic()
if getattr(settings, 'MAKEMESSAGES_ON_DEPLOYMENT', False):
run_makemessages()
if getattr(settings, 'COMPILEMESSAGES_ON_DEPLOYMENT', False):
run_compilemessages()
if restart_apache:
run_restart_apache()
if restart_uwsgi:
run_restart_uwsgi()
if restart_nginx:
run_restart_nginx()
else:
run_touch_wsgi()
@require_server
def run_download_db(filename=None):
"""
Downloads the database from the server into your local machine.
In order to import the downloaded database, run ``fab import_db``
Usage::
fab prod run_download_db
fab prod run_download_db:filename=foobar.dump
"""
if not filename:
filename = settings.DB_DUMP_FILENAME
if env.key_filename:
ssh = settings.PROJECT_NAME
else:
ssh = '{0}@{1}'.format(env.user, env.host_string)
local('scp {0}:{1}{2} .'.format(
ssh, settings.FAB_SETTING('SERVER_DB_BACKUP_DIR'), filename))
@require_server
def run_download_media(filename=None):
"""
Downloads the media dump from the server into your local machine.
In order to import the downloaded media dump, run ``fab import_media``
Usage::
fab prod run_download_media
fab prod run_download_media:filename=foobar.tar.gz
"""
if not filename:
filename = settings.MEDIA_DUMP_FILENAME
if env.key_filename:
ssh = settings.PROJECT_NAME
else:
ssh = '{0}@{1}'.format(env.user, env.host_string)
local('scp {0}:{1}{2} .'.format(
ssh, settings.FAB_SETTING('SERVER_MEDIA_BACKUP_DIR'), filename))
@require_server
@require_server
def run_export_media(filename=None):
"""
Exports the media folder on the server.
Usage::
fab prod run_export_media
fab prod run_export_media:filename=foobar.tar.gz
"""
if not filename:
filename = settings.MEDIA_DUMP_FILENAME
with cd(settings.FAB_SETTING('SERVER_MEDIA_ROOT')):
run('rm -rf {0}'.format(filename))
run('tar -czf {0} *'.format(filename))
run('mv {0} {1}'.format(
filename, settings.FAB_SETTING('SERVER_MEDIA_BACKUP_DIR')))
@require_server
def run_git_pull():
"""
Pulls the latest code and updates submodules.
Usage::
fab <server> run_git_pull
"""
with cd(settings.FAB_SETTING('SERVER_REPO_ROOT')):
run('git pull && git submodule init && git submodule update')
@require_server
def import_remote_db():
"""
Downloads a db and imports it locally.
"""
run_export_db()
run_download_db()
drop_db()
create_db()
import_db()
reset_passwords()
@require_server
def import_remote_media():
"""
Downloads media and imports it locally.
"""
run_export_media()
run_download_media()
import_media()
@require_server
def run_makemessages():
"""
Executes ./manage.py makemessages -s --all on the server.
Usage::
fab <server name> run_makemessages
"""
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
run_workon('python{} manage.py makemessages -s --all'.format(
PYTHON_VERSION))
@require_server
def run_pip_install(upgrade=0):
"""
Installs the requirement.txt file on the given server.
Usage::
fab <server> run_pip_install
fab <server> run_pip_install:upgrade=1
:param upgrade: If set to 1, the command will be executed with the
``--upgrade`` flag.
"""
command = 'pip install -r {0}'.format(
settings.FAB_SETTING('SERVER_REQUIREMENTS_PATH'))
if upgrade:
command += ' --upgrade'
run_workon(command)
@require_server
def run_restart_apache():
"""
Restarts apache on the given server.
Usage::
fab <server> run_restart_apache
"""
run('{0}restart'.format(settings.FAB_SETTING('SERVER_APACHE_BIN_DIR')))
@require_server
def run_restart_uwsgi():
"""
Restarts uwsgi on the given server.
Usage::
fab <server> run_restart_uwsgi
"""
with cd(settings.FAB_SETTING('SERVER_LOCAL_ETC_DIR')):
run('supervisorctl restart uwsgi')
@require_server
def run_restart_nginx():
"""
Restarts uwsgi on the given server.
Usage::
fab <server> run_restart_nginx
"""
with cd(settings.FAB_SETTING('SERVER_LOCAL_ETC_DIR')):
run('supervisorctl restart nginx')
@require_server
def run_rsync_project():
"""
Copies the project from the git repository to it's destination folder.
This has the nice side effect of rsync deleting all ``.pyc`` files and
removing other files that might have been left behind by sys admins messing
around on the server.
Usage::
fab <server> run_rsync_project
"""
excludes = ''
for exclude in settings.RSYNC_EXCLUDES:
excludes += " --exclude '{0}'".format(exclude)
command = "rsync -avz --stats --delete {0} {1} {2}".format(
excludes, settings.FAB_SETTING('SERVER_REPO_PROJECT_ROOT'),
settings.FAB_SETTING('SERVER_APP_ROOT'))
run(command)
@require_server
def run_syncdb():
"""
Runs `./manage.py syncdb --migrate` on the given server.
Usage::
fab <server> run_syncdb
"""
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
if StrictVersion(django.get_version()) < StrictVersion('1.7'):
run_workon('python{} manage.py syncdb --migrate --noinput'.format(
PYTHON_VERSION))
else:
run_workon('python{} manage.py migrate'.format(PYTHON_VERSION))
@require_server
def run_touch_wsgi():
"""
Runs `touch <path>/wsgi.py` on the given server.
Usage::
fab <server> run_touch_wsgi
"""
run('touch {0}'.format(settings.FAB_SETTING('SERVER_WSGI_FILE')))
@require_server
def run_upload_db(filename=None):
"""
Uploads your local database to the server.
You can create a local dump with ``fab export_db`` first.
In order to import the database on the server you still need to SSH into
the server.
Usage::
fab prod run_upload_db
fab prod run_upload_db:filename=foobar.dump
"""
if not filename:
filename = settings.DB_DUMP_FILENAME
if env.key_filename:
ssh = settings.PROJECT_NAME
else:
ssh = '{0}@{1}'.format(env.user, env.host_string)
local('scp {0} {1}:{3}'.format(
filename, ssh, settings.FAB_SETTING('SERVER_DB_BACKUP_DIR')))
|
bitlabstudio/django-development-fabfile | development_fabfile/fabfile/remote.py | run_export_media | python | def run_export_media(filename=None):
if not filename:
filename = settings.MEDIA_DUMP_FILENAME
with cd(settings.FAB_SETTING('SERVER_MEDIA_ROOT')):
run('rm -rf {0}'.format(filename))
run('tar -czf {0} *'.format(filename))
run('mv {0} {1}'.format(
filename, settings.FAB_SETTING('SERVER_MEDIA_BACKUP_DIR'))) | Exports the media folder on the server.
Usage::
fab prod run_export_media
fab prod run_export_media:filename=foobar.tar.gz | train | https://github.com/bitlabstudio/django-development-fabfile/blob/a135c6eb5bdd0b496a7eccfd271aca558dd99243/development_fabfile/fabfile/remote.py#L148-L165 | null | """Fab tasks that execute things on a remote server."""
import sys
import django
from django.conf import settings
from distutils.version import StrictVersion
from fabric.api import cd, env, local, run
from .local import drop_db, create_db, import_db, import_media, reset_passwords
from .utils import require_server, run_workon
if settings.PYTHON_VERSION:
PYTHON_VERSION = settings.PYTHON_VERSION
else:
PYTHON_VERSION = '{}.{}'.format(
sys.version_info.major, sys.version_info.minor)
if getattr(settings, 'PEM_KEY_DIR', False):
env.key_filename = settings.PEM_KEY_DIR
@require_server
def run_collectstatic():
"""
Runs `./manage.py collectstatic` on the given server.
Usage::
fab <server> run_collectstatic
"""
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
run_workon('python{} manage.py collectstatic --noinput'.format(
PYTHON_VERSION))
@require_server
def run_compilemessages():
"""
Executes ./manage.py compilemessages on the server.
Usage::
fab <server name> run_compilemessages
"""
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
run_workon('python{} manage.py compilemessages'.format(PYTHON_VERSION))
@require_server
def run_deploy_website(restart_apache=False, restart_uwsgi=False,
restart_nginx=False):
"""
Executes all tasks necessary to deploy the website on the given server.
Usage::
fab <server> run_deploy_website
"""
run_git_pull()
run_pip_install()
run_rsync_project()
run_syncdb()
run_collectstatic()
if getattr(settings, 'MAKEMESSAGES_ON_DEPLOYMENT', False):
run_makemessages()
if getattr(settings, 'COMPILEMESSAGES_ON_DEPLOYMENT', False):
run_compilemessages()
if restart_apache:
run_restart_apache()
if restart_uwsgi:
run_restart_uwsgi()
if restart_nginx:
run_restart_nginx()
else:
run_touch_wsgi()
@require_server
def run_download_db(filename=None):
"""
Downloads the database from the server into your local machine.
In order to import the downloaded database, run ``fab import_db``
Usage::
fab prod run_download_db
fab prod run_download_db:filename=foobar.dump
"""
if not filename:
filename = settings.DB_DUMP_FILENAME
if env.key_filename:
ssh = settings.PROJECT_NAME
else:
ssh = '{0}@{1}'.format(env.user, env.host_string)
local('scp {0}:{1}{2} .'.format(
ssh, settings.FAB_SETTING('SERVER_DB_BACKUP_DIR'), filename))
@require_server
def run_download_media(filename=None):
"""
Downloads the media dump from the server into your local machine.
In order to import the downloaded media dump, run ``fab import_media``
Usage::
fab prod run_download_media
fab prod run_download_media:filename=foobar.tar.gz
"""
if not filename:
filename = settings.MEDIA_DUMP_FILENAME
if env.key_filename:
ssh = settings.PROJECT_NAME
else:
ssh = '{0}@{1}'.format(env.user, env.host_string)
local('scp {0}:{1}{2} .'.format(
ssh, settings.FAB_SETTING('SERVER_MEDIA_BACKUP_DIR'), filename))
@require_server
def run_export_db(filename=None):
"""
Exports the database on the server.
Usage::
fab prod run_export_db
fab prod run_export_db:filename=foobar.dump
"""
if not filename:
filename = settings.DB_DUMP_FILENAME
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
run_workon('fab export_db:remote=True,filename={}'.format(filename))
@require_server
@require_server
def run_git_pull():
"""
Pulls the latest code and updates submodules.
Usage::
fab <server> run_git_pull
"""
with cd(settings.FAB_SETTING('SERVER_REPO_ROOT')):
run('git pull && git submodule init && git submodule update')
@require_server
def import_remote_db():
"""
Downloads a db and imports it locally.
"""
run_export_db()
run_download_db()
drop_db()
create_db()
import_db()
reset_passwords()
@require_server
def import_remote_media():
"""
Downloads media and imports it locally.
"""
run_export_media()
run_download_media()
import_media()
@require_server
def run_makemessages():
"""
Executes ./manage.py makemessages -s --all on the server.
Usage::
fab <server name> run_makemessages
"""
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
run_workon('python{} manage.py makemessages -s --all'.format(
PYTHON_VERSION))
@require_server
def run_pip_install(upgrade=0):
"""
Installs the requirement.txt file on the given server.
Usage::
fab <server> run_pip_install
fab <server> run_pip_install:upgrade=1
:param upgrade: If set to 1, the command will be executed with the
``--upgrade`` flag.
"""
command = 'pip install -r {0}'.format(
settings.FAB_SETTING('SERVER_REQUIREMENTS_PATH'))
if upgrade:
command += ' --upgrade'
run_workon(command)
@require_server
def run_restart_apache():
"""
Restarts apache on the given server.
Usage::
fab <server> run_restart_apache
"""
run('{0}restart'.format(settings.FAB_SETTING('SERVER_APACHE_BIN_DIR')))
@require_server
def run_restart_uwsgi():
"""
Restarts uwsgi on the given server.
Usage::
fab <server> run_restart_uwsgi
"""
with cd(settings.FAB_SETTING('SERVER_LOCAL_ETC_DIR')):
run('supervisorctl restart uwsgi')
@require_server
def run_restart_nginx():
"""
Restarts uwsgi on the given server.
Usage::
fab <server> run_restart_nginx
"""
with cd(settings.FAB_SETTING('SERVER_LOCAL_ETC_DIR')):
run('supervisorctl restart nginx')
@require_server
def run_rsync_project():
"""
Copies the project from the git repository to it's destination folder.
This has the nice side effect of rsync deleting all ``.pyc`` files and
removing other files that might have been left behind by sys admins messing
around on the server.
Usage::
fab <server> run_rsync_project
"""
excludes = ''
for exclude in settings.RSYNC_EXCLUDES:
excludes += " --exclude '{0}'".format(exclude)
command = "rsync -avz --stats --delete {0} {1} {2}".format(
excludes, settings.FAB_SETTING('SERVER_REPO_PROJECT_ROOT'),
settings.FAB_SETTING('SERVER_APP_ROOT'))
run(command)
@require_server
def run_syncdb():
"""
Runs `./manage.py syncdb --migrate` on the given server.
Usage::
fab <server> run_syncdb
"""
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
if StrictVersion(django.get_version()) < StrictVersion('1.7'):
run_workon('python{} manage.py syncdb --migrate --noinput'.format(
PYTHON_VERSION))
else:
run_workon('python{} manage.py migrate'.format(PYTHON_VERSION))
@require_server
def run_touch_wsgi():
"""
Runs `touch <path>/wsgi.py` on the given server.
Usage::
fab <server> run_touch_wsgi
"""
run('touch {0}'.format(settings.FAB_SETTING('SERVER_WSGI_FILE')))
@require_server
def run_upload_db(filename=None):
"""
Uploads your local database to the server.
You can create a local dump with ``fab export_db`` first.
In order to import the database on the server you still need to SSH into
the server.
Usage::
fab prod run_upload_db
fab prod run_upload_db:filename=foobar.dump
"""
if not filename:
filename = settings.DB_DUMP_FILENAME
if env.key_filename:
ssh = settings.PROJECT_NAME
else:
ssh = '{0}@{1}'.format(env.user, env.host_string)
local('scp {0} {1}:{3}'.format(
filename, ssh, settings.FAB_SETTING('SERVER_DB_BACKUP_DIR')))
|
bitlabstudio/django-development-fabfile | development_fabfile/fabfile/remote.py | run_pip_install | python | def run_pip_install(upgrade=0):
command = 'pip install -r {0}'.format(
settings.FAB_SETTING('SERVER_REQUIREMENTS_PATH'))
if upgrade:
command += ' --upgrade'
run_workon(command) | Installs the requirement.txt file on the given server.
Usage::
fab <server> run_pip_install
fab <server> run_pip_install:upgrade=1
:param upgrade: If set to 1, the command will be executed with the
``--upgrade`` flag. | train | https://github.com/bitlabstudio/django-development-fabfile/blob/a135c6eb5bdd0b496a7eccfd271aca558dd99243/development_fabfile/fabfile/remote.py#L223-L240 | [
"def run_workon(command):\n \"\"\"\n Starts the virtualenv before running the given command.\n\n :param command: A string representing a shell command that should be\n executed.\n \"\"\"\n env.shell = \"/bin/bash -l -i -c\"\n return run('workon {0} && {1}'.format(env.venv_name, command))\n"
] | """Fab tasks that execute things on a remote server."""
import sys
import django
from django.conf import settings
from distutils.version import StrictVersion
from fabric.api import cd, env, local, run
from .local import drop_db, create_db, import_db, import_media, reset_passwords
from .utils import require_server, run_workon
if settings.PYTHON_VERSION:
PYTHON_VERSION = settings.PYTHON_VERSION
else:
PYTHON_VERSION = '{}.{}'.format(
sys.version_info.major, sys.version_info.minor)
if getattr(settings, 'PEM_KEY_DIR', False):
env.key_filename = settings.PEM_KEY_DIR
@require_server
def run_collectstatic():
"""
Runs `./manage.py collectstatic` on the given server.
Usage::
fab <server> run_collectstatic
"""
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
run_workon('python{} manage.py collectstatic --noinput'.format(
PYTHON_VERSION))
@require_server
def run_compilemessages():
"""
Executes ./manage.py compilemessages on the server.
Usage::
fab <server name> run_compilemessages
"""
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
run_workon('python{} manage.py compilemessages'.format(PYTHON_VERSION))
@require_server
def run_deploy_website(restart_apache=False, restart_uwsgi=False,
restart_nginx=False):
"""
Executes all tasks necessary to deploy the website on the given server.
Usage::
fab <server> run_deploy_website
"""
run_git_pull()
run_pip_install()
run_rsync_project()
run_syncdb()
run_collectstatic()
if getattr(settings, 'MAKEMESSAGES_ON_DEPLOYMENT', False):
run_makemessages()
if getattr(settings, 'COMPILEMESSAGES_ON_DEPLOYMENT', False):
run_compilemessages()
if restart_apache:
run_restart_apache()
if restart_uwsgi:
run_restart_uwsgi()
if restart_nginx:
run_restart_nginx()
else:
run_touch_wsgi()
@require_server
def run_download_db(filename=None):
"""
Downloads the database from the server into your local machine.
In order to import the downloaded database, run ``fab import_db``
Usage::
fab prod run_download_db
fab prod run_download_db:filename=foobar.dump
"""
if not filename:
filename = settings.DB_DUMP_FILENAME
if env.key_filename:
ssh = settings.PROJECT_NAME
else:
ssh = '{0}@{1}'.format(env.user, env.host_string)
local('scp {0}:{1}{2} .'.format(
ssh, settings.FAB_SETTING('SERVER_DB_BACKUP_DIR'), filename))
@require_server
def run_download_media(filename=None):
"""
Downloads the media dump from the server into your local machine.
In order to import the downloaded media dump, run ``fab import_media``
Usage::
fab prod run_download_media
fab prod run_download_media:filename=foobar.tar.gz
"""
if not filename:
filename = settings.MEDIA_DUMP_FILENAME
if env.key_filename:
ssh = settings.PROJECT_NAME
else:
ssh = '{0}@{1}'.format(env.user, env.host_string)
local('scp {0}:{1}{2} .'.format(
ssh, settings.FAB_SETTING('SERVER_MEDIA_BACKUP_DIR'), filename))
@require_server
def run_export_db(filename=None):
"""
Exports the database on the server.
Usage::
fab prod run_export_db
fab prod run_export_db:filename=foobar.dump
"""
if not filename:
filename = settings.DB_DUMP_FILENAME
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
run_workon('fab export_db:remote=True,filename={}'.format(filename))
@require_server
def run_export_media(filename=None):
"""
Exports the media folder on the server.
Usage::
fab prod run_export_media
fab prod run_export_media:filename=foobar.tar.gz
"""
if not filename:
filename = settings.MEDIA_DUMP_FILENAME
with cd(settings.FAB_SETTING('SERVER_MEDIA_ROOT')):
run('rm -rf {0}'.format(filename))
run('tar -czf {0} *'.format(filename))
run('mv {0} {1}'.format(
filename, settings.FAB_SETTING('SERVER_MEDIA_BACKUP_DIR')))
@require_server
def run_git_pull():
"""
Pulls the latest code and updates submodules.
Usage::
fab <server> run_git_pull
"""
with cd(settings.FAB_SETTING('SERVER_REPO_ROOT')):
run('git pull && git submodule init && git submodule update')
@require_server
def import_remote_db():
"""
Downloads a db and imports it locally.
"""
run_export_db()
run_download_db()
drop_db()
create_db()
import_db()
reset_passwords()
@require_server
def import_remote_media():
"""
Downloads media and imports it locally.
"""
run_export_media()
run_download_media()
import_media()
@require_server
def run_makemessages():
"""
Executes ./manage.py makemessages -s --all on the server.
Usage::
fab <server name> run_makemessages
"""
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
run_workon('python{} manage.py makemessages -s --all'.format(
PYTHON_VERSION))
@require_server
@require_server
def run_restart_apache():
"""
Restarts apache on the given server.
Usage::
fab <server> run_restart_apache
"""
run('{0}restart'.format(settings.FAB_SETTING('SERVER_APACHE_BIN_DIR')))
@require_server
def run_restart_uwsgi():
"""
Restarts uwsgi on the given server.
Usage::
fab <server> run_restart_uwsgi
"""
with cd(settings.FAB_SETTING('SERVER_LOCAL_ETC_DIR')):
run('supervisorctl restart uwsgi')
@require_server
def run_restart_nginx():
"""
Restarts uwsgi on the given server.
Usage::
fab <server> run_restart_nginx
"""
with cd(settings.FAB_SETTING('SERVER_LOCAL_ETC_DIR')):
run('supervisorctl restart nginx')
@require_server
def run_rsync_project():
"""
Copies the project from the git repository to it's destination folder.
This has the nice side effect of rsync deleting all ``.pyc`` files and
removing other files that might have been left behind by sys admins messing
around on the server.
Usage::
fab <server> run_rsync_project
"""
excludes = ''
for exclude in settings.RSYNC_EXCLUDES:
excludes += " --exclude '{0}'".format(exclude)
command = "rsync -avz --stats --delete {0} {1} {2}".format(
excludes, settings.FAB_SETTING('SERVER_REPO_PROJECT_ROOT'),
settings.FAB_SETTING('SERVER_APP_ROOT'))
run(command)
@require_server
def run_syncdb():
"""
Runs `./manage.py syncdb --migrate` on the given server.
Usage::
fab <server> run_syncdb
"""
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
if StrictVersion(django.get_version()) < StrictVersion('1.7'):
run_workon('python{} manage.py syncdb --migrate --noinput'.format(
PYTHON_VERSION))
else:
run_workon('python{} manage.py migrate'.format(PYTHON_VERSION))
@require_server
def run_touch_wsgi():
"""
Runs `touch <path>/wsgi.py` on the given server.
Usage::
fab <server> run_touch_wsgi
"""
run('touch {0}'.format(settings.FAB_SETTING('SERVER_WSGI_FILE')))
@require_server
def run_upload_db(filename=None):
"""
Uploads your local database to the server.
You can create a local dump with ``fab export_db`` first.
In order to import the database on the server you still need to SSH into
the server.
Usage::
fab prod run_upload_db
fab prod run_upload_db:filename=foobar.dump
"""
if not filename:
filename = settings.DB_DUMP_FILENAME
if env.key_filename:
ssh = settings.PROJECT_NAME
else:
ssh = '{0}@{1}'.format(env.user, env.host_string)
local('scp {0} {1}:{3}'.format(
filename, ssh, settings.FAB_SETTING('SERVER_DB_BACKUP_DIR')))
|
bitlabstudio/django-development-fabfile | development_fabfile/fabfile/remote.py | run_rsync_project | python | def run_rsync_project():
excludes = ''
for exclude in settings.RSYNC_EXCLUDES:
excludes += " --exclude '{0}'".format(exclude)
command = "rsync -avz --stats --delete {0} {1} {2}".format(
excludes, settings.FAB_SETTING('SERVER_REPO_PROJECT_ROOT'),
settings.FAB_SETTING('SERVER_APP_ROOT'))
run(command) | Copies the project from the git repository to it's destination folder.
This has the nice side effect of rsync deleting all ``.pyc`` files and
removing other files that might have been left behind by sys admins messing
around on the server.
Usage::
fab <server> run_rsync_project | train | https://github.com/bitlabstudio/django-development-fabfile/blob/a135c6eb5bdd0b496a7eccfd271aca558dd99243/development_fabfile/fabfile/remote.py#L287-L306 | null | """Fab tasks that execute things on a remote server."""
import sys
import django
from django.conf import settings
from distutils.version import StrictVersion
from fabric.api import cd, env, local, run
from .local import drop_db, create_db, import_db, import_media, reset_passwords
from .utils import require_server, run_workon
if settings.PYTHON_VERSION:
PYTHON_VERSION = settings.PYTHON_VERSION
else:
PYTHON_VERSION = '{}.{}'.format(
sys.version_info.major, sys.version_info.minor)
if getattr(settings, 'PEM_KEY_DIR', False):
env.key_filename = settings.PEM_KEY_DIR
@require_server
def run_collectstatic():
"""
Runs `./manage.py collectstatic` on the given server.
Usage::
fab <server> run_collectstatic
"""
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
run_workon('python{} manage.py collectstatic --noinput'.format(
PYTHON_VERSION))
@require_server
def run_compilemessages():
"""
Executes ./manage.py compilemessages on the server.
Usage::
fab <server name> run_compilemessages
"""
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
run_workon('python{} manage.py compilemessages'.format(PYTHON_VERSION))
@require_server
def run_deploy_website(restart_apache=False, restart_uwsgi=False,
restart_nginx=False):
"""
Executes all tasks necessary to deploy the website on the given server.
Usage::
fab <server> run_deploy_website
"""
run_git_pull()
run_pip_install()
run_rsync_project()
run_syncdb()
run_collectstatic()
if getattr(settings, 'MAKEMESSAGES_ON_DEPLOYMENT', False):
run_makemessages()
if getattr(settings, 'COMPILEMESSAGES_ON_DEPLOYMENT', False):
run_compilemessages()
if restart_apache:
run_restart_apache()
if restart_uwsgi:
run_restart_uwsgi()
if restart_nginx:
run_restart_nginx()
else:
run_touch_wsgi()
@require_server
def run_download_db(filename=None):
"""
Downloads the database from the server into your local machine.
In order to import the downloaded database, run ``fab import_db``
Usage::
fab prod run_download_db
fab prod run_download_db:filename=foobar.dump
"""
if not filename:
filename = settings.DB_DUMP_FILENAME
if env.key_filename:
ssh = settings.PROJECT_NAME
else:
ssh = '{0}@{1}'.format(env.user, env.host_string)
local('scp {0}:{1}{2} .'.format(
ssh, settings.FAB_SETTING('SERVER_DB_BACKUP_DIR'), filename))
@require_server
def run_download_media(filename=None):
"""
Downloads the media dump from the server into your local machine.
In order to import the downloaded media dump, run ``fab import_media``
Usage::
fab prod run_download_media
fab prod run_download_media:filename=foobar.tar.gz
"""
if not filename:
filename = settings.MEDIA_DUMP_FILENAME
if env.key_filename:
ssh = settings.PROJECT_NAME
else:
ssh = '{0}@{1}'.format(env.user, env.host_string)
local('scp {0}:{1}{2} .'.format(
ssh, settings.FAB_SETTING('SERVER_MEDIA_BACKUP_DIR'), filename))
@require_server
def run_export_db(filename=None):
"""
Exports the database on the server.
Usage::
fab prod run_export_db
fab prod run_export_db:filename=foobar.dump
"""
if not filename:
filename = settings.DB_DUMP_FILENAME
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
run_workon('fab export_db:remote=True,filename={}'.format(filename))
@require_server
def run_export_media(filename=None):
"""
Exports the media folder on the server.
Usage::
fab prod run_export_media
fab prod run_export_media:filename=foobar.tar.gz
"""
if not filename:
filename = settings.MEDIA_DUMP_FILENAME
with cd(settings.FAB_SETTING('SERVER_MEDIA_ROOT')):
run('rm -rf {0}'.format(filename))
run('tar -czf {0} *'.format(filename))
run('mv {0} {1}'.format(
filename, settings.FAB_SETTING('SERVER_MEDIA_BACKUP_DIR')))
@require_server
def run_git_pull():
"""
Pulls the latest code and updates submodules.
Usage::
fab <server> run_git_pull
"""
with cd(settings.FAB_SETTING('SERVER_REPO_ROOT')):
run('git pull && git submodule init && git submodule update')
@require_server
def import_remote_db():
"""
Downloads a db and imports it locally.
"""
run_export_db()
run_download_db()
drop_db()
create_db()
import_db()
reset_passwords()
@require_server
def import_remote_media():
"""
Downloads media and imports it locally.
"""
run_export_media()
run_download_media()
import_media()
@require_server
def run_makemessages():
"""
Executes ./manage.py makemessages -s --all on the server.
Usage::
fab <server name> run_makemessages
"""
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
run_workon('python{} manage.py makemessages -s --all'.format(
PYTHON_VERSION))
@require_server
def run_pip_install(upgrade=0):
"""
Installs the requirement.txt file on the given server.
Usage::
fab <server> run_pip_install
fab <server> run_pip_install:upgrade=1
:param upgrade: If set to 1, the command will be executed with the
``--upgrade`` flag.
"""
command = 'pip install -r {0}'.format(
settings.FAB_SETTING('SERVER_REQUIREMENTS_PATH'))
if upgrade:
command += ' --upgrade'
run_workon(command)
@require_server
def run_restart_apache():
"""
Restarts apache on the given server.
Usage::
fab <server> run_restart_apache
"""
run('{0}restart'.format(settings.FAB_SETTING('SERVER_APACHE_BIN_DIR')))
@require_server
def run_restart_uwsgi():
"""
Restarts uwsgi on the given server.
Usage::
fab <server> run_restart_uwsgi
"""
with cd(settings.FAB_SETTING('SERVER_LOCAL_ETC_DIR')):
run('supervisorctl restart uwsgi')
@require_server
def run_restart_nginx():
"""
Restarts uwsgi on the given server.
Usage::
fab <server> run_restart_nginx
"""
with cd(settings.FAB_SETTING('SERVER_LOCAL_ETC_DIR')):
run('supervisorctl restart nginx')
@require_server
@require_server
def run_syncdb():
"""
Runs `./manage.py syncdb --migrate` on the given server.
Usage::
fab <server> run_syncdb
"""
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
if StrictVersion(django.get_version()) < StrictVersion('1.7'):
run_workon('python{} manage.py syncdb --migrate --noinput'.format(
PYTHON_VERSION))
else:
run_workon('python{} manage.py migrate'.format(PYTHON_VERSION))
@require_server
def run_touch_wsgi():
"""
Runs `touch <path>/wsgi.py` on the given server.
Usage::
fab <server> run_touch_wsgi
"""
run('touch {0}'.format(settings.FAB_SETTING('SERVER_WSGI_FILE')))
@require_server
def run_upload_db(filename=None):
"""
Uploads your local database to the server.
You can create a local dump with ``fab export_db`` first.
In order to import the database on the server you still need to SSH into
the server.
Usage::
fab prod run_upload_db
fab prod run_upload_db:filename=foobar.dump
"""
if not filename:
filename = settings.DB_DUMP_FILENAME
if env.key_filename:
ssh = settings.PROJECT_NAME
else:
ssh = '{0}@{1}'.format(env.user, env.host_string)
local('scp {0} {1}:{3}'.format(
filename, ssh, settings.FAB_SETTING('SERVER_DB_BACKUP_DIR')))
|
bitlabstudio/django-development-fabfile | development_fabfile/fabfile/remote.py | run_syncdb | python | def run_syncdb():
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
if StrictVersion(django.get_version()) < StrictVersion('1.7'):
run_workon('python{} manage.py syncdb --migrate --noinput'.format(
PYTHON_VERSION))
else:
run_workon('python{} manage.py migrate'.format(PYTHON_VERSION)) | Runs `./manage.py syncdb --migrate` on the given server.
Usage::
fab <server> run_syncdb | train | https://github.com/bitlabstudio/django-development-fabfile/blob/a135c6eb5bdd0b496a7eccfd271aca558dd99243/development_fabfile/fabfile/remote.py#L310-L324 | null | """Fab tasks that execute things on a remote server."""
import sys
import django
from django.conf import settings
from distutils.version import StrictVersion
from fabric.api import cd, env, local, run
from .local import drop_db, create_db, import_db, import_media, reset_passwords
from .utils import require_server, run_workon
if settings.PYTHON_VERSION:
PYTHON_VERSION = settings.PYTHON_VERSION
else:
PYTHON_VERSION = '{}.{}'.format(
sys.version_info.major, sys.version_info.minor)
if getattr(settings, 'PEM_KEY_DIR', False):
env.key_filename = settings.PEM_KEY_DIR
@require_server
def run_collectstatic():
"""
Runs `./manage.py collectstatic` on the given server.
Usage::
fab <server> run_collectstatic
"""
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
run_workon('python{} manage.py collectstatic --noinput'.format(
PYTHON_VERSION))
@require_server
def run_compilemessages():
"""
Executes ./manage.py compilemessages on the server.
Usage::
fab <server name> run_compilemessages
"""
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
run_workon('python{} manage.py compilemessages'.format(PYTHON_VERSION))
@require_server
def run_deploy_website(restart_apache=False, restart_uwsgi=False,
restart_nginx=False):
"""
Executes all tasks necessary to deploy the website on the given server.
Usage::
fab <server> run_deploy_website
"""
run_git_pull()
run_pip_install()
run_rsync_project()
run_syncdb()
run_collectstatic()
if getattr(settings, 'MAKEMESSAGES_ON_DEPLOYMENT', False):
run_makemessages()
if getattr(settings, 'COMPILEMESSAGES_ON_DEPLOYMENT', False):
run_compilemessages()
if restart_apache:
run_restart_apache()
if restart_uwsgi:
run_restart_uwsgi()
if restart_nginx:
run_restart_nginx()
else:
run_touch_wsgi()
@require_server
def run_download_db(filename=None):
"""
Downloads the database from the server into your local machine.
In order to import the downloaded database, run ``fab import_db``
Usage::
fab prod run_download_db
fab prod run_download_db:filename=foobar.dump
"""
if not filename:
filename = settings.DB_DUMP_FILENAME
if env.key_filename:
ssh = settings.PROJECT_NAME
else:
ssh = '{0}@{1}'.format(env.user, env.host_string)
local('scp {0}:{1}{2} .'.format(
ssh, settings.FAB_SETTING('SERVER_DB_BACKUP_DIR'), filename))
@require_server
def run_download_media(filename=None):
"""
Downloads the media dump from the server into your local machine.
In order to import the downloaded media dump, run ``fab import_media``
Usage::
fab prod run_download_media
fab prod run_download_media:filename=foobar.tar.gz
"""
if not filename:
filename = settings.MEDIA_DUMP_FILENAME
if env.key_filename:
ssh = settings.PROJECT_NAME
else:
ssh = '{0}@{1}'.format(env.user, env.host_string)
local('scp {0}:{1}{2} .'.format(
ssh, settings.FAB_SETTING('SERVER_MEDIA_BACKUP_DIR'), filename))
@require_server
def run_export_db(filename=None):
"""
Exports the database on the server.
Usage::
fab prod run_export_db
fab prod run_export_db:filename=foobar.dump
"""
if not filename:
filename = settings.DB_DUMP_FILENAME
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
run_workon('fab export_db:remote=True,filename={}'.format(filename))
@require_server
def run_export_media(filename=None):
"""
Exports the media folder on the server.
Usage::
fab prod run_export_media
fab prod run_export_media:filename=foobar.tar.gz
"""
if not filename:
filename = settings.MEDIA_DUMP_FILENAME
with cd(settings.FAB_SETTING('SERVER_MEDIA_ROOT')):
run('rm -rf {0}'.format(filename))
run('tar -czf {0} *'.format(filename))
run('mv {0} {1}'.format(
filename, settings.FAB_SETTING('SERVER_MEDIA_BACKUP_DIR')))
@require_server
def run_git_pull():
"""
Pulls the latest code and updates submodules.
Usage::
fab <server> run_git_pull
"""
with cd(settings.FAB_SETTING('SERVER_REPO_ROOT')):
run('git pull && git submodule init && git submodule update')
@require_server
def import_remote_db():
"""
Downloads a db and imports it locally.
"""
run_export_db()
run_download_db()
drop_db()
create_db()
import_db()
reset_passwords()
@require_server
def import_remote_media():
"""
Downloads media and imports it locally.
"""
run_export_media()
run_download_media()
import_media()
@require_server
def run_makemessages():
"""
Executes ./manage.py makemessages -s --all on the server.
Usage::
fab <server name> run_makemessages
"""
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
run_workon('python{} manage.py makemessages -s --all'.format(
PYTHON_VERSION))
@require_server
def run_pip_install(upgrade=0):
"""
Installs the requirement.txt file on the given server.
Usage::
fab <server> run_pip_install
fab <server> run_pip_install:upgrade=1
:param upgrade: If set to 1, the command will be executed with the
``--upgrade`` flag.
"""
command = 'pip install -r {0}'.format(
settings.FAB_SETTING('SERVER_REQUIREMENTS_PATH'))
if upgrade:
command += ' --upgrade'
run_workon(command)
@require_server
def run_restart_apache():
"""
Restarts apache on the given server.
Usage::
fab <server> run_restart_apache
"""
run('{0}restart'.format(settings.FAB_SETTING('SERVER_APACHE_BIN_DIR')))
@require_server
def run_restart_uwsgi():
"""
Restarts uwsgi on the given server.
Usage::
fab <server> run_restart_uwsgi
"""
with cd(settings.FAB_SETTING('SERVER_LOCAL_ETC_DIR')):
run('supervisorctl restart uwsgi')
@require_server
def run_restart_nginx():
"""
Restarts uwsgi on the given server.
Usage::
fab <server> run_restart_nginx
"""
with cd(settings.FAB_SETTING('SERVER_LOCAL_ETC_DIR')):
run('supervisorctl restart nginx')
@require_server
def run_rsync_project():
"""
Copies the project from the git repository to it's destination folder.
This has the nice side effect of rsync deleting all ``.pyc`` files and
removing other files that might have been left behind by sys admins messing
around on the server.
Usage::
fab <server> run_rsync_project
"""
excludes = ''
for exclude in settings.RSYNC_EXCLUDES:
excludes += " --exclude '{0}'".format(exclude)
command = "rsync -avz --stats --delete {0} {1} {2}".format(
excludes, settings.FAB_SETTING('SERVER_REPO_PROJECT_ROOT'),
settings.FAB_SETTING('SERVER_APP_ROOT'))
run(command)
@require_server
@require_server
def run_touch_wsgi():
"""
Runs `touch <path>/wsgi.py` on the given server.
Usage::
fab <server> run_touch_wsgi
"""
run('touch {0}'.format(settings.FAB_SETTING('SERVER_WSGI_FILE')))
@require_server
def run_upload_db(filename=None):
"""
Uploads your local database to the server.
You can create a local dump with ``fab export_db`` first.
In order to import the database on the server you still need to SSH into
the server.
Usage::
fab prod run_upload_db
fab prod run_upload_db:filename=foobar.dump
"""
if not filename:
filename = settings.DB_DUMP_FILENAME
if env.key_filename:
ssh = settings.PROJECT_NAME
else:
ssh = '{0}@{1}'.format(env.user, env.host_string)
local('scp {0} {1}:{3}'.format(
filename, ssh, settings.FAB_SETTING('SERVER_DB_BACKUP_DIR')))
|
bitlabstudio/django-development-fabfile | development_fabfile/fabfile/remote.py | run_upload_db | python | def run_upload_db(filename=None):
if not filename:
filename = settings.DB_DUMP_FILENAME
if env.key_filename:
ssh = settings.PROJECT_NAME
else:
ssh = '{0}@{1}'.format(env.user, env.host_string)
local('scp {0} {1}:{3}'.format(
filename, ssh, settings.FAB_SETTING('SERVER_DB_BACKUP_DIR'))) | Uploads your local database to the server.
You can create a local dump with ``fab export_db`` first.
In order to import the database on the server you still need to SSH into
the server.
Usage::
fab prod run_upload_db
fab prod run_upload_db:filename=foobar.dump | train | https://github.com/bitlabstudio/django-development-fabfile/blob/a135c6eb5bdd0b496a7eccfd271aca558dd99243/development_fabfile/fabfile/remote.py#L341-L363 | null | """Fab tasks that execute things on a remote server."""
import sys
import django
from django.conf import settings
from distutils.version import StrictVersion
from fabric.api import cd, env, local, run
from .local import drop_db, create_db, import_db, import_media, reset_passwords
from .utils import require_server, run_workon
if settings.PYTHON_VERSION:
PYTHON_VERSION = settings.PYTHON_VERSION
else:
PYTHON_VERSION = '{}.{}'.format(
sys.version_info.major, sys.version_info.minor)
if getattr(settings, 'PEM_KEY_DIR', False):
env.key_filename = settings.PEM_KEY_DIR
@require_server
def run_collectstatic():
"""
Runs `./manage.py collectstatic` on the given server.
Usage::
fab <server> run_collectstatic
"""
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
run_workon('python{} manage.py collectstatic --noinput'.format(
PYTHON_VERSION))
@require_server
def run_compilemessages():
"""
Executes ./manage.py compilemessages on the server.
Usage::
fab <server name> run_compilemessages
"""
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
run_workon('python{} manage.py compilemessages'.format(PYTHON_VERSION))
@require_server
def run_deploy_website(restart_apache=False, restart_uwsgi=False,
restart_nginx=False):
"""
Executes all tasks necessary to deploy the website on the given server.
Usage::
fab <server> run_deploy_website
"""
run_git_pull()
run_pip_install()
run_rsync_project()
run_syncdb()
run_collectstatic()
if getattr(settings, 'MAKEMESSAGES_ON_DEPLOYMENT', False):
run_makemessages()
if getattr(settings, 'COMPILEMESSAGES_ON_DEPLOYMENT', False):
run_compilemessages()
if restart_apache:
run_restart_apache()
if restart_uwsgi:
run_restart_uwsgi()
if restart_nginx:
run_restart_nginx()
else:
run_touch_wsgi()
@require_server
def run_download_db(filename=None):
"""
Downloads the database from the server into your local machine.
In order to import the downloaded database, run ``fab import_db``
Usage::
fab prod run_download_db
fab prod run_download_db:filename=foobar.dump
"""
if not filename:
filename = settings.DB_DUMP_FILENAME
if env.key_filename:
ssh = settings.PROJECT_NAME
else:
ssh = '{0}@{1}'.format(env.user, env.host_string)
local('scp {0}:{1}{2} .'.format(
ssh, settings.FAB_SETTING('SERVER_DB_BACKUP_DIR'), filename))
@require_server
def run_download_media(filename=None):
"""
Downloads the media dump from the server into your local machine.
In order to import the downloaded media dump, run ``fab import_media``
Usage::
fab prod run_download_media
fab prod run_download_media:filename=foobar.tar.gz
"""
if not filename:
filename = settings.MEDIA_DUMP_FILENAME
if env.key_filename:
ssh = settings.PROJECT_NAME
else:
ssh = '{0}@{1}'.format(env.user, env.host_string)
local('scp {0}:{1}{2} .'.format(
ssh, settings.FAB_SETTING('SERVER_MEDIA_BACKUP_DIR'), filename))
@require_server
def run_export_db(filename=None):
"""
Exports the database on the server.
Usage::
fab prod run_export_db
fab prod run_export_db:filename=foobar.dump
"""
if not filename:
filename = settings.DB_DUMP_FILENAME
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
run_workon('fab export_db:remote=True,filename={}'.format(filename))
@require_server
def run_export_media(filename=None):
"""
Exports the media folder on the server.
Usage::
fab prod run_export_media
fab prod run_export_media:filename=foobar.tar.gz
"""
if not filename:
filename = settings.MEDIA_DUMP_FILENAME
with cd(settings.FAB_SETTING('SERVER_MEDIA_ROOT')):
run('rm -rf {0}'.format(filename))
run('tar -czf {0} *'.format(filename))
run('mv {0} {1}'.format(
filename, settings.FAB_SETTING('SERVER_MEDIA_BACKUP_DIR')))
@require_server
def run_git_pull():
"""
Pulls the latest code and updates submodules.
Usage::
fab <server> run_git_pull
"""
with cd(settings.FAB_SETTING('SERVER_REPO_ROOT')):
run('git pull && git submodule init && git submodule update')
@require_server
def import_remote_db():
"""
Downloads a db and imports it locally.
"""
run_export_db()
run_download_db()
drop_db()
create_db()
import_db()
reset_passwords()
@require_server
def import_remote_media():
"""
Downloads media and imports it locally.
"""
run_export_media()
run_download_media()
import_media()
@require_server
def run_makemessages():
"""
Executes ./manage.py makemessages -s --all on the server.
Usage::
fab <server name> run_makemessages
"""
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
run_workon('python{} manage.py makemessages -s --all'.format(
PYTHON_VERSION))
@require_server
def run_pip_install(upgrade=0):
"""
Installs the requirement.txt file on the given server.
Usage::
fab <server> run_pip_install
fab <server> run_pip_install:upgrade=1
:param upgrade: If set to 1, the command will be executed with the
``--upgrade`` flag.
"""
command = 'pip install -r {0}'.format(
settings.FAB_SETTING('SERVER_REQUIREMENTS_PATH'))
if upgrade:
command += ' --upgrade'
run_workon(command)
@require_server
def run_restart_apache():
"""
Restarts apache on the given server.
Usage::
fab <server> run_restart_apache
"""
run('{0}restart'.format(settings.FAB_SETTING('SERVER_APACHE_BIN_DIR')))
@require_server
def run_restart_uwsgi():
"""
Restarts uwsgi on the given server.
Usage::
fab <server> run_restart_uwsgi
"""
with cd(settings.FAB_SETTING('SERVER_LOCAL_ETC_DIR')):
run('supervisorctl restart uwsgi')
@require_server
def run_restart_nginx():
"""
Restarts uwsgi on the given server.
Usage::
fab <server> run_restart_nginx
"""
with cd(settings.FAB_SETTING('SERVER_LOCAL_ETC_DIR')):
run('supervisorctl restart nginx')
@require_server
def run_rsync_project():
"""
Copies the project from the git repository to it's destination folder.
This has the nice side effect of rsync deleting all ``.pyc`` files and
removing other files that might have been left behind by sys admins messing
around on the server.
Usage::
fab <server> run_rsync_project
"""
excludes = ''
for exclude in settings.RSYNC_EXCLUDES:
excludes += " --exclude '{0}'".format(exclude)
command = "rsync -avz --stats --delete {0} {1} {2}".format(
excludes, settings.FAB_SETTING('SERVER_REPO_PROJECT_ROOT'),
settings.FAB_SETTING('SERVER_APP_ROOT'))
run(command)
@require_server
def run_syncdb():
"""
Runs `./manage.py syncdb --migrate` on the given server.
Usage::
fab <server> run_syncdb
"""
with cd(settings.FAB_SETTING('SERVER_PROJECT_ROOT')):
if StrictVersion(django.get_version()) < StrictVersion('1.7'):
run_workon('python{} manage.py syncdb --migrate --noinput'.format(
PYTHON_VERSION))
else:
run_workon('python{} manage.py migrate'.format(PYTHON_VERSION))
@require_server
def run_touch_wsgi():
"""
Runs `touch <path>/wsgi.py` on the given server.
Usage::
fab <server> run_touch_wsgi
"""
run('touch {0}'.format(settings.FAB_SETTING('SERVER_WSGI_FILE')))
@require_server
|
bitlabstudio/django-development-fabfile | development_fabfile/fabfile/servers.py | local_machine | python | def local_machine():
common_conf()
env.machine = 'local'
env.pg_admin_role = settings.LOCAL_PG_ADMIN_ROLE
env.db_backup_dir = settings.DJANGO_PROJECT_ROOT
env.media_backup_dir = settings.DJANGO_PROJECT_ROOT
# Not sure what this is good for. Not used in our fabfile.
# env.media_root = settings.DJANGO_MEDIA_ROOT
# env.local_db_password = settings.DJANGO_DB_PASSWORD
env.db_role = settings.DATABASES['default']['USER']
env.db_name = settings.DATABASES['default']['NAME'] | Option to do something on local machine. | train | https://github.com/bitlabstudio/django-development-fabfile/blob/a135c6eb5bdd0b496a7eccfd271aca558dd99243/development_fabfile/fabfile/servers.py#L23-L36 | [
"def common_conf():\n \"\"\"Sets some default values in the environment.\"\"\"\n env.port = '22'\n env.pg_admin_role = 'postgres'\n env.venv_name = settings.VENV_NAME\n"
] | """
Different server options that can be used when running fab tasks.
For example, if you want to export the database from the staging server
you can call the fab task like so::
fab stage run_export_db
"""
from django.conf import settings
from fabric.api import env
def common_conf():
"""Sets some default values in the environment."""
env.port = '22'
env.pg_admin_role = 'postgres'
env.venv_name = settings.VENV_NAME
common_conf()
def dev():
"""Option to do something on the development server."""
common_conf()
env.user = settings.LOGIN_USER_DEV
env.machine = 'dev'
env.host_string = settings.HOST_DEV
env.hosts = [env.host_string, ]
def stage():
"""Option to do something on the staging server."""
common_conf()
env.user = settings.LOGIN_USER_STAGE
env.machine = 'stage'
env.host_string = settings.HOST_STAGE
env.hosts = [env.host_string, ]
def prod():
"""Option to do something on the production server."""
common_conf()
env.user = settings.LOGIN_USER_PROD
env.machine = 'prod'
env.host_string = settings.HOST_PROD
env.hosts = [env.host_string, ]
|
bitlabstudio/django-development-fabfile | development_fabfile/fabfile/servers.py | dev | python | def dev():
common_conf()
env.user = settings.LOGIN_USER_DEV
env.machine = 'dev'
env.host_string = settings.HOST_DEV
env.hosts = [env.host_string, ] | Option to do something on the development server. | train | https://github.com/bitlabstudio/django-development-fabfile/blob/a135c6eb5bdd0b496a7eccfd271aca558dd99243/development_fabfile/fabfile/servers.py#L39-L45 | [
"def common_conf():\n \"\"\"Sets some default values in the environment.\"\"\"\n env.port = '22'\n env.pg_admin_role = 'postgres'\n env.venv_name = settings.VENV_NAME\n"
] | """
Different server options that can be used when running fab tasks.
For example, if you want to export the database from the staging server
you can call the fab task like so::
fab stage run_export_db
"""
from django.conf import settings
from fabric.api import env
def common_conf():
"""Sets some default values in the environment."""
env.port = '22'
env.pg_admin_role = 'postgres'
env.venv_name = settings.VENV_NAME
common_conf()
def local_machine():
"""Option to do something on local machine."""
common_conf()
env.machine = 'local'
env.pg_admin_role = settings.LOCAL_PG_ADMIN_ROLE
env.db_backup_dir = settings.DJANGO_PROJECT_ROOT
env.media_backup_dir = settings.DJANGO_PROJECT_ROOT
# Not sure what this is good for. Not used in our fabfile.
# env.media_root = settings.DJANGO_MEDIA_ROOT
# env.local_db_password = settings.DJANGO_DB_PASSWORD
env.db_role = settings.DATABASES['default']['USER']
env.db_name = settings.DATABASES['default']['NAME']
def stage():
"""Option to do something on the staging server."""
common_conf()
env.user = settings.LOGIN_USER_STAGE
env.machine = 'stage'
env.host_string = settings.HOST_STAGE
env.hosts = [env.host_string, ]
def prod():
"""Option to do something on the production server."""
common_conf()
env.user = settings.LOGIN_USER_PROD
env.machine = 'prod'
env.host_string = settings.HOST_PROD
env.hosts = [env.host_string, ]
|
bitlabstudio/django-development-fabfile | development_fabfile/fabfile/servers.py | stage | python | def stage():
common_conf()
env.user = settings.LOGIN_USER_STAGE
env.machine = 'stage'
env.host_string = settings.HOST_STAGE
env.hosts = [env.host_string, ] | Option to do something on the staging server. | train | https://github.com/bitlabstudio/django-development-fabfile/blob/a135c6eb5bdd0b496a7eccfd271aca558dd99243/development_fabfile/fabfile/servers.py#L48-L54 | [
"def common_conf():\n \"\"\"Sets some default values in the environment.\"\"\"\n env.port = '22'\n env.pg_admin_role = 'postgres'\n env.venv_name = settings.VENV_NAME\n"
] | """
Different server options that can be used when running fab tasks.
For example, if you want to export the database from the staging server
you can call the fab task like so::
fab stage run_export_db
"""
from django.conf import settings
from fabric.api import env
def common_conf():
"""Sets some default values in the environment."""
env.port = '22'
env.pg_admin_role = 'postgres'
env.venv_name = settings.VENV_NAME
common_conf()
def local_machine():
"""Option to do something on local machine."""
common_conf()
env.machine = 'local'
env.pg_admin_role = settings.LOCAL_PG_ADMIN_ROLE
env.db_backup_dir = settings.DJANGO_PROJECT_ROOT
env.media_backup_dir = settings.DJANGO_PROJECT_ROOT
# Not sure what this is good for. Not used in our fabfile.
# env.media_root = settings.DJANGO_MEDIA_ROOT
# env.local_db_password = settings.DJANGO_DB_PASSWORD
env.db_role = settings.DATABASES['default']['USER']
env.db_name = settings.DATABASES['default']['NAME']
def dev():
"""Option to do something on the development server."""
common_conf()
env.user = settings.LOGIN_USER_DEV
env.machine = 'dev'
env.host_string = settings.HOST_DEV
env.hosts = [env.host_string, ]
def prod():
"""Option to do something on the production server."""
common_conf()
env.user = settings.LOGIN_USER_PROD
env.machine = 'prod'
env.host_string = settings.HOST_PROD
env.hosts = [env.host_string, ]
|
bitlabstudio/django-development-fabfile | development_fabfile/fabfile/servers.py | prod | python | def prod():
common_conf()
env.user = settings.LOGIN_USER_PROD
env.machine = 'prod'
env.host_string = settings.HOST_PROD
env.hosts = [env.host_string, ] | Option to do something on the production server. | train | https://github.com/bitlabstudio/django-development-fabfile/blob/a135c6eb5bdd0b496a7eccfd271aca558dd99243/development_fabfile/fabfile/servers.py#L57-L63 | [
"def common_conf():\n \"\"\"Sets some default values in the environment.\"\"\"\n env.port = '22'\n env.pg_admin_role = 'postgres'\n env.venv_name = settings.VENV_NAME\n"
] | """
Different server options that can be used when running fab tasks.
For example, if you want to export the database from the staging server
you can call the fab task like so::
fab stage run_export_db
"""
from django.conf import settings
from fabric.api import env
def common_conf():
"""Sets some default values in the environment."""
env.port = '22'
env.pg_admin_role = 'postgres'
env.venv_name = settings.VENV_NAME
common_conf()
def local_machine():
"""Option to do something on local machine."""
common_conf()
env.machine = 'local'
env.pg_admin_role = settings.LOCAL_PG_ADMIN_ROLE
env.db_backup_dir = settings.DJANGO_PROJECT_ROOT
env.media_backup_dir = settings.DJANGO_PROJECT_ROOT
# Not sure what this is good for. Not used in our fabfile.
# env.media_root = settings.DJANGO_MEDIA_ROOT
# env.local_db_password = settings.DJANGO_DB_PASSWORD
env.db_role = settings.DATABASES['default']['USER']
env.db_name = settings.DATABASES['default']['NAME']
def dev():
"""Option to do something on the development server."""
common_conf()
env.user = settings.LOGIN_USER_DEV
env.machine = 'dev'
env.host_string = settings.HOST_DEV
env.hosts = [env.host_string, ]
def stage():
"""Option to do something on the staging server."""
common_conf()
env.user = settings.LOGIN_USER_STAGE
env.machine = 'stage'
env.host_string = settings.HOST_STAGE
env.hosts = [env.host_string, ]
|
bitlabstudio/django-development-fabfile | development_fabfile/fabfile/local.py | check_coverage | python | def check_coverage():
with lcd(settings.LOCAL_COVERAGE_PATH):
total_line = local('grep -n Total index.html', capture=True)
match = re.search(r'^(\d+):', total_line)
total_line_number = int(match.groups()[0])
percentage_line_number = total_line_number + 5
percentage_line = local(
'awk NR=={0} index.html'.format(percentage_line_number),
capture=True)
match = re.search(r'(\d.+)%', percentage_line)
try:
percentage = float(match.groups()[0])
except ValueError:
# If there's no dotting try another search
match = re.search(r'(\d+)%', percentage_line)
percentage = float(match.groups()[0])
if percentage < 100:
abort(red('Coverage is {0}%'.format(percentage)))
print(green('Coverage is {0}%'.format(percentage))) | Checks if the coverage is 100%. | train | https://github.com/bitlabstudio/django-development-fabfile/blob/a135c6eb5bdd0b496a7eccfd271aca558dd99243/development_fabfile/fabfile/local.py#L42-L61 | null | """Fabfile for tasks that only manipulate things on the local machine."""
import django
import os
import re
import sys
from django.conf import settings
from distutils.version import StrictVersion
from fabric.api import hide, lcd, local
from fabric.api import settings as fab_settings
from fabric.colors import green, red
from fabric.utils import abort, warn, puts
from fabric.state import env
from .servers import local_machine
HOST = ' -h localhost'
if settings.DATABASES['default']['HOST']:
HOST = ' -h {}'.format(settings.DATABASES['default']['HOST'])
USER_AND_HOST = '-U {0}{1}'.format(settings.LOCAL_PG_ADMIN_ROLE, HOST)
DB_PASSWORD = settings.DATABASES['default']['PASSWORD']
if settings.PYTHON_VERSION:
PYTHON_VERSION = settings.PYTHON_VERSION
else:
PYTHON_VERSION = '{}.{}'.format(
sys.version_info.major, sys.version_info.minor)
def check():
"""Runs flake8, check_coverage and test."""
flake8()
syntax_check()
jshint()
test()
check_coverage()
def create_db(with_postgis=False):
"""
Creates the local database.
:param with_postgis: If ``True``, the postgis extension will be installed.
"""
local_machine()
local('psql {0} -c "CREATE USER {1} WITH PASSWORD \'{2}\'"'.format(
USER_AND_HOST, env.db_role, DB_PASSWORD))
local('psql {0} -c "CREATE DATABASE {1} ENCODING \'UTF8\'"'.format(
USER_AND_HOST, env.db_name))
if with_postgis:
local('psql {0} {1} -c "CREATE EXTENSION postgis"'.format(
USER_AND_HOST, env.db_name))
local('psql {0} -c "GRANT ALL PRIVILEGES ON DATABASE {1}'
' to {2}"'.format(USER_AND_HOST, env.db_name, env.db_role))
local('psql {0} -c "GRANT ALL PRIVILEGES ON ALL TABLES'
' IN SCHEMA public TO {1}"'.format(
USER_AND_HOST, env.db_role))
def delete_db():
"""
Deletes all data in the database.
You need django-extensions in order to use this.
However, please note that this is not as thorough as a real database drop.
"""
local(' ./manage.py reset_db --router=default --noinput')
def export_db(filename=None, remote=False):
"""
Exports the database.
Make sure that you have this in your ``~/.pgpass`` file:
localhost:5433:*:<db_role>:<password>
Also make sure that the file has ``chmod 0600 .pgpass``.
Usage::
fab export_db
fab export_db:filename=foobar.dump
"""
local_machine()
if not filename:
filename = settings.DB_DUMP_FILENAME
if remote:
backup_dir = settings.FAB_SETTING('SERVER_DB_BACKUP_DIR')
else:
backup_dir = ''
local('pg_dump -c -Fc -O -U {0}{1} {2} -f {3}{4}'.format(
env.db_role, HOST, env.db_name, backup_dir, filename))
def drop_db():
"""Drops the local database."""
local_machine()
with fab_settings(warn_only=True):
local('psql {0} -c "DROP DATABASE {1}"'.format(
USER_AND_HOST, env.db_name))
local('psql {0} -c "DROP USER {1}"'.format(
USER_AND_HOST, env.db_role))
def jshint():
"""Runs jshint checks."""
with fab_settings(warn_only=True):
needs_to_abort = False
# because jshint fails with exit code 2, we need to allow this as
# a successful exit code in our env
if 2 not in env.ok_ret_codes:
env.ok_ret_codes.append(2)
output = local(
'find -name "{}" -print'.format('*.js'),
capture=True,
)
files = output.split()
jshint_installed = local('command -v jshint', capture=True)
if not jshint_installed.succeeded:
warn(red(
"To enable an extended check of your js files, please"
" install jshint by entering:\n\n npm install -g jshint"
))
else:
for file in files:
if hasattr(settings, 'JSHINT_CHECK_EXCLUDES'):
excludes = settings.JSHINT_CHECK_EXCLUDES
else:
excludes = settings.SYNTAX_CHECK_EXCLUDES
if any(s in file for s in excludes):
continue
jshint_result = local(
'jshint {0}'.format(file),
capture=True
)
if jshint_result:
warn(red('JS errors detected in file {0}'.format(
file
)))
puts(jshint_result)
needs_to_abort = True
if needs_to_abort:
abort(red('There have been errors. Please fix them and run'
' the check again.'))
else:
puts(green('jshint found no errors. Very good!'))
def syntax_check():
"""Runs flake8 against the codebase."""
with fab_settings(warn_only=True):
for file_type in settings.SYNTAX_CHECK:
needs_to_abort = False
# because egrep fails with exit code 1, we need to allow this as
# a successful exit code in our env
if 1 not in env.ok_ret_codes:
env.ok_ret_codes.append(1)
output = local(
'find -name "{}" -print'.format(file_type),
capture=True,
)
files = output.split()
for file in files:
if any(s in file for s in settings.SYNTAX_CHECK_EXCLUDES):
continue
result = local('egrep -i -n "{0}" {1}'.format(
settings.SYNTAX_CHECK[file_type], file), capture=True)
if result:
warn(red("Syntax check found in '{0}': {1}".format(
file, result)))
needs_to_abort = True
if needs_to_abort:
abort(red('There have been errors. Please fix them and run'
' the check again.'))
else:
puts(green('Syntax check found no errors. Very good!'))
def flake8():
"""Runs flake8 against the codebase."""
return local('flake8 --ignore=E126 --statistics '
'--exclude=submodules,south_migrations,migrations,'
'node_modules .')
def import_db(filename=None):
"""
Imports the database.
Make sure that you have this in your ``~/.pgpass`` file:
localhost:5433:*:publishizer_publishizer:publishizer
Also make sure that the file has ``chmod 0600 .pgpass``.
Usage::
fab import_db
fab import_db:filename=foobar.dump
"""
local_machine()
if not filename:
filename = settings.DB_DUMP_FILENAME
with fab_settings(warn_only=True):
local('pg_restore -O -c -U {0}{1} -d {2} {3}'.format(
env.db_role, HOST, env.db_name, filename))
def import_media(filename=None):
"""
Extracts media dump into your local media root.
Please note that this might overwrite existing local files.
Usage::
fab import_media
fab import_media:filename=foobar.tar.gz
"""
if not filename:
filename = settings.MEDIA_DUMP_FILENAME
project_root = os.getcwd()
with fab_settings(hide('everything'), warn_only=True):
is_backup_missing = local('test -e "$(echo %s)"' % os.path.join(
project_root, filename)).failed
if is_backup_missing:
abort(red('ERROR: There is no media backup that could be imported in'
' {0}. We need a file called {1} in that folder.'.format(
project_root, filename)))
# copy the dump into the media root folder
with lcd(project_root):
local('cp {0} {1}'.format(filename, settings.MEDIA_ROOT))
# extract and remove media dump
with lcd(settings.MEDIA_ROOT):
local('tar -xvf {0}'.format(filename))
local('rm -rf {0}'.format(filename))
def lessc(responsive=False):
"""
Compiles all less files.
This is useful if you are using the Twitter Bootstrap Framework.
"""
local('lessc {0}/static/css/bootstrap.less'
' {0}/static/css/bootstrap.css'.format(settings.PROJECT_NAME))
if responsive:
local('lessc {0}/static/css/responsive.less'
' {0}/static/css/bootstrap-responsive.css'.format(
settings.PROJECT_NAME))
def rebuild():
"""
Deletes and re-creates your DB. Needs django-extensions and South.
"""
drop_db()
create_db()
if StrictVersion(django.get_version()) < StrictVersion('1.7'):
local('python{} manage.py syncdb --all --noinput'.format(
PYTHON_VERSION))
local('python{} manage.py migrate --fake'.format(PYTHON_VERSION))
else:
local('python{} manage.py migrate'.format(PYTHON_VERSION))
def reset_passwords():
"""Resets all passwords to `test123`."""
local('python{} manage.py set_fake_passwords --password=test1234'.format(
PYTHON_VERSION))
def test(options=None, integration=1, selenium=1, test_settings=None):
"""
Runs manage.py tests.
Usage::
fab test
fab test:app
fab test:app.tests.forms_tests:TestCaseName
fab test:integration=0
fab test:selenium=0
"""
if test_settings is None:
test_settings = settings.TEST_SETTINGS_PATH
command = ("coverage run --source='.' manage.py test -v 2" +
" --failfast --settings={0} --pattern='*_tests.py'".format(
test_settings))
if int(integration) == 0:
command += " --exclude='integration_tests'"
if int(selenium) == 0:
command += " --exclude='selenium_tests'"
if options:
command += ' {0}'.format(options)
with fab_settings(warn_only=True):
local(command, capture=False)
local('coverage html -d coverage --omit="{}"'.format(
settings.COVERAGE_EXCLUDES))
|
bitlabstudio/django-development-fabfile | development_fabfile/fabfile/local.py | create_db | python | def create_db(with_postgis=False):
local_machine()
local('psql {0} -c "CREATE USER {1} WITH PASSWORD \'{2}\'"'.format(
USER_AND_HOST, env.db_role, DB_PASSWORD))
local('psql {0} -c "CREATE DATABASE {1} ENCODING \'UTF8\'"'.format(
USER_AND_HOST, env.db_name))
if with_postgis:
local('psql {0} {1} -c "CREATE EXTENSION postgis"'.format(
USER_AND_HOST, env.db_name))
local('psql {0} -c "GRANT ALL PRIVILEGES ON DATABASE {1}'
' to {2}"'.format(USER_AND_HOST, env.db_name, env.db_role))
local('psql {0} -c "GRANT ALL PRIVILEGES ON ALL TABLES'
' IN SCHEMA public TO {1}"'.format(
USER_AND_HOST, env.db_role)) | Creates the local database.
:param with_postgis: If ``True``, the postgis extension will be installed. | train | https://github.com/bitlabstudio/django-development-fabfile/blob/a135c6eb5bdd0b496a7eccfd271aca558dd99243/development_fabfile/fabfile/local.py#L64-L83 | [
"def local_machine():\n \"\"\"Option to do something on local machine.\"\"\"\n common_conf()\n env.machine = 'local'\n env.pg_admin_role = settings.LOCAL_PG_ADMIN_ROLE\n env.db_backup_dir = settings.DJANGO_PROJECT_ROOT\n env.media_backup_dir = settings.DJANGO_PROJECT_ROOT\n\n # Not sure what th... | """Fabfile for tasks that only manipulate things on the local machine."""
import django
import os
import re
import sys
from django.conf import settings
from distutils.version import StrictVersion
from fabric.api import hide, lcd, local
from fabric.api import settings as fab_settings
from fabric.colors import green, red
from fabric.utils import abort, warn, puts
from fabric.state import env
from .servers import local_machine
HOST = ' -h localhost'
if settings.DATABASES['default']['HOST']:
HOST = ' -h {}'.format(settings.DATABASES['default']['HOST'])
USER_AND_HOST = '-U {0}{1}'.format(settings.LOCAL_PG_ADMIN_ROLE, HOST)
DB_PASSWORD = settings.DATABASES['default']['PASSWORD']
if settings.PYTHON_VERSION:
PYTHON_VERSION = settings.PYTHON_VERSION
else:
PYTHON_VERSION = '{}.{}'.format(
sys.version_info.major, sys.version_info.minor)
def check():
"""Runs flake8, check_coverage and test."""
flake8()
syntax_check()
jshint()
test()
check_coverage()
def check_coverage():
"""Checks if the coverage is 100%."""
with lcd(settings.LOCAL_COVERAGE_PATH):
total_line = local('grep -n Total index.html', capture=True)
match = re.search(r'^(\d+):', total_line)
total_line_number = int(match.groups()[0])
percentage_line_number = total_line_number + 5
percentage_line = local(
'awk NR=={0} index.html'.format(percentage_line_number),
capture=True)
match = re.search(r'(\d.+)%', percentage_line)
try:
percentage = float(match.groups()[0])
except ValueError:
# If there's no dotting try another search
match = re.search(r'(\d+)%', percentage_line)
percentage = float(match.groups()[0])
if percentage < 100:
abort(red('Coverage is {0}%'.format(percentage)))
print(green('Coverage is {0}%'.format(percentage)))
def delete_db():
"""
Deletes all data in the database.
You need django-extensions in order to use this.
However, please note that this is not as thorough as a real database drop.
"""
local(' ./manage.py reset_db --router=default --noinput')
def export_db(filename=None, remote=False):
"""
Exports the database.
Make sure that you have this in your ``~/.pgpass`` file:
localhost:5433:*:<db_role>:<password>
Also make sure that the file has ``chmod 0600 .pgpass``.
Usage::
fab export_db
fab export_db:filename=foobar.dump
"""
local_machine()
if not filename:
filename = settings.DB_DUMP_FILENAME
if remote:
backup_dir = settings.FAB_SETTING('SERVER_DB_BACKUP_DIR')
else:
backup_dir = ''
local('pg_dump -c -Fc -O -U {0}{1} {2} -f {3}{4}'.format(
env.db_role, HOST, env.db_name, backup_dir, filename))
def drop_db():
"""Drops the local database."""
local_machine()
with fab_settings(warn_only=True):
local('psql {0} -c "DROP DATABASE {1}"'.format(
USER_AND_HOST, env.db_name))
local('psql {0} -c "DROP USER {1}"'.format(
USER_AND_HOST, env.db_role))
def jshint():
"""Runs jshint checks."""
with fab_settings(warn_only=True):
needs_to_abort = False
# because jshint fails with exit code 2, we need to allow this as
# a successful exit code in our env
if 2 not in env.ok_ret_codes:
env.ok_ret_codes.append(2)
output = local(
'find -name "{}" -print'.format('*.js'),
capture=True,
)
files = output.split()
jshint_installed = local('command -v jshint', capture=True)
if not jshint_installed.succeeded:
warn(red(
"To enable an extended check of your js files, please"
" install jshint by entering:\n\n npm install -g jshint"
))
else:
for file in files:
if hasattr(settings, 'JSHINT_CHECK_EXCLUDES'):
excludes = settings.JSHINT_CHECK_EXCLUDES
else:
excludes = settings.SYNTAX_CHECK_EXCLUDES
if any(s in file for s in excludes):
continue
jshint_result = local(
'jshint {0}'.format(file),
capture=True
)
if jshint_result:
warn(red('JS errors detected in file {0}'.format(
file
)))
puts(jshint_result)
needs_to_abort = True
if needs_to_abort:
abort(red('There have been errors. Please fix them and run'
' the check again.'))
else:
puts(green('jshint found no errors. Very good!'))
def syntax_check():
"""Runs flake8 against the codebase."""
with fab_settings(warn_only=True):
for file_type in settings.SYNTAX_CHECK:
needs_to_abort = False
# because egrep fails with exit code 1, we need to allow this as
# a successful exit code in our env
if 1 not in env.ok_ret_codes:
env.ok_ret_codes.append(1)
output = local(
'find -name "{}" -print'.format(file_type),
capture=True,
)
files = output.split()
for file in files:
if any(s in file for s in settings.SYNTAX_CHECK_EXCLUDES):
continue
result = local('egrep -i -n "{0}" {1}'.format(
settings.SYNTAX_CHECK[file_type], file), capture=True)
if result:
warn(red("Syntax check found in '{0}': {1}".format(
file, result)))
needs_to_abort = True
if needs_to_abort:
abort(red('There have been errors. Please fix them and run'
' the check again.'))
else:
puts(green('Syntax check found no errors. Very good!'))
def flake8():
"""Runs flake8 against the codebase."""
return local('flake8 --ignore=E126 --statistics '
'--exclude=submodules,south_migrations,migrations,'
'node_modules .')
def import_db(filename=None):
"""
Imports the database.
Make sure that you have this in your ``~/.pgpass`` file:
localhost:5433:*:publishizer_publishizer:publishizer
Also make sure that the file has ``chmod 0600 .pgpass``.
Usage::
fab import_db
fab import_db:filename=foobar.dump
"""
local_machine()
if not filename:
filename = settings.DB_DUMP_FILENAME
with fab_settings(warn_only=True):
local('pg_restore -O -c -U {0}{1} -d {2} {3}'.format(
env.db_role, HOST, env.db_name, filename))
def import_media(filename=None):
"""
Extracts media dump into your local media root.
Please note that this might overwrite existing local files.
Usage::
fab import_media
fab import_media:filename=foobar.tar.gz
"""
if not filename:
filename = settings.MEDIA_DUMP_FILENAME
project_root = os.getcwd()
with fab_settings(hide('everything'), warn_only=True):
is_backup_missing = local('test -e "$(echo %s)"' % os.path.join(
project_root, filename)).failed
if is_backup_missing:
abort(red('ERROR: There is no media backup that could be imported in'
' {0}. We need a file called {1} in that folder.'.format(
project_root, filename)))
# copy the dump into the media root folder
with lcd(project_root):
local('cp {0} {1}'.format(filename, settings.MEDIA_ROOT))
# extract and remove media dump
with lcd(settings.MEDIA_ROOT):
local('tar -xvf {0}'.format(filename))
local('rm -rf {0}'.format(filename))
def lessc(responsive=False):
"""
Compiles all less files.
This is useful if you are using the Twitter Bootstrap Framework.
"""
local('lessc {0}/static/css/bootstrap.less'
' {0}/static/css/bootstrap.css'.format(settings.PROJECT_NAME))
if responsive:
local('lessc {0}/static/css/responsive.less'
' {0}/static/css/bootstrap-responsive.css'.format(
settings.PROJECT_NAME))
def rebuild():
"""
Deletes and re-creates your DB. Needs django-extensions and South.
"""
drop_db()
create_db()
if StrictVersion(django.get_version()) < StrictVersion('1.7'):
local('python{} manage.py syncdb --all --noinput'.format(
PYTHON_VERSION))
local('python{} manage.py migrate --fake'.format(PYTHON_VERSION))
else:
local('python{} manage.py migrate'.format(PYTHON_VERSION))
def reset_passwords():
"""Resets all passwords to `test123`."""
local('python{} manage.py set_fake_passwords --password=test1234'.format(
PYTHON_VERSION))
def test(options=None, integration=1, selenium=1, test_settings=None):
"""
Runs manage.py tests.
Usage::
fab test
fab test:app
fab test:app.tests.forms_tests:TestCaseName
fab test:integration=0
fab test:selenium=0
"""
if test_settings is None:
test_settings = settings.TEST_SETTINGS_PATH
command = ("coverage run --source='.' manage.py test -v 2" +
" --failfast --settings={0} --pattern='*_tests.py'".format(
test_settings))
if int(integration) == 0:
command += " --exclude='integration_tests'"
if int(selenium) == 0:
command += " --exclude='selenium_tests'"
if options:
command += ' {0}'.format(options)
with fab_settings(warn_only=True):
local(command, capture=False)
local('coverage html -d coverage --omit="{}"'.format(
settings.COVERAGE_EXCLUDES))
|
bitlabstudio/django-development-fabfile | development_fabfile/fabfile/local.py | export_db | python | def export_db(filename=None, remote=False):
local_machine()
if not filename:
filename = settings.DB_DUMP_FILENAME
if remote:
backup_dir = settings.FAB_SETTING('SERVER_DB_BACKUP_DIR')
else:
backup_dir = ''
local('pg_dump -c -Fc -O -U {0}{1} {2} -f {3}{4}'.format(
env.db_role, HOST, env.db_name, backup_dir, filename)) | Exports the database.
Make sure that you have this in your ``~/.pgpass`` file:
localhost:5433:*:<db_role>:<password>
Also make sure that the file has ``chmod 0600 .pgpass``.
Usage::
fab export_db
fab export_db:filename=foobar.dump | train | https://github.com/bitlabstudio/django-development-fabfile/blob/a135c6eb5bdd0b496a7eccfd271aca558dd99243/development_fabfile/fabfile/local.py#L97-L122 | [
"def local_machine():\n \"\"\"Option to do something on local machine.\"\"\"\n common_conf()\n env.machine = 'local'\n env.pg_admin_role = settings.LOCAL_PG_ADMIN_ROLE\n env.db_backup_dir = settings.DJANGO_PROJECT_ROOT\n env.media_backup_dir = settings.DJANGO_PROJECT_ROOT\n\n # Not sure what th... | """Fabfile for tasks that only manipulate things on the local machine."""
import django
import os
import re
import sys
from django.conf import settings
from distutils.version import StrictVersion
from fabric.api import hide, lcd, local
from fabric.api import settings as fab_settings
from fabric.colors import green, red
from fabric.utils import abort, warn, puts
from fabric.state import env
from .servers import local_machine
HOST = ' -h localhost'
if settings.DATABASES['default']['HOST']:
HOST = ' -h {}'.format(settings.DATABASES['default']['HOST'])
USER_AND_HOST = '-U {0}{1}'.format(settings.LOCAL_PG_ADMIN_ROLE, HOST)
DB_PASSWORD = settings.DATABASES['default']['PASSWORD']
if settings.PYTHON_VERSION:
PYTHON_VERSION = settings.PYTHON_VERSION
else:
PYTHON_VERSION = '{}.{}'.format(
sys.version_info.major, sys.version_info.minor)
def check():
"""Runs flake8, check_coverage and test."""
flake8()
syntax_check()
jshint()
test()
check_coverage()
def check_coverage():
"""Checks if the coverage is 100%."""
with lcd(settings.LOCAL_COVERAGE_PATH):
total_line = local('grep -n Total index.html', capture=True)
match = re.search(r'^(\d+):', total_line)
total_line_number = int(match.groups()[0])
percentage_line_number = total_line_number + 5
percentage_line = local(
'awk NR=={0} index.html'.format(percentage_line_number),
capture=True)
match = re.search(r'(\d.+)%', percentage_line)
try:
percentage = float(match.groups()[0])
except ValueError:
# If there's no dotting try another search
match = re.search(r'(\d+)%', percentage_line)
percentage = float(match.groups()[0])
if percentage < 100:
abort(red('Coverage is {0}%'.format(percentage)))
print(green('Coverage is {0}%'.format(percentage)))
def create_db(with_postgis=False):
"""
Creates the local database.
:param with_postgis: If ``True``, the postgis extension will be installed.
"""
local_machine()
local('psql {0} -c "CREATE USER {1} WITH PASSWORD \'{2}\'"'.format(
USER_AND_HOST, env.db_role, DB_PASSWORD))
local('psql {0} -c "CREATE DATABASE {1} ENCODING \'UTF8\'"'.format(
USER_AND_HOST, env.db_name))
if with_postgis:
local('psql {0} {1} -c "CREATE EXTENSION postgis"'.format(
USER_AND_HOST, env.db_name))
local('psql {0} -c "GRANT ALL PRIVILEGES ON DATABASE {1}'
' to {2}"'.format(USER_AND_HOST, env.db_name, env.db_role))
local('psql {0} -c "GRANT ALL PRIVILEGES ON ALL TABLES'
' IN SCHEMA public TO {1}"'.format(
USER_AND_HOST, env.db_role))
def delete_db():
"""
Deletes all data in the database.
You need django-extensions in order to use this.
However, please note that this is not as thorough as a real database drop.
"""
local(' ./manage.py reset_db --router=default --noinput')
def drop_db():
"""Drops the local database."""
local_machine()
with fab_settings(warn_only=True):
local('psql {0} -c "DROP DATABASE {1}"'.format(
USER_AND_HOST, env.db_name))
local('psql {0} -c "DROP USER {1}"'.format(
USER_AND_HOST, env.db_role))
def jshint():
"""Runs jshint checks."""
with fab_settings(warn_only=True):
needs_to_abort = False
# because jshint fails with exit code 2, we need to allow this as
# a successful exit code in our env
if 2 not in env.ok_ret_codes:
env.ok_ret_codes.append(2)
output = local(
'find -name "{}" -print'.format('*.js'),
capture=True,
)
files = output.split()
jshint_installed = local('command -v jshint', capture=True)
if not jshint_installed.succeeded:
warn(red(
"To enable an extended check of your js files, please"
" install jshint by entering:\n\n npm install -g jshint"
))
else:
for file in files:
if hasattr(settings, 'JSHINT_CHECK_EXCLUDES'):
excludes = settings.JSHINT_CHECK_EXCLUDES
else:
excludes = settings.SYNTAX_CHECK_EXCLUDES
if any(s in file for s in excludes):
continue
jshint_result = local(
'jshint {0}'.format(file),
capture=True
)
if jshint_result:
warn(red('JS errors detected in file {0}'.format(
file
)))
puts(jshint_result)
needs_to_abort = True
if needs_to_abort:
abort(red('There have been errors. Please fix them and run'
' the check again.'))
else:
puts(green('jshint found no errors. Very good!'))
def syntax_check():
"""Runs flake8 against the codebase."""
with fab_settings(warn_only=True):
for file_type in settings.SYNTAX_CHECK:
needs_to_abort = False
# because egrep fails with exit code 1, we need to allow this as
# a successful exit code in our env
if 1 not in env.ok_ret_codes:
env.ok_ret_codes.append(1)
output = local(
'find -name "{}" -print'.format(file_type),
capture=True,
)
files = output.split()
for file in files:
if any(s in file for s in settings.SYNTAX_CHECK_EXCLUDES):
continue
result = local('egrep -i -n "{0}" {1}'.format(
settings.SYNTAX_CHECK[file_type], file), capture=True)
if result:
warn(red("Syntax check found in '{0}': {1}".format(
file, result)))
needs_to_abort = True
if needs_to_abort:
abort(red('There have been errors. Please fix them and run'
' the check again.'))
else:
puts(green('Syntax check found no errors. Very good!'))
def flake8():
"""Runs flake8 against the codebase."""
return local('flake8 --ignore=E126 --statistics '
'--exclude=submodules,south_migrations,migrations,'
'node_modules .')
def import_db(filename=None):
"""
Imports the database.
Make sure that you have this in your ``~/.pgpass`` file:
localhost:5433:*:publishizer_publishizer:publishizer
Also make sure that the file has ``chmod 0600 .pgpass``.
Usage::
fab import_db
fab import_db:filename=foobar.dump
"""
local_machine()
if not filename:
filename = settings.DB_DUMP_FILENAME
with fab_settings(warn_only=True):
local('pg_restore -O -c -U {0}{1} -d {2} {3}'.format(
env.db_role, HOST, env.db_name, filename))
def import_media(filename=None):
"""
Extracts media dump into your local media root.
Please note that this might overwrite existing local files.
Usage::
fab import_media
fab import_media:filename=foobar.tar.gz
"""
if not filename:
filename = settings.MEDIA_DUMP_FILENAME
project_root = os.getcwd()
with fab_settings(hide('everything'), warn_only=True):
is_backup_missing = local('test -e "$(echo %s)"' % os.path.join(
project_root, filename)).failed
if is_backup_missing:
abort(red('ERROR: There is no media backup that could be imported in'
' {0}. We need a file called {1} in that folder.'.format(
project_root, filename)))
# copy the dump into the media root folder
with lcd(project_root):
local('cp {0} {1}'.format(filename, settings.MEDIA_ROOT))
# extract and remove media dump
with lcd(settings.MEDIA_ROOT):
local('tar -xvf {0}'.format(filename))
local('rm -rf {0}'.format(filename))
def lessc(responsive=False):
"""
Compiles all less files.
This is useful if you are using the Twitter Bootstrap Framework.
"""
local('lessc {0}/static/css/bootstrap.less'
' {0}/static/css/bootstrap.css'.format(settings.PROJECT_NAME))
if responsive:
local('lessc {0}/static/css/responsive.less'
' {0}/static/css/bootstrap-responsive.css'.format(
settings.PROJECT_NAME))
def rebuild():
"""
Deletes and re-creates your DB. Needs django-extensions and South.
"""
drop_db()
create_db()
if StrictVersion(django.get_version()) < StrictVersion('1.7'):
local('python{} manage.py syncdb --all --noinput'.format(
PYTHON_VERSION))
local('python{} manage.py migrate --fake'.format(PYTHON_VERSION))
else:
local('python{} manage.py migrate'.format(PYTHON_VERSION))
def reset_passwords():
"""Resets all passwords to `test123`."""
local('python{} manage.py set_fake_passwords --password=test1234'.format(
PYTHON_VERSION))
def test(options=None, integration=1, selenium=1, test_settings=None):
"""
Runs manage.py tests.
Usage::
fab test
fab test:app
fab test:app.tests.forms_tests:TestCaseName
fab test:integration=0
fab test:selenium=0
"""
if test_settings is None:
test_settings = settings.TEST_SETTINGS_PATH
command = ("coverage run --source='.' manage.py test -v 2" +
" --failfast --settings={0} --pattern='*_tests.py'".format(
test_settings))
if int(integration) == 0:
command += " --exclude='integration_tests'"
if int(selenium) == 0:
command += " --exclude='selenium_tests'"
if options:
command += ' {0}'.format(options)
with fab_settings(warn_only=True):
local(command, capture=False)
local('coverage html -d coverage --omit="{}"'.format(
settings.COVERAGE_EXCLUDES))
|
bitlabstudio/django-development-fabfile | development_fabfile/fabfile/local.py | drop_db | python | def drop_db():
local_machine()
with fab_settings(warn_only=True):
local('psql {0} -c "DROP DATABASE {1}"'.format(
USER_AND_HOST, env.db_name))
local('psql {0} -c "DROP USER {1}"'.format(
USER_AND_HOST, env.db_role)) | Drops the local database. | train | https://github.com/bitlabstudio/django-development-fabfile/blob/a135c6eb5bdd0b496a7eccfd271aca558dd99243/development_fabfile/fabfile/local.py#L125-L132 | [
"def local_machine():\n \"\"\"Option to do something on local machine.\"\"\"\n common_conf()\n env.machine = 'local'\n env.pg_admin_role = settings.LOCAL_PG_ADMIN_ROLE\n env.db_backup_dir = settings.DJANGO_PROJECT_ROOT\n env.media_backup_dir = settings.DJANGO_PROJECT_ROOT\n\n # Not sure what th... | """Fabfile for tasks that only manipulate things on the local machine."""
import django
import os
import re
import sys
from django.conf import settings
from distutils.version import StrictVersion
from fabric.api import hide, lcd, local
from fabric.api import settings as fab_settings
from fabric.colors import green, red
from fabric.utils import abort, warn, puts
from fabric.state import env
from .servers import local_machine
HOST = ' -h localhost'
if settings.DATABASES['default']['HOST']:
HOST = ' -h {}'.format(settings.DATABASES['default']['HOST'])
USER_AND_HOST = '-U {0}{1}'.format(settings.LOCAL_PG_ADMIN_ROLE, HOST)
DB_PASSWORD = settings.DATABASES['default']['PASSWORD']
if settings.PYTHON_VERSION:
PYTHON_VERSION = settings.PYTHON_VERSION
else:
PYTHON_VERSION = '{}.{}'.format(
sys.version_info.major, sys.version_info.minor)
def check():
"""Runs flake8, check_coverage and test."""
flake8()
syntax_check()
jshint()
test()
check_coverage()
def check_coverage():
"""Checks if the coverage is 100%."""
with lcd(settings.LOCAL_COVERAGE_PATH):
total_line = local('grep -n Total index.html', capture=True)
match = re.search(r'^(\d+):', total_line)
total_line_number = int(match.groups()[0])
percentage_line_number = total_line_number + 5
percentage_line = local(
'awk NR=={0} index.html'.format(percentage_line_number),
capture=True)
match = re.search(r'(\d.+)%', percentage_line)
try:
percentage = float(match.groups()[0])
except ValueError:
# If there's no dotting try another search
match = re.search(r'(\d+)%', percentage_line)
percentage = float(match.groups()[0])
if percentage < 100:
abort(red('Coverage is {0}%'.format(percentage)))
print(green('Coverage is {0}%'.format(percentage)))
def create_db(with_postgis=False):
"""
Creates the local database.
:param with_postgis: If ``True``, the postgis extension will be installed.
"""
local_machine()
local('psql {0} -c "CREATE USER {1} WITH PASSWORD \'{2}\'"'.format(
USER_AND_HOST, env.db_role, DB_PASSWORD))
local('psql {0} -c "CREATE DATABASE {1} ENCODING \'UTF8\'"'.format(
USER_AND_HOST, env.db_name))
if with_postgis:
local('psql {0} {1} -c "CREATE EXTENSION postgis"'.format(
USER_AND_HOST, env.db_name))
local('psql {0} -c "GRANT ALL PRIVILEGES ON DATABASE {1}'
' to {2}"'.format(USER_AND_HOST, env.db_name, env.db_role))
local('psql {0} -c "GRANT ALL PRIVILEGES ON ALL TABLES'
' IN SCHEMA public TO {1}"'.format(
USER_AND_HOST, env.db_role))
def delete_db():
"""
Deletes all data in the database.
You need django-extensions in order to use this.
However, please note that this is not as thorough as a real database drop.
"""
local(' ./manage.py reset_db --router=default --noinput')
def export_db(filename=None, remote=False):
"""
Exports the database.
Make sure that you have this in your ``~/.pgpass`` file:
localhost:5433:*:<db_role>:<password>
Also make sure that the file has ``chmod 0600 .pgpass``.
Usage::
fab export_db
fab export_db:filename=foobar.dump
"""
local_machine()
if not filename:
filename = settings.DB_DUMP_FILENAME
if remote:
backup_dir = settings.FAB_SETTING('SERVER_DB_BACKUP_DIR')
else:
backup_dir = ''
local('pg_dump -c -Fc -O -U {0}{1} {2} -f {3}{4}'.format(
env.db_role, HOST, env.db_name, backup_dir, filename))
def jshint():
"""Runs jshint checks."""
with fab_settings(warn_only=True):
needs_to_abort = False
# because jshint fails with exit code 2, we need to allow this as
# a successful exit code in our env
if 2 not in env.ok_ret_codes:
env.ok_ret_codes.append(2)
output = local(
'find -name "{}" -print'.format('*.js'),
capture=True,
)
files = output.split()
jshint_installed = local('command -v jshint', capture=True)
if not jshint_installed.succeeded:
warn(red(
"To enable an extended check of your js files, please"
" install jshint by entering:\n\n npm install -g jshint"
))
else:
for file in files:
if hasattr(settings, 'JSHINT_CHECK_EXCLUDES'):
excludes = settings.JSHINT_CHECK_EXCLUDES
else:
excludes = settings.SYNTAX_CHECK_EXCLUDES
if any(s in file for s in excludes):
continue
jshint_result = local(
'jshint {0}'.format(file),
capture=True
)
if jshint_result:
warn(red('JS errors detected in file {0}'.format(
file
)))
puts(jshint_result)
needs_to_abort = True
if needs_to_abort:
abort(red('There have been errors. Please fix them and run'
' the check again.'))
else:
puts(green('jshint found no errors. Very good!'))
def syntax_check():
"""Runs flake8 against the codebase."""
with fab_settings(warn_only=True):
for file_type in settings.SYNTAX_CHECK:
needs_to_abort = False
# because egrep fails with exit code 1, we need to allow this as
# a successful exit code in our env
if 1 not in env.ok_ret_codes:
env.ok_ret_codes.append(1)
output = local(
'find -name "{}" -print'.format(file_type),
capture=True,
)
files = output.split()
for file in files:
if any(s in file for s in settings.SYNTAX_CHECK_EXCLUDES):
continue
result = local('egrep -i -n "{0}" {1}'.format(
settings.SYNTAX_CHECK[file_type], file), capture=True)
if result:
warn(red("Syntax check found in '{0}': {1}".format(
file, result)))
needs_to_abort = True
if needs_to_abort:
abort(red('There have been errors. Please fix them and run'
' the check again.'))
else:
puts(green('Syntax check found no errors. Very good!'))
def flake8():
"""Runs flake8 against the codebase."""
return local('flake8 --ignore=E126 --statistics '
'--exclude=submodules,south_migrations,migrations,'
'node_modules .')
def import_db(filename=None):
"""
Imports the database.
Make sure that you have this in your ``~/.pgpass`` file:
localhost:5433:*:publishizer_publishizer:publishizer
Also make sure that the file has ``chmod 0600 .pgpass``.
Usage::
fab import_db
fab import_db:filename=foobar.dump
"""
local_machine()
if not filename:
filename = settings.DB_DUMP_FILENAME
with fab_settings(warn_only=True):
local('pg_restore -O -c -U {0}{1} -d {2} {3}'.format(
env.db_role, HOST, env.db_name, filename))
def import_media(filename=None):
"""
Extracts media dump into your local media root.
Please note that this might overwrite existing local files.
Usage::
fab import_media
fab import_media:filename=foobar.tar.gz
"""
if not filename:
filename = settings.MEDIA_DUMP_FILENAME
project_root = os.getcwd()
with fab_settings(hide('everything'), warn_only=True):
is_backup_missing = local('test -e "$(echo %s)"' % os.path.join(
project_root, filename)).failed
if is_backup_missing:
abort(red('ERROR: There is no media backup that could be imported in'
' {0}. We need a file called {1} in that folder.'.format(
project_root, filename)))
# copy the dump into the media root folder
with lcd(project_root):
local('cp {0} {1}'.format(filename, settings.MEDIA_ROOT))
# extract and remove media dump
with lcd(settings.MEDIA_ROOT):
local('tar -xvf {0}'.format(filename))
local('rm -rf {0}'.format(filename))
def lessc(responsive=False):
"""
Compiles all less files.
This is useful if you are using the Twitter Bootstrap Framework.
"""
local('lessc {0}/static/css/bootstrap.less'
' {0}/static/css/bootstrap.css'.format(settings.PROJECT_NAME))
if responsive:
local('lessc {0}/static/css/responsive.less'
' {0}/static/css/bootstrap-responsive.css'.format(
settings.PROJECT_NAME))
def rebuild():
"""
Deletes and re-creates your DB. Needs django-extensions and South.
"""
drop_db()
create_db()
if StrictVersion(django.get_version()) < StrictVersion('1.7'):
local('python{} manage.py syncdb --all --noinput'.format(
PYTHON_VERSION))
local('python{} manage.py migrate --fake'.format(PYTHON_VERSION))
else:
local('python{} manage.py migrate'.format(PYTHON_VERSION))
def reset_passwords():
"""Resets all passwords to `test123`."""
local('python{} manage.py set_fake_passwords --password=test1234'.format(
PYTHON_VERSION))
def test(options=None, integration=1, selenium=1, test_settings=None):
"""
Runs manage.py tests.
Usage::
fab test
fab test:app
fab test:app.tests.forms_tests:TestCaseName
fab test:integration=0
fab test:selenium=0
"""
if test_settings is None:
test_settings = settings.TEST_SETTINGS_PATH
command = ("coverage run --source='.' manage.py test -v 2" +
" --failfast --settings={0} --pattern='*_tests.py'".format(
test_settings))
if int(integration) == 0:
command += " --exclude='integration_tests'"
if int(selenium) == 0:
command += " --exclude='selenium_tests'"
if options:
command += ' {0}'.format(options)
with fab_settings(warn_only=True):
local(command, capture=False)
local('coverage html -d coverage --omit="{}"'.format(
settings.COVERAGE_EXCLUDES))
|
bitlabstudio/django-development-fabfile | development_fabfile/fabfile/local.py | jshint | python | def jshint():
with fab_settings(warn_only=True):
needs_to_abort = False
# because jshint fails with exit code 2, we need to allow this as
# a successful exit code in our env
if 2 not in env.ok_ret_codes:
env.ok_ret_codes.append(2)
output = local(
'find -name "{}" -print'.format('*.js'),
capture=True,
)
files = output.split()
jshint_installed = local('command -v jshint', capture=True)
if not jshint_installed.succeeded:
warn(red(
"To enable an extended check of your js files, please"
" install jshint by entering:\n\n npm install -g jshint"
))
else:
for file in files:
if hasattr(settings, 'JSHINT_CHECK_EXCLUDES'):
excludes = settings.JSHINT_CHECK_EXCLUDES
else:
excludes = settings.SYNTAX_CHECK_EXCLUDES
if any(s in file for s in excludes):
continue
jshint_result = local(
'jshint {0}'.format(file),
capture=True
)
if jshint_result:
warn(red('JS errors detected in file {0}'.format(
file
)))
puts(jshint_result)
needs_to_abort = True
if needs_to_abort:
abort(red('There have been errors. Please fix them and run'
' the check again.'))
else:
puts(green('jshint found no errors. Very good!')) | Runs jshint checks. | train | https://github.com/bitlabstudio/django-development-fabfile/blob/a135c6eb5bdd0b496a7eccfd271aca558dd99243/development_fabfile/fabfile/local.py#L135-L176 | null | """Fabfile for tasks that only manipulate things on the local machine."""
import django
import os
import re
import sys
from django.conf import settings
from distutils.version import StrictVersion
from fabric.api import hide, lcd, local
from fabric.api import settings as fab_settings
from fabric.colors import green, red
from fabric.utils import abort, warn, puts
from fabric.state import env
from .servers import local_machine
HOST = ' -h localhost'
if settings.DATABASES['default']['HOST']:
HOST = ' -h {}'.format(settings.DATABASES['default']['HOST'])
USER_AND_HOST = '-U {0}{1}'.format(settings.LOCAL_PG_ADMIN_ROLE, HOST)
DB_PASSWORD = settings.DATABASES['default']['PASSWORD']
if settings.PYTHON_VERSION:
PYTHON_VERSION = settings.PYTHON_VERSION
else:
PYTHON_VERSION = '{}.{}'.format(
sys.version_info.major, sys.version_info.minor)
def check():
"""Runs flake8, check_coverage and test."""
flake8()
syntax_check()
jshint()
test()
check_coverage()
def check_coverage():
"""Checks if the coverage is 100%."""
with lcd(settings.LOCAL_COVERAGE_PATH):
total_line = local('grep -n Total index.html', capture=True)
match = re.search(r'^(\d+):', total_line)
total_line_number = int(match.groups()[0])
percentage_line_number = total_line_number + 5
percentage_line = local(
'awk NR=={0} index.html'.format(percentage_line_number),
capture=True)
match = re.search(r'(\d.+)%', percentage_line)
try:
percentage = float(match.groups()[0])
except ValueError:
# If there's no dotting try another search
match = re.search(r'(\d+)%', percentage_line)
percentage = float(match.groups()[0])
if percentage < 100:
abort(red('Coverage is {0}%'.format(percentage)))
print(green('Coverage is {0}%'.format(percentage)))
def create_db(with_postgis=False):
"""
Creates the local database.
:param with_postgis: If ``True``, the postgis extension will be installed.
"""
local_machine()
local('psql {0} -c "CREATE USER {1} WITH PASSWORD \'{2}\'"'.format(
USER_AND_HOST, env.db_role, DB_PASSWORD))
local('psql {0} -c "CREATE DATABASE {1} ENCODING \'UTF8\'"'.format(
USER_AND_HOST, env.db_name))
if with_postgis:
local('psql {0} {1} -c "CREATE EXTENSION postgis"'.format(
USER_AND_HOST, env.db_name))
local('psql {0} -c "GRANT ALL PRIVILEGES ON DATABASE {1}'
' to {2}"'.format(USER_AND_HOST, env.db_name, env.db_role))
local('psql {0} -c "GRANT ALL PRIVILEGES ON ALL TABLES'
' IN SCHEMA public TO {1}"'.format(
USER_AND_HOST, env.db_role))
def delete_db():
"""
Deletes all data in the database.
You need django-extensions in order to use this.
However, please note that this is not as thorough as a real database drop.
"""
local(' ./manage.py reset_db --router=default --noinput')
def export_db(filename=None, remote=False):
"""
Exports the database.
Make sure that you have this in your ``~/.pgpass`` file:
localhost:5433:*:<db_role>:<password>
Also make sure that the file has ``chmod 0600 .pgpass``.
Usage::
fab export_db
fab export_db:filename=foobar.dump
"""
local_machine()
if not filename:
filename = settings.DB_DUMP_FILENAME
if remote:
backup_dir = settings.FAB_SETTING('SERVER_DB_BACKUP_DIR')
else:
backup_dir = ''
local('pg_dump -c -Fc -O -U {0}{1} {2} -f {3}{4}'.format(
env.db_role, HOST, env.db_name, backup_dir, filename))
def drop_db():
"""Drops the local database."""
local_machine()
with fab_settings(warn_only=True):
local('psql {0} -c "DROP DATABASE {1}"'.format(
USER_AND_HOST, env.db_name))
local('psql {0} -c "DROP USER {1}"'.format(
USER_AND_HOST, env.db_role))
def syntax_check():
"""Runs flake8 against the codebase."""
with fab_settings(warn_only=True):
for file_type in settings.SYNTAX_CHECK:
needs_to_abort = False
# because egrep fails with exit code 1, we need to allow this as
# a successful exit code in our env
if 1 not in env.ok_ret_codes:
env.ok_ret_codes.append(1)
output = local(
'find -name "{}" -print'.format(file_type),
capture=True,
)
files = output.split()
for file in files:
if any(s in file for s in settings.SYNTAX_CHECK_EXCLUDES):
continue
result = local('egrep -i -n "{0}" {1}'.format(
settings.SYNTAX_CHECK[file_type], file), capture=True)
if result:
warn(red("Syntax check found in '{0}': {1}".format(
file, result)))
needs_to_abort = True
if needs_to_abort:
abort(red('There have been errors. Please fix them and run'
' the check again.'))
else:
puts(green('Syntax check found no errors. Very good!'))
def flake8():
"""Runs flake8 against the codebase."""
return local('flake8 --ignore=E126 --statistics '
'--exclude=submodules,south_migrations,migrations,'
'node_modules .')
def import_db(filename=None):
"""
Imports the database.
Make sure that you have this in your ``~/.pgpass`` file:
localhost:5433:*:publishizer_publishizer:publishizer
Also make sure that the file has ``chmod 0600 .pgpass``.
Usage::
fab import_db
fab import_db:filename=foobar.dump
"""
local_machine()
if not filename:
filename = settings.DB_DUMP_FILENAME
with fab_settings(warn_only=True):
local('pg_restore -O -c -U {0}{1} -d {2} {3}'.format(
env.db_role, HOST, env.db_name, filename))
def import_media(filename=None):
"""
Extracts media dump into your local media root.
Please note that this might overwrite existing local files.
Usage::
fab import_media
fab import_media:filename=foobar.tar.gz
"""
if not filename:
filename = settings.MEDIA_DUMP_FILENAME
project_root = os.getcwd()
with fab_settings(hide('everything'), warn_only=True):
is_backup_missing = local('test -e "$(echo %s)"' % os.path.join(
project_root, filename)).failed
if is_backup_missing:
abort(red('ERROR: There is no media backup that could be imported in'
' {0}. We need a file called {1} in that folder.'.format(
project_root, filename)))
# copy the dump into the media root folder
with lcd(project_root):
local('cp {0} {1}'.format(filename, settings.MEDIA_ROOT))
# extract and remove media dump
with lcd(settings.MEDIA_ROOT):
local('tar -xvf {0}'.format(filename))
local('rm -rf {0}'.format(filename))
def lessc(responsive=False):
"""
Compiles all less files.
This is useful if you are using the Twitter Bootstrap Framework.
"""
local('lessc {0}/static/css/bootstrap.less'
' {0}/static/css/bootstrap.css'.format(settings.PROJECT_NAME))
if responsive:
local('lessc {0}/static/css/responsive.less'
' {0}/static/css/bootstrap-responsive.css'.format(
settings.PROJECT_NAME))
def rebuild():
"""
Deletes and re-creates your DB. Needs django-extensions and South.
"""
drop_db()
create_db()
if StrictVersion(django.get_version()) < StrictVersion('1.7'):
local('python{} manage.py syncdb --all --noinput'.format(
PYTHON_VERSION))
local('python{} manage.py migrate --fake'.format(PYTHON_VERSION))
else:
local('python{} manage.py migrate'.format(PYTHON_VERSION))
def reset_passwords():
"""Resets all passwords to `test123`."""
local('python{} manage.py set_fake_passwords --password=test1234'.format(
PYTHON_VERSION))
def test(options=None, integration=1, selenium=1, test_settings=None):
"""
Runs manage.py tests.
Usage::
fab test
fab test:app
fab test:app.tests.forms_tests:TestCaseName
fab test:integration=0
fab test:selenium=0
"""
if test_settings is None:
test_settings = settings.TEST_SETTINGS_PATH
command = ("coverage run --source='.' manage.py test -v 2" +
" --failfast --settings={0} --pattern='*_tests.py'".format(
test_settings))
if int(integration) == 0:
command += " --exclude='integration_tests'"
if int(selenium) == 0:
command += " --exclude='selenium_tests'"
if options:
command += ' {0}'.format(options)
with fab_settings(warn_only=True):
local(command, capture=False)
local('coverage html -d coverage --omit="{}"'.format(
settings.COVERAGE_EXCLUDES))
|
bitlabstudio/django-development-fabfile | development_fabfile/fabfile/local.py | syntax_check | python | def syntax_check():
with fab_settings(warn_only=True):
for file_type in settings.SYNTAX_CHECK:
needs_to_abort = False
# because egrep fails with exit code 1, we need to allow this as
# a successful exit code in our env
if 1 not in env.ok_ret_codes:
env.ok_ret_codes.append(1)
output = local(
'find -name "{}" -print'.format(file_type),
capture=True,
)
files = output.split()
for file in files:
if any(s in file for s in settings.SYNTAX_CHECK_EXCLUDES):
continue
result = local('egrep -i -n "{0}" {1}'.format(
settings.SYNTAX_CHECK[file_type], file), capture=True)
if result:
warn(red("Syntax check found in '{0}': {1}".format(
file, result)))
needs_to_abort = True
if needs_to_abort:
abort(red('There have been errors. Please fix them and run'
' the check again.'))
else:
puts(green('Syntax check found no errors. Very good!')) | Runs flake8 against the codebase. | train | https://github.com/bitlabstudio/django-development-fabfile/blob/a135c6eb5bdd0b496a7eccfd271aca558dd99243/development_fabfile/fabfile/local.py#L179-L206 | null | """Fabfile for tasks that only manipulate things on the local machine."""
import django
import os
import re
import sys
from django.conf import settings
from distutils.version import StrictVersion
from fabric.api import hide, lcd, local
from fabric.api import settings as fab_settings
from fabric.colors import green, red
from fabric.utils import abort, warn, puts
from fabric.state import env
from .servers import local_machine
HOST = ' -h localhost'
if settings.DATABASES['default']['HOST']:
HOST = ' -h {}'.format(settings.DATABASES['default']['HOST'])
USER_AND_HOST = '-U {0}{1}'.format(settings.LOCAL_PG_ADMIN_ROLE, HOST)
DB_PASSWORD = settings.DATABASES['default']['PASSWORD']
if settings.PYTHON_VERSION:
PYTHON_VERSION = settings.PYTHON_VERSION
else:
PYTHON_VERSION = '{}.{}'.format(
sys.version_info.major, sys.version_info.minor)
def check():
"""Runs flake8, check_coverage and test."""
flake8()
syntax_check()
jshint()
test()
check_coverage()
def check_coverage():
"""Checks if the coverage is 100%."""
with lcd(settings.LOCAL_COVERAGE_PATH):
total_line = local('grep -n Total index.html', capture=True)
match = re.search(r'^(\d+):', total_line)
total_line_number = int(match.groups()[0])
percentage_line_number = total_line_number + 5
percentage_line = local(
'awk NR=={0} index.html'.format(percentage_line_number),
capture=True)
match = re.search(r'(\d.+)%', percentage_line)
try:
percentage = float(match.groups()[0])
except ValueError:
# If there's no dotting try another search
match = re.search(r'(\d+)%', percentage_line)
percentage = float(match.groups()[0])
if percentage < 100:
abort(red('Coverage is {0}%'.format(percentage)))
print(green('Coverage is {0}%'.format(percentage)))
def create_db(with_postgis=False):
"""
Creates the local database.
:param with_postgis: If ``True``, the postgis extension will be installed.
"""
local_machine()
local('psql {0} -c "CREATE USER {1} WITH PASSWORD \'{2}\'"'.format(
USER_AND_HOST, env.db_role, DB_PASSWORD))
local('psql {0} -c "CREATE DATABASE {1} ENCODING \'UTF8\'"'.format(
USER_AND_HOST, env.db_name))
if with_postgis:
local('psql {0} {1} -c "CREATE EXTENSION postgis"'.format(
USER_AND_HOST, env.db_name))
local('psql {0} -c "GRANT ALL PRIVILEGES ON DATABASE {1}'
' to {2}"'.format(USER_AND_HOST, env.db_name, env.db_role))
local('psql {0} -c "GRANT ALL PRIVILEGES ON ALL TABLES'
' IN SCHEMA public TO {1}"'.format(
USER_AND_HOST, env.db_role))
def delete_db():
"""
Deletes all data in the database.
You need django-extensions in order to use this.
However, please note that this is not as thorough as a real database drop.
"""
local(' ./manage.py reset_db --router=default --noinput')
def export_db(filename=None, remote=False):
"""
Exports the database.
Make sure that you have this in your ``~/.pgpass`` file:
localhost:5433:*:<db_role>:<password>
Also make sure that the file has ``chmod 0600 .pgpass``.
Usage::
fab export_db
fab export_db:filename=foobar.dump
"""
local_machine()
if not filename:
filename = settings.DB_DUMP_FILENAME
if remote:
backup_dir = settings.FAB_SETTING('SERVER_DB_BACKUP_DIR')
else:
backup_dir = ''
local('pg_dump -c -Fc -O -U {0}{1} {2} -f {3}{4}'.format(
env.db_role, HOST, env.db_name, backup_dir, filename))
def drop_db():
"""Drops the local database."""
local_machine()
with fab_settings(warn_only=True):
local('psql {0} -c "DROP DATABASE {1}"'.format(
USER_AND_HOST, env.db_name))
local('psql {0} -c "DROP USER {1}"'.format(
USER_AND_HOST, env.db_role))
def jshint():
"""Runs jshint checks."""
with fab_settings(warn_only=True):
needs_to_abort = False
# because jshint fails with exit code 2, we need to allow this as
# a successful exit code in our env
if 2 not in env.ok_ret_codes:
env.ok_ret_codes.append(2)
output = local(
'find -name "{}" -print'.format('*.js'),
capture=True,
)
files = output.split()
jshint_installed = local('command -v jshint', capture=True)
if not jshint_installed.succeeded:
warn(red(
"To enable an extended check of your js files, please"
" install jshint by entering:\n\n npm install -g jshint"
))
else:
for file in files:
if hasattr(settings, 'JSHINT_CHECK_EXCLUDES'):
excludes = settings.JSHINT_CHECK_EXCLUDES
else:
excludes = settings.SYNTAX_CHECK_EXCLUDES
if any(s in file for s in excludes):
continue
jshint_result = local(
'jshint {0}'.format(file),
capture=True
)
if jshint_result:
warn(red('JS errors detected in file {0}'.format(
file
)))
puts(jshint_result)
needs_to_abort = True
if needs_to_abort:
abort(red('There have been errors. Please fix them and run'
' the check again.'))
else:
puts(green('jshint found no errors. Very good!'))
def flake8():
"""Runs flake8 against the codebase."""
return local('flake8 --ignore=E126 --statistics '
'--exclude=submodules,south_migrations,migrations,'
'node_modules .')
def import_db(filename=None):
"""
Imports the database.
Make sure that you have this in your ``~/.pgpass`` file:
localhost:5433:*:publishizer_publishizer:publishizer
Also make sure that the file has ``chmod 0600 .pgpass``.
Usage::
fab import_db
fab import_db:filename=foobar.dump
"""
local_machine()
if not filename:
filename = settings.DB_DUMP_FILENAME
with fab_settings(warn_only=True):
local('pg_restore -O -c -U {0}{1} -d {2} {3}'.format(
env.db_role, HOST, env.db_name, filename))
def import_media(filename=None):
"""
Extracts media dump into your local media root.
Please note that this might overwrite existing local files.
Usage::
fab import_media
fab import_media:filename=foobar.tar.gz
"""
if not filename:
filename = settings.MEDIA_DUMP_FILENAME
project_root = os.getcwd()
with fab_settings(hide('everything'), warn_only=True):
is_backup_missing = local('test -e "$(echo %s)"' % os.path.join(
project_root, filename)).failed
if is_backup_missing:
abort(red('ERROR: There is no media backup that could be imported in'
' {0}. We need a file called {1} in that folder.'.format(
project_root, filename)))
# copy the dump into the media root folder
with lcd(project_root):
local('cp {0} {1}'.format(filename, settings.MEDIA_ROOT))
# extract and remove media dump
with lcd(settings.MEDIA_ROOT):
local('tar -xvf {0}'.format(filename))
local('rm -rf {0}'.format(filename))
def lessc(responsive=False):
"""
Compiles all less files.
This is useful if you are using the Twitter Bootstrap Framework.
"""
local('lessc {0}/static/css/bootstrap.less'
' {0}/static/css/bootstrap.css'.format(settings.PROJECT_NAME))
if responsive:
local('lessc {0}/static/css/responsive.less'
' {0}/static/css/bootstrap-responsive.css'.format(
settings.PROJECT_NAME))
def rebuild():
"""
Deletes and re-creates your DB. Needs django-extensions and South.
"""
drop_db()
create_db()
if StrictVersion(django.get_version()) < StrictVersion('1.7'):
local('python{} manage.py syncdb --all --noinput'.format(
PYTHON_VERSION))
local('python{} manage.py migrate --fake'.format(PYTHON_VERSION))
else:
local('python{} manage.py migrate'.format(PYTHON_VERSION))
def reset_passwords():
"""Resets all passwords to `test123`."""
local('python{} manage.py set_fake_passwords --password=test1234'.format(
PYTHON_VERSION))
def test(options=None, integration=1, selenium=1, test_settings=None):
"""
Runs manage.py tests.
Usage::
fab test
fab test:app
fab test:app.tests.forms_tests:TestCaseName
fab test:integration=0
fab test:selenium=0
"""
if test_settings is None:
test_settings = settings.TEST_SETTINGS_PATH
command = ("coverage run --source='.' manage.py test -v 2" +
" --failfast --settings={0} --pattern='*_tests.py'".format(
test_settings))
if int(integration) == 0:
command += " --exclude='integration_tests'"
if int(selenium) == 0:
command += " --exclude='selenium_tests'"
if options:
command += ' {0}'.format(options)
with fab_settings(warn_only=True):
local(command, capture=False)
local('coverage html -d coverage --omit="{}"'.format(
settings.COVERAGE_EXCLUDES))
|
bitlabstudio/django-development-fabfile | development_fabfile/fabfile/local.py | import_db | python | def import_db(filename=None):
local_machine()
if not filename:
filename = settings.DB_DUMP_FILENAME
with fab_settings(warn_only=True):
local('pg_restore -O -c -U {0}{1} -d {2} {3}'.format(
env.db_role, HOST, env.db_name, filename)) | Imports the database.
Make sure that you have this in your ``~/.pgpass`` file:
localhost:5433:*:publishizer_publishizer:publishizer
Also make sure that the file has ``chmod 0600 .pgpass``.
Usage::
fab import_db
fab import_db:filename=foobar.dump | train | https://github.com/bitlabstudio/django-development-fabfile/blob/a135c6eb5bdd0b496a7eccfd271aca558dd99243/development_fabfile/fabfile/local.py#L216-L237 | [
"def local_machine():\n \"\"\"Option to do something on local machine.\"\"\"\n common_conf()\n env.machine = 'local'\n env.pg_admin_role = settings.LOCAL_PG_ADMIN_ROLE\n env.db_backup_dir = settings.DJANGO_PROJECT_ROOT\n env.media_backup_dir = settings.DJANGO_PROJECT_ROOT\n\n # Not sure what th... | """Fabfile for tasks that only manipulate things on the local machine."""
import django
import os
import re
import sys
from django.conf import settings
from distutils.version import StrictVersion
from fabric.api import hide, lcd, local
from fabric.api import settings as fab_settings
from fabric.colors import green, red
from fabric.utils import abort, warn, puts
from fabric.state import env
from .servers import local_machine
HOST = ' -h localhost'
if settings.DATABASES['default']['HOST']:
HOST = ' -h {}'.format(settings.DATABASES['default']['HOST'])
USER_AND_HOST = '-U {0}{1}'.format(settings.LOCAL_PG_ADMIN_ROLE, HOST)
DB_PASSWORD = settings.DATABASES['default']['PASSWORD']
if settings.PYTHON_VERSION:
PYTHON_VERSION = settings.PYTHON_VERSION
else:
PYTHON_VERSION = '{}.{}'.format(
sys.version_info.major, sys.version_info.minor)
def check():
"""Runs flake8, check_coverage and test."""
flake8()
syntax_check()
jshint()
test()
check_coverage()
def check_coverage():
"""Checks if the coverage is 100%."""
with lcd(settings.LOCAL_COVERAGE_PATH):
total_line = local('grep -n Total index.html', capture=True)
match = re.search(r'^(\d+):', total_line)
total_line_number = int(match.groups()[0])
percentage_line_number = total_line_number + 5
percentage_line = local(
'awk NR=={0} index.html'.format(percentage_line_number),
capture=True)
match = re.search(r'(\d.+)%', percentage_line)
try:
percentage = float(match.groups()[0])
except ValueError:
# If there's no dotting try another search
match = re.search(r'(\d+)%', percentage_line)
percentage = float(match.groups()[0])
if percentage < 100:
abort(red('Coverage is {0}%'.format(percentage)))
print(green('Coverage is {0}%'.format(percentage)))
def create_db(with_postgis=False):
"""
Creates the local database.
:param with_postgis: If ``True``, the postgis extension will be installed.
"""
local_machine()
local('psql {0} -c "CREATE USER {1} WITH PASSWORD \'{2}\'"'.format(
USER_AND_HOST, env.db_role, DB_PASSWORD))
local('psql {0} -c "CREATE DATABASE {1} ENCODING \'UTF8\'"'.format(
USER_AND_HOST, env.db_name))
if with_postgis:
local('psql {0} {1} -c "CREATE EXTENSION postgis"'.format(
USER_AND_HOST, env.db_name))
local('psql {0} -c "GRANT ALL PRIVILEGES ON DATABASE {1}'
' to {2}"'.format(USER_AND_HOST, env.db_name, env.db_role))
local('psql {0} -c "GRANT ALL PRIVILEGES ON ALL TABLES'
' IN SCHEMA public TO {1}"'.format(
USER_AND_HOST, env.db_role))
def delete_db():
"""
Deletes all data in the database.
You need django-extensions in order to use this.
However, please note that this is not as thorough as a real database drop.
"""
local(' ./manage.py reset_db --router=default --noinput')
def export_db(filename=None, remote=False):
"""
Exports the database.
Make sure that you have this in your ``~/.pgpass`` file:
localhost:5433:*:<db_role>:<password>
Also make sure that the file has ``chmod 0600 .pgpass``.
Usage::
fab export_db
fab export_db:filename=foobar.dump
"""
local_machine()
if not filename:
filename = settings.DB_DUMP_FILENAME
if remote:
backup_dir = settings.FAB_SETTING('SERVER_DB_BACKUP_DIR')
else:
backup_dir = ''
local('pg_dump -c -Fc -O -U {0}{1} {2} -f {3}{4}'.format(
env.db_role, HOST, env.db_name, backup_dir, filename))
def drop_db():
"""Drops the local database."""
local_machine()
with fab_settings(warn_only=True):
local('psql {0} -c "DROP DATABASE {1}"'.format(
USER_AND_HOST, env.db_name))
local('psql {0} -c "DROP USER {1}"'.format(
USER_AND_HOST, env.db_role))
def jshint():
"""Runs jshint checks."""
with fab_settings(warn_only=True):
needs_to_abort = False
# because jshint fails with exit code 2, we need to allow this as
# a successful exit code in our env
if 2 not in env.ok_ret_codes:
env.ok_ret_codes.append(2)
output = local(
'find -name "{}" -print'.format('*.js'),
capture=True,
)
files = output.split()
jshint_installed = local('command -v jshint', capture=True)
if not jshint_installed.succeeded:
warn(red(
"To enable an extended check of your js files, please"
" install jshint by entering:\n\n npm install -g jshint"
))
else:
for file in files:
if hasattr(settings, 'JSHINT_CHECK_EXCLUDES'):
excludes = settings.JSHINT_CHECK_EXCLUDES
else:
excludes = settings.SYNTAX_CHECK_EXCLUDES
if any(s in file for s in excludes):
continue
jshint_result = local(
'jshint {0}'.format(file),
capture=True
)
if jshint_result:
warn(red('JS errors detected in file {0}'.format(
file
)))
puts(jshint_result)
needs_to_abort = True
if needs_to_abort:
abort(red('There have been errors. Please fix them and run'
' the check again.'))
else:
puts(green('jshint found no errors. Very good!'))
def syntax_check():
"""Runs flake8 against the codebase."""
with fab_settings(warn_only=True):
for file_type in settings.SYNTAX_CHECK:
needs_to_abort = False
# because egrep fails with exit code 1, we need to allow this as
# a successful exit code in our env
if 1 not in env.ok_ret_codes:
env.ok_ret_codes.append(1)
output = local(
'find -name "{}" -print'.format(file_type),
capture=True,
)
files = output.split()
for file in files:
if any(s in file for s in settings.SYNTAX_CHECK_EXCLUDES):
continue
result = local('egrep -i -n "{0}" {1}'.format(
settings.SYNTAX_CHECK[file_type], file), capture=True)
if result:
warn(red("Syntax check found in '{0}': {1}".format(
file, result)))
needs_to_abort = True
if needs_to_abort:
abort(red('There have been errors. Please fix them and run'
' the check again.'))
else:
puts(green('Syntax check found no errors. Very good!'))
def flake8():
"""Runs flake8 against the codebase."""
return local('flake8 --ignore=E126 --statistics '
'--exclude=submodules,south_migrations,migrations,'
'node_modules .')
def import_media(filename=None):
"""
Extracts media dump into your local media root.
Please note that this might overwrite existing local files.
Usage::
fab import_media
fab import_media:filename=foobar.tar.gz
"""
if not filename:
filename = settings.MEDIA_DUMP_FILENAME
project_root = os.getcwd()
with fab_settings(hide('everything'), warn_only=True):
is_backup_missing = local('test -e "$(echo %s)"' % os.path.join(
project_root, filename)).failed
if is_backup_missing:
abort(red('ERROR: There is no media backup that could be imported in'
' {0}. We need a file called {1} in that folder.'.format(
project_root, filename)))
# copy the dump into the media root folder
with lcd(project_root):
local('cp {0} {1}'.format(filename, settings.MEDIA_ROOT))
# extract and remove media dump
with lcd(settings.MEDIA_ROOT):
local('tar -xvf {0}'.format(filename))
local('rm -rf {0}'.format(filename))
def lessc(responsive=False):
"""
Compiles all less files.
This is useful if you are using the Twitter Bootstrap Framework.
"""
local('lessc {0}/static/css/bootstrap.less'
' {0}/static/css/bootstrap.css'.format(settings.PROJECT_NAME))
if responsive:
local('lessc {0}/static/css/responsive.less'
' {0}/static/css/bootstrap-responsive.css'.format(
settings.PROJECT_NAME))
def rebuild():
"""
Deletes and re-creates your DB. Needs django-extensions and South.
"""
drop_db()
create_db()
if StrictVersion(django.get_version()) < StrictVersion('1.7'):
local('python{} manage.py syncdb --all --noinput'.format(
PYTHON_VERSION))
local('python{} manage.py migrate --fake'.format(PYTHON_VERSION))
else:
local('python{} manage.py migrate'.format(PYTHON_VERSION))
def reset_passwords():
"""Resets all passwords to `test123`."""
local('python{} manage.py set_fake_passwords --password=test1234'.format(
PYTHON_VERSION))
def test(options=None, integration=1, selenium=1, test_settings=None):
"""
Runs manage.py tests.
Usage::
fab test
fab test:app
fab test:app.tests.forms_tests:TestCaseName
fab test:integration=0
fab test:selenium=0
"""
if test_settings is None:
test_settings = settings.TEST_SETTINGS_PATH
command = ("coverage run --source='.' manage.py test -v 2" +
" --failfast --settings={0} --pattern='*_tests.py'".format(
test_settings))
if int(integration) == 0:
command += " --exclude='integration_tests'"
if int(selenium) == 0:
command += " --exclude='selenium_tests'"
if options:
command += ' {0}'.format(options)
with fab_settings(warn_only=True):
local(command, capture=False)
local('coverage html -d coverage --omit="{}"'.format(
settings.COVERAGE_EXCLUDES))
|
bitlabstudio/django-development-fabfile | development_fabfile/fabfile/local.py | import_media | python | def import_media(filename=None):
if not filename:
filename = settings.MEDIA_DUMP_FILENAME
project_root = os.getcwd()
with fab_settings(hide('everything'), warn_only=True):
is_backup_missing = local('test -e "$(echo %s)"' % os.path.join(
project_root, filename)).failed
if is_backup_missing:
abort(red('ERROR: There is no media backup that could be imported in'
' {0}. We need a file called {1} in that folder.'.format(
project_root, filename)))
# copy the dump into the media root folder
with lcd(project_root):
local('cp {0} {1}'.format(filename, settings.MEDIA_ROOT))
# extract and remove media dump
with lcd(settings.MEDIA_ROOT):
local('tar -xvf {0}'.format(filename))
local('rm -rf {0}'.format(filename)) | Extracts media dump into your local media root.
Please note that this might overwrite existing local files.
Usage::
fab import_media
fab import_media:filename=foobar.tar.gz | train | https://github.com/bitlabstudio/django-development-fabfile/blob/a135c6eb5bdd0b496a7eccfd271aca558dd99243/development_fabfile/fabfile/local.py#L240-L272 | null | """Fabfile for tasks that only manipulate things on the local machine."""
import django
import os
import re
import sys
from django.conf import settings
from distutils.version import StrictVersion
from fabric.api import hide, lcd, local
from fabric.api import settings as fab_settings
from fabric.colors import green, red
from fabric.utils import abort, warn, puts
from fabric.state import env
from .servers import local_machine
HOST = ' -h localhost'
if settings.DATABASES['default']['HOST']:
HOST = ' -h {}'.format(settings.DATABASES['default']['HOST'])
USER_AND_HOST = '-U {0}{1}'.format(settings.LOCAL_PG_ADMIN_ROLE, HOST)
DB_PASSWORD = settings.DATABASES['default']['PASSWORD']
if settings.PYTHON_VERSION:
PYTHON_VERSION = settings.PYTHON_VERSION
else:
PYTHON_VERSION = '{}.{}'.format(
sys.version_info.major, sys.version_info.minor)
def check():
"""Runs flake8, check_coverage and test."""
flake8()
syntax_check()
jshint()
test()
check_coverage()
def check_coverage():
"""Checks if the coverage is 100%."""
with lcd(settings.LOCAL_COVERAGE_PATH):
total_line = local('grep -n Total index.html', capture=True)
match = re.search(r'^(\d+):', total_line)
total_line_number = int(match.groups()[0])
percentage_line_number = total_line_number + 5
percentage_line = local(
'awk NR=={0} index.html'.format(percentage_line_number),
capture=True)
match = re.search(r'(\d.+)%', percentage_line)
try:
percentage = float(match.groups()[0])
except ValueError:
# If there's no dotting try another search
match = re.search(r'(\d+)%', percentage_line)
percentage = float(match.groups()[0])
if percentage < 100:
abort(red('Coverage is {0}%'.format(percentage)))
print(green('Coverage is {0}%'.format(percentage)))
def create_db(with_postgis=False):
"""
Creates the local database.
:param with_postgis: If ``True``, the postgis extension will be installed.
"""
local_machine()
local('psql {0} -c "CREATE USER {1} WITH PASSWORD \'{2}\'"'.format(
USER_AND_HOST, env.db_role, DB_PASSWORD))
local('psql {0} -c "CREATE DATABASE {1} ENCODING \'UTF8\'"'.format(
USER_AND_HOST, env.db_name))
if with_postgis:
local('psql {0} {1} -c "CREATE EXTENSION postgis"'.format(
USER_AND_HOST, env.db_name))
local('psql {0} -c "GRANT ALL PRIVILEGES ON DATABASE {1}'
' to {2}"'.format(USER_AND_HOST, env.db_name, env.db_role))
local('psql {0} -c "GRANT ALL PRIVILEGES ON ALL TABLES'
' IN SCHEMA public TO {1}"'.format(
USER_AND_HOST, env.db_role))
def delete_db():
"""
Deletes all data in the database.
You need django-extensions in order to use this.
However, please note that this is not as thorough as a real database drop.
"""
local(' ./manage.py reset_db --router=default --noinput')
def export_db(filename=None, remote=False):
"""
Exports the database.
Make sure that you have this in your ``~/.pgpass`` file:
localhost:5433:*:<db_role>:<password>
Also make sure that the file has ``chmod 0600 .pgpass``.
Usage::
fab export_db
fab export_db:filename=foobar.dump
"""
local_machine()
if not filename:
filename = settings.DB_DUMP_FILENAME
if remote:
backup_dir = settings.FAB_SETTING('SERVER_DB_BACKUP_DIR')
else:
backup_dir = ''
local('pg_dump -c -Fc -O -U {0}{1} {2} -f {3}{4}'.format(
env.db_role, HOST, env.db_name, backup_dir, filename))
def drop_db():
"""Drops the local database."""
local_machine()
with fab_settings(warn_only=True):
local('psql {0} -c "DROP DATABASE {1}"'.format(
USER_AND_HOST, env.db_name))
local('psql {0} -c "DROP USER {1}"'.format(
USER_AND_HOST, env.db_role))
def jshint():
"""Runs jshint checks."""
with fab_settings(warn_only=True):
needs_to_abort = False
# because jshint fails with exit code 2, we need to allow this as
# a successful exit code in our env
if 2 not in env.ok_ret_codes:
env.ok_ret_codes.append(2)
output = local(
'find -name "{}" -print'.format('*.js'),
capture=True,
)
files = output.split()
jshint_installed = local('command -v jshint', capture=True)
if not jshint_installed.succeeded:
warn(red(
"To enable an extended check of your js files, please"
" install jshint by entering:\n\n npm install -g jshint"
))
else:
for file in files:
if hasattr(settings, 'JSHINT_CHECK_EXCLUDES'):
excludes = settings.JSHINT_CHECK_EXCLUDES
else:
excludes = settings.SYNTAX_CHECK_EXCLUDES
if any(s in file for s in excludes):
continue
jshint_result = local(
'jshint {0}'.format(file),
capture=True
)
if jshint_result:
warn(red('JS errors detected in file {0}'.format(
file
)))
puts(jshint_result)
needs_to_abort = True
if needs_to_abort:
abort(red('There have been errors. Please fix them and run'
' the check again.'))
else:
puts(green('jshint found no errors. Very good!'))
def syntax_check():
"""Runs flake8 against the codebase."""
with fab_settings(warn_only=True):
for file_type in settings.SYNTAX_CHECK:
needs_to_abort = False
# because egrep fails with exit code 1, we need to allow this as
# a successful exit code in our env
if 1 not in env.ok_ret_codes:
env.ok_ret_codes.append(1)
output = local(
'find -name "{}" -print'.format(file_type),
capture=True,
)
files = output.split()
for file in files:
if any(s in file for s in settings.SYNTAX_CHECK_EXCLUDES):
continue
result = local('egrep -i -n "{0}" {1}'.format(
settings.SYNTAX_CHECK[file_type], file), capture=True)
if result:
warn(red("Syntax check found in '{0}': {1}".format(
file, result)))
needs_to_abort = True
if needs_to_abort:
abort(red('There have been errors. Please fix them and run'
' the check again.'))
else:
puts(green('Syntax check found no errors. Very good!'))
def flake8():
"""Runs flake8 against the codebase."""
return local('flake8 --ignore=E126 --statistics '
'--exclude=submodules,south_migrations,migrations,'
'node_modules .')
def import_db(filename=None):
"""
Imports the database.
Make sure that you have this in your ``~/.pgpass`` file:
localhost:5433:*:publishizer_publishizer:publishizer
Also make sure that the file has ``chmod 0600 .pgpass``.
Usage::
fab import_db
fab import_db:filename=foobar.dump
"""
local_machine()
if not filename:
filename = settings.DB_DUMP_FILENAME
with fab_settings(warn_only=True):
local('pg_restore -O -c -U {0}{1} -d {2} {3}'.format(
env.db_role, HOST, env.db_name, filename))
def lessc(responsive=False):
"""
Compiles all less files.
This is useful if you are using the Twitter Bootstrap Framework.
"""
local('lessc {0}/static/css/bootstrap.less'
' {0}/static/css/bootstrap.css'.format(settings.PROJECT_NAME))
if responsive:
local('lessc {0}/static/css/responsive.less'
' {0}/static/css/bootstrap-responsive.css'.format(
settings.PROJECT_NAME))
def rebuild():
"""
Deletes and re-creates your DB. Needs django-extensions and South.
"""
drop_db()
create_db()
if StrictVersion(django.get_version()) < StrictVersion('1.7'):
local('python{} manage.py syncdb --all --noinput'.format(
PYTHON_VERSION))
local('python{} manage.py migrate --fake'.format(PYTHON_VERSION))
else:
local('python{} manage.py migrate'.format(PYTHON_VERSION))
def reset_passwords():
"""Resets all passwords to `test123`."""
local('python{} manage.py set_fake_passwords --password=test1234'.format(
PYTHON_VERSION))
def test(options=None, integration=1, selenium=1, test_settings=None):
"""
Runs manage.py tests.
Usage::
fab test
fab test:app
fab test:app.tests.forms_tests:TestCaseName
fab test:integration=0
fab test:selenium=0
"""
if test_settings is None:
test_settings = settings.TEST_SETTINGS_PATH
command = ("coverage run --source='.' manage.py test -v 2" +
" --failfast --settings={0} --pattern='*_tests.py'".format(
test_settings))
if int(integration) == 0:
command += " --exclude='integration_tests'"
if int(selenium) == 0:
command += " --exclude='selenium_tests'"
if options:
command += ' {0}'.format(options)
with fab_settings(warn_only=True):
local(command, capture=False)
local('coverage html -d coverage --omit="{}"'.format(
settings.COVERAGE_EXCLUDES))
|
bitlabstudio/django-development-fabfile | development_fabfile/fabfile/local.py | lessc | python | def lessc(responsive=False):
local('lessc {0}/static/css/bootstrap.less'
' {0}/static/css/bootstrap.css'.format(settings.PROJECT_NAME))
if responsive:
local('lessc {0}/static/css/responsive.less'
' {0}/static/css/bootstrap-responsive.css'.format(
settings.PROJECT_NAME)) | Compiles all less files.
This is useful if you are using the Twitter Bootstrap Framework. | train | https://github.com/bitlabstudio/django-development-fabfile/blob/a135c6eb5bdd0b496a7eccfd271aca558dd99243/development_fabfile/fabfile/local.py#L275-L287 | null | """Fabfile for tasks that only manipulate things on the local machine."""
import django
import os
import re
import sys
from django.conf import settings
from distutils.version import StrictVersion
from fabric.api import hide, lcd, local
from fabric.api import settings as fab_settings
from fabric.colors import green, red
from fabric.utils import abort, warn, puts
from fabric.state import env
from .servers import local_machine
HOST = ' -h localhost'
if settings.DATABASES['default']['HOST']:
HOST = ' -h {}'.format(settings.DATABASES['default']['HOST'])
USER_AND_HOST = '-U {0}{1}'.format(settings.LOCAL_PG_ADMIN_ROLE, HOST)
DB_PASSWORD = settings.DATABASES['default']['PASSWORD']
if settings.PYTHON_VERSION:
PYTHON_VERSION = settings.PYTHON_VERSION
else:
PYTHON_VERSION = '{}.{}'.format(
sys.version_info.major, sys.version_info.minor)
def check():
"""Runs flake8, check_coverage and test."""
flake8()
syntax_check()
jshint()
test()
check_coverage()
def check_coverage():
"""Checks if the coverage is 100%."""
with lcd(settings.LOCAL_COVERAGE_PATH):
total_line = local('grep -n Total index.html', capture=True)
match = re.search(r'^(\d+):', total_line)
total_line_number = int(match.groups()[0])
percentage_line_number = total_line_number + 5
percentage_line = local(
'awk NR=={0} index.html'.format(percentage_line_number),
capture=True)
match = re.search(r'(\d.+)%', percentage_line)
try:
percentage = float(match.groups()[0])
except ValueError:
# If there's no dotting try another search
match = re.search(r'(\d+)%', percentage_line)
percentage = float(match.groups()[0])
if percentage < 100:
abort(red('Coverage is {0}%'.format(percentage)))
print(green('Coverage is {0}%'.format(percentage)))
def create_db(with_postgis=False):
"""
Creates the local database.
:param with_postgis: If ``True``, the postgis extension will be installed.
"""
local_machine()
local('psql {0} -c "CREATE USER {1} WITH PASSWORD \'{2}\'"'.format(
USER_AND_HOST, env.db_role, DB_PASSWORD))
local('psql {0} -c "CREATE DATABASE {1} ENCODING \'UTF8\'"'.format(
USER_AND_HOST, env.db_name))
if with_postgis:
local('psql {0} {1} -c "CREATE EXTENSION postgis"'.format(
USER_AND_HOST, env.db_name))
local('psql {0} -c "GRANT ALL PRIVILEGES ON DATABASE {1}'
' to {2}"'.format(USER_AND_HOST, env.db_name, env.db_role))
local('psql {0} -c "GRANT ALL PRIVILEGES ON ALL TABLES'
' IN SCHEMA public TO {1}"'.format(
USER_AND_HOST, env.db_role))
def delete_db():
"""
Deletes all data in the database.
You need django-extensions in order to use this.
However, please note that this is not as thorough as a real database drop.
"""
local(' ./manage.py reset_db --router=default --noinput')
def export_db(filename=None, remote=False):
"""
Exports the database.
Make sure that you have this in your ``~/.pgpass`` file:
localhost:5433:*:<db_role>:<password>
Also make sure that the file has ``chmod 0600 .pgpass``.
Usage::
fab export_db
fab export_db:filename=foobar.dump
"""
local_machine()
if not filename:
filename = settings.DB_DUMP_FILENAME
if remote:
backup_dir = settings.FAB_SETTING('SERVER_DB_BACKUP_DIR')
else:
backup_dir = ''
local('pg_dump -c -Fc -O -U {0}{1} {2} -f {3}{4}'.format(
env.db_role, HOST, env.db_name, backup_dir, filename))
def drop_db():
"""Drops the local database."""
local_machine()
with fab_settings(warn_only=True):
local('psql {0} -c "DROP DATABASE {1}"'.format(
USER_AND_HOST, env.db_name))
local('psql {0} -c "DROP USER {1}"'.format(
USER_AND_HOST, env.db_role))
def jshint():
"""Runs jshint checks."""
with fab_settings(warn_only=True):
needs_to_abort = False
# because jshint fails with exit code 2, we need to allow this as
# a successful exit code in our env
if 2 not in env.ok_ret_codes:
env.ok_ret_codes.append(2)
output = local(
'find -name "{}" -print'.format('*.js'),
capture=True,
)
files = output.split()
jshint_installed = local('command -v jshint', capture=True)
if not jshint_installed.succeeded:
warn(red(
"To enable an extended check of your js files, please"
" install jshint by entering:\n\n npm install -g jshint"
))
else:
for file in files:
if hasattr(settings, 'JSHINT_CHECK_EXCLUDES'):
excludes = settings.JSHINT_CHECK_EXCLUDES
else:
excludes = settings.SYNTAX_CHECK_EXCLUDES
if any(s in file for s in excludes):
continue
jshint_result = local(
'jshint {0}'.format(file),
capture=True
)
if jshint_result:
warn(red('JS errors detected in file {0}'.format(
file
)))
puts(jshint_result)
needs_to_abort = True
if needs_to_abort:
abort(red('There have been errors. Please fix them and run'
' the check again.'))
else:
puts(green('jshint found no errors. Very good!'))
def syntax_check():
"""Runs flake8 against the codebase."""
with fab_settings(warn_only=True):
for file_type in settings.SYNTAX_CHECK:
needs_to_abort = False
# because egrep fails with exit code 1, we need to allow this as
# a successful exit code in our env
if 1 not in env.ok_ret_codes:
env.ok_ret_codes.append(1)
output = local(
'find -name "{}" -print'.format(file_type),
capture=True,
)
files = output.split()
for file in files:
if any(s in file for s in settings.SYNTAX_CHECK_EXCLUDES):
continue
result = local('egrep -i -n "{0}" {1}'.format(
settings.SYNTAX_CHECK[file_type], file), capture=True)
if result:
warn(red("Syntax check found in '{0}': {1}".format(
file, result)))
needs_to_abort = True
if needs_to_abort:
abort(red('There have been errors. Please fix them and run'
' the check again.'))
else:
puts(green('Syntax check found no errors. Very good!'))
def flake8():
"""Runs flake8 against the codebase."""
return local('flake8 --ignore=E126 --statistics '
'--exclude=submodules,south_migrations,migrations,'
'node_modules .')
def import_db(filename=None):
"""
Imports the database.
Make sure that you have this in your ``~/.pgpass`` file:
localhost:5433:*:publishizer_publishizer:publishizer
Also make sure that the file has ``chmod 0600 .pgpass``.
Usage::
fab import_db
fab import_db:filename=foobar.dump
"""
local_machine()
if not filename:
filename = settings.DB_DUMP_FILENAME
with fab_settings(warn_only=True):
local('pg_restore -O -c -U {0}{1} -d {2} {3}'.format(
env.db_role, HOST, env.db_name, filename))
def import_media(filename=None):
"""
Extracts media dump into your local media root.
Please note that this might overwrite existing local files.
Usage::
fab import_media
fab import_media:filename=foobar.tar.gz
"""
if not filename:
filename = settings.MEDIA_DUMP_FILENAME
project_root = os.getcwd()
with fab_settings(hide('everything'), warn_only=True):
is_backup_missing = local('test -e "$(echo %s)"' % os.path.join(
project_root, filename)).failed
if is_backup_missing:
abort(red('ERROR: There is no media backup that could be imported in'
' {0}. We need a file called {1} in that folder.'.format(
project_root, filename)))
# copy the dump into the media root folder
with lcd(project_root):
local('cp {0} {1}'.format(filename, settings.MEDIA_ROOT))
# extract and remove media dump
with lcd(settings.MEDIA_ROOT):
local('tar -xvf {0}'.format(filename))
local('rm -rf {0}'.format(filename))
def rebuild():
"""
Deletes and re-creates your DB. Needs django-extensions and South.
"""
drop_db()
create_db()
if StrictVersion(django.get_version()) < StrictVersion('1.7'):
local('python{} manage.py syncdb --all --noinput'.format(
PYTHON_VERSION))
local('python{} manage.py migrate --fake'.format(PYTHON_VERSION))
else:
local('python{} manage.py migrate'.format(PYTHON_VERSION))
def reset_passwords():
"""Resets all passwords to `test123`."""
local('python{} manage.py set_fake_passwords --password=test1234'.format(
PYTHON_VERSION))
def test(options=None, integration=1, selenium=1, test_settings=None):
"""
Runs manage.py tests.
Usage::
fab test
fab test:app
fab test:app.tests.forms_tests:TestCaseName
fab test:integration=0
fab test:selenium=0
"""
if test_settings is None:
test_settings = settings.TEST_SETTINGS_PATH
command = ("coverage run --source='.' manage.py test -v 2" +
" --failfast --settings={0} --pattern='*_tests.py'".format(
test_settings))
if int(integration) == 0:
command += " --exclude='integration_tests'"
if int(selenium) == 0:
command += " --exclude='selenium_tests'"
if options:
command += ' {0}'.format(options)
with fab_settings(warn_only=True):
local(command, capture=False)
local('coverage html -d coverage --omit="{}"'.format(
settings.COVERAGE_EXCLUDES))
|
bitlabstudio/django-development-fabfile | development_fabfile/fabfile/local.py | rebuild | python | def rebuild():
drop_db()
create_db()
if StrictVersion(django.get_version()) < StrictVersion('1.7'):
local('python{} manage.py syncdb --all --noinput'.format(
PYTHON_VERSION))
local('python{} manage.py migrate --fake'.format(PYTHON_VERSION))
else:
local('python{} manage.py migrate'.format(PYTHON_VERSION)) | Deletes and re-creates your DB. Needs django-extensions and South. | train | https://github.com/bitlabstudio/django-development-fabfile/blob/a135c6eb5bdd0b496a7eccfd271aca558dd99243/development_fabfile/fabfile/local.py#L290-L302 | [
"def create_db(with_postgis=False):\n \"\"\"\n Creates the local database.\n\n :param with_postgis: If ``True``, the postgis extension will be installed.\n\n \"\"\"\n local_machine()\n local('psql {0} -c \"CREATE USER {1} WITH PASSWORD \\'{2}\\'\"'.format(\n USER_AND_HOST, env.db_role, DB_P... | """Fabfile for tasks that only manipulate things on the local machine."""
import django
import os
import re
import sys
from django.conf import settings
from distutils.version import StrictVersion
from fabric.api import hide, lcd, local
from fabric.api import settings as fab_settings
from fabric.colors import green, red
from fabric.utils import abort, warn, puts
from fabric.state import env
from .servers import local_machine
HOST = ' -h localhost'
if settings.DATABASES['default']['HOST']:
HOST = ' -h {}'.format(settings.DATABASES['default']['HOST'])
USER_AND_HOST = '-U {0}{1}'.format(settings.LOCAL_PG_ADMIN_ROLE, HOST)
DB_PASSWORD = settings.DATABASES['default']['PASSWORD']
if settings.PYTHON_VERSION:
PYTHON_VERSION = settings.PYTHON_VERSION
else:
PYTHON_VERSION = '{}.{}'.format(
sys.version_info.major, sys.version_info.minor)
def check():
"""Runs flake8, check_coverage and test."""
flake8()
syntax_check()
jshint()
test()
check_coverage()
def check_coverage():
"""Checks if the coverage is 100%."""
with lcd(settings.LOCAL_COVERAGE_PATH):
total_line = local('grep -n Total index.html', capture=True)
match = re.search(r'^(\d+):', total_line)
total_line_number = int(match.groups()[0])
percentage_line_number = total_line_number + 5
percentage_line = local(
'awk NR=={0} index.html'.format(percentage_line_number),
capture=True)
match = re.search(r'(\d.+)%', percentage_line)
try:
percentage = float(match.groups()[0])
except ValueError:
# If there's no dotting try another search
match = re.search(r'(\d+)%', percentage_line)
percentage = float(match.groups()[0])
if percentage < 100:
abort(red('Coverage is {0}%'.format(percentage)))
print(green('Coverage is {0}%'.format(percentage)))
def create_db(with_postgis=False):
"""
Creates the local database.
:param with_postgis: If ``True``, the postgis extension will be installed.
"""
local_machine()
local('psql {0} -c "CREATE USER {1} WITH PASSWORD \'{2}\'"'.format(
USER_AND_HOST, env.db_role, DB_PASSWORD))
local('psql {0} -c "CREATE DATABASE {1} ENCODING \'UTF8\'"'.format(
USER_AND_HOST, env.db_name))
if with_postgis:
local('psql {0} {1} -c "CREATE EXTENSION postgis"'.format(
USER_AND_HOST, env.db_name))
local('psql {0} -c "GRANT ALL PRIVILEGES ON DATABASE {1}'
' to {2}"'.format(USER_AND_HOST, env.db_name, env.db_role))
local('psql {0} -c "GRANT ALL PRIVILEGES ON ALL TABLES'
' IN SCHEMA public TO {1}"'.format(
USER_AND_HOST, env.db_role))
def delete_db():
"""
Deletes all data in the database.
You need django-extensions in order to use this.
However, please note that this is not as thorough as a real database drop.
"""
local(' ./manage.py reset_db --router=default --noinput')
def export_db(filename=None, remote=False):
"""
Exports the database.
Make sure that you have this in your ``~/.pgpass`` file:
localhost:5433:*:<db_role>:<password>
Also make sure that the file has ``chmod 0600 .pgpass``.
Usage::
fab export_db
fab export_db:filename=foobar.dump
"""
local_machine()
if not filename:
filename = settings.DB_DUMP_FILENAME
if remote:
backup_dir = settings.FAB_SETTING('SERVER_DB_BACKUP_DIR')
else:
backup_dir = ''
local('pg_dump -c -Fc -O -U {0}{1} {2} -f {3}{4}'.format(
env.db_role, HOST, env.db_name, backup_dir, filename))
def drop_db():
"""Drops the local database."""
local_machine()
with fab_settings(warn_only=True):
local('psql {0} -c "DROP DATABASE {1}"'.format(
USER_AND_HOST, env.db_name))
local('psql {0} -c "DROP USER {1}"'.format(
USER_AND_HOST, env.db_role))
def jshint():
"""Runs jshint checks."""
with fab_settings(warn_only=True):
needs_to_abort = False
# because jshint fails with exit code 2, we need to allow this as
# a successful exit code in our env
if 2 not in env.ok_ret_codes:
env.ok_ret_codes.append(2)
output = local(
'find -name "{}" -print'.format('*.js'),
capture=True,
)
files = output.split()
jshint_installed = local('command -v jshint', capture=True)
if not jshint_installed.succeeded:
warn(red(
"To enable an extended check of your js files, please"
" install jshint by entering:\n\n npm install -g jshint"
))
else:
for file in files:
if hasattr(settings, 'JSHINT_CHECK_EXCLUDES'):
excludes = settings.JSHINT_CHECK_EXCLUDES
else:
excludes = settings.SYNTAX_CHECK_EXCLUDES
if any(s in file for s in excludes):
continue
jshint_result = local(
'jshint {0}'.format(file),
capture=True
)
if jshint_result:
warn(red('JS errors detected in file {0}'.format(
file
)))
puts(jshint_result)
needs_to_abort = True
if needs_to_abort:
abort(red('There have been errors. Please fix them and run'
' the check again.'))
else:
puts(green('jshint found no errors. Very good!'))
def syntax_check():
"""Runs flake8 against the codebase."""
with fab_settings(warn_only=True):
for file_type in settings.SYNTAX_CHECK:
needs_to_abort = False
# because egrep fails with exit code 1, we need to allow this as
# a successful exit code in our env
if 1 not in env.ok_ret_codes:
env.ok_ret_codes.append(1)
output = local(
'find -name "{}" -print'.format(file_type),
capture=True,
)
files = output.split()
for file in files:
if any(s in file for s in settings.SYNTAX_CHECK_EXCLUDES):
continue
result = local('egrep -i -n "{0}" {1}'.format(
settings.SYNTAX_CHECK[file_type], file), capture=True)
if result:
warn(red("Syntax check found in '{0}': {1}".format(
file, result)))
needs_to_abort = True
if needs_to_abort:
abort(red('There have been errors. Please fix them and run'
' the check again.'))
else:
puts(green('Syntax check found no errors. Very good!'))
def flake8():
"""Runs flake8 against the codebase."""
return local('flake8 --ignore=E126 --statistics '
'--exclude=submodules,south_migrations,migrations,'
'node_modules .')
def import_db(filename=None):
"""
Imports the database.
Make sure that you have this in your ``~/.pgpass`` file:
localhost:5433:*:publishizer_publishizer:publishizer
Also make sure that the file has ``chmod 0600 .pgpass``.
Usage::
fab import_db
fab import_db:filename=foobar.dump
"""
local_machine()
if not filename:
filename = settings.DB_DUMP_FILENAME
with fab_settings(warn_only=True):
local('pg_restore -O -c -U {0}{1} -d {2} {3}'.format(
env.db_role, HOST, env.db_name, filename))
def import_media(filename=None):
"""
Extracts media dump into your local media root.
Please note that this might overwrite existing local files.
Usage::
fab import_media
fab import_media:filename=foobar.tar.gz
"""
if not filename:
filename = settings.MEDIA_DUMP_FILENAME
project_root = os.getcwd()
with fab_settings(hide('everything'), warn_only=True):
is_backup_missing = local('test -e "$(echo %s)"' % os.path.join(
project_root, filename)).failed
if is_backup_missing:
abort(red('ERROR: There is no media backup that could be imported in'
' {0}. We need a file called {1} in that folder.'.format(
project_root, filename)))
# copy the dump into the media root folder
with lcd(project_root):
local('cp {0} {1}'.format(filename, settings.MEDIA_ROOT))
# extract and remove media dump
with lcd(settings.MEDIA_ROOT):
local('tar -xvf {0}'.format(filename))
local('rm -rf {0}'.format(filename))
def lessc(responsive=False):
"""
Compiles all less files.
This is useful if you are using the Twitter Bootstrap Framework.
"""
local('lessc {0}/static/css/bootstrap.less'
' {0}/static/css/bootstrap.css'.format(settings.PROJECT_NAME))
if responsive:
local('lessc {0}/static/css/responsive.less'
' {0}/static/css/bootstrap-responsive.css'.format(
settings.PROJECT_NAME))
def reset_passwords():
"""Resets all passwords to `test123`."""
local('python{} manage.py set_fake_passwords --password=test1234'.format(
PYTHON_VERSION))
def test(options=None, integration=1, selenium=1, test_settings=None):
"""
Runs manage.py tests.
Usage::
fab test
fab test:app
fab test:app.tests.forms_tests:TestCaseName
fab test:integration=0
fab test:selenium=0
"""
if test_settings is None:
test_settings = settings.TEST_SETTINGS_PATH
command = ("coverage run --source='.' manage.py test -v 2" +
" --failfast --settings={0} --pattern='*_tests.py'".format(
test_settings))
if int(integration) == 0:
command += " --exclude='integration_tests'"
if int(selenium) == 0:
command += " --exclude='selenium_tests'"
if options:
command += ' {0}'.format(options)
with fab_settings(warn_only=True):
local(command, capture=False)
local('coverage html -d coverage --omit="{}"'.format(
settings.COVERAGE_EXCLUDES))
|
talkincode/toughlib | toughlib/utils.py | fmt_second | python | def fmt_second(time_total):
def _ck(t):
return t < 10 and "0%s" % t or t
times = int(time_total)
h = times / 3600
m = times % 3600 / 60
s = times % 3600 % 60
return "%s:%s:%s" % (_ck(h), _ck(m), _ck(s)) | >>> fmt_second(100)
'00:01:40' | train | https://github.com/talkincode/toughlib/blob/1c2f7dde3a7f101248f1b5f5d428cc85466995cf/toughlib/utils.py#L190-L203 | [
"def _ck(t):\n return t < 10 and \"0%s\" % t or t\n"
] | #!/usr/bin/env python
#coding:utf-8
import decimal
import datetime
from Crypto.Cipher import AES
from Crypto import Random
import hashlib
import binascii
import hashlib
import base64
import calendar
import random
import os
import time
import uuid
import json
import functools
import logging
import urlparse
random_generator = random.SystemRandom()
decimal.getcontext().prec = 32
decimal.getcontext().rounding = decimal.ROUND_UP
_base_id = 0
_CurrentID = random_generator.randrange(1, 1024)
def CurrentID():
global _CurrentID
_CurrentID = (_CurrentID + 1) % 1024
return str(_CurrentID)
class AESCipher:
def __init__(self,key=None):
if key:self.setup(key)
def is_pwd_encrypt(self):
return os.environ.get("CLOSE_PASSWORD_ENCRYPTION")
def setup(self, key):
self.bs = 32
self.ori_key = key
self.key = hashlib.sha256(key.encode()).digest()
def encrypt(self, raw):
is_encrypt = self.is_pwd_encrypt()
if is_encrypt:
return raw
raw = safestr(raw)
raw = self._pad(raw)
iv = Random.new().read(AES.block_size)
cipher = AES.new(self.key, AES.MODE_CBC, iv)
return base64.b64encode(iv + cipher.encrypt(raw))
def decrypt(self, enc):
is_encrypt = self.is_pwd_encrypt()
if is_encrypt:
return enc
enc = base64.b64decode(enc)
iv = enc[:AES.block_size]
cipher = AES.new(self.key, AES.MODE_CBC, iv)
return safeunicode(self._unpad(cipher.decrypt(enc[AES.block_size:])))
def _pad(self, s):
return s + (self.bs - len(s) % self.bs) * chr(self.bs - len(s) % self.bs)
def _unpad(self,s):
return s[:-ord(s[len(s)-1:])]
aescipher = AESCipher()
encrypt = aescipher.encrypt
decrypt = aescipher.decrypt
def update_tz(tz_val,default_val="CST-8"):
try:
os.environ["TZ"] = tz_val or default_val
time.tzset()
except:
pass
def check_ssl(config):
use_ssl = False
privatekey = None
certificate = None
if config.has_option('DEFAULT','ssl') and config.getboolean('DEFAULT','ssl'):
privatekey = config.get('DEFAULT','privatekey')
certificate = config.get('DEFAULT','certificate')
if os.path.exists(privatekey) and os.path.exists(certificate):
use_ssl = True
return use_ssl,privatekey,certificate
def get_uuid():
return uuid.uuid1().hex.upper()
def bps2mbps(bps):
_bps = decimal.Decimal(bps or 0)
_mbps = _bps / decimal.Decimal(1024*1024)
return str(_mbps.quantize(decimal.Decimal('1.000')))
def mbps2bps(mbps):
_mbps = decimal.Decimal(mbps or 0)
_kbps = _mbps * decimal.Decimal(1024*1024)
return int(_kbps.to_integral_value())
def bb2mb(ik):
_kb = decimal.Decimal(ik or 0)
_mb = _kb / decimal.Decimal(1024*1024)
return str(_mb.quantize(decimal.Decimal('1.00')))
def bbgb2mb(bb,gb):
bl = decimal.Decimal(bb or 0)/decimal.Decimal(1024*1024)
gl = decimal.Decimal(gb or 0)*decimal.Decimal(4*1024*1024*1024)
tl = bl + gl
return str(tl.quantize(decimal.Decimal('1.00')))
def kb2mb(ik,fmt='1.00'):
_kb = decimal.Decimal(ik or 0)
_mb = _kb / decimal.Decimal(1024)
return str(_mb.quantize(decimal.Decimal(fmt)))
def mb2kb(im=0):
_mb = decimal.Decimal(im or 0)
_kb = _mb * decimal.Decimal(1024)
return int(_kb.to_integral_value())
def kb2gb(ik,fmt='1.00'):
_kb = decimal.Decimal(ik or 0)
_mb = _kb / decimal.Decimal(1024*1024)
return str(_mb.quantize(decimal.Decimal(fmt)))
def gb2kb(im=0):
_mb = decimal.Decimal(im or 0)
_kb = _mb * decimal.Decimal(1024*1024)
return int(_kb.to_integral_value())
def hour2sec(hor=0):
_hor = decimal.Decimal(hor or 0)
_sec = _hor * decimal.Decimal(3600)
return int(_sec.to_integral_value())
def sec2hour(sec=0):
_sec = decimal.Decimal(sec or 0)
_hor = _sec / decimal.Decimal(3600)
return str(_hor.quantize(decimal.Decimal('1.00')))
def fen2yuan(fen=0):
f = decimal.Decimal(fen or 0)
y = f / decimal.Decimal(100)
return str(y.quantize(decimal.Decimal('1.00')))
def yuan2fen(yuan=0):
y = decimal.Decimal(yuan or 0)
f = y * decimal.Decimal(100)
return int(f.to_integral_value())
def get_currtime():
return datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
def get_currdate():
return datetime.datetime.now().strftime("%Y-%m-%d")
def get_datetime(second):
return time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(second))
def datetime2msec(dtime_str):
_datetime = datetime.datetime.strptime(dtime_str,"%Y-%m-%d %H:%M:%S")
return int(time.mktime(_datetime.timetuple()))
def gen_backup_id():
global _base_id
if _base_id >= 9999:_base_id=0
_base_id += 1
_num = str(_base_id).zfill(4)
return datetime.datetime.now().strftime("%Y%m%d_%H%M%S_") + _num
gen_backep_id = gen_backup_id
def gen_order_id():
global _base_id
if _base_id >= 9999:_base_id=0
_base_id += 1
_num = str(_base_id).zfill(4)
return datetime.datetime.now().strftime("%Y%m%d%H%M%S") + _num
def is_expire(dstr):
if not dstr:
return False
try:
expire_date = datetime.datetime.strptime("%s 23:59:59" % dstr, "%Y-%m-%d %H:%M:%S")
now = datetime.datetime.now()
return expire_date < now
except:
import traceback
traceback.print_exc()
return False
def fmt_online_time(ctime):
if not ctime:
return ''
cdate = datetime.datetime.strptime(ctime, '%Y-%m-%d %H:%M:%S')
nowdate = datetime.datetime.now()
dt = nowdate - cdate
times = dt.total_seconds()
if times <= 60:
return u"%s秒"%int(times)
d = times / (3600 * 24)
h = times % (3600 * 24) / 3600
m = times % (3600 * 24) % 3600 / 60
s = times % (3600 * 24) % 3600 % 60
if int(d) > 0:
return u"%s天%s小时%s分钟%s秒" % (int(d), int(h), int(m),int(s))
elif int(d) == 0 and int(h) > 0:
return u"%s小时%s分钟%s秒" % (int(h), int(m), int(s))
elif int(d) == 0 and int(h) == 0 and int(m) > 0:
return u"%s分钟%s秒" % (int(m),int(s))
def add_months(dt,months, days=0):
month = dt.month - 1 + months
year = dt.year + month / 12
month = month % 12 + 1
day = min(dt.day,calendar.monthrange(year,month)[1])
dt = dt.replace(year=year, month=month, day=day)
return dt + datetime.timedelta(days=days)
def is_connect(timestr, period=600):
if not timestr:
return False
try:
last_ping = datetime.datetime.strptime(timestr, "%Y-%m-%d %H:%M:%S")
now = datetime.datetime.now()
tt = now - last_ping
return tt.seconds < period
except:
return False
def serial_model(mdl):
if not mdl:return
if not hasattr(mdl,'__table__'):return
data = {}
for c in mdl.__table__.columns:
data[c.name] = getattr(mdl, c.name)
return json.dumps(data,ensure_ascii=False)
def safestr(val):
if val is None:
return ''
if isinstance(val, unicode):
try:
return val.encode('utf-8')
except:
return val.encode('gb2312')
elif isinstance(val, str):
return val
elif isinstance(val, int):
return str(val)
elif isinstance(val, float):
return str(val)
elif isinstance(val, (dict,list)):
return json.dumps(val, ensure_ascii=False)
else:
try:
return str(val)
except:
return val
return val
def safeunicode(val):
if val is None:
return u''
if isinstance(val, str):
try:
return val.decode('utf-8')
except:
try:
return val.decode('gb2312')
except:
return val
elif isinstance(val, unicode):
return val
elif isinstance(val, int):
return str(val).decode('utf-8')
elif isinstance(val, float):
return str(val).decode('utf-8')
elif isinstance(val, (dict,list)):
return json.dumps(val)
else:
try:
return str(val).decode('utf-8')
except:
return val
return val
def gen_secret(clen=32):
rg = random.SystemRandom()
r = list('1234567890abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ')
return ''.join([rg.choice(r) for _ in range(clen)])
def timecast(func):
from twisted.python import log
@functools.wraps(func)
def warp(*args,**kargs):
_start = time.clock()
result = func(*args,**kargs)
log.msg("%s cast %.6f second"%(func.__name__,time.clock()-_start))
return result
return warp
def split_mline(src,wd=32,rstr='\r\n'):
_idx = 0
ss = []
for c in src:
if _idx > 0 and _idx%wd == 0:
ss.append(rstr)
ss.append(c)
_idx += 1
return ''.join(ss)
def get_cron_interval(cron_time):
if cron_time:
cron_time = "%s:00"%cron_time
date_now = datetime.datetime.now()
_now_hm = date_now.strftime("%H:%M:%S")
_ymd = get_currdate()
if _now_hm > cron_time:
_ymd = (date_now + datetime.timedelta(days=1)).strftime("%Y-%m-%d")
_interval = datetime.datetime.strptime("%s %s"%(_ymd,cron_time),"%Y-%m-%d %H:%M:%S") - date_now
_itimes = int(_interval.total_seconds())
return _itimes if _itimes > 0 else 86400
else:
return 120
if __name__ == '__main__':
aes = AESCipher("LpWE9AtfDPQ3ufXBS6gJ37WW8TnSF920")
# aa = aes.encrypt(u"中文".encode('utf-8'))
# print aa
# cc = aes.decrypt(aa)
# print cc.encode('utf-8')
# aa = aes.decrypt("+//J9HPYQ+5PccoBZml6ngcLLu1/XQh2KyWakfcExJeb0wyq1C9+okztyaFbspYZ")
# print aa
# print get_cron_interval('09:32')
now = datetime.datetime.now()
mon = now.month + 1
mon = mon if mon <= 12 else 1
timestr = "%s-%s-1 01:00:00" % (now.year,mon)
_date = datetime.datetime.strptime(timestr, "%Y-%m-%d %H:%M:%S")
tt = (time.mktime(_date.timetuple()) - time.time()) /86400
print _date,tt
|
talkincode/toughlib | toughlib/permit.py | Permit.add_route | python | def add_route(self, handle_cls, path, name, category,
handle_params={}, is_menu=False,
order=time.time(),is_open=True, oem=False,**kwargs):
if not path: return
if path in self.routes:
if self.routes[path].get('oem'):
return
self.routes[path] = dict(
path=path, # 权限url路径
name=name, # 权限名称
category=category, # 权限目录
is_menu=is_menu, # 是否在边栏显示为菜单
oprs=[], # 关联的操作员
order=order, # 排序
is_open=is_open, # 是否开放授权
oem=oem #是否定制功能
)
self.routes[path].update(**kwargs)
self.add_handler(handle_cls, path, handle_params) | 注册权限 | train | https://github.com/talkincode/toughlib/blob/1c2f7dde3a7f101248f1b5f5d428cc85466995cf/toughlib/permit.py#L37-L58 | [
"def add_handler(self, handle_cls, path, handle_params={}):\n self.handlers[path] = (path, handle_cls, handle_params)\n"
] | class Permit():
""" 权限菜单管理
"""
opr_cache = {}
def __init__(self, parent=None):
if parent:
self.routes = parent.routes
self.handlers = parent.handlers
self.free_routes = parent.free_routes
else:
self.routes = {}
self.handlers = {}
self.free_routes = []
def fork(self,opr_name, opr_type=0,rules=[]):
p = Permit.opr_cache.setdefault(opr_name,Permit(self))
if opr_type == 0:
p.bind_super(opr_name)
else:
p.unbind_opr(opr_name)
for path in rules:
p.bind_opr(opr_name, path)
return p
def add_handler(self, handle_cls, path, handle_params={}):
self.handlers[path] = (path, handle_cls, handle_params)
@property
def all_handlers(self):
return self.handlers.values()
def get_route(self, path):
""" 获取一个权限资源
"""
return self.routes.get(path)
def bind_super(self, opr):
""" 为超级管理员授权所有权限
"""
for path in self.routes:
route = self.routes.get(path)
route['oprs'].append(opr)
def bind_opr(self, opr, path):
""" 为操作员授权
"""
if not path or path not in self.routes:
return
oprs = self.routes[path]['oprs']
if opr not in oprs:
oprs.append(opr)
def unbind_opr(self, opr, path=None):
""" 接触操作员与权限关联
"""
if path:
self.routes[path]['oprs'].remove(opr)
else:
for path in self.routes:
route = self.routes.get(path)
if route and opr in route['oprs']:
route['oprs'].remove(opr)
def check_open(self, path):
""" 检查权限是否开放授权
"""
route = self.routes[path]
return 'is_open' in route and route['is_open']
def check_opr_category(self, opr, category):
""" 检查权限是否在指定目录下
"""
for path in self.routes:
route = self.routes[path]
if opr in route['oprs'] and route['category'] == category:
return True
return False
def build_menus(self, order_cats=[]):
""" 生成全局内存菜单"""
menus = [{'category': _cat, 'items': []} for _cat in order_cats]
for path in self.routes:
route = self.routes[path]
for menu in menus:
if route['category'] == menu['category']:
menu['items'].append(route)
return menus
def match(self, opr, path):
""" 检查操作员是否匹配资源
"""
_url = urlparse.urlparse(path)
if not _url.path or not opr:
return False
if _url.path in self.free_routes:
return True
if _url.path not in self.routes:
return False
return opr in self.routes[_url.path]['oprs']
def suproute(self, url_pattern, menuname=None, category=None,
is_menu=False, order=0, is_open=True,oem=False,**kwargs):
selfobj = self
def handler_wapper(cls):
selfobj.add_route(cls, url_pattern, menuname, category,
order=order, is_menu=is_menu, is_open=is_open,oem=oem, admin=True,**kwargs)
logger.info("add super managed route [%s : %s]" % (url_pattern, repr(cls)))
return cls
return handler_wapper
def route(self, url_pattern, menuname=None, category=None,
is_menu=False, order=0, is_open=True,oem=False,**kwargs):
selfobj = self
def handler_wapper(cls):
if not menuname:
self.add_handler(cls, url_pattern)
selfobj.free_routes.append(url_pattern)
logger.info("add free route [%s : %s]" % (url_pattern, repr(cls)))
else:
selfobj.add_route(cls, url_pattern, menuname, category,
order=order, is_menu=is_menu, is_open=is_open,oem=oem,**kwargs)
logger.info("add managed route [%s : %s]" % (url_pattern, repr(cls)))
return cls
return handler_wapper
|
talkincode/toughlib | toughlib/permit.py | Permit.bind_super | python | def bind_super(self, opr):
for path in self.routes:
route = self.routes.get(path)
route['oprs'].append(opr) | 为超级管理员授权所有权限 | train | https://github.com/talkincode/toughlib/blob/1c2f7dde3a7f101248f1b5f5d428cc85466995cf/toughlib/permit.py#L72-L77 | null | class Permit():
""" 权限菜单管理
"""
opr_cache = {}
def __init__(self, parent=None):
if parent:
self.routes = parent.routes
self.handlers = parent.handlers
self.free_routes = parent.free_routes
else:
self.routes = {}
self.handlers = {}
self.free_routes = []
def fork(self,opr_name, opr_type=0,rules=[]):
p = Permit.opr_cache.setdefault(opr_name,Permit(self))
if opr_type == 0:
p.bind_super(opr_name)
else:
p.unbind_opr(opr_name)
for path in rules:
p.bind_opr(opr_name, path)
return p
def add_route(self, handle_cls, path, name, category,
handle_params={}, is_menu=False,
order=time.time(),is_open=True, oem=False,**kwargs):
""" 注册权限
"""
if not path: return
if path in self.routes:
if self.routes[path].get('oem'):
return
self.routes[path] = dict(
path=path, # 权限url路径
name=name, # 权限名称
category=category, # 权限目录
is_menu=is_menu, # 是否在边栏显示为菜单
oprs=[], # 关联的操作员
order=order, # 排序
is_open=is_open, # 是否开放授权
oem=oem #是否定制功能
)
self.routes[path].update(**kwargs)
self.add_handler(handle_cls, path, handle_params)
def add_handler(self, handle_cls, path, handle_params={}):
self.handlers[path] = (path, handle_cls, handle_params)
@property
def all_handlers(self):
return self.handlers.values()
def get_route(self, path):
""" 获取一个权限资源
"""
return self.routes.get(path)
def bind_opr(self, opr, path):
""" 为操作员授权
"""
if not path or path not in self.routes:
return
oprs = self.routes[path]['oprs']
if opr not in oprs:
oprs.append(opr)
def unbind_opr(self, opr, path=None):
""" 接触操作员与权限关联
"""
if path:
self.routes[path]['oprs'].remove(opr)
else:
for path in self.routes:
route = self.routes.get(path)
if route and opr in route['oprs']:
route['oprs'].remove(opr)
def check_open(self, path):
""" 检查权限是否开放授权
"""
route = self.routes[path]
return 'is_open' in route and route['is_open']
def check_opr_category(self, opr, category):
""" 检查权限是否在指定目录下
"""
for path in self.routes:
route = self.routes[path]
if opr in route['oprs'] and route['category'] == category:
return True
return False
def build_menus(self, order_cats=[]):
""" 生成全局内存菜单"""
menus = [{'category': _cat, 'items': []} for _cat in order_cats]
for path in self.routes:
route = self.routes[path]
for menu in menus:
if route['category'] == menu['category']:
menu['items'].append(route)
return menus
def match(self, opr, path):
""" 检查操作员是否匹配资源
"""
_url = urlparse.urlparse(path)
if not _url.path or not opr:
return False
if _url.path in self.free_routes:
return True
if _url.path not in self.routes:
return False
return opr in self.routes[_url.path]['oprs']
def suproute(self, url_pattern, menuname=None, category=None,
is_menu=False, order=0, is_open=True,oem=False,**kwargs):
selfobj = self
def handler_wapper(cls):
selfobj.add_route(cls, url_pattern, menuname, category,
order=order, is_menu=is_menu, is_open=is_open,oem=oem, admin=True,**kwargs)
logger.info("add super managed route [%s : %s]" % (url_pattern, repr(cls)))
return cls
return handler_wapper
def route(self, url_pattern, menuname=None, category=None,
is_menu=False, order=0, is_open=True,oem=False,**kwargs):
selfobj = self
def handler_wapper(cls):
if not menuname:
self.add_handler(cls, url_pattern)
selfobj.free_routes.append(url_pattern)
logger.info("add free route [%s : %s]" % (url_pattern, repr(cls)))
else:
selfobj.add_route(cls, url_pattern, menuname, category,
order=order, is_menu=is_menu, is_open=is_open,oem=oem,**kwargs)
logger.info("add managed route [%s : %s]" % (url_pattern, repr(cls)))
return cls
return handler_wapper
|
talkincode/toughlib | toughlib/permit.py | Permit.bind_opr | python | def bind_opr(self, opr, path):
if not path or path not in self.routes:
return
oprs = self.routes[path]['oprs']
if opr not in oprs:
oprs.append(opr) | 为操作员授权 | train | https://github.com/talkincode/toughlib/blob/1c2f7dde3a7f101248f1b5f5d428cc85466995cf/toughlib/permit.py#L79-L86 | null | class Permit():
""" 权限菜单管理
"""
opr_cache = {}
def __init__(self, parent=None):
if parent:
self.routes = parent.routes
self.handlers = parent.handlers
self.free_routes = parent.free_routes
else:
self.routes = {}
self.handlers = {}
self.free_routes = []
def fork(self,opr_name, opr_type=0,rules=[]):
p = Permit.opr_cache.setdefault(opr_name,Permit(self))
if opr_type == 0:
p.bind_super(opr_name)
else:
p.unbind_opr(opr_name)
for path in rules:
p.bind_opr(opr_name, path)
return p
def add_route(self, handle_cls, path, name, category,
handle_params={}, is_menu=False,
order=time.time(),is_open=True, oem=False,**kwargs):
""" 注册权限
"""
if not path: return
if path in self.routes:
if self.routes[path].get('oem'):
return
self.routes[path] = dict(
path=path, # 权限url路径
name=name, # 权限名称
category=category, # 权限目录
is_menu=is_menu, # 是否在边栏显示为菜单
oprs=[], # 关联的操作员
order=order, # 排序
is_open=is_open, # 是否开放授权
oem=oem #是否定制功能
)
self.routes[path].update(**kwargs)
self.add_handler(handle_cls, path, handle_params)
def add_handler(self, handle_cls, path, handle_params={}):
self.handlers[path] = (path, handle_cls, handle_params)
@property
def all_handlers(self):
return self.handlers.values()
def get_route(self, path):
""" 获取一个权限资源
"""
return self.routes.get(path)
def bind_super(self, opr):
""" 为超级管理员授权所有权限
"""
for path in self.routes:
route = self.routes.get(path)
route['oprs'].append(opr)
def unbind_opr(self, opr, path=None):
""" 接触操作员与权限关联
"""
if path:
self.routes[path]['oprs'].remove(opr)
else:
for path in self.routes:
route = self.routes.get(path)
if route and opr in route['oprs']:
route['oprs'].remove(opr)
def check_open(self, path):
""" 检查权限是否开放授权
"""
route = self.routes[path]
return 'is_open' in route and route['is_open']
def check_opr_category(self, opr, category):
""" 检查权限是否在指定目录下
"""
for path in self.routes:
route = self.routes[path]
if opr in route['oprs'] and route['category'] == category:
return True
return False
def build_menus(self, order_cats=[]):
""" 生成全局内存菜单"""
menus = [{'category': _cat, 'items': []} for _cat in order_cats]
for path in self.routes:
route = self.routes[path]
for menu in menus:
if route['category'] == menu['category']:
menu['items'].append(route)
return menus
def match(self, opr, path):
""" 检查操作员是否匹配资源
"""
_url = urlparse.urlparse(path)
if not _url.path or not opr:
return False
if _url.path in self.free_routes:
return True
if _url.path not in self.routes:
return False
return opr in self.routes[_url.path]['oprs']
def suproute(self, url_pattern, menuname=None, category=None,
is_menu=False, order=0, is_open=True,oem=False,**kwargs):
selfobj = self
def handler_wapper(cls):
selfobj.add_route(cls, url_pattern, menuname, category,
order=order, is_menu=is_menu, is_open=is_open,oem=oem, admin=True,**kwargs)
logger.info("add super managed route [%s : %s]" % (url_pattern, repr(cls)))
return cls
return handler_wapper
def route(self, url_pattern, menuname=None, category=None,
is_menu=False, order=0, is_open=True,oem=False,**kwargs):
selfobj = self
def handler_wapper(cls):
if not menuname:
self.add_handler(cls, url_pattern)
selfobj.free_routes.append(url_pattern)
logger.info("add free route [%s : %s]" % (url_pattern, repr(cls)))
else:
selfobj.add_route(cls, url_pattern, menuname, category,
order=order, is_menu=is_menu, is_open=is_open,oem=oem,**kwargs)
logger.info("add managed route [%s : %s]" % (url_pattern, repr(cls)))
return cls
return handler_wapper
|
talkincode/toughlib | toughlib/permit.py | Permit.unbind_opr | python | def unbind_opr(self, opr, path=None):
if path:
self.routes[path]['oprs'].remove(opr)
else:
for path in self.routes:
route = self.routes.get(path)
if route and opr in route['oprs']:
route['oprs'].remove(opr) | 接触操作员与权限关联 | train | https://github.com/talkincode/toughlib/blob/1c2f7dde3a7f101248f1b5f5d428cc85466995cf/toughlib/permit.py#L88-L97 | null | class Permit():
""" 权限菜单管理
"""
opr_cache = {}
def __init__(self, parent=None):
if parent:
self.routes = parent.routes
self.handlers = parent.handlers
self.free_routes = parent.free_routes
else:
self.routes = {}
self.handlers = {}
self.free_routes = []
def fork(self,opr_name, opr_type=0,rules=[]):
p = Permit.opr_cache.setdefault(opr_name,Permit(self))
if opr_type == 0:
p.bind_super(opr_name)
else:
p.unbind_opr(opr_name)
for path in rules:
p.bind_opr(opr_name, path)
return p
def add_route(self, handle_cls, path, name, category,
handle_params={}, is_menu=False,
order=time.time(),is_open=True, oem=False,**kwargs):
""" 注册权限
"""
if not path: return
if path in self.routes:
if self.routes[path].get('oem'):
return
self.routes[path] = dict(
path=path, # 权限url路径
name=name, # 权限名称
category=category, # 权限目录
is_menu=is_menu, # 是否在边栏显示为菜单
oprs=[], # 关联的操作员
order=order, # 排序
is_open=is_open, # 是否开放授权
oem=oem #是否定制功能
)
self.routes[path].update(**kwargs)
self.add_handler(handle_cls, path, handle_params)
def add_handler(self, handle_cls, path, handle_params={}):
self.handlers[path] = (path, handle_cls, handle_params)
@property
def all_handlers(self):
return self.handlers.values()
def get_route(self, path):
""" 获取一个权限资源
"""
return self.routes.get(path)
def bind_super(self, opr):
""" 为超级管理员授权所有权限
"""
for path in self.routes:
route = self.routes.get(path)
route['oprs'].append(opr)
def bind_opr(self, opr, path):
""" 为操作员授权
"""
if not path or path not in self.routes:
return
oprs = self.routes[path]['oprs']
if opr not in oprs:
oprs.append(opr)
def check_open(self, path):
""" 检查权限是否开放授权
"""
route = self.routes[path]
return 'is_open' in route and route['is_open']
def check_opr_category(self, opr, category):
""" 检查权限是否在指定目录下
"""
for path in self.routes:
route = self.routes[path]
if opr in route['oprs'] and route['category'] == category:
return True
return False
def build_menus(self, order_cats=[]):
""" 生成全局内存菜单"""
menus = [{'category': _cat, 'items': []} for _cat in order_cats]
for path in self.routes:
route = self.routes[path]
for menu in menus:
if route['category'] == menu['category']:
menu['items'].append(route)
return menus
def match(self, opr, path):
""" 检查操作员是否匹配资源
"""
_url = urlparse.urlparse(path)
if not _url.path or not opr:
return False
if _url.path in self.free_routes:
return True
if _url.path not in self.routes:
return False
return opr in self.routes[_url.path]['oprs']
def suproute(self, url_pattern, menuname=None, category=None,
is_menu=False, order=0, is_open=True,oem=False,**kwargs):
selfobj = self
def handler_wapper(cls):
selfobj.add_route(cls, url_pattern, menuname, category,
order=order, is_menu=is_menu, is_open=is_open,oem=oem, admin=True,**kwargs)
logger.info("add super managed route [%s : %s]" % (url_pattern, repr(cls)))
return cls
return handler_wapper
def route(self, url_pattern, menuname=None, category=None,
is_menu=False, order=0, is_open=True,oem=False,**kwargs):
selfobj = self
def handler_wapper(cls):
if not menuname:
self.add_handler(cls, url_pattern)
selfobj.free_routes.append(url_pattern)
logger.info("add free route [%s : %s]" % (url_pattern, repr(cls)))
else:
selfobj.add_route(cls, url_pattern, menuname, category,
order=order, is_menu=is_menu, is_open=is_open,oem=oem,**kwargs)
logger.info("add managed route [%s : %s]" % (url_pattern, repr(cls)))
return cls
return handler_wapper
|
talkincode/toughlib | toughlib/permit.py | Permit.check_opr_category | python | def check_opr_category(self, opr, category):
for path in self.routes:
route = self.routes[path]
if opr in route['oprs'] and route['category'] == category:
return True
return False | 检查权限是否在指定目录下 | train | https://github.com/talkincode/toughlib/blob/1c2f7dde3a7f101248f1b5f5d428cc85466995cf/toughlib/permit.py#L105-L112 | null | class Permit():
""" 权限菜单管理
"""
opr_cache = {}
def __init__(self, parent=None):
if parent:
self.routes = parent.routes
self.handlers = parent.handlers
self.free_routes = parent.free_routes
else:
self.routes = {}
self.handlers = {}
self.free_routes = []
def fork(self,opr_name, opr_type=0,rules=[]):
p = Permit.opr_cache.setdefault(opr_name,Permit(self))
if opr_type == 0:
p.bind_super(opr_name)
else:
p.unbind_opr(opr_name)
for path in rules:
p.bind_opr(opr_name, path)
return p
def add_route(self, handle_cls, path, name, category,
handle_params={}, is_menu=False,
order=time.time(),is_open=True, oem=False,**kwargs):
""" 注册权限
"""
if not path: return
if path in self.routes:
if self.routes[path].get('oem'):
return
self.routes[path] = dict(
path=path, # 权限url路径
name=name, # 权限名称
category=category, # 权限目录
is_menu=is_menu, # 是否在边栏显示为菜单
oprs=[], # 关联的操作员
order=order, # 排序
is_open=is_open, # 是否开放授权
oem=oem #是否定制功能
)
self.routes[path].update(**kwargs)
self.add_handler(handle_cls, path, handle_params)
def add_handler(self, handle_cls, path, handle_params={}):
self.handlers[path] = (path, handle_cls, handle_params)
@property
def all_handlers(self):
return self.handlers.values()
def get_route(self, path):
""" 获取一个权限资源
"""
return self.routes.get(path)
def bind_super(self, opr):
""" 为超级管理员授权所有权限
"""
for path in self.routes:
route = self.routes.get(path)
route['oprs'].append(opr)
def bind_opr(self, opr, path):
""" 为操作员授权
"""
if not path or path not in self.routes:
return
oprs = self.routes[path]['oprs']
if opr not in oprs:
oprs.append(opr)
def unbind_opr(self, opr, path=None):
""" 接触操作员与权限关联
"""
if path:
self.routes[path]['oprs'].remove(opr)
else:
for path in self.routes:
route = self.routes.get(path)
if route and opr in route['oprs']:
route['oprs'].remove(opr)
def check_open(self, path):
""" 检查权限是否开放授权
"""
route = self.routes[path]
return 'is_open' in route and route['is_open']
def build_menus(self, order_cats=[]):
""" 生成全局内存菜单"""
menus = [{'category': _cat, 'items': []} for _cat in order_cats]
for path in self.routes:
route = self.routes[path]
for menu in menus:
if route['category'] == menu['category']:
menu['items'].append(route)
return menus
def match(self, opr, path):
""" 检查操作员是否匹配资源
"""
_url = urlparse.urlparse(path)
if not _url.path or not opr:
return False
if _url.path in self.free_routes:
return True
if _url.path not in self.routes:
return False
return opr in self.routes[_url.path]['oprs']
def suproute(self, url_pattern, menuname=None, category=None,
is_menu=False, order=0, is_open=True,oem=False,**kwargs):
selfobj = self
def handler_wapper(cls):
selfobj.add_route(cls, url_pattern, menuname, category,
order=order, is_menu=is_menu, is_open=is_open,oem=oem, admin=True,**kwargs)
logger.info("add super managed route [%s : %s]" % (url_pattern, repr(cls)))
return cls
return handler_wapper
def route(self, url_pattern, menuname=None, category=None,
is_menu=False, order=0, is_open=True,oem=False,**kwargs):
selfobj = self
def handler_wapper(cls):
if not menuname:
self.add_handler(cls, url_pattern)
selfobj.free_routes.append(url_pattern)
logger.info("add free route [%s : %s]" % (url_pattern, repr(cls)))
else:
selfobj.add_route(cls, url_pattern, menuname, category,
order=order, is_menu=is_menu, is_open=is_open,oem=oem,**kwargs)
logger.info("add managed route [%s : %s]" % (url_pattern, repr(cls)))
return cls
return handler_wapper
|
talkincode/toughlib | toughlib/permit.py | Permit.build_menus | python | def build_menus(self, order_cats=[]):
menus = [{'category': _cat, 'items': []} for _cat in order_cats]
for path in self.routes:
route = self.routes[path]
for menu in menus:
if route['category'] == menu['category']:
menu['items'].append(route)
return menus | 生成全局内存菜单 | train | https://github.com/talkincode/toughlib/blob/1c2f7dde3a7f101248f1b5f5d428cc85466995cf/toughlib/permit.py#L114-L122 | null | class Permit():
""" 权限菜单管理
"""
opr_cache = {}
def __init__(self, parent=None):
if parent:
self.routes = parent.routes
self.handlers = parent.handlers
self.free_routes = parent.free_routes
else:
self.routes = {}
self.handlers = {}
self.free_routes = []
def fork(self,opr_name, opr_type=0,rules=[]):
p = Permit.opr_cache.setdefault(opr_name,Permit(self))
if opr_type == 0:
p.bind_super(opr_name)
else:
p.unbind_opr(opr_name)
for path in rules:
p.bind_opr(opr_name, path)
return p
def add_route(self, handle_cls, path, name, category,
handle_params={}, is_menu=False,
order=time.time(),is_open=True, oem=False,**kwargs):
""" 注册权限
"""
if not path: return
if path in self.routes:
if self.routes[path].get('oem'):
return
self.routes[path] = dict(
path=path, # 权限url路径
name=name, # 权限名称
category=category, # 权限目录
is_menu=is_menu, # 是否在边栏显示为菜单
oprs=[], # 关联的操作员
order=order, # 排序
is_open=is_open, # 是否开放授权
oem=oem #是否定制功能
)
self.routes[path].update(**kwargs)
self.add_handler(handle_cls, path, handle_params)
def add_handler(self, handle_cls, path, handle_params={}):
self.handlers[path] = (path, handle_cls, handle_params)
@property
def all_handlers(self):
return self.handlers.values()
def get_route(self, path):
""" 获取一个权限资源
"""
return self.routes.get(path)
def bind_super(self, opr):
""" 为超级管理员授权所有权限
"""
for path in self.routes:
route = self.routes.get(path)
route['oprs'].append(opr)
def bind_opr(self, opr, path):
""" 为操作员授权
"""
if not path or path not in self.routes:
return
oprs = self.routes[path]['oprs']
if opr not in oprs:
oprs.append(opr)
def unbind_opr(self, opr, path=None):
""" 接触操作员与权限关联
"""
if path:
self.routes[path]['oprs'].remove(opr)
else:
for path in self.routes:
route = self.routes.get(path)
if route and opr in route['oprs']:
route['oprs'].remove(opr)
def check_open(self, path):
""" 检查权限是否开放授权
"""
route = self.routes[path]
return 'is_open' in route and route['is_open']
def check_opr_category(self, opr, category):
""" 检查权限是否在指定目录下
"""
for path in self.routes:
route = self.routes[path]
if opr in route['oprs'] and route['category'] == category:
return True
return False
def match(self, opr, path):
""" 检查操作员是否匹配资源
"""
_url = urlparse.urlparse(path)
if not _url.path or not opr:
return False
if _url.path in self.free_routes:
return True
if _url.path not in self.routes:
return False
return opr in self.routes[_url.path]['oprs']
def suproute(self, url_pattern, menuname=None, category=None,
is_menu=False, order=0, is_open=True,oem=False,**kwargs):
selfobj = self
def handler_wapper(cls):
selfobj.add_route(cls, url_pattern, menuname, category,
order=order, is_menu=is_menu, is_open=is_open,oem=oem, admin=True,**kwargs)
logger.info("add super managed route [%s : %s]" % (url_pattern, repr(cls)))
return cls
return handler_wapper
def route(self, url_pattern, menuname=None, category=None,
is_menu=False, order=0, is_open=True,oem=False,**kwargs):
selfobj = self
def handler_wapper(cls):
if not menuname:
self.add_handler(cls, url_pattern)
selfobj.free_routes.append(url_pattern)
logger.info("add free route [%s : %s]" % (url_pattern, repr(cls)))
else:
selfobj.add_route(cls, url_pattern, menuname, category,
order=order, is_menu=is_menu, is_open=is_open,oem=oem,**kwargs)
logger.info("add managed route [%s : %s]" % (url_pattern, repr(cls)))
return cls
return handler_wapper
|
talkincode/toughlib | toughlib/permit.py | Permit.match | python | def match(self, opr, path):
_url = urlparse.urlparse(path)
if not _url.path or not opr:
return False
if _url.path in self.free_routes:
return True
if _url.path not in self.routes:
return False
return opr in self.routes[_url.path]['oprs'] | 检查操作员是否匹配资源 | train | https://github.com/talkincode/toughlib/blob/1c2f7dde3a7f101248f1b5f5d428cc85466995cf/toughlib/permit.py#L124-L134 | null | class Permit():
""" 权限菜单管理
"""
opr_cache = {}
def __init__(self, parent=None):
if parent:
self.routes = parent.routes
self.handlers = parent.handlers
self.free_routes = parent.free_routes
else:
self.routes = {}
self.handlers = {}
self.free_routes = []
def fork(self,opr_name, opr_type=0,rules=[]):
p = Permit.opr_cache.setdefault(opr_name,Permit(self))
if opr_type == 0:
p.bind_super(opr_name)
else:
p.unbind_opr(opr_name)
for path in rules:
p.bind_opr(opr_name, path)
return p
def add_route(self, handle_cls, path, name, category,
handle_params={}, is_menu=False,
order=time.time(),is_open=True, oem=False,**kwargs):
""" 注册权限
"""
if not path: return
if path in self.routes:
if self.routes[path].get('oem'):
return
self.routes[path] = dict(
path=path, # 权限url路径
name=name, # 权限名称
category=category, # 权限目录
is_menu=is_menu, # 是否在边栏显示为菜单
oprs=[], # 关联的操作员
order=order, # 排序
is_open=is_open, # 是否开放授权
oem=oem #是否定制功能
)
self.routes[path].update(**kwargs)
self.add_handler(handle_cls, path, handle_params)
def add_handler(self, handle_cls, path, handle_params={}):
self.handlers[path] = (path, handle_cls, handle_params)
@property
def all_handlers(self):
return self.handlers.values()
def get_route(self, path):
""" 获取一个权限资源
"""
return self.routes.get(path)
def bind_super(self, opr):
""" 为超级管理员授权所有权限
"""
for path in self.routes:
route = self.routes.get(path)
route['oprs'].append(opr)
def bind_opr(self, opr, path):
""" 为操作员授权
"""
if not path or path not in self.routes:
return
oprs = self.routes[path]['oprs']
if opr not in oprs:
oprs.append(opr)
def unbind_opr(self, opr, path=None):
""" 接触操作员与权限关联
"""
if path:
self.routes[path]['oprs'].remove(opr)
else:
for path in self.routes:
route = self.routes.get(path)
if route and opr in route['oprs']:
route['oprs'].remove(opr)
def check_open(self, path):
""" 检查权限是否开放授权
"""
route = self.routes[path]
return 'is_open' in route and route['is_open']
def check_opr_category(self, opr, category):
""" 检查权限是否在指定目录下
"""
for path in self.routes:
route = self.routes[path]
if opr in route['oprs'] and route['category'] == category:
return True
return False
def build_menus(self, order_cats=[]):
""" 生成全局内存菜单"""
menus = [{'category': _cat, 'items': []} for _cat in order_cats]
for path in self.routes:
route = self.routes[path]
for menu in menus:
if route['category'] == menu['category']:
menu['items'].append(route)
return menus
def suproute(self, url_pattern, menuname=None, category=None,
is_menu=False, order=0, is_open=True,oem=False,**kwargs):
selfobj = self
def handler_wapper(cls):
selfobj.add_route(cls, url_pattern, menuname, category,
order=order, is_menu=is_menu, is_open=is_open,oem=oem, admin=True,**kwargs)
logger.info("add super managed route [%s : %s]" % (url_pattern, repr(cls)))
return cls
return handler_wapper
def route(self, url_pattern, menuname=None, category=None,
is_menu=False, order=0, is_open=True,oem=False,**kwargs):
selfobj = self
def handler_wapper(cls):
if not menuname:
self.add_handler(cls, url_pattern)
selfobj.free_routes.append(url_pattern)
logger.info("add free route [%s : %s]" % (url_pattern, repr(cls)))
else:
selfobj.add_route(cls, url_pattern, menuname, category,
order=order, is_menu=is_menu, is_open=is_open,oem=oem,**kwargs)
logger.info("add managed route [%s : %s]" % (url_pattern, repr(cls)))
return cls
return handler_wapper
|
talkincode/toughlib | toughlib/apiutils.py | make_sign | python | def make_sign(api_secret, params=[]):
_params = [utils.safeunicode(p) for p in params if p is not None]
_params.sort()
# print 'sorted params:',_params
_params.insert(0, api_secret)
strs = ''.join(_params)
# print 'sign params:',strs
mds = md5(strs.encode('utf-8')).hexdigest()
return mds.upper() | >>> make_sign("123456",[1,'2',u'中文'])
'33C9065427EECA3490C5642C99165145' | train | https://github.com/talkincode/toughlib/blob/1c2f7dde3a7f101248f1b5f5d428cc85466995cf/toughlib/apiutils.py#L29-L41 | null | #!/usr/bin/env python
# coding:utf-8
import time
import json
from hashlib import md5
from toughlib import utils, httpclient
from toughlib import logger
from toughlib.storage import Storage
from collections import namedtuple
ApiStatus = namedtuple('ApiStatus', 'code desc msg')
apistatus = Storage(
success = ApiStatus(code=0,desc='success',msg=u"处理成功"),
sign_err = ApiStatus(code=90001,desc='message sign error',msg=u"消息签名错误"),
parse_err = ApiStatus(code=90002,desc='param parse error',msg=u"参数解析失败"),
verify_err = ApiStatus(code=90003,desc='message verify error',msg=u"消息校验错误"),
timeout = ApiStatus(code=90004,desc='request timeout',msg=u"请求超时"),
limit_err = ApiStatus(code=90005,desc='api limit',msg=u"频率限制"),
server_err = ApiStatus(code=90006,desc='server process failure',msg=u"服务器处理失败"),
unknow = ApiStatus(code=99999,desc='unknow error',msg=u"未知错误")
)
class SignError(Exception):
pass
class ParseError(Exception):
pass
def check_sign(api_secret, msg):
"""
>>> check_sign("123456",dict(code=1,s='2',msg=u'中文',sign='33C9065427EECA3490C5642C99165145'))
True
"""
if "sign" not in msg:
return False
sign = msg['sign']
params = [utils.safestr(msg[k]) for k in msg if k != 'sign' and msg[k] is not None]
local_sign = make_sign(api_secret, params)
result = (sign == local_sign)
if not result:
logger.error("check_sign failure, sign:%s != local_sign:%s" %(sign,local_sign))
return result
def make_message(api_secret, enc_func=False, **params):
"""
>>> json.loads(make_message("123456",**dict(code=1,msg=u"中文",nonce=1451122677)))['sign']
u'58BAF40309BC1DC51D2E2DC43ECCC1A1'
"""
if 'nonce' not in params:
params['nonce' ] = str(int(time.time()))
params['sign'] = make_sign(api_secret, params.values())
msg = json.dumps(params, ensure_ascii=False)
if callable(enc_func):
return enc_func(msg)
else:
return msg
def make_error(api_secret, msg=None, enc_func=False):
return make_message(api_secret,code=1,msg=msg, enc_func=enc_func)
def parse_request(api_secret, reqbody, dec_func=False):
"""
>>> parse_request("123456",'{"nonce": 1451122677, "msg": "helllo", "code": 0, "sign": "DB30F4D1112C20DFA736F65458F89C64"}')
<Storage {u'nonce': 1451122677, u'msg': u'helllo', u'code': 0, u'sign': u'DB30F4D1112C20DFA736F65458F89C64'}>
"""
try:
if type(reqbody) == type(dict):
return self.parse_form_request(reqbody)
if callable(dec_func):
req_msg = json.loads(dec_func(reqbody))
else:
req_msg = json.loads(reqbody)
except Exception as err:
raise ParseError(u"parse params error")
if not check_sign(api_secret, req_msg):
raise SignError(u"message sign error")
return Storage(req_msg)
def parse_form_request(api_secret, request):
"""
>>> parse_form_request("123456",{"nonce": 1451122677, "msg": "helllo", "code": 0, "sign": "DB30F4D1112C20DFA736F65458F89C64"})
<Storage {'nonce': 1451122677, 'msg': 'helllo', 'code': 0, 'sign': 'DB30F4D1112C20DFA736F65458F89C64'}>
"""
if not check_sign(api_secret, request):
raise SignError(u"message sign error")
return Storage(request)
def request(apiurl, data=None, **kwargs):
headers = {"Content-Type": ["application/json"]}
return httpclient.post(apiurl, data=data, **kwargs)
if __name__ == "__main__":
# print apistatus
# import doctest
# doctest.testmod()
#isp_name=123&isp_email=222@222.com&isp_idcard=&isp_desc=&isp_phone=22222222222&sign=9E5139D66E8E5C10634A3E96631BCF
params = ['123','222@222.com','22222222222']
print make_sign('LpWE9AtfDPQ3ufXBS6gJ37WW8TnSF920',params)
|
talkincode/toughlib | toughlib/apiutils.py | check_sign | python | def check_sign(api_secret, msg):
if "sign" not in msg:
return False
sign = msg['sign']
params = [utils.safestr(msg[k]) for k in msg if k != 'sign' and msg[k] is not None]
local_sign = make_sign(api_secret, params)
result = (sign == local_sign)
if not result:
logger.error("check_sign failure, sign:%s != local_sign:%s" %(sign,local_sign))
return result | >>> check_sign("123456",dict(code=1,s='2',msg=u'中文',sign='33C9065427EECA3490C5642C99165145'))
True | train | https://github.com/talkincode/toughlib/blob/1c2f7dde3a7f101248f1b5f5d428cc85466995cf/toughlib/apiutils.py#L44-L58 | [
"def error(message,**kwargs):\n if not isinstance(message, unicode):\n message = safeunicode(message)\n if EVENT_ERROR in dispatch.dispatch.callbacks:\n dispatch.pub(EVENT_ERROR,message,**kwargs)\n if EVENT_TRACE in dispatch.dispatch.callbacks:\n dispatch.pub(EVENT_TRACE,\"erro... | #!/usr/bin/env python
# coding:utf-8
import time
import json
from hashlib import md5
from toughlib import utils, httpclient
from toughlib import logger
from toughlib.storage import Storage
from collections import namedtuple
ApiStatus = namedtuple('ApiStatus', 'code desc msg')
apistatus = Storage(
success = ApiStatus(code=0,desc='success',msg=u"处理成功"),
sign_err = ApiStatus(code=90001,desc='message sign error',msg=u"消息签名错误"),
parse_err = ApiStatus(code=90002,desc='param parse error',msg=u"参数解析失败"),
verify_err = ApiStatus(code=90003,desc='message verify error',msg=u"消息校验错误"),
timeout = ApiStatus(code=90004,desc='request timeout',msg=u"请求超时"),
limit_err = ApiStatus(code=90005,desc='api limit',msg=u"频率限制"),
server_err = ApiStatus(code=90006,desc='server process failure',msg=u"服务器处理失败"),
unknow = ApiStatus(code=99999,desc='unknow error',msg=u"未知错误")
)
class SignError(Exception):
pass
class ParseError(Exception):
pass
def make_sign(api_secret, params=[]):
"""
>>> make_sign("123456",[1,'2',u'中文'])
'33C9065427EECA3490C5642C99165145'
"""
_params = [utils.safeunicode(p) for p in params if p is not None]
_params.sort()
# print 'sorted params:',_params
_params.insert(0, api_secret)
strs = ''.join(_params)
# print 'sign params:',strs
mds = md5(strs.encode('utf-8')).hexdigest()
return mds.upper()
def make_message(api_secret, enc_func=False, **params):
"""
>>> json.loads(make_message("123456",**dict(code=1,msg=u"中文",nonce=1451122677)))['sign']
u'58BAF40309BC1DC51D2E2DC43ECCC1A1'
"""
if 'nonce' not in params:
params['nonce' ] = str(int(time.time()))
params['sign'] = make_sign(api_secret, params.values())
msg = json.dumps(params, ensure_ascii=False)
if callable(enc_func):
return enc_func(msg)
else:
return msg
def make_error(api_secret, msg=None, enc_func=False):
return make_message(api_secret,code=1,msg=msg, enc_func=enc_func)
def parse_request(api_secret, reqbody, dec_func=False):
"""
>>> parse_request("123456",'{"nonce": 1451122677, "msg": "helllo", "code": 0, "sign": "DB30F4D1112C20DFA736F65458F89C64"}')
<Storage {u'nonce': 1451122677, u'msg': u'helllo', u'code': 0, u'sign': u'DB30F4D1112C20DFA736F65458F89C64'}>
"""
try:
if type(reqbody) == type(dict):
return self.parse_form_request(reqbody)
if callable(dec_func):
req_msg = json.loads(dec_func(reqbody))
else:
req_msg = json.loads(reqbody)
except Exception as err:
raise ParseError(u"parse params error")
if not check_sign(api_secret, req_msg):
raise SignError(u"message sign error")
return Storage(req_msg)
def parse_form_request(api_secret, request):
"""
>>> parse_form_request("123456",{"nonce": 1451122677, "msg": "helllo", "code": 0, "sign": "DB30F4D1112C20DFA736F65458F89C64"})
<Storage {'nonce': 1451122677, 'msg': 'helllo', 'code': 0, 'sign': 'DB30F4D1112C20DFA736F65458F89C64'}>
"""
if not check_sign(api_secret, request):
raise SignError(u"message sign error")
return Storage(request)
def request(apiurl, data=None, **kwargs):
headers = {"Content-Type": ["application/json"]}
return httpclient.post(apiurl, data=data, **kwargs)
if __name__ == "__main__":
# print apistatus
# import doctest
# doctest.testmod()
#isp_name=123&isp_email=222@222.com&isp_idcard=&isp_desc=&isp_phone=22222222222&sign=9E5139D66E8E5C10634A3E96631BCF
params = ['123','222@222.com','22222222222']
print make_sign('LpWE9AtfDPQ3ufXBS6gJ37WW8TnSF920',params)
|
talkincode/toughlib | toughlib/apiutils.py | make_message | python | def make_message(api_secret, enc_func=False, **params):
if 'nonce' not in params:
params['nonce' ] = str(int(time.time()))
params['sign'] = make_sign(api_secret, params.values())
msg = json.dumps(params, ensure_ascii=False)
if callable(enc_func):
return enc_func(msg)
else:
return msg | >>> json.loads(make_message("123456",**dict(code=1,msg=u"中文",nonce=1451122677)))['sign']
u'58BAF40309BC1DC51D2E2DC43ECCC1A1' | train | https://github.com/talkincode/toughlib/blob/1c2f7dde3a7f101248f1b5f5d428cc85466995cf/toughlib/apiutils.py#L60-L72 | [
"def make_sign(api_secret, params=[]):\n \"\"\"\n >>> make_sign(\"123456\",[1,'2',u'中文'])\n '33C9065427EECA3490C5642C99165145'\n \"\"\"\n _params = [utils.safeunicode(p) for p in params if p is not None]\n _params.sort()\n # print 'sorted params:',_params\n _params.insert(0, api_secr... | #!/usr/bin/env python
# coding:utf-8
import time
import json
from hashlib import md5
from toughlib import utils, httpclient
from toughlib import logger
from toughlib.storage import Storage
from collections import namedtuple
ApiStatus = namedtuple('ApiStatus', 'code desc msg')
apistatus = Storage(
success = ApiStatus(code=0,desc='success',msg=u"处理成功"),
sign_err = ApiStatus(code=90001,desc='message sign error',msg=u"消息签名错误"),
parse_err = ApiStatus(code=90002,desc='param parse error',msg=u"参数解析失败"),
verify_err = ApiStatus(code=90003,desc='message verify error',msg=u"消息校验错误"),
timeout = ApiStatus(code=90004,desc='request timeout',msg=u"请求超时"),
limit_err = ApiStatus(code=90005,desc='api limit',msg=u"频率限制"),
server_err = ApiStatus(code=90006,desc='server process failure',msg=u"服务器处理失败"),
unknow = ApiStatus(code=99999,desc='unknow error',msg=u"未知错误")
)
class SignError(Exception):
pass
class ParseError(Exception):
pass
def make_sign(api_secret, params=[]):
"""
>>> make_sign("123456",[1,'2',u'中文'])
'33C9065427EECA3490C5642C99165145'
"""
_params = [utils.safeunicode(p) for p in params if p is not None]
_params.sort()
# print 'sorted params:',_params
_params.insert(0, api_secret)
strs = ''.join(_params)
# print 'sign params:',strs
mds = md5(strs.encode('utf-8')).hexdigest()
return mds.upper()
def check_sign(api_secret, msg):
"""
>>> check_sign("123456",dict(code=1,s='2',msg=u'中文',sign='33C9065427EECA3490C5642C99165145'))
True
"""
if "sign" not in msg:
return False
sign = msg['sign']
params = [utils.safestr(msg[k]) for k in msg if k != 'sign' and msg[k] is not None]
local_sign = make_sign(api_secret, params)
result = (sign == local_sign)
if not result:
logger.error("check_sign failure, sign:%s != local_sign:%s" %(sign,local_sign))
return result
def make_error(api_secret, msg=None, enc_func=False):
return make_message(api_secret,code=1,msg=msg, enc_func=enc_func)
def parse_request(api_secret, reqbody, dec_func=False):
"""
>>> parse_request("123456",'{"nonce": 1451122677, "msg": "helllo", "code": 0, "sign": "DB30F4D1112C20DFA736F65458F89C64"}')
<Storage {u'nonce': 1451122677, u'msg': u'helllo', u'code': 0, u'sign': u'DB30F4D1112C20DFA736F65458F89C64'}>
"""
try:
if type(reqbody) == type(dict):
return self.parse_form_request(reqbody)
if callable(dec_func):
req_msg = json.loads(dec_func(reqbody))
else:
req_msg = json.loads(reqbody)
except Exception as err:
raise ParseError(u"parse params error")
if not check_sign(api_secret, req_msg):
raise SignError(u"message sign error")
return Storage(req_msg)
def parse_form_request(api_secret, request):
"""
>>> parse_form_request("123456",{"nonce": 1451122677, "msg": "helllo", "code": 0, "sign": "DB30F4D1112C20DFA736F65458F89C64"})
<Storage {'nonce': 1451122677, 'msg': 'helllo', 'code': 0, 'sign': 'DB30F4D1112C20DFA736F65458F89C64'}>
"""
if not check_sign(api_secret, request):
raise SignError(u"message sign error")
return Storage(request)
def request(apiurl, data=None, **kwargs):
headers = {"Content-Type": ["application/json"]}
return httpclient.post(apiurl, data=data, **kwargs)
if __name__ == "__main__":
# print apistatus
# import doctest
# doctest.testmod()
#isp_name=123&isp_email=222@222.com&isp_idcard=&isp_desc=&isp_phone=22222222222&sign=9E5139D66E8E5C10634A3E96631BCF
params = ['123','222@222.com','22222222222']
print make_sign('LpWE9AtfDPQ3ufXBS6gJ37WW8TnSF920',params)
|
talkincode/toughlib | toughlib/apiutils.py | parse_request | python | def parse_request(api_secret, reqbody, dec_func=False):
try:
if type(reqbody) == type(dict):
return self.parse_form_request(reqbody)
if callable(dec_func):
req_msg = json.loads(dec_func(reqbody))
else:
req_msg = json.loads(reqbody)
except Exception as err:
raise ParseError(u"parse params error")
if not check_sign(api_secret, req_msg):
raise SignError(u"message sign error")
return Storage(req_msg) | >>> parse_request("123456",'{"nonce": 1451122677, "msg": "helllo", "code": 0, "sign": "DB30F4D1112C20DFA736F65458F89C64"}')
<Storage {u'nonce': 1451122677, u'msg': u'helllo', u'code': 0, u'sign': u'DB30F4D1112C20DFA736F65458F89C64'}> | train | https://github.com/talkincode/toughlib/blob/1c2f7dde3a7f101248f1b5f5d428cc85466995cf/toughlib/apiutils.py#L77-L96 | [
"def check_sign(api_secret, msg):\n \"\"\"\n >>> check_sign(\"123456\",dict(code=1,s='2',msg=u'中文',sign='33C9065427EECA3490C5642C99165145'))\n True\n\n \"\"\"\n if \"sign\" not in msg:\n return False\n sign = msg['sign']\n params = [utils.safestr(msg[k]) for k in msg if k != 'sig... | #!/usr/bin/env python
# coding:utf-8
import time
import json
from hashlib import md5
from toughlib import utils, httpclient
from toughlib import logger
from toughlib.storage import Storage
from collections import namedtuple
ApiStatus = namedtuple('ApiStatus', 'code desc msg')
apistatus = Storage(
success = ApiStatus(code=0,desc='success',msg=u"处理成功"),
sign_err = ApiStatus(code=90001,desc='message sign error',msg=u"消息签名错误"),
parse_err = ApiStatus(code=90002,desc='param parse error',msg=u"参数解析失败"),
verify_err = ApiStatus(code=90003,desc='message verify error',msg=u"消息校验错误"),
timeout = ApiStatus(code=90004,desc='request timeout',msg=u"请求超时"),
limit_err = ApiStatus(code=90005,desc='api limit',msg=u"频率限制"),
server_err = ApiStatus(code=90006,desc='server process failure',msg=u"服务器处理失败"),
unknow = ApiStatus(code=99999,desc='unknow error',msg=u"未知错误")
)
class SignError(Exception):
pass
class ParseError(Exception):
pass
def make_sign(api_secret, params=[]):
"""
>>> make_sign("123456",[1,'2',u'中文'])
'33C9065427EECA3490C5642C99165145'
"""
_params = [utils.safeunicode(p) for p in params if p is not None]
_params.sort()
# print 'sorted params:',_params
_params.insert(0, api_secret)
strs = ''.join(_params)
# print 'sign params:',strs
mds = md5(strs.encode('utf-8')).hexdigest()
return mds.upper()
def check_sign(api_secret, msg):
"""
>>> check_sign("123456",dict(code=1,s='2',msg=u'中文',sign='33C9065427EECA3490C5642C99165145'))
True
"""
if "sign" not in msg:
return False
sign = msg['sign']
params = [utils.safestr(msg[k]) for k in msg if k != 'sign' and msg[k] is not None]
local_sign = make_sign(api_secret, params)
result = (sign == local_sign)
if not result:
logger.error("check_sign failure, sign:%s != local_sign:%s" %(sign,local_sign))
return result
def make_message(api_secret, enc_func=False, **params):
"""
>>> json.loads(make_message("123456",**dict(code=1,msg=u"中文",nonce=1451122677)))['sign']
u'58BAF40309BC1DC51D2E2DC43ECCC1A1'
"""
if 'nonce' not in params:
params['nonce' ] = str(int(time.time()))
params['sign'] = make_sign(api_secret, params.values())
msg = json.dumps(params, ensure_ascii=False)
if callable(enc_func):
return enc_func(msg)
else:
return msg
def make_error(api_secret, msg=None, enc_func=False):
return make_message(api_secret,code=1,msg=msg, enc_func=enc_func)
def parse_request(api_secret, reqbody, dec_func=False):
"""
>>> parse_request("123456",'{"nonce": 1451122677, "msg": "helllo", "code": 0, "sign": "DB30F4D1112C20DFA736F65458F89C64"}')
<Storage {u'nonce': 1451122677, u'msg': u'helllo', u'code': 0, u'sign': u'DB30F4D1112C20DFA736F65458F89C64'}>
"""
try:
if type(reqbody) == type(dict):
return self.parse_form_request(reqbody)
if callable(dec_func):
req_msg = json.loads(dec_func(reqbody))
else:
req_msg = json.loads(reqbody)
except Exception as err:
raise ParseError(u"parse params error")
if not check_sign(api_secret, req_msg):
raise SignError(u"message sign error")
return Storage(req_msg)
def parse_form_request(api_secret, request):
"""
>>> parse_form_request("123456",{"nonce": 1451122677, "msg": "helllo", "code": 0, "sign": "DB30F4D1112C20DFA736F65458F89C64"})
<Storage {'nonce': 1451122677, 'msg': 'helllo', 'code': 0, 'sign': 'DB30F4D1112C20DFA736F65458F89C64'}>
"""
if not check_sign(api_secret, request):
raise SignError(u"message sign error")
return Storage(request)
def request(apiurl, data=None, **kwargs):
headers = {"Content-Type": ["application/json"]}
return httpclient.post(apiurl, data=data, **kwargs)
if __name__ == "__main__":
# print apistatus
# import doctest
# doctest.testmod()
#isp_name=123&isp_email=222@222.com&isp_idcard=&isp_desc=&isp_phone=22222222222&sign=9E5139D66E8E5C10634A3E96631BCF
params = ['123','222@222.com','22222222222']
print make_sign('LpWE9AtfDPQ3ufXBS6gJ37WW8TnSF920',params)
|
talkincode/toughlib | toughlib/apiutils.py | parse_form_request | python | def parse_form_request(api_secret, request):
if not check_sign(api_secret, request):
raise SignError(u"message sign error")
return Storage(request) | >>> parse_form_request("123456",{"nonce": 1451122677, "msg": "helllo", "code": 0, "sign": "DB30F4D1112C20DFA736F65458F89C64"})
<Storage {'nonce': 1451122677, 'msg': 'helllo', 'code': 0, 'sign': 'DB30F4D1112C20DFA736F65458F89C64'}> | train | https://github.com/talkincode/toughlib/blob/1c2f7dde3a7f101248f1b5f5d428cc85466995cf/toughlib/apiutils.py#L98-L106 | [
"def check_sign(api_secret, msg):\n \"\"\"\n >>> check_sign(\"123456\",dict(code=1,s='2',msg=u'中文',sign='33C9065427EECA3490C5642C99165145'))\n True\n\n \"\"\"\n if \"sign\" not in msg:\n return False\n sign = msg['sign']\n params = [utils.safestr(msg[k]) for k in msg if k != 'sig... | #!/usr/bin/env python
# coding:utf-8
import time
import json
from hashlib import md5
from toughlib import utils, httpclient
from toughlib import logger
from toughlib.storage import Storage
from collections import namedtuple
ApiStatus = namedtuple('ApiStatus', 'code desc msg')
apistatus = Storage(
success = ApiStatus(code=0,desc='success',msg=u"处理成功"),
sign_err = ApiStatus(code=90001,desc='message sign error',msg=u"消息签名错误"),
parse_err = ApiStatus(code=90002,desc='param parse error',msg=u"参数解析失败"),
verify_err = ApiStatus(code=90003,desc='message verify error',msg=u"消息校验错误"),
timeout = ApiStatus(code=90004,desc='request timeout',msg=u"请求超时"),
limit_err = ApiStatus(code=90005,desc='api limit',msg=u"频率限制"),
server_err = ApiStatus(code=90006,desc='server process failure',msg=u"服务器处理失败"),
unknow = ApiStatus(code=99999,desc='unknow error',msg=u"未知错误")
)
class SignError(Exception):
pass
class ParseError(Exception):
pass
def make_sign(api_secret, params=[]):
"""
>>> make_sign("123456",[1,'2',u'中文'])
'33C9065427EECA3490C5642C99165145'
"""
_params = [utils.safeunicode(p) for p in params if p is not None]
_params.sort()
# print 'sorted params:',_params
_params.insert(0, api_secret)
strs = ''.join(_params)
# print 'sign params:',strs
mds = md5(strs.encode('utf-8')).hexdigest()
return mds.upper()
def check_sign(api_secret, msg):
"""
>>> check_sign("123456",dict(code=1,s='2',msg=u'中文',sign='33C9065427EECA3490C5642C99165145'))
True
"""
if "sign" not in msg:
return False
sign = msg['sign']
params = [utils.safestr(msg[k]) for k in msg if k != 'sign' and msg[k] is not None]
local_sign = make_sign(api_secret, params)
result = (sign == local_sign)
if not result:
logger.error("check_sign failure, sign:%s != local_sign:%s" %(sign,local_sign))
return result
def make_message(api_secret, enc_func=False, **params):
"""
>>> json.loads(make_message("123456",**dict(code=1,msg=u"中文",nonce=1451122677)))['sign']
u'58BAF40309BC1DC51D2E2DC43ECCC1A1'
"""
if 'nonce' not in params:
params['nonce' ] = str(int(time.time()))
params['sign'] = make_sign(api_secret, params.values())
msg = json.dumps(params, ensure_ascii=False)
if callable(enc_func):
return enc_func(msg)
else:
return msg
def make_error(api_secret, msg=None, enc_func=False):
return make_message(api_secret,code=1,msg=msg, enc_func=enc_func)
def parse_request(api_secret, reqbody, dec_func=False):
"""
>>> parse_request("123456",'{"nonce": 1451122677, "msg": "helllo", "code": 0, "sign": "DB30F4D1112C20DFA736F65458F89C64"}')
<Storage {u'nonce': 1451122677, u'msg': u'helllo', u'code': 0, u'sign': u'DB30F4D1112C20DFA736F65458F89C64'}>
"""
try:
if type(reqbody) == type(dict):
return self.parse_form_request(reqbody)
if callable(dec_func):
req_msg = json.loads(dec_func(reqbody))
else:
req_msg = json.loads(reqbody)
except Exception as err:
raise ParseError(u"parse params error")
if not check_sign(api_secret, req_msg):
raise SignError(u"message sign error")
return Storage(req_msg)
def request(apiurl, data=None, **kwargs):
headers = {"Content-Type": ["application/json"]}
return httpclient.post(apiurl, data=data, **kwargs)
if __name__ == "__main__":
# print apistatus
# import doctest
# doctest.testmod()
#isp_name=123&isp_email=222@222.com&isp_idcard=&isp_desc=&isp_phone=22222222222&sign=9E5139D66E8E5C10634A3E96631BCF
params = ['123','222@222.com','22222222222']
print make_sign('LpWE9AtfDPQ3ufXBS6gJ37WW8TnSF920',params)
|
talkincode/toughlib | toughlib/btforms/net.py | validipaddr | python | def validipaddr(address):
try:
octets = address.split('.')
if len(octets) != 4:
return False
for x in octets:
if not (0 <= int(x) <= 255):
return False
except ValueError:
return False
return True | Returns True if `address` is a valid IPv4 address.
>>> validipaddr('192.168.1.1')
True
>>> validipaddr('192.168.1.800')
False
>>> validipaddr('192.168.1')
False | train | https://github.com/talkincode/toughlib/blob/1c2f7dde3a7f101248f1b5f5d428cc85466995cf/toughlib/btforms/net.py#L14-L34 | null | #!/usr/bin/env python
#coding:utf-8
__all__ = [
"validipaddr", "validipport", "validip", "validaddr",
"urlquote",
"httpdate", "parsehttpdate",
"htmlquote", "htmlunquote", "websafe",
]
import urllib, time
try: import datetime
except ImportError: pass
def validipaddr(address):
"""
Returns True if `address` is a valid IPv4 address.
>>> validipaddr('192.168.1.1')
True
>>> validipaddr('192.168.1.800')
False
>>> validipaddr('192.168.1')
False
"""
try:
octets = address.split('.')
if len(octets) != 4:
return False
for x in octets:
if not (0 <= int(x) <= 255):
return False
except ValueError:
return False
return True
def validipport(port):
"""
Returns True if `port` is a valid IPv4 port.
>>> validipport('9000')
True
>>> validipport('foo')
False
>>> validipport('1000000')
False
"""
try:
if not (0 <= int(port) <= 65535):
return False
except ValueError:
return False
return True
def validip(ip, defaultaddr="0.0.0.0", defaultport=8080):
"""Returns `(ip_address, port)` from string `ip_addr_port`"""
addr = defaultaddr
port = defaultport
ip = ip.split(":", 1)
if len(ip) == 1:
if not ip[0]:
pass
elif validipaddr(ip[0]):
addr = ip[0]
elif validipport(ip[0]):
port = int(ip[0])
else:
raise ValueError, ':'.join(ip) + ' is not a valid IP address/port'
elif len(ip) == 2:
addr, port = ip
if not validipaddr(addr) and validipport(port):
raise ValueError, ':'.join(ip) + ' is not a valid IP address/port'
port = int(port)
else:
raise ValueError, ':'.join(ip) + ' is not a valid IP address/port'
return (addr, port)
def validaddr(string_):
"""
Returns either (ip_address, port) or "/path/to/socket" from string_
>>> validaddr('/path/to/socket')
'/path/to/socket'
>>> validaddr('8000')
('0.0.0.0', 8000)
>>> validaddr('127.0.0.1')
('127.0.0.1', 8080)
>>> validaddr('127.0.0.1:8000')
('127.0.0.1', 8000)
>>> validaddr('fff')
Traceback (most recent call last):
...
ValueError: fff is not a valid IP address/port
"""
if '/' in string_:
return string_
else:
return validip(string_)
def urlquote(val):
"""
Quotes a string for use in a URL.
>>> urlquote('://?f=1&j=1')
'%3A//%3Ff%3D1%26j%3D1'
>>> urlquote(None)
''
>>> urlquote(u'\u203d')
'%E2%80%BD'
"""
if val is None: return ''
if not isinstance(val, unicode): val = str(val)
else: val = val.encode('utf-8')
return urllib.quote(val)
def httpdate(date_obj):
"""
Formats a datetime object for use in HTTP headers.
>>> import datetime
>>> httpdate(datetime.datetime(1970, 1, 1, 1, 1, 1))
'Thu, 01 Jan 1970 01:01:01 GMT'
"""
return date_obj.strftime("%a, %d %b %Y %H:%M:%S GMT")
def parsehttpdate(string_):
"""
Parses an HTTP date into a datetime object.
>>> parsehttpdate('Thu, 01 Jan 1970 01:01:01 GMT')
datetime.datetime(1970, 1, 1, 1, 1, 1)
"""
try:
t = time.strptime(string_, "%a, %d %b %Y %H:%M:%S %Z")
except ValueError:
return None
return datetime.datetime(*t[:6])
def htmlquote(text):
r"""
Encodes `text` for raw use in HTML.
>>> htmlquote(u"<'&\">")
u'<'&">'
"""
text = text.replace(u"&", u"&") # Must be done first!
text = text.replace(u"<", u"<")
text = text.replace(u">", u">")
text = text.replace(u"'", u"'")
text = text.replace(u'"', u""")
return text
def htmlunquote(text):
r"""
Decodes `text` that's HTML quoted.
>>> htmlunquote(u'<'&">')
u'<\'&">'
"""
text = text.replace(u""", u'"')
text = text.replace(u"'", u"'")
text = text.replace(u">", u">")
text = text.replace(u"<", u"<")
text = text.replace(u"&", u"&") # Must be done last!
return text
def websafe(val):
r"""Converts `val` so that it is safe for use in Unicode HTML.
>>> websafe("<'&\">")
u'<'&">'
>>> websafe(None)
u''
>>> websafe(u'\u203d')
u'\u203d'
>>> websafe('\xe2\x80\xbd')
u'\u203d'
"""
if val is None:
return u''
elif isinstance(val, str):
val = val.decode('utf-8')
elif not isinstance(val, unicode):
val = unicode(val)
return htmlquote(val)
if __name__ == "__main__":
import doctest
doctest.testmod()
|
talkincode/toughlib | toughlib/btforms/net.py | validip | python | def validip(ip, defaultaddr="0.0.0.0", defaultport=8080):
addr = defaultaddr
port = defaultport
ip = ip.split(":", 1)
if len(ip) == 1:
if not ip[0]:
pass
elif validipaddr(ip[0]):
addr = ip[0]
elif validipport(ip[0]):
port = int(ip[0])
else:
raise ValueError, ':'.join(ip) + ' is not a valid IP address/port'
elif len(ip) == 2:
addr, port = ip
if not validipaddr(addr) and validipport(port):
raise ValueError, ':'.join(ip) + ' is not a valid IP address/port'
port = int(port)
else:
raise ValueError, ':'.join(ip) + ' is not a valid IP address/port'
return (addr, port) | Returns `(ip_address, port)` from string `ip_addr_port` | train | https://github.com/talkincode/toughlib/blob/1c2f7dde3a7f101248f1b5f5d428cc85466995cf/toughlib/btforms/net.py#L54-L76 | [
"def validipaddr(address):\n \"\"\"\n Returns True if `address` is a valid IPv4 address.\n\n >>> validipaddr('192.168.1.1')\n True\n >>> validipaddr('192.168.1.800')\n False\n >>> validipaddr('192.168.1')\n False\n \"\"\"\n try:\n octets = address.split('... | #!/usr/bin/env python
#coding:utf-8
__all__ = [
"validipaddr", "validipport", "validip", "validaddr",
"urlquote",
"httpdate", "parsehttpdate",
"htmlquote", "htmlunquote", "websafe",
]
import urllib, time
try: import datetime
except ImportError: pass
def validipaddr(address):
"""
Returns True if `address` is a valid IPv4 address.
>>> validipaddr('192.168.1.1')
True
>>> validipaddr('192.168.1.800')
False
>>> validipaddr('192.168.1')
False
"""
try:
octets = address.split('.')
if len(octets) != 4:
return False
for x in octets:
if not (0 <= int(x) <= 255):
return False
except ValueError:
return False
return True
def validipport(port):
"""
Returns True if `port` is a valid IPv4 port.
>>> validipport('9000')
True
>>> validipport('foo')
False
>>> validipport('1000000')
False
"""
try:
if not (0 <= int(port) <= 65535):
return False
except ValueError:
return False
return True
def validip(ip, defaultaddr="0.0.0.0", defaultport=8080):
"""Returns `(ip_address, port)` from string `ip_addr_port`"""
addr = defaultaddr
port = defaultport
ip = ip.split(":", 1)
if len(ip) == 1:
if not ip[0]:
pass
elif validipaddr(ip[0]):
addr = ip[0]
elif validipport(ip[0]):
port = int(ip[0])
else:
raise ValueError, ':'.join(ip) + ' is not a valid IP address/port'
elif len(ip) == 2:
addr, port = ip
if not validipaddr(addr) and validipport(port):
raise ValueError, ':'.join(ip) + ' is not a valid IP address/port'
port = int(port)
else:
raise ValueError, ':'.join(ip) + ' is not a valid IP address/port'
return (addr, port)
def validaddr(string_):
"""
Returns either (ip_address, port) or "/path/to/socket" from string_
>>> validaddr('/path/to/socket')
'/path/to/socket'
>>> validaddr('8000')
('0.0.0.0', 8000)
>>> validaddr('127.0.0.1')
('127.0.0.1', 8080)
>>> validaddr('127.0.0.1:8000')
('127.0.0.1', 8000)
>>> validaddr('fff')
Traceback (most recent call last):
...
ValueError: fff is not a valid IP address/port
"""
if '/' in string_:
return string_
else:
return validip(string_)
def urlquote(val):
"""
Quotes a string for use in a URL.
>>> urlquote('://?f=1&j=1')
'%3A//%3Ff%3D1%26j%3D1'
>>> urlquote(None)
''
>>> urlquote(u'\u203d')
'%E2%80%BD'
"""
if val is None: return ''
if not isinstance(val, unicode): val = str(val)
else: val = val.encode('utf-8')
return urllib.quote(val)
def httpdate(date_obj):
"""
Formats a datetime object for use in HTTP headers.
>>> import datetime
>>> httpdate(datetime.datetime(1970, 1, 1, 1, 1, 1))
'Thu, 01 Jan 1970 01:01:01 GMT'
"""
return date_obj.strftime("%a, %d %b %Y %H:%M:%S GMT")
def parsehttpdate(string_):
"""
Parses an HTTP date into a datetime object.
>>> parsehttpdate('Thu, 01 Jan 1970 01:01:01 GMT')
datetime.datetime(1970, 1, 1, 1, 1, 1)
"""
try:
t = time.strptime(string_, "%a, %d %b %Y %H:%M:%S %Z")
except ValueError:
return None
return datetime.datetime(*t[:6])
def htmlquote(text):
r"""
Encodes `text` for raw use in HTML.
>>> htmlquote(u"<'&\">")
u'<'&">'
"""
text = text.replace(u"&", u"&") # Must be done first!
text = text.replace(u"<", u"<")
text = text.replace(u">", u">")
text = text.replace(u"'", u"'")
text = text.replace(u'"', u""")
return text
def htmlunquote(text):
r"""
Decodes `text` that's HTML quoted.
>>> htmlunquote(u'<'&">')
u'<\'&">'
"""
text = text.replace(u""", u'"')
text = text.replace(u"'", u"'")
text = text.replace(u">", u">")
text = text.replace(u"<", u"<")
text = text.replace(u"&", u"&") # Must be done last!
return text
def websafe(val):
r"""Converts `val` so that it is safe for use in Unicode HTML.
>>> websafe("<'&\">")
u'<'&">'
>>> websafe(None)
u''
>>> websafe(u'\u203d')
u'\u203d'
>>> websafe('\xe2\x80\xbd')
u'\u203d'
"""
if val is None:
return u''
elif isinstance(val, str):
val = val.decode('utf-8')
elif not isinstance(val, unicode):
val = unicode(val)
return htmlquote(val)
if __name__ == "__main__":
import doctest
doctest.testmod()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.