code
stringlengths 1
199k
|
|---|
"""
This Bot gives you information about projects and other Goteo stuff by using its
api: https://api.goteo.org/
First, a few handler functions are defined. Then, those functions are passed to
the Dispatcher and registered at their respective places.
Then, the bot is started and runs until we press Ctrl-C on the command line.
Usage:
...TODO...
Press Ctrl-C on the command line or send a signal to the process to stop the
bot.
"""
from telegram.ext import Updater
from telegram import ParseMode
import logging
import requests
from email.utils import parsedate_to_datetime
from datetime import datetime, timedelta
import config
logging.basicConfig(
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
# level=logging.INFO)
level=logging.INFO)
logger = logging.getLogger(__name__)
job_queue = None
last_id = {}
last_date = datetime.now() - timedelta(days=1)
def get_project(project):
r = requests.get(config.API_URL + '/projects/' + project, auth=(config.API_USER, config.API_KEY))
logger.info("REQUEST %s %s" % (config.API_URL + '/projects/' + project, r))
if r.status_code == 200:
project = r.json()
return project
return None
def get_invests(project_id):
payload = {
'limit' : 5,
'from_date': last_date.strftime('%Y-%m-%d')
}
if project_id is not '*':
payload['project'] = project_id,
r = requests.get(config.API_URL + '/invests/', params=payload, auth=(config.API_USER, config.API_KEY))
logger.info("REQUEST %s %s PARAMS %s" % (config.API_URL + '/invests/', r, payload))
if r.status_code == 200:
invests = r.json()
# print(invests)
if 'items' in invests and invests['items']:
return list(reversed(invests['items']))
return None
def msg_yell(amount=0):
if amount > 100:
return "Yabadabadu!"
if amount >= 100:
return "Amazing!"
if amount >= 50:
return "Great!"
if amount >= 20:
return "Good!"
if amount >= 10:
return "Cool!"
if amount >= 5:
return "Not bad!"
return 'Ahem, that could be better...'
def msg_invest(invest):
m = "*%s*" % msg_yell(invest['amount'])
m += " A new *%i %s* contribution" % (invest['amount'], invest['currency'])
if invest['region']:
m+= " from _%s_" % invest['region']
if invest['project']:
prj = get_project(invest['project'])
if prj:
m += " to [%s](%s)" % (prj['name'], prj['project-url'])
else:
m += " to *unknown project*"
else:
m += " to the *virtual wallet*"
return m
def filter_new_invests(invests):
global last_date
ret = []
for i in invests:
logger.info("INVEST %s of %i %s" % (i['id'], i['amount'], i['currency']))
# print('lASTID', last_id)
if i['project'] not in last_id:
last_id[i['project']] = 0
if i['id'] > last_id[i['project']]:
ret.append(i)
logger.info("ADDING INVEST %s" % i['id'])
last_id[i['project']] = i['id']
last_date = parsedate_to_datetime(i['date-invested'])
# print('LAST_ID', last_id, 'INVESTS', ret)
return ret
def start(bot, update):
bot.sendMessage(update.message.chat_id, text='Hi! Use /subscribe <project-id> to '
'receive updates on every contribution')
def subscribe(bot, update, args):
chat_id = update.message.chat_id
try:
# args[0] should contain the project to follow
project_id = args[0]
project = None
if project_id != '*':
project = get_project(project_id)
if project is None:
bot.sendMessage(chat_id, text='Sorry, project not found!')
raise ValueError;
logger.info("NEW SUBSCRIPTION FOR [%s]" % project_id)
# print(update.message)
def updates(bot):
""" Inner function to send the updates message """
invests = get_invests(project_id)
if not invests:
logger.info("NO INVESTS FOUND FOR [%s]" % project_id)
return
invests = filter_new_invests(invests)
logger.info("TOTAL INVEST FOUND: %i" % len(invests))
for invest in invests:
logger.info("INVEST [%s] OF [%i %s] UPDATED [%s] FOR PROJECT [%s]" % (invest['id'],
invest['amount'],
invest['currency'],
invest['date-invested'],
invest['project']))
bot.sendMessage(chat_id,
text=msg_invest(invest),
parse_mode=ParseMode.MARKDOWN)
# Add job to queue
job_queue.put(updates, config.POLL_FREQUENCY)
t = "Subscribed to all projects!"
if project:
t = "Subscribed for project %s" % project['name']
bot.sendMessage(chat_id, text=t)
except IndexError:
bot.sendMessage(chat_id, text='Usage: /subscribe <project>')
except ValueError:
bot.sendMessage(chat_id, text='Usage: /subscribe <project>')
def error(bot, update, error):
logger.warn('Update "%s" caused error "%s"' % (update, error))
def main():
global job_queue
updater = Updater(config.BOT_TOKEN)
job_queue = updater.job_queue
# Get the dispatcher to register handlers
dp = updater.dispatcher
# on different commands - answer in Telegram
dp.addTelegramCommandHandler("start", start)
dp.addTelegramCommandHandler("help", start)
dp.addTelegramCommandHandler("subscribe", subscribe)
# log all errors
dp.addErrorHandler(error)
# Start the Bot
updater.start_polling()
# Block until the you presses Ctrl-C or the process receives SIGINT,
# SIGTERM or SIGABRT. This should be used most of the time, since
# start_polling() is non-blocking and will stop the bot gracefully.
updater.idle()
if __name__ == '__main__':
main()
|
from Tank import Tank, TankType
from Config import TANK_WIDTH, TANK_HEIGHT, BASIC_TANK_SPEED, BASIC_TANK_ARMOR, BASIC_TANK_ATTACK
from Resources import BASIC_TANK_UP, BASIC_TANK_RIGHT, BASIC_TANK_DOWN, BASIC_TANK_LEFT
class BasicTank(Tank):
# self, x, y, width, height, resources, speed, armor, attack, object_id, lives, type
def __init__(self, x, y):
super(BasicTank, self).__init__(x, y, TANK_WIDTH, TANK_HEIGHT,
BASIC_TANK_DOWN, BASIC_TANK_SPEED, BASIC_TANK_ARMOR, BASIC_TANK_ATTACK, 0, 1, TankType.BASIC)
def move_up(self):
self.resources = BASIC_TANK_UP
super(BasicTank, self).move_up()
def move_right(self):
self.resources = BASIC_TANK_RIGHT
super(BasicTank, self).move_right()
def move_down(self):
self.resources = BASIC_TANK_DOWN
super(BasicTank, self).move_down()
def move_left(self):
self.resources = BASIC_TANK_LEFT
super(BasicTank, self).move_left()
|
import numpy
import scipy.linalg
from sandbox.util.Parameter import Parameter
from apgl.kernel import *
from sandbox.util.Util import Util
"""
An implementation of the primal-dual Canonincal Correlation Analysis algorithm.
"""
class PrimalDualCCA(object):
def __init__(self, kernelX, tau1, tau2):
Parameter.checkFloat(tau1, 0.0, 1.0)
Parameter.checkFloat(tau2, 0.0, 1.0)
Parameter.checkClass(kernelX, AbstractKernel)
self.kernelX = kernelX
self.tau1 = tau1
self.tau2 = tau2
def learnModel(self, X, Y):
"""
Learn the CCA primal-dual directions.
"""
self.trainX = X
self.trainY = Y
numExamples = X.shape[0]
numFeatures = Y.shape[1]
a = 10**-5
I = numpy.eye(numExamples)
I2 = numpy.eye(numFeatures)
Kx = self.kernelX.evaluate(X, X) + a*I
Kxx = numpy.dot(Kx, Kx)
Kxy = numpy.dot(Kx, Y)
Cyy = numpy.dot(Y.T, Y) + a*I2
Z1 = numpy.zeros((numExamples, numExamples))
Z2 = numpy.zeros((numFeatures, numFeatures))
Z3 = numpy.zeros((numExamples, numFeatures))
#Note we add a small value to the diagonal of A and B to deal with low-rank
A = numpy.c_[Z1, Kxy]
A1 = numpy.c_[Kxy.T, Z2]
A = numpy.r_[A, A1]
A = (A+A.T)/2 #Stupid stupidness
B = numpy.c_[(1-self.tau1)*Kxx - self.tau1*Kx, Z3]
B1 = numpy.c_[Z3.T, (1-self.tau2)*Cyy - self.tau2*I2]
B = numpy.r_[B, B1]
B = (B+B.T)/2
(D, W) = scipy.linalg.eig(A, B)
#Only select eigenvalues which are greater than zero
W = W[:, D>0]
#We need to return those eigenvectors corresponding to positive eigenvalues
self.alpha = W[0:numExamples, :]
self.V = W[numExamples:, :]
self.lmbdas = D[D>0]
alphaDiag = Util.mdot(self.alpha.T, Kxx, self.alpha)
alphaDiag = alphaDiag + numpy.array(alphaDiag < 0, numpy.int)
vDiag = Util.mdot(self.V.T, Cyy, self.V)
vDiag = vDiag + numpy.array(vDiag < 0, numpy.int)
self.alpha = numpy.dot(self.alpha, numpy.diag(1/numpy.sqrt(numpy.diag(alphaDiag))))
self.V = numpy.dot(self.V, numpy.diag(1/numpy.sqrt(numpy.diag(vDiag))))
return self.alpha, self.V, self.lmbdas
def project(self, testX, testY, k=None):
if k==None:
k = self.alpha.shape[1]
testTrainKx = self.kernelX.evaluate(testX, self.trainX)
return numpy.dot(testTrainKx, self.alpha[:, 0:k]), numpy.dot(testY, self.V[:, 0:k])
|
import urllib,urllib2,sys,platform,os,re
class bcolors:
HEADER = '\033[95m'
OKGREEN = '\033[92m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
ghc=open("githubcli","r")
code=ghc.read()
ghc.close()
ln=1
for line in code.splitlines():
if ln==3:
av=''.join(re.findall(r'#(.*?)#',line)).replace("v ","")
break
else:
ln=ln+1
vl="https://raw.githubusercontent.com/Bytezz/GitHub-CLI/master/githubcli"
print "Connecting..."
try:
urllib2.urlopen(vl)
print "Success."
site=urllib.urlopen(vl)
page=site.read()
ln=1
for line in page.splitlines():
if ln==3:
line=line.replace("#","").replace("v ","")
print "Actual version:",bcolors.BOLD+bcolors.OKGREEN+line+bcolors.ENDC
if line==av:
print "Your version:",bcolors.BOLD+bcolors.OKGREEN+av
print "Your software is at the latest version."+bcolors.ENDC
else:
print "Your version:",bcolors.BOLD+bcolors.FAIL+av+bcolors.ENDC
while True:
up=raw_input("Update? [y or n]: ")
if up.upper()=="Y" or up.upper()=="YES":
if platform.system()=="Linux":
try:
print "Update..."
os.system("git clone https://github.com/Bytezz/GitHub-CLI temp && cd temp/ && mv ../temp/* .. && rm -rf ../temp && make reinstall")
print "(If reinstall not completed, type:)"
print "sudo make reinstall"
print "Completed."
except:
print "Git not installed."
print "Go here for download:"
print "https://github.com/Bytezz/GitHub-CLI"
sys.exit()
else:
print "Go here for download:"
print "https://github.com/Bytezz/GitHub-CLI"
sys.exit()
elif up.upper()=="N" or up.upper()=="NO":
print "Don't update."
sys.exit()
else:
print "Error. Retry."
elif ln>3:
break
ln+=1
except urllib2.HTTPError, e:
print "Error:"
print(e.code)
except urllib2.URLError, e:
print bcolors.FAIL+bcolors.BOLD+"Error:"+bcolors.ENDC
print(e.args)
|
from application import db
from application.model.operational import Ambulance
from application.model.workforce import ParamedicTeam
def most_dispatched_ambulance():
query = "SELECT TOP 1 id_ambulance, COUNT(*) FROM dispatch GROUP BY id_ambulance ORDER BY count(*) DESC"
result = db.engine.execute(query).fetchall()
return Ambulance.query.get(result[0][0]), result[0][1]
def most_profitable_ambulance():
query = "SELECT TOP 1 id_ambulance, SUM(fee) FROM dispatch GROUP BY id_ambulance ORDER BY SUM(fee) DESC"
result = db.engine.execute(query).fetchall()
return Ambulance.query.get(result[0][0]), result[0][1]
def most_dispatched_paramedic_team():
query = "SELECT TOP 1 id_params_team, COUNT(*) FROM dispatch GROUP BY id_params_team ORDER BY count(*) DESC"
result = db.engine.execute(query).fetchall()
return ParamedicTeam.query.get(result[0][0]), result[0][1]
def most_profitable_paramedic_team():
query = "SELECT TOP 1 id_params_team, SUM(fee) FROM dispatch GROUP BY id_params_team ORDER BY SUM(fee) DESC"
result = db.engine.execute(query).fetchall()
return ParamedicTeam.query.get(result[0][0]), result[0][1]
def top_ambulances():
query = "SELECT TOP 5 plate_number, COUNT(*), SUM(fee) " \
"FROM dispatch " \
"INNER JOIN ambulance " \
"ON dispatch.id_ambulance=ambulance.id_ambulance " \
"GROUP BY plate_number, status HAVING status = 4 ORDER BY SUM(fee) DESC"
result = db.engine.execute(query).fetchall()
return result
def top_teams():
query = "SELECT TOP 5 dispatch.id_params_team, type, SUM(fee) " \
"FROM dispatch " \
"INNER JOIN paramedics_team " \
"ON dispatch.id_params_team=paramedics_team.id_params_team " \
"GROUP BY dispatch.id_params_team, type ORDER BY SUM(fee) DESC"
result = db.engine.execute(query).fetchall()
return result
|
import datetime
import sys
import numpy as np
import tensorflow as tf
import tensorflow.contrib.layers as tf_layers
import tensorflow.contrib.losses as tf_losses
import forecast_dataset
class Network:
def __init__(self, args, data_train):
self.args = args
# Create an empty graph and a session
graph = tf.Graph()
graph.seed = args.seed
self.session = tf.Session(graph=graph, config=tf.ConfigProto(
inter_op_parallelism_threads=args.threads,
intra_op_parallelism_threads=args.threads))
# Construct the graph
with self.session.graph.as_default():
self.global_step = tf.Variable(0, dtype=tf.int64, trainable=False, name="global_step")
# [None] is the shape, gold is a 1-dimensional tensor, the dimension is the batch size
self.gold = tf.placeholder(tf.float32, [None], "gold_consumption")
self.prod_id = tf.placeholder(tf.int32, [None], "product_id")
self.proj_id = tf.placeholder(tf.int32, [None], "project_id")
# consumption from previous months etc
self.features = tf.placeholder(tf.float32, [None, data_train.features_size()],
"features")
n_prods = len(data_train.products)
n_projs = len(data_train.projects)
prod_embeddings_matrix = tf.get_variable("prod_embeddings_matrix",
[n_prods, args.prod_dim], dtype=tf.float32)
proj_embeddings_matrix = tf.get_variable("proj_embeddings_matrix",
[n_projs, args.proj_dim], dtype=tf.float32)
# [batch_size, prod_dim] = float32
prod_embeddings = tf.nn.embedding_lookup(prod_embeddings_matrix, self.prod_id)
proj_embeddings = tf.nn.embedding_lookup(proj_embeddings_matrix, self.proj_id)
# concat all the inputs along the 1st dimension (keep the 0th dimension as batch_size)
layer = tf.concat(1, [prod_embeddings, proj_embeddings, self.features])
# TODO try more layers, other activation_fns, dropout,...
if args.hidden_dim:
layer = tf_layers.fully_connected(layer, num_outputs=args.hidden_dim,
activation_fn=tf.nn.relu, scope="hidden_layer")
layer = tf_layers.linear(layer, num_outputs=1, scope="output_layer")
self.predictions = tf.reshape(layer, [-1])
differences = self.predictions - self.gold
self.mse = tf.reduce_mean(tf.square(differences))
# TODO real quantile_loss with argparter tau, now it is just the median
self.quantile = tf_losses.absolute_difference(self.predictions, self.gold)
# TODO try optimizing a different loss
optimizer = tf.train.AdamOptimizer()
loss = self.mse if args.loss == 'mse' else self.quantile
#self.gradient = optimizer.compute_gradients(loss)
# TODO: report the gradient to summaries, do gradient clipping
#self.training = optimizer.apply_gradients(self.gradient, global_step=self.global_step)
self.training = optimizer.minimize(loss, global_step=self.global_step)
self.train_summary = tf.summary.merge(
[tf.summary.scalar("train/mse", self.mse),
tf.summary.scalar("train/quantile", self.quantile)])
self.dev_summary = tf.summary.merge(
[tf.summary.scalar("dev/mse", self.mse),
tf.summary.scalar("dev/quantile", self.quantile)])
# Initialize variables
self.session.run(tf.global_variables_initializer())
timestamp = datetime.datetime.now().strftime("%Y-%m-%d_%H%M%S")
if args.expname is None:
args.expname = "proj{}-prod{}-hid{}-{}-bs{}-epochs{}".format(
args.proj_dim, args.prod_dim, args.hidden_dim, args.loss, args.batch_size,
args.epochs)
self.summary_writer = tf.summary.FileWriter("{}/{}-{}".format(
args.logdir, timestamp, args.expname), graph=self.session.graph, flush_secs=10)
@property
def training_step(self):
return self.session.run(self.global_step)
def train(self, data_train):
features, prod_id, proj_id, gold = data_train.next_batch(self.args.batch_size)
feed_dict = {self.prod_id: prod_id, self.proj_id: proj_id, self.gold: gold,
self.features: features}
_, summ = self.session.run([self.training, self.train_summary], feed_dict)
self.summary_writer.add_summary(summ, self.training_step)
def evaluate(self, data_dev):
features, prod_id, proj_id, gold = data_dev.whole_data_as_batch()
feed_dict = {self.prod_id: prod_id, self.proj_id: proj_id, self.gold: gold,
self.features: features}
mse, quantile, summ = self.session.run([self.mse, self.quantile, self.dev_summary],
feed_dict)
self.summary_writer.add_summary(summ, self.training_step)
return mse, quantile
def predict(self, data_test):
features, prod_id, proj_id, _ = data_test.whole_data_as_batch()
feed_dict = {self.prod_id: prod_id, self.proj_id: proj_id,
self.features: features}
return self.session.run(self.predictions, feed_dict)
def main():
import argparse
argpar = argparse.ArgumentParser()
# General arguments
argpar.add_argument("--batch_size", default=32, type=int, help="Batch size.")
argpar.add_argument("--data_train", default="data/tableTrainingImputed.csv",
type=str, help="Training data file.")
argpar.add_argument("--data_dev", default="data/tableTestingImputed.csv",
type=str, help="Development data file.")
argpar.add_argument("--data_test", default="data/tableTestingImputed.csv",
type=str, help="Test data file.")
argpar.add_argument("--epochs", default=10, type=int, help="Number of epochs.")
argpar.add_argument("--logdir", default="logs", type=str, help="Logdir name.")
argpar.add_argument("--threads", default=0, type=int, help="Number of threads to use. 0=all.")
argpar.add_argument("--seed", default=42, type=int, help="Random seed.")
argpar.add_argument("--expname", default=None, type=str, help="Experiment name.")
# Project-specific arguments
argpar.add_argument("--loss", default='mse', type=str, help="Optimization loss (mse,quantile).")
argpar.add_argument("--proj_dim", default=20, type=int, help="Project embeddings dimension.")
argpar.add_argument("--prod_dim", default=20, type=int, help="Product embeddings dimension.")
argpar.add_argument("--hidden_dim", default=20, type=int, help="Hidden layer dimension.")
args = argpar.parse_args()
# Fix random seed
np.random.seed(args.seed)
print("Loading the data.", file=sys.stderr)
data_train = forecast_dataset.ForecastDataset(args.data_train)
data_dev = forecast_dataset.ForecastDataset(args.data_dev, data_train)
data_test = forecast_dataset.ForecastDataset(args.data_test, data_train)
print("Constructing the network.", file=sys.stderr)
network = Network(args, data_train=data_train)
# Train
best_dev_mse = float('Inf')
test_predictions = None
print("Training...", file=sys.stderr)
for epoch in range(args.epochs):
while not data_train.epoch_finished():
network.train(data_train)
dev_mse, dev_quantile = network.evaluate(data_dev)
print("Epoch {:3d}: dev mse={:.4f} quantile_loss={:.4f}".format(
epoch + 1, dev_mse, dev_quantile), file=sys.stderr)
if dev_mse < best_dev_mse:
best_dev_mse = dev_mse
test_predictions = network.predict(data_test)
# Print test predictions
for i in range(len(data_test.data)):
print("{:.6f}".format(test_predictions[i]))
if __name__ == "__main__":
main()
|
import sigrokdecode as srd
class SamplerateError(Exception):
pass
class Decoder(srd.Decoder):
api_version = 3
id = 'spdif'
name = 'S/PDIF'
longname = 'Sony/Philips Digital Interface Format'
desc = 'Serial bus for connecting digital audio devices.'
license = 'gplv2+'
inputs = ['logic']
outputs = ['spdif']
channels = (
{'id': 'data', 'name': 'Data', 'desc': 'Data line'},
)
annotations = (
('bitrate', 'Bitrate / baudrate'),
('preamble', 'Preamble'),
('bits', 'Bits'),
('aux', 'Auxillary-audio-databits'),
('samples', 'Audio Samples'),
('validity', 'Data Valid'),
('subcode', 'Subcode data'),
('chan_stat', 'Channnel Status'),
('parity', 'Parity Bit'),
)
annotation_rows = (
('info', 'Info', (0, 1, 3, 5, 6, 7, 8)),
('bits', 'Bits', (2,)),
('samples', 'Samples', (4,)),
)
def putx(self, ss, es, data):
self.put(ss, es, self.out_ann, data)
def puty(self, data):
self.put(self.ss_edge, self.samplenum, self.out_ann, data)
def __init__(self):
self.state = 'GET FIRST PULSE WIDTH'
self.ss_edge = None
self.first_edge = True
self.samplenum_prev_edge = 0
self.pulse_width = 0
self.clocks = []
self.range1 = 0
self.range2 = 0
self.preamble_state = 0
self.preamble = []
self.seen_preamble = False
self.last_preamble = 0
self.first_one = True
self.subframe = []
def start(self):
self.out_ann = self.register(srd.OUTPUT_ANN)
# Assume that the initial pin state is logic 0.
self.initial_pins = [0]
def metadata(self, key, value):
if key == srd.SRD_CONF_SAMPLERATE:
self.samplerate = value
def get_pulse_type(self):
if self.range1 == 0 or self.range2 == 0:
return -1
if self.pulse_width >= self.range2:
return 2
elif self.pulse_width >= self.range1:
return 0
else:
return 1
def find_first_pulse_width(self):
if self.pulse_width != 0:
self.clocks.append(self.pulse_width)
self.state = 'GET SECOND PULSE WIDTH'
def find_second_pulse_width(self):
if self.pulse_width > (self.clocks[0] * 1.3) or \
self.pulse_width < (self.clocks[0] * 0.7):
self.clocks.append(self.pulse_width)
self.state = 'GET THIRD PULSE WIDTH'
def find_third_pulse_width(self):
if not ((self.pulse_width > (self.clocks[0] * 1.3) or \
self.pulse_width < (self.clocks[0] * 0.7)) \
and (self.pulse_width > (self.clocks[1] * 1.3) or \
self.pulse_width < (self.clocks[1] * 0.7))):
return
self.clocks.append(self.pulse_width)
self.clocks.sort()
self.range1 = (self.clocks[0] + self.clocks[1]) / 2
self.range2 = (self.clocks[1] + self.clocks[2]) / 2
spdif_bitrate = int(self.samplerate / (self.clocks[2] / 1.5))
self.ss_edge = 0
self.puty([0, ['Signal Bitrate: %d Mbit/s (=> %d kHz)' % \
(spdif_bitrate, (spdif_bitrate/ (2 * 32)))]])
clock_period_nsec = 1000000000 / spdif_bitrate
self.last_preamble = self.samplenum
# We are done recovering the clock, now let's decode the data stream.
self.state = 'DECODE STREAM'
def decode_stream(self):
pulse = self.get_pulse_type()
if not self.seen_preamble:
# This is probably the start of a preamble, decode it.
if pulse == 2:
self.preamble.append(self.get_pulse_type())
self.state = 'DECODE PREAMBLE'
self.ss_edge = self.samplenum - self.pulse_width - 1
return
# We've seen a preamble.
if pulse == 1 and self.first_one:
self.first_one = False
self.subframe.append([pulse, self.samplenum - \
self.pulse_width - 1, self.samplenum])
elif pulse == 1 and not self.first_one:
self.subframe[-1][2] = self.samplenum
self.putx(self.subframe[-1][1], self.samplenum, [2, ['1']])
self.bitcount += 1
self.first_one = True
else:
self.subframe.append([pulse, self.samplenum - \
self.pulse_width - 1, self.samplenum])
self.putx(self.samplenum - self.pulse_width - 1,
self.samplenum, [2, ['0']])
self.bitcount += 1
if self.bitcount == 28:
aux_audio_data = self.subframe[0:4]
sam, sam_rot = '', ''
for a in aux_audio_data:
sam = sam + str(a[0])
sam_rot = str(a[0]) + sam_rot
sample = self.subframe[4:24]
for s in sample:
sam = sam + str(s[0])
sam_rot = str(s[0]) + sam_rot
validity = self.subframe[24:25]
subcode_data = self.subframe[25:26]
channel_status = self.subframe[26:27]
parity = self.subframe[27:28]
self.putx(aux_audio_data[0][1], aux_audio_data[3][2], \
[3, ['Aux 0x%x' % int(sam, 2), '0x%x' % int(sam, 2)]])
self.putx(sample[0][1], sample[19][2], \
[3, ['Sample 0x%x' % int(sam, 2), '0x%x' % int(sam, 2)]])
self.putx(aux_audio_data[0][1], sample[19][2], \
[4, ['Audio 0x%x' % int(sam_rot, 2), '0x%x' % int(sam_rot, 2)]])
if validity[0][0] == 0:
self.putx(validity[0][1], validity[0][2], [5, ['V']])
else:
self.putx(validity[0][1], validity[0][2], [5, ['E']])
self.putx(subcode_data[0][1], subcode_data[0][2],
[6, ['S: %d' % subcode_data[0][0]]])
self.putx(channel_status[0][1], channel_status[0][2],
[7, ['C: %d' % channel_status[0][0]]])
self.putx(parity[0][1], parity[0][2], [8, ['P: %d' % parity[0][0]]])
self.subframe = []
self.seen_preamble = False
self.bitcount = 0
def decode_preamble(self):
if self.preamble_state == 0:
self.preamble.append(self.get_pulse_type())
self.preamble_state = 1
elif self.preamble_state == 1:
self.preamble.append(self.get_pulse_type())
self.preamble_state = 2
elif self.preamble_state == 2:
self.preamble.append(self.get_pulse_type())
self.preamble_state = 0
self.state = 'DECODE STREAM'
if self.preamble == [2, 0, 1, 0]:
self.puty([1, ['Preamble W', 'W']])
elif self.preamble == [2, 2, 1, 1]:
self.puty([1, ['Preamble M', 'M']])
elif self.preamble == [2, 1, 1, 2]:
self.puty([1, ['Preamble B', 'B']])
else:
self.puty([1, ['Unknown Preamble', 'Unknown Prea.', 'U']])
self.preamble = []
self.seen_preamble = True
self.bitcount = 0
self.first_one = True
self.last_preamble = self.samplenum
def decode(self):
if not self.samplerate:
raise SamplerateError('Cannot decode without samplerate.')
# Throw away first detected edge as it might be mangled data.
self.wait({0: 'e'})
while True:
# Wait for any edge (rising or falling).
(data,) = self.wait({0: 'e'})
self.pulse_width = self.samplenum - self.samplenum_prev_edge - 1
self.samplenum_prev_edge = self.samplenum
if self.state == 'GET FIRST PULSE WIDTH':
self.find_first_pulse_width()
elif self.state == 'GET SECOND PULSE WIDTH':
self.find_second_pulse_width()
elif self.state == 'GET THIRD PULSE WIDTH':
self.find_third_pulse_width()
elif self.state == 'DECODE STREAM':
self.decode_stream()
elif self.state == 'DECODE PREAMBLE':
self.decode_preamble()
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ldap_people', '0005_rename_ldapperson_to_ldap_people_ldapperson'),
]
operations = [
migrations.RunSQL("ALTER TABLE ldap_people_ldapperson ADD COLUMN host_name varchar(200);",
"ALTER TABLE ldap_people_ldapperson DROP COLUMN host_name;"),
]
|
"""
EVE Swagger Interface
An OpenAPI for EVE Online
OpenAPI spec version: 0.4.6
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class GetCharactersCharacterIdMailForbidden(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, error=None):
"""
GetCharactersCharacterIdMailForbidden - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'error': 'str'
}
self.attribute_map = {
'error': 'error'
}
self._error = error
@property
def error(self):
"""
Gets the error of this GetCharactersCharacterIdMailForbidden.
Forbidden message
:return: The error of this GetCharactersCharacterIdMailForbidden.
:rtype: str
"""
return self._error
@error.setter
def error(self, error):
"""
Sets the error of this GetCharactersCharacterIdMailForbidden.
Forbidden message
:param error: The error of this GetCharactersCharacterIdMailForbidden.
:type: str
"""
self._error = error
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, GetCharactersCharacterIdMailForbidden):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
from ..paginated import Paginated
from ..utils import URL
class Top(Paginated):
"""
Top torrents featuring category management.
"""
base_path = '/top'
def __init__(self, base_url, use_tor, category='0'):
super(Top, self).__init__(use_tor=use_tor)
self.url = URL(
base=base_url,
path=self.base_path,
segments=['category'],
defaults=[str(category)]
)
def load_torrents(self, callback):
super(Top, self).load_torrents(callback)
def category(self, category=None):
"""
If category is given, modify the URL correspondingly, return the
current category otherwise.
"""
if category is None:
return int(self.url.category)
self.url.category = str(category)
|
import sys, time
from daemon import Daemon
class MyDaemon(Daemon):
def run(self):
while True:
time.sleep(1)
if __name__ == "__main__":
daemon = MyDaemon('/tmp/daemon-example.pid')
if len(sys.argv) == 2:
if 'start' == sys.argv[1]:
daemon.start()
elif 'stop' == sys.argv[1]:
daemon.stop()
elif 'restart' == sys.argv[1]:
daemon.restart()
else:
print "Unknown command"
sys.exit(2)
sys.exit(0)
else:
print "usage: %s start|stop|restart" % sys.argv[0]
sys.exit(2)
|
import rospy, math
from geometry_msgs.msg import Twist
from pses_basis.msg import Command
from std_msgs.msg import Bool
def convert_motor_level(v):
global maxSpeedCmd
global minSpeedCmd
global maxSpeedCmdBack
global minSpeedCmdBack
global maxVel
global minVel
if v == 0:
return 0
if v < 0:
return -1*(((maxSpeedCmdBack-minSpeedCmdBack)/(maxVel-minVel) * (math.fabs(v)-minVel) + minSpeedCmdBack))
else:
return ((maxSpeedCmd-minSpeedCmd)/(maxVel-minVel) * (math.fabs(v)-minVel) + minSpeedCmd)
def convert_steering_level(angle):
global maxSteeringCmd
global maxSteeringAngle
global offsetSteeringCmd
global straight_command
return (180/math.pi) * angle / maxSteeringAngle * maxSteeringCmd + offsetSteeringCmd
def cmd_callback(data):
global pub
speed = convert_motor_level(data.linear.x)
steering = convert_steering_level(data.angular.z)
msg = Command()
msg.header.stamp = rospy.Time.now()
msg.header.frame_id = 'odom'
msg.steering_level = steering
msg.motor_level = speed
pub.publish(msg)
def straight_cmd_callback(data):
global straight_command
straight_command = data
if __name__ == "__main__":
try:
rospy.init_node("cmd_vel_to_pses_basis_command")
rospy.Subscriber("/teb_cmd_vel", Twist, cmd_callback, queue_size=1)
pub = rospy.Publisher("/pses_basis/command", Command, queue_size=1)
maxSpeedCmd = rospy.get_param("/mephobia_controller/wall_follower/max_speed_cmd")
maxSpeedCmdBack = rospy.get_param("/mephobia_controller/wall_follower/max_speed_cmd_back", maxSpeedCmd)
maxVel = rospy.get_param("/mephobia_controller/wall_follower/max_vel")
minSpeedCmd = rospy.get_param("/mephobia_controller/wall_follower/min_speed_cmd")
minSpeedCmdBack = rospy.get_param("/mephobia_controller/wall_follower/min_speed_cmd_back", minSpeedCmd)
minVel = rospy.get_param("/mephobia_controller/wall_follower/min_vel")
maxSteeringCmd = rospy.get_param("/mephobia_controller/wall_follower/max_steering_cmd")
maxSteeringAngle = rospy.get_param("/mephobia_controller/wall_follower/max_steering_angle")
offsetSteeringCmd = rospy.get_param("/mephobia_controller/wall_follower/offset_steering_cmd")
rospy.loginfo("Node 'cmd_vel_to_pses_basis_command' started.")
rospy.spin()
except rospy.ROSInterruptException:
pass
|
import operator
from copy import deepcopy
import numpy as np
import scipy.optimize
from charge_method import ChargeMethodSkeleton
from charges import Charges
from statistics import calculate_statistics
from structures.molecule_set import MoleculeSet
def run_one_iter(data: np.ndarray, molecules: MoleculeSet, method: ChargeMethodSkeleton, ref_charges: Charges):
method.parameters.load_packed(data)
results = {}
for molecule in molecules:
results[molecule.name] = method.calculate_charges(molecule)
new_charges: Charges = Charges(results)
rmsd = calculate_statistics(molecules, ref_charges, new_charges)
return rmsd
def one_process(molecules: MoleculeSet, method: ChargeMethodSkeleton, ref_charges: Charges):
method.parameters.set_random_values()
method.parameters.print_parameters()
x0 = method.parameters.pack_values()
return scipy.optimize.minimize(run_one_iter, x0, args=(molecules, method, ref_charges), method='L-BFGS-B',
options={'maxiter': 10})
def parameterize(molecules: MoleculeSet, method: ChargeMethodSkeleton, ref_charges: Charges):
population_size = 1
population = [deepcopy(method) for _ in range(population_size)]
results = []
for m in population:
results.append(one_process(molecules, m, ref_charges))
results = [result.fun for result in results]
index, value = min(enumerate(results), key=operator.itemgetter(1))
method.parameters.load_packed(population[index].parameters.pack_values())
|
from vcsn_cxx import polynomial
from vcsn.tools import _is_equal, _right_mult
polynomial.__add__ = polynomial.sum
polynomial.__and__ = polynomial.conjunction
polynomial.__eq__ = _is_equal
polynomial.__mul__ = _right_mult
polynomial.__repr__ = lambda self: self.format('text')
polynomial._repr_latex_ = lambda self: '$' + self.format('latex') + '$'
def _polynomial_pow(p, k):
# FIXME: cannot generate the unit polynomial with the current API.
assert 0 < k
res = p
for i in range(1, k):
res *= p
return res
polynomial.__pow__ = _polynomial_pow
|
from collections import defaultdict
def read_dict(filename, token_field, tag_field):
"""Read tagset + tag dictionary from corpus"""
tags = set()
norm_tags = defaultdict(set)
max_field = max(token_field, tag_field)
with open(filename, 'r', encoding='utf-8') as f:
for line in f:
fields = line.rstrip('\n').split('\t')
if len(fields) > max_field:
tags.add(fields[tag_field])
norm_tags[fields[token_field].lower()].add(fields[tag_field])
return tags, norm_tags
|
import typing
import attr
import numpy as np
from ._fwdpy11 import (GeneticValueIsTrait, GeneticValueNoise, _ll_Additive,
_ll_GaussianNoise, _ll_GBR, _ll_GSSmo,
_ll_Multiplicative, _ll_MultivariateGSSmo, _ll_NoNoise,
_ll_Optimum, _ll_PleiotropicOptima,
_ll_StrictAdditiveMultivariateEffects)
from .class_decorators import (attr_add_asblack, attr_class_pickle_with_super,
attr_class_to_from_dict,
attr_class_to_from_dict_no_recurse)
@attr_add_asblack
@attr_class_pickle_with_super
@attr_class_to_from_dict
@attr.s(auto_attribs=True, frozen=True, repr_ns="fwdpy11")
class Optimum(_ll_Optimum):
"""
Parameters for a trait optimum.
This class has the following attributes, whose names
are also `kwargs` for intitialization. The attribute names
also determine the order of positional arguments:
:param optimum: The trait value
:type optimum: float
:param VS: Strength of stabilizing selection
:type VS: float
:param when: The time when the optimum shifts
:type when: int or None
.. note::
When used to model a stable optimum (e.g.,
:class:`fwdpy11.GSS`), the `when` parameter is omitted.
The `when` parameter is used for moving optima
(:class:`fwdpy11.GSSmo`).
.. versionadded:: 0.7.1
.. versionchanged:: 0.8.0
Refactored to use attrs and inherit from
low-level C++ class
"""
optimum: float = attr.ib(validator=attr.validators.instance_of(float))
VS: float = attr.ib(validator=attr.validators.instance_of(float))
when: typing.Optional[int] = attr.ib(default=None)
@when.validator
def validate_when(self, attribute, value):
if value is not None:
attr.validators.instance_of(int)(self, attribute, value)
def __attrs_post_init__(self):
super(Optimum, self).__init__(self.optimum, self.VS, self.when)
@attr_add_asblack
@attr_class_pickle_with_super
@attr_class_to_from_dict
@attr.s(auto_attribs=True, frozen=True, repr_ns="fwdpy11", eq=False)
class PleiotropicOptima(_ll_PleiotropicOptima):
"""
Parameters for multiple trait optima
This class has the following attributes, whose names
are also `kwargs` for intitialization. The attribute names
also determine the order of positional arguments:
:param optima: The trait values
:type optima: List[float]
:param VS: Strength of stabilizing selection
:type VS: float
:param when: The time when the optimum shifts
:type when: int or None
.. note::
When used to model stable optima (e.g.,
:class:`fwdpy11.MultivariateGSS`), the `when` parameter is omitted.
The `when` parameter is used for moving optima
(:class:`fwdpy11.MultivariateGSSmo`).
.. versionadded:: 0.7.1
.. versionchanged:: 0.8.0
Refactored to use attrs and inherit from
low-level C++ class
"""
optima: typing.List[float]
VS: float = attr.ib(validator=attr.validators.instance_of(float))
when: typing.Optional[int] = attr.ib(default=None)
@when.validator
def validate_when(self, attribute, value):
if value is not None:
attr.validators.instance_of(int)(self, attribute, value)
def __attrs_post_init__(self):
super(PleiotropicOptima, self).__init__(self.optima, self.VS, self.when)
def __eq__(self, other):
optima_equal = np.array_equal(self.optima, other.optima)
VS_equal = self.VS == other.VS
when_equal = False
if self.when is not None and other.when is not None:
when_equal = self.when == other.when
return optima_equal and VS_equal and when_equal
@attr_add_asblack
@attr_class_to_from_dict_no_recurse
@attr.s(auto_attribs=True, frozen=True, repr_ns="fwdpy11")
class GSS(_ll_GSSmo):
"""
Gaussian stabilizing selection on a single trait.
This class has the following attributes, whose names
are also `kwargs` for intitialization. The attribute names
also determine the order of positional arguments:
:param optimum: The optimal trait value
:type optimum: float or fwdpy11.Optimum
:param VS: Inverse strength of stabilizing selection
:type VS: float or None
.. note::
VS should be None if optimum is an instance
of :class:`fwdpy11.Optimum`
.. versionchanged:: 0.7.1
Allow instances of fwdpy11.Optimum for intitialization
.. versionchanged:: 0.8.0
Refactored to use attrs and inherit from
low-level C++ class
"""
optimum: typing.Union[Optimum, float]
VS: typing.Optional[float] = None
def __attrs_post_init__(self):
if self.VS is None:
super(GSS, self).__init__(
[Optimum(optimum=self.optimum.optimum, VS=self.optimum.VS, when=0)]
)
else:
super(GSS, self).__init__(
[Optimum(optimum=self.optimum, VS=self.VS, when=0)]
)
def __getstate__(self):
return self.asdict()
def __setstate__(self, d):
self.__dict__.update(d)
if self.VS is None:
super(GSS, self).__init__(
[Optimum(optimum=self.optimum.optimum, VS=self.optimum.VS, when=0)]
)
else:
super(GSS, self).__init__(
[Optimum(optimum=self.optimum, VS=self.VS, when=0)]
)
@attr_add_asblack
@attr_class_pickle_with_super
@attr_class_to_from_dict_no_recurse
@attr.s(auto_attribs=True, frozen=True, repr_ns="fwdpy11")
class GSSmo(_ll_GSSmo):
"""
Gaussian stabilizing selection on a single trait with moving
optimum.
This class has the following attributes, whose names
are also `kwargs` for intitialization. The attribute names
also determine the order of positional arguments:
:param optima: The optimal trait values
:type optima: list[fwdpy11.Optimum]
.. note::
Instances of fwdpy11.Optimum must have valid
values for `when`.
.. versionchanged:: 0.8.0
Refactored to use attrs and inherit from
low-level C++ class
"""
optima: typing.List[Optimum] = attr.ib()
@optima.validator
def validate_optima(self, attribute, value):
if len(value) == 0:
raise ValueError("list of optima cannot be empty")
for o in value:
if o.when is None:
raise ValueError("Optimum.when is None")
def __attrs_post_init__(self):
super(GSSmo, self).__init__(self.optima)
@attr_add_asblack
@attr_class_to_from_dict_no_recurse
@attr.s(auto_attribs=True, frozen=True, repr_ns="fwdpy11")
class MultivariateGSS(_ll_MultivariateGSSmo):
"""
Multivariate gaussian stablizing selection.
Maps a multidimensional trait to fitness using the Euclidian
distance of a vector of trait values to a vector of optima.
Essentially, this is Equation 1 of
Simons, Yuval B., Kevin Bullaughey, Richard R. Hudson, and Guy Sella. 2018.
"A Population Genetic Interpretation of GWAS Findings for Human Quantitative Traits."
PLoS Biology 16 (3): e2002985.
For the case of moving optima, see :class:`fwdpy11.MultivariateGSSmo`.
This class has the following attributes, whose names
are also `kwargs` for intitialization. The attribute names
also determine the order of positional arguments:
:param optima: The optimum value for each trait over time
:type optima: numpy.ndarray or list[fwdpy11.PleiotropicOptima]
:param VS: Inverse strength of stablizing selection
:type VS: float or None
.. note::
`VS` should be `None` if `optima` is list[fwdpy11.PleiotropicOptima]
`VS` is :math:`\\omega^2` in the Simons et al. notation
.. versionchanged:: 0.7.1
Allow initialization with list of fwdpy11.PleiotropicOptima
.. versionchanged:: 0.8.0
Refactored to use attrs and inherit from
low-level C++ class
"""
optima: typing.Union[PleiotropicOptima, typing.List[float]]
VS: typing.Optional[float] = None
def __attrs_post_init__(self):
if self.VS is None:
super(MultivariateGSS, self).__init__([self.optima])
else:
super(MultivariateGSS, self).__init__(self._convert_to_list())
def __getstate__(self):
return self.asdict()
def __setstate__(self, d):
self.__dict__.update(d)
if self.VS is None:
super(MultivariateGSS, self).__init__([self.optima])
else:
super(MultivariateGSS, self).__init__(self._convert_to_list())
def _convert_to_list(self):
if self.VS is None:
raise ValueError("VS must not be None")
return [PleiotropicOptima(optima=self.optima, VS=self.VS, when=0)]
@attr_add_asblack
@attr_class_pickle_with_super
@attr_class_to_from_dict_no_recurse
@attr.s(auto_attribs=True, frozen=True, repr_ns="fwdpy11")
class MultivariateGSSmo(_ll_MultivariateGSSmo):
"""
Multivariate gaussian stablizing selection with moving optima
This class has the following attributes, whose names
are also `kwargs` for intitialization. The attribute names
also determine the order of positional arguments:
:param optima: list of optima over time
:type optima: list[fwdpy11.PleiotropicOptima]
.. versionchanged:: 0.7.1
Allow initialization with list of fwdpy11.PleiotropicOptima
.. versionchanged:: 0.8.0
Refactored to use attrs and inherit from
low-level C++ class
"""
optima: typing.List[PleiotropicOptima] = attr.ib()
@optima.validator
def validate_optima(self, attribute, value):
if len(value) == 0:
raise ValueError("list of optima cannot be empty")
for o in value:
if o.when is None:
raise ValueError("PleiotropicOptima.when is None")
def __attrs_post_init__(self):
super(MultivariateGSSmo, self).__init__(self.optima)
@attr_add_asblack
@attr_class_pickle_with_super
@attr_class_to_from_dict
@attr.s(auto_attribs=True, frozen=True, repr_ns="fwdpy11")
class NoNoise(_ll_NoNoise):
"""
No random effects on genetic values
.. versionchanged:: 0.8.0
Refactored to use attrs and inherit from
low-level C++ class
"""
def __attrs_post_init__(self):
super(NoNoise, self).__init__()
@attr_add_asblack
@attr_class_pickle_with_super
@attr_class_to_from_dict
@attr.s(auto_attribs=True, frozen=True, repr_ns="fwdpy11")
class GaussianNoise(_ll_GaussianNoise):
"""
Gaussian noise added to genetic values.
This class has the following attributes, whose names
are also `kwargs` for intitialization. The attribute names
also determine the order of positional arguments:
:param sd: Standard deviation
:type sd: float
:param mean: Mean value
:type mean: float
.. versionchanged:: 0.8.0
Refactored to use attrs and inherit from
low-level C++ class
"""
sd: float
mean: float = 0.0
def __attrs_post_init__(self):
super(GaussianNoise, self).__init__(self.sd, self.mean)
@attr_add_asblack
@attr_class_pickle_with_super
@attr_class_to_from_dict_no_recurse
@attr.s(auto_attribs=True, frozen=True, repr_ns="fwdpy11")
class Additive(_ll_Additive):
"""
Additive effects on genetic values.
This class has the following attributes, whose names
are also `kwargs` for intitialization. The attribute names
also determine the order of positional arguments:
:param scaling: How to treat mutant homozygotes.
:type scaling: float
:param gvalue_to_fitness: How to map trait value to fitness
:type gvalue_to_fitness: fwdpy11.GeneticValueIsTrait
:param noise: Random effects on trait values
:type noise: fwdpy11.GeneticValueNoise
When `gvalue_to_fitness` is `None`, then we are
modeling additive effects on fitness.
For a model of fitness, the genetic value is 1, 1+e*h,
1+`scaling`*e for genotypes AA, Aa, and aa, respectively,
where `e` and `h` are the effect size and dominance, respectively.
For a model of a trait (phenotype), meaning `gvalue_to_fitness`
is not `None`, the values for the three genotypes are 0, e*h,
and e, respectively.
.. versionchanged:: 0.8.0
Refactored to use attrs and inherit from
low-level C++ class
"""
scaling: float
gvalue_to_fitness: GeneticValueIsTrait = None
noise: GeneticValueNoise = None
ndemes: int = 1
def __attrs_post_init__(self):
super(Additive, self).__init__(
self.scaling, self.gvalue_to_fitness, self.noise, self.ndemes
)
@attr_add_asblack
@attr_class_pickle_with_super
@attr_class_to_from_dict_no_recurse
@attr.s(auto_attribs=True, frozen=True, repr_ns="fwdpy11")
class Multiplicative(_ll_Multiplicative):
"""
Multiplicative effects on genetic values.
This class has the following attributes, whose names
are also `kwargs` for intitialization. The attribute names
also determine the order of positional arguments:
:param scaling: How to treat mutant homozygotes.
:type scaling: float
:param gvalue_to_fitness: How to map trait value to fitness
:type gvalue_to_fitness: fwdpy11.GeneticValueIsTrait
:param noise: Random effects on trait values
:type noise: fwdpy11.GeneticValueNoise
When `gvalue_to_fitness` is `None`, then we are
modeling multiplicative effects on fitness.
For a model of fitness, the genetic value is 1, 1+e*h,
1+`scaling`*e for genotypes AA, Aa, and aa, respectively,
where `e` and `h` are the effect size and dominance, respectively.
For a model of a trait (phenotype), meaning `gvalue_to_fitness`
is not `None`, the values for the three genotypes are 0, e*h,
and e, respectively.
.. versionchanged:: 0.8.0
Refactored to use attrs and inherit from
low-level C++ class
"""
scaling: float
gvalue_to_fitness: GeneticValueIsTrait = None
noise: GeneticValueNoise = None
ndemes: int = 1
def __attrs_post_init__(self):
super(Multiplicative, self).__init__(
self.scaling, self.gvalue_to_fitness, self.noise, self.ndemes
)
@attr_add_asblack
@attr_class_pickle_with_super
@attr_class_to_from_dict_no_recurse
@attr.s(auto_attribs=True, frozen=True, repr_ns="fwdpy11")
class GBR(_ll_GBR):
"""
The "gene-based recessive" trait model described in Thornton et al.
2013 http://dx.doi.org/10.1371/journal.pgen.1003258 and Sanjak et al. 2017
http://dx.doi.org/10.1371/journal.pgen.1006573.
The trait value is the geometric mean of the sum of effect sizes on each haplotype.
It is undefined for the case where these sums are negative.
This class has the following attributes, whose names
are also `kwargs` for intitialization. The attribute names
also determine the order of positional arguments:
:param gvalue_to_fitness: How to map trait value to fitness
:type gvalue_to_fitness: fwdpy11.GeneticValueIsTrait
:param noise: Random effects on trait values
:type noise: fwdpy11.GeneticValueNoise
.. versionchanged:: 0.8.0
Refactored to use attrs and inherit from
low-level C++ class
"""
gvalue_to_fitness: object
noise: object = None
def __attrs_post_init__(self):
super(GBR, self).__init__(self.gvalue_to_fitness, self.noise)
@attr_add_asblack
@attr_class_pickle_with_super
@attr_class_to_from_dict_no_recurse
@attr.s(auto_attribs=True, frozen=True, repr_ns="fwdpy11")
class StrictAdditiveMultivariateEffects(_ll_StrictAdditiveMultivariateEffects):
"""
Multivariate trait values under strictly additive effects.
Calculate the trait value for a diploid in a :class:`fwdpy11.DiploidPopulation`
for a multidimensional trait.
This class is restricted to the case of simple additive effects, meaning
that any dominance terms associated with mutations are ignored.
During a simulation, :attr:`fwdpy11.DiploidMetadata.g` is filled with the
genetic value corresponding to a "focal" trait specified upon object construction.
This class has the following attributes, whose names
are also `kwargs` for intitialization. The attribute names
also determine the order of positional arguments:
:param ndimensions: Number of trait dimensions
:type ndimensions: int
:param focal_trait: Index of the focal trait
:type focal_trait: int
:param gvalue_to_fitness: Function mapping trait value to fitness
:type gvalue_to_fitness: :class:`fwdpy11.GeneticValueToFitnessMap`
:param noise: Function adding random additive noise to trait value
:type noise: :class:`fwdpy11.GeneticValueNoise`
.. versionchanged:: 0.8.0
Refactored to use attrs and inherit from
low-level C++ class
"""
ndimensions: int
focal_trait: int
gvalue_to_fitness: object
noise: object = None
def __attrs_post_init__(self):
super(StrictAdditiveMultivariateEffects, self).__init__(
self.ndimensions, self.focal_trait, self.gvalue_to_fitness, self.noise
)
|
__author__ = 'benjaminsmith'
class UnitModel(object):
#this is a simple way to standarize how we're storing layers of units
#right now a layer is simply a list of units.
#it has to be a list in order to be indexable.
#we could extend it later to be a class, possibly a class inheriting from a dictionary with minimally name and a value keys
#but for now, the pattern is simply a name-value dictionary with optionally other values as well.
def __init__(self,name,value):
self.name = name
self.value = value
# def __new__(self,name,value):
# return float.__new__(self,value)
@classmethod
def GetLayerOfUnits(cls,layer,layer_type="UnspecifiedLayer"):
object_type = layer_type
try:
#OK, let's see if we can get the necessary values.
necessary_vals = [{"name":e["name"],"value":e["value"]} for e in layer]
#great! if this works, for a given input, then just pass out the *original* (allow for additional values to be loaded in)
ret_val = layer
except TypeError as te:
if(str(te)=="string indices must be integers, not str"):
#seems that the iteration worked, but trying to get attributes didn't, indicating this isn't that kind of iterable.
#let's try interpreting as a dictionary list of numeric values.
ret_val = [{"name":k,"value":float(v)} for k,v in layer.iteritems()]
elif (str(te)=="'int' object has no attribute '__getitem__'"):
#probably this is a list of numeric values.
ret_val = [{"name":object_type + str(x),"value":float(i)} for x,i in enumerate(layer)]
elif (str(te)=="'int' object is not iterable"):
ret_val = [{"name":object_type + str(i),"value":0.0} for i in range(0,layer)]
else:
ret_val=None
raise
return [cls(name = v["name"],value= v["value"]) for v in ret_val]
@classmethod
def get_list_names(cls,list):
#this is the wrong place for this, but I wanted to avoid creating a special list class with this for as long
#as possible; in the mean-time, this will do.
return([li.name for li in list])
@classmethod
def get_list_vals(cls,list):
#this is the wrong place for this, but I wanted to avoid creating a special list class with this for as long
#as possible; in the mean-time, this will do.
return([li.value for li in list])
def __repr__(self): #let's not do this for now.
#def __print__(self):
return ("UnitModel " + self.name + ", value=" + str(self.value))# + super(ActionModel,self).__repr__(self))
|
import numpy as np
def ccc(l1, l2):
'''
Concordance correlation coefficient.
See: https://en.wikipedia.org/wiki/Concordance_correlation_coefficient
'''
ccc_val = 2 * np.cov(l1, l2)[0, 1] / (np.var(l1) + np.var(l2) +
(np.mean(l1) - np.mean(l2)) ** 2)
return ccc_val
def m1_ccc_map(dep_dist_i_PA_vals, dist_3d_kpc):
"""
This is a much faster implementation of the 'OLD' code above.
Source: https://stackoverflow.com/a/47225031/1391441
"""
l1, l2 = np.asarray(dep_dist_i_PA_vals), np.asarray(dist_3d_kpc)
l1_c = l1 - l1.mean(axis=-1, keepdims=True)
N_clusters = len(l2)
l2_c = (l2 - l2.mean()) / (N_clusters - 1)
cov = np.dot(l1_c, l2_c)
ccc_lst = 2 * cov / (
np.var(l1, axis=2) + np.var(l2) +
(np.mean(l1, axis=2) - np.mean(l2)) ** 2)
return ccc_lst.flatten()
|
import numpy as np
if len(AxisOut) != 1:
Msg.Error(3, "Number of output axis has to be one, when using numpy")
if len(AxisIn) != 1:
Msg.Error(3, "Number of input axis has to be one, when using numpy")
Func = Analyse["Routine"]
ValuesOut = list()
for RowValues in ValuesIn:
ValuesOut.append([eval('np.'+Func+'(RowValues)')])
|
import bz2
import gnupg
import hashlib
import os
import datadecorator
class GPGBZ2Decorator(datadecorator.DataDecorator):
def __init__(self, gpghome, gpgkeys, compresslevel=5, compressed=True, encrypted=True, DEBUG=False):
# this function takes home directory for gnupg encryption library
# and key files
# if key file does not exist yet, export key to that file
# return initialized gpg object
gpg = gnupg.GPG(gnupghome=gpghome)
try:
# key exists
with open(gpgkeys, 'r') as f:
key = gpg.import_keys(f.read())
except IOError:
input_data = gpg.gen_key_input()
key = gpg.gen_key(input_data)
ascii_armored_public_keys = gpg.export_keys(key)
ascii_armored_private_keys = gpg.export_keys(key, True)
with open(gpgkeys, 'w') as f:
f.write(ascii_armored_public_keys)
f.write(ascii_armored_private_keys)
self.gpg = gpg
self.key = key.fingerprints
self.compresslevel = compresslevel
self.compressed = compressed
self.encrypted = encrypted
# encrypt with public key
def decorate(self, data):
if self.compressed:
compressed = bz2.compress(data, self.compresslevel)
else:
compressed = data
if self.encrypted:
encrypted = self.gpg.encrypt(compressed, self.key)
else:
encrypted = compressed
return str(encrypted)
def undecorate(self, data):
if self.encrypted:
decrypted = self.gpg.decrypt(data).data
else:
decrypted = data
if self.compressed:
decompressed = bz2.decompress(decrypted)
else:
decompressed = decrypted
return decompressed
def decorate_file(self, ifname, ofname):
f = file(ifname, 'r')
buffer_size = 512
if self.encrypted:
com_tmp = ofname + '.bz2'
else:
com_tmp = ofname
if self.compressed:
# use extension to discriminate different files
compressed_file = bz2.BZ2File(com_tmp, mode='w', \
buffering = buffer_size, compresslevel=self.compresslevel)
else:
compressed_file = open(com_tmp, 'w')
while True:
data = f.read(buffer_size)
if len(data):
compressed_file.write(data)
else:
f.close()
compressed_file.close()
break
if self.encrypted:
compressed_file = open(com_tmp, 'r')
self.gpg.encrypt_file(compressed_file, self.key, output=ofname)
compressed_file.close()
if not com_tmp == ofname:
os.unlink(com_tmp)
def undecorate_file(self, ifname, ofname):
buffer_size = 512
output = open(ofname, 'w')
if self.compressed:
decrypted_file = ofname + '.bz2'
else:
decrypted_file = ifname
if self.encrypted:
f = open(ifname, 'r')
self.gpg.decrypt_file(f, output=decrypted_file)
f.close()
if self.compressed:
f = bz2.BZ2File(decrypted_file)
else:
f = open(decrypted_file)
while True:
data = f.read(buffer_size)
if len(data):
output.write(data)
else:
f.close()
output.close()
break
if not ofname == decrypted_file:
os.unlink(decrypted_file)
|
from starcluster.clustersetup import ClusterSetup
from starcluster.logger import log
import re
local_pe_attrs = {
'pe_name': 'local',
'slots': '999',
'user_lists': 'NONE',
'xuser_lists': 'NONE',
'start_proc_args': 'NONE',
'stop_proc_args': 'NONE',
'allocation_rule': '$pe_slots',
'control_slaves': 'TRUE',
'job_is_first_task': 'TRUE',
'urgency_slots': 'min',
'accounting_summary': 'TRUE',
}
global_attrs = {
'qmaster_params': 'ENABLE_RESCHEDULE_SLAVE=1',
'load_report_time': '00:00:40',
'max_unheard': '00:02:00',
'reschedule_unknown': '00:05:00',
}
class SGEConfig(ClusterSetup):
"""Apply additional configuration to a running SGE instance.
This plugin is mean to run after the build-in SGE plugin of StarCluster.
"""
def __init__(self):
pass
def run (self, nodes, master, user, user_shell, volumes):
sge = SGE(master)
if not sge.is_installed():
log.error("SGE is not installed on this AMI, skipping...")
return
log.info("Applying additional SGE configuration...")
sge.create_or_update_pe('local', local_pe_attrs, ['all.q'])
sge.update_global_config(global_attrs)
sge.cleanup()
def on_add_node(self, node, nodes, master, user, user_shell, volumes):
# This code configures a specific user and group id for the user that
# you wish to run the jobs under (if it's not sgeadmin).
# Enable and customize as needed
#mgroup = 'mygroup'
#myuser = 'myuser'
#node.ssh.execute('addgroup --system --gid 1014 %s' % mygroup)
#node.ssh.execute('adduser --gid 1014 --uid 1014 %s --system' % myuser)
pass
class SGE(object):
def __init__(self, master):
self.mssh = master.ssh
self.cleanup_dirs = []
def is_installed(self):
return self.mssh.isdir("/opt/sge6-fresh")
def cleanup(self):
log.debug("Need to cleanup %s", self.cleanup_dirs)
def exists_pe(self, pe_name):
"""Check if parallel environment exists"""
spl = self.mssh.execute("qconf -spl")
return pe_name in spl
def create_or_update_pe(self, name, attrs, queues=None):
"""Create or update parallel environment with the specified attributes.
Any attributes of an existing PE are replaced with the provided dict.
"""
file = self._stage_attrs(name, attrs)
if self.exists_pe(name):
mode="M"
verb = 'Updating'
else:
mode="A"
verb = 'Creating'
log.info("%s SGE parallel environment '%s'" % (verb, name))
self.mssh.execute("qconf -{mode}p {file}".format(mode=mode, file=file))
if queues:
qs=','.join(queues)
log.info("Adding parallel environment '%s' to queues '%s'", name, qs)
self.mssh.execute('qconf -mattr queue pe_list "%s" %s' % (name, qs))
def update_global_config(self, attrsDict):
"""Update global config with specified attributes."""
dir=self._create_tmp_dir()
file="{dir}/{name}".format(dir=dir, name='global')
sed_cmd_template="s/^({key})(\s+)(.*)/\\1\\2{value}/"
sed_cmd = ""
for k,v in attrsDict.iteritems():
frag = sed_cmd_template.format(key=k, value=re.escape(v))
sed_cmd += ' -e "%s"' % frag
self.mssh.execute("qconf -sconf global | sed -r %s > %s" % (sed_cmd, file))
self.mssh.execute("qconf -Mconf %s" % file)
def _stage_attrs(self, fileName, attrsDict):
dir=self._create_tmp_dir()
file="{dir}/{name}".format(dir=dir, name=fileName)
log.debug("Checking for file %s", file)
f = self.mssh.remote_file(file, mode="w")
f.writelines(self._format_attrs(attrsDict))
f.close()
return file
def _format_attrs(self, attrsDict):
"""Format dictionary of attributes into a list of lines in the sge_config format.
"""
return ["%s\t\t\t%s\n" % (k,v) for k,v in attrsDict.iteritems()]
def _create_tmp_dir(self):
dir=self.mssh.execute("mktemp --tmpdir=/tmp --directory sgeconf.XXXXXXX")
if not dir:
raise Exception("Failed to create temp directory")
#master.ssh.execute("ls /tmp" % dir)
self.cleanup_dirs.append(dir)
return dir[0]
|
from __future__ import division, print_function, absolute_import
import numpy as np
import scipy.stats
import pandas
import tqdm
from sklearn.model_selection import StratifiedKFold
from streams import utils
from streams.metrics.classifiers import MatchToSampleClassifier
from streams.envs import objectome
def internal_cons(df, kind='dprime', time=False, niter=100,
corr_method='pearson', cons_kind='i1'):
"""
Spearman-Brown corrected split-half behavioral consistency.
:Kwargs:
- kind (dprime or acc default: dprime)
Compute consistency using accuracy or d'
- time (bool, default: False)
Over time or using the 70-170 time window.
- niter (int, default: 100)
How many times to split in half.
- cons_kind (i1, or i2, default: i1)
The kind of consistency.
:Returns:
`pandas.DataFrame` with split number and Spearman-Brown corrected consistency
"""
index = 'id' if cons_kind == 'i1' else 'uuid'
groupby = [index, 'obj', 'distractor', 'imgno']
if time:
groupby += ['stim_dur']
sdf = utils.splithalf(df, groupby=groupby)
if kind in ["d'", 'dprime']:
groups = ['split', 'iterno']
if time: groups += ['stim_dur']
sdf, _ = _human_dprime_i1(sdf, groups=groups)
kind = "d'"
else:
kind = 'acc'
cols = ['split', 'iterno']
if time:
cols += ['stim_dur']
pv_all = sdf.pivot_table(index=index, columns=cols, values=kind)
if corr_method == 'pearson':
r_all = pv_all[0].corrwith(pv_all[1]).reset_index()
r_all.rename(columns={0: 'r'}, inplace=True)
else:
r_all = []
for col in pv_all[0]:
corr = pv_all[0][col].corr(pv_all[1][col], method='spearman')
if pv_all.columns.nlevels > 2:
r_all.append(list(col) + [corr])
else:
r_all.append([col, corr])
r_all = pandas.DataFrame(r_all, columns=['iterno', 'r'])
r_all.r = utils.spearman_brown(r_all.r)
return r_all
def i1(human_data, model_conf, kind='dprime', time=None, niter=10,
corr_method='pearson'):
if not isinstance(model_conf, pandas.Series):
model_conf = pandas.Series(model_conf, index=self.meta['id'])
ic = internal_cons(kind=kind, time=time,
niter=niter, corr_method=corr_method,
cons_kind='i1')
if kind == 'acc':
pv = df.pivot_table(index='id', columns='stim_dur', values='acc')
groups = ['stim_dur'] if time else []
elif kind in ["d'", 'dprime']:
kind = "d'"
groups = ['stim_dur'] if time else []
human, _ = _human_dprime_i1(human_data, groups=groups, ikind='i1')
pv = human.pivot_table(index='id', columns=groups, values=kind)
else:
raise ValueError("'kind' %s not recognized." % kind)
pv = human.pivot_table(index='id', columns=groups, values=kind)
if len(groups) == 0:
if isinstance(pv, pandas.Series):
r = pv.corr(model_conf, method=corr_method)
else:
import ipdb; ipdb.set_trace() #TODO: why is this else here?
r = pv[pv.columns[0]].corr(model_conf, method=corr_method)
r = pandas.DataFrame(r / np.sqrt(internal_cons.r))
r['iterno'] = ic.iterno
else:
corr_acc = []
for dur in pv:
r = pv[dur].corr(model_conf, method=corr_method)
relh = ic[ic.stim_dur == dur]
r = pandas.DataFrame(r / np.sqrt(relh.r))
r['iterno'] = relh.iterno
r['stim_dur'] = dur
corr_acc.append(r)
r = pandas.concat(corr_acc, ignore_index=True)
return r
def _human_dprime_i1(df, ikind='i1', groups=[], ceiling=5):
# I1 hits are the average accuracy for each image (that is, each object & imgno pair). uuid is here for convenience only
# groups = ['obj', 'imgno', 'uuid']
cons_gr = 'id' if ikind == 'i1' else 'uuid'
hits_gr = ['obj', 'imgno'] + [cons_gr] + groups
# if 'stim_dur' in df: groups += ['stim_dur']
# hits_gr += groups
hits = df.groupby(hits_gr).acc.mean()
distr = df.groupby(['obj'] + groups).acc.mean()
for obj in df.obj.unique():
if len(groups) > 0:
distr[obj] = 1 - df[df.distractor == obj].groupby(groups).acc.mean()
else:
distr[obj] = 1 - df[df.distractor == obj].acc.mean()
if len(groups) > 0:
def f(x):
sel = tuple([x.obj] + [x[n] for n in x.index if n in groups])
return distr[sel]
else:
def f(x):
return distr.loc[x.obj]
fas_tmp = hits.reset_index().apply(f, axis='columns')
fas = hits.copy()
fas[:] = fas_tmp.values
return _dprime(hits, fas, ceiling=ceiling)
def _human_dprime_i2(df, ceiling=5):
"""
NOTE: NOT WORKING!!
"""
raise NotImplementedError
# I2 hits are the average accuracy for each image (that is, each object & imgno pair) for each distractor.
groups = ['obj', 'distractor', 'imgno', 'uuid']
if 'stim_dur' in df: groups += ['stim_dur']
hits = df.groupby(groups).acc.mean()
# I2 false alarms are one minus the average accuracy for images that have the same object & distractor pair but the correct answer is distractor
groups = ['distractor', 'obj']
if 'stim_dur' in df: groups += ['stim_dur']
distr = df.groupby(groups).acc.mean()
if 'stim_dur' in df:
f = lambda x: distr[(x.name[1], x.name[0], x.name[2])]
else:
f = lambda x: distr[(x.name[1], x.name[0])]
groups = ['obj', 'distractor']
if 'stim_dur' in df: groups += ['stim_dur']
fas_tmp = 1 - hits.reset_index().groupby(groups).acc.transform(f)
fas = hits.copy()
fas[:] = fas_tmp.values
dprime = hits.apply(scipy.stats.norm.ppf) - fas.apply(scipy.stats.norm.ppf)
dprime = dprime.reset_index()
dprime.rename(columns={'acc': "d'"}, inplace=True)
dprime.loc[dprime["d'"] > ceiling, "d'"] = ceiling
c = .5 * (hits.apply(scipy.stats.norm.ppf) + fas.apply(scipy.stats.norm.ppf))
c = c.reset_index()
c.rename(columns={'acc': 'bias'}, inplace=True)
return dprime, c
def _dprime(hits, fas, cap=5):
# dprime = bangmetric.dprime(hits.values, fas.values, mode='rate', max_value=ceiling)
# dprime = pandas.Series(dprime, index=hits.index)
# dprime = dprime.reset_index()
# dprime.rename(columns={0: "d'"}, inplace=True)
dprime = hits.apply(scipy.stats.norm.ppf) - fas.apply(scipy.stats.norm.ppf)
dprime = dprime.reset_index()
dprime.rename(columns={'acc': "d'"}, inplace=True)
# bias
c = .5 * (hits.apply(scipy.stats.norm.ppf) + fas.apply(scipy.stats.norm.ppf))
c = c.reset_index()
c.rename(columns={'acc': 'bias'}, inplace=True)
return dprime, c
def human_acc(df):
return df.pivot_table(index='id', columns='stim_dur', values='acc')
def _to_c(model_feats, labels, order):
df = pandas.DataFrame(model_feats, index=labels, columns=order)
out = np.zeros_like(model_feats)
for (i,j), hit in np.ndenumerate(df.values):
target = labels[i]
distr = order[j]
if target == distr:
c = np.nan
else:
c = np.mean(df.loc[df.index == target, distr])
out[i,j] = model_feats[i,j] - c
return out
def o1():
df = pandas.DataFrame(preds, index=labels[test_idx], columns=order)
df = df.stack().reset_index()
df = df.rename(columns={'level_1': 'distr', 0: 'acc'})
df.obj = df.obj.astype('category', ordered=True, categories=order)
df.distr = df.distr.astype('category', ordered=True, categories=order)
acc = df.groupby('obj').acc.mean()
# dfi = df.set_index(indices[metric])
# out = pandas.Series(np.zeros(len(dfi)), index=dfi.index)
# for idx, row in dfi.iterrows():
# hit_rate = row[value]
# if metric == 'o1': # idx: target
# rej = df.loc[(df[target] != idx) & (df[distr] == idx), value]
# elif metric == 'o2': # idx: (target, distr)
# rej = df.loc[(df[target] == idx[1]) & (df[distr] == idx[0]), value]
# elif metric == 'i1': # idx: (target, imid)
# rej = df.loc[(df[target] != idx[0]) & (df[distr] == idx[0]), value]
# import ipdb; ipdb.set_trace()
# elif metric == 'i2': # idx: (target, imid, distr)
# rej = df.loc[(df[target] == idx[2]) & (df[distr] == idx[0]), value]
# fa_rate = 1 - np.nanmean(rej)
# if kind == 'dprime':
# dprime = scipy.stats.norm.ppf(hit_rate) - scipy.stats.norm.ppf(fa_rate)
# out.loc[idx] = np.clip(dprime, -cap, cap)
# elif kind == 'acc':
# raise NotImplementedError
# if normalize:
# if metric == 'i1':
# by = target
# elif metric == 'i2':
# by = [target, distr]
# else:
# raise ValueError(f'normalization only defined for i1 and i2, got {metric}')
# out[value] -= out.groupby(by)[value].transform(lambda x: x.mean())
# return out#.reset_index()
def objectome_cons(model_feats, metric='i2n', kind='dprime',
target='obj', distr='distr', imid='id', value='acc', cap=20):
normalize = metric[-1] == 'n'
metric = metric.rstrip('n')
obj = objectome.Objectome()
obj24 = objectome.Objectome24s10()
if normalize:
hkind = f'{metric.upper()}_{kind}_C'
else:
hkind = f'{metric.upper()}_{kind}'
human_data = obj24.human_data(kind=hkind)
# import ipdb; ipdb.set_trace()
test_idx = pandas.read_pickle(obj24.datapath('sel240'))
clf = MatchToSampleClassifier(C=1e-3)
train_idx = [i for i in range(len(obj.meta.obj)) if i not in test_idx]
clf.fit(model_feats[train_idx], obj.meta.obj.iloc[train_idx], order=obj.OBJS)
preds = clf.predict_proba(model_feats[test_idx],
targets=obj.meta.obj.iloc[test_idx], kind='2-way')
df = pandas.DataFrame(preds, index=obj.meta.obj.iloc[test_idx], columns=obj.OBJS).reset_index()
df['id'] = obj.meta.id.iloc[test_idx].values
df = df.set_index(['obj', 'id'])
df = df.stack().reset_index()
df = df.rename(columns={'level_2': 'distr', 0: 'acc'})
df = df[['obj', 'id', 'distr', 'acc']]
# obj_order = ['lo_poly_animal_RHINO_2', 'calc01', 'womens_shorts_01M', 'zebra', 'MB27346', 'build51', 'weimaraner', 'interior_details_130_2', 'lo_poly_animal_CHICKDEE', 'kitchen_equipment_knife2', 'interior_details_103_4', 'lo_poly_animal_BEAR_BLK', 'MB30203', 'antique_furniture_item_18', 'lo_poly_animal_ELE_AS1', 'MB29874', 'womens_stockings_01M', 'Hanger_02', 'dromedary', 'MB28699', 'lo_poly_animal_TRANTULA', 'flarenut_spanner', 'MB30758', '22_acoustic_guitar']
# df.obj = df.obj.astype(pandas.api.types.CategoricalDtype(ordered=True, categories=obj_order))
# df.distr = df.distr.astype(pandas.api.types.CategoricalDtype(ordered=True, categories=obj.OBJS))
# mm = obj.meta.iloc[test_idx]
# id_order = np.concatenate([mm[mm.obj == o].id for o in obj_order])
# df.id = df.id.astype(pandas.api.types.CategoricalDtype(ordered=True, categories=id_order))
# df = df.sort_values('id')
# indices = {
# 'o1': ['obj'],
# 'o2': ['obj', 'distr'],
# 'i1': ['obj', 'id'],
# 'i2': ['obj', 'id', 'distr']
# }
indices = {
'o1': [target],
'o2': [target, distr],
'i1': [target, imid],
'i2': [target, imid, distr]
}
def hitrate_to_dprime(x):
idx = x.name
hit_rate = np.nanmean(x)
if metric == 'o1': # idx: target
rej = df.loc[(df[target] != idx) & (df[distr] == idx), value]
elif metric == 'o2': # idx: (target, distr)
rej = df.loc[(df[target] == idx[1]) & (df[distr] == idx[0]), value]
elif metric == 'i1': # idx: (target, imid)
rej = df.loc[(df[target] != idx[0]) & (df[distr] == idx[0]), value]
# import ipdb; ipdb.set_trace()
elif metric == 'i2': # idx: (target, imid, distr)
rej = df.loc[(df[target] == idx[2]) & (df[distr] == idx[0]), value]
fa_rate = 1 - np.nanmean(rej)
if kind == 'dprime':
output = scipy.stats.norm.ppf(hit_rate) - scipy.stats.norm.ppf(fa_rate)
output = np.clip(output, -cap, cap)
elif kind == 'acc':
raise NotImplementedError
return output
dprime = df.groupby(indices[metric])['acc'].apply(hitrate_to_dprime)
dprime = dprime.reset_index()
if normalize:
if metric == 'i1':
by = target
elif metric == 'i2':
by = [target, distr]
else:
raise ValueError(f'normalization only defined for i1 and i2, got {metric}')
# idx = dprime.index
# import ipdb; ipdb.set_trace()
# dprime = dprime.sort_values(imid)
dprime[value] = dprime.groupby(by)[value].transform(lambda x: x - x.mean())
# dprime = dprime.set_index(indices[metric])
# dprime = hitrate_to_dprime(df, metric=metric, kind=kind,
# target='obj', distr='distr',
# imid='id', value='acc', normalize=normalize)
obj_order = ['lo_poly_animal_RHINO_2', 'calc01', 'womens_shorts_01M', 'zebra', 'MB27346', 'build51', 'weimaraner', 'interior_details_130_2', 'lo_poly_animal_CHICKDEE', 'kitchen_equipment_knife2', 'interior_details_103_4', 'lo_poly_animal_BEAR_BLK', 'MB30203', 'antique_furniture_item_18', 'lo_poly_animal_ELE_AS1', 'MB29874', 'womens_stockings_01M', 'Hanger_02', 'dromedary', 'MB28699', 'lo_poly_animal_TRANTULA', 'flarenut_spanner', 'MB30758', '22_acoustic_guitar']
if metric in ['o1', 'o2']:
dprime.obj = dprime.obj.astype(pandas.api.types.CategoricalDtype(ordered=True, categories=obj.OBJS))
else:
dprime.obj = dprime.obj.astype(pandas.api.types.CategoricalDtype(ordered=True, categories=obj_order))
if metric in ['o2', 'i2']:
dprime.distr = dprime.distr.astype(pandas.api.types.CategoricalDtype(ordered=True, categories=obj.OBJS))
if metric in ['i1', 'i2']:
mm = obj.meta.iloc[test_idx]
id_order = np.concatenate([mm[mm.obj == o].id for o in obj_order])
dprime.id = dprime.id.astype(pandas.api.types.CategoricalDtype(ordered=True, categories=id_order))
if metric == 'o1':
dprime = dprime.sort_values('obj')
preds = pandas.DataFrame(dprime.set_index(['obj']))
elif metric == 'o2':
dprime = dprime.sort_values('obj')
preds = dprime.set_index(['obj','distr']).unstack('distr')
elif metric == 'i1':
dprime = dprime.sort_values('id')
preds = pandas.DataFrame(dprime.set_index(['id','obj']))
elif metric == 'i2':
dprime = dprime.sort_values('id')
preds = dprime.set_index(['id','obj','distr']).unstack('distr')
# if metric in ['o2', 'i2']:
# preds = preds.unstack(distr)
# else:
# preds = pandas.DataFrame(preds)
preds = preds.fillna(np.nan).values
df = []
for iterno, split in enumerate(tqdm.tqdm(human_data)):
inds = np.isfinite(split[0]) & np.isfinite(split[1]) & np.isfinite(preds)
c0 = np.corrcoef(preds[inds], split[0][inds])[0,1]
c1 = np.corrcoef(preds[inds], split[1][inds])[0,1]
corr = (c0 + c1) / 2
ic = np.corrcoef(split[0][inds], split[1][inds])[0,1]
df.append([iterno, ic, corr, corr / np.sqrt(ic)])
# import ipdb; ipdb.set_trace()
df = pandas.DataFrame(df, columns=['split', 'internal_cons', 'r', 'cons'])
# import ipdb; ipdb.set_trace()
return df
def objectome_i2(model_feats, human_acc, meta, test_idx, order=None,
kind='i2'):
# skf = StratifiedKFold(n_splits=10)
# labels = np.array(labels)
# preds = np.zeros((model_feats.shape[0], len(np.unique(labels))))
# for i, (train_idx, test_idx) in enumerate(skf.split(model_feats, labels)):
# clf = MatchToSampleClassifier()
# clf.fit(model_feats[train_idx], labels[train_idx], order=order,
# decision_function_shape='ovr')
# pred = clf.predict_proba(model_feats[test_idx],
# targets=labels[test_idx], kind='2-way')
# preds[test_idx] = pred
import matplotlib.pyplot as plt
import os
clf = MatchToSampleClassifier()
train_idx = [i for i in range(len(meta.obj)) if i not in test_idx]
# train_idx2 = np.array([ 9, 13, 25, 30, 34, 39, 47, 50, 52, 55, 56,
# 58, 61, 65, 67, 68, 74, 77, 82, 84, 100, 104,
# 114, 120, 123, 130, 140, 153, 155, 157, 163, 164, 165,
# 167, 174, 179, 183, 196, 197, 199, 200, 201, 202, 204,
# 213, 215, 217, 220, 231, 234, 245, 247, 256, 260, 261,
# 274, 275, 277, 286, 287, 302, 304, 314, 315, 317, 320,
# 325, 330, 336, 346, 349, 352, 353, 366, 372, 374, 380,
# 383, 384, 399, 406, 424, 431, 433, 436, 445, 449, 452,
# 460, 467, 469, 472, 473, 483, 484, 488, 492, 494, 495,
# 498, 500, 501, 506, 507, 521, 524, 525, 528, 537, 544,
# 548, 550, 553, 554, 558, 570, 572, 586, 591, 599, 602,
# 604, 613, 614, 620, 623, 631, 642, 644, 647, 649, 655,
# 657, 664, 668, 669, 678, 687, 691, 692, 702, 710, 718,
# 720, 721, 723, 725, 730, 731, 737, 739, 744, 747, 760,
# 765, 772, 774, 778, 789, 797, 811, 824, 827, 828, 832,
# 836, 842, 850, 852, 854, 855, 875, 877, 881, 884, 889,
# 891, 892, 895, 896, 902, 907, 910, 922, 929, 932, 933,
# 938, 941, 948, 949, 958, 967, 968, 978, 980, 985, 988,
# 993, 995, 1002, 1003, 1005, 1018, 1021, 1028, 1030, 1049, 1055,
# 1059, 1064, 1065, 1066, 1071, 1073, 1076, 1078, 1081, 1088, 1098,
# 1105, 1106, 1115, 1117, 1123, 1124, 1133, 1137, 1143, 1144, 1146,
# 1152, 1157, 1163, 1180, 1182, 1184, 1187, 1191, 1199, 1207, 1214,
# 1218, 1227, 1230, 1236, 1247, 1249, 1252, 1254, 1257, 1267, 1268,
# 1274, 1275, 1279, 1280, 1286, 1289, 1294, 1305, 1306, 1321, 1324,
# 1333, 1339, 1341, 1351, 1357, 1359, 1360, 1361, 1377, 1378, 1385,
# 1386, 1387, 1389, 1394, 1395, 1402, 1408, 1409, 1415, 1418, 1423,
# 1424, 1426, 1428, 1432, 1435, 1457, 1458, 1460, 1472, 1474, 1477,
# 1486, 1491, 1494, 1503, 1504, 1512, 1517, 1519, 1523, 1538, 1541,
# 1546, 1550, 1554, 1558, 1575, 1577, 1579, 1580, 1583, 1587, 1598,
# 1599, 1603, 1604, 1605, 1606, 1611, 1615, 1626, 1628, 1630, 1646,
# 1647, 1656, 1657, 1659, 1670, 1671, 1675, 1689, 1690, 1699, 1703,
# 1709, 1713, 1726, 1727, 1729, 1731, 1732, 1737, 1751, 1752, 1760,
# 1761, 1762, 1768, 1769, 1775, 1787, 1796, 1797, 1815, 1818, 1820,
# 1822, 1830, 1831, 1834, 1835, 1836, 1841, 1843, 1844, 1850, 1853,
# 1860, 1875, 1884, 1885, 1887, 1888, 1902, 1910, 1915, 1916, 1919,
# 1922, 1924, 1934, 1936, 1939, 1940, 1957, 1959, 1967, 1972, 1976,
# 1985, 1990, 1993, 1994, 2004, 2008, 2010, 2015, 2021, 2026, 2031,
# 2035, 2038, 2041, 2043, 2049, 2050, 2053, 2066, 2070, 2071, 2089,
# 2090, 2099, 2102, 2105, 2112, 2113, 2116, 2123, 2125, 2126, 2127,
# 2136, 2144, 2146, 2147, 2155, 2170, 2177, 2182, 2192, 2194, 2195,
# 2214, 2216, 2219, 2220, 2224, 2233, 2234, 2237, 2242, 2245, 2249,
# 2255, 2258, 2262, 2265, 2272, 2283, 2285, 2288, 2295, 2305, 2315,
# 2318, 2323, 2339, 2343, 2344, 2348, 2349, 2350, 2361, 2363, 2367,
# 2372, 2376, 2381, 2383, 2391, 2392, 2395])
# train_idx3 = np.random.choice(train_idx, size=1000, replace=False)
clf.fit(model_feats[train_idx], meta.obj.iloc[train_idx], order=order)
# decision_function_shape='ovr')
# test_idx = np.sort(test_idx)
preds = clf.predict_proba(model_feats[test_idx],
targets=meta.obj.iloc[test_idx], kind='2-way')
# kk = preds.values.ravel()
# plt.figure();plt.hist(kk[np.isfinite(kk)]);plt.show()
# import ipdb; ipdb.set_trace()
df = pandas.DataFrame(preds, index=meta.obj.iloc[test_idx], columns=order).reset_index()
df['id'] = meta.id.iloc[test_idx].values
df = df.set_index(['obj', 'id'])
df = df.stack().reset_index()
df = df.rename(columns={'level_2': 'distr', 0: 'acc'})
# df.obj = df.obj.astype('category', ordered=True, categories=order)
# df.distr = df.distr.astype('category', ordered=True, categories=order)
# df = pandas.read_pickle('trials-from-rishi.pkl')
# # df = pandas.read_pickle('trials.pkl')
# df = df.drop(labels=['WorkerID', 'AssignmentID'], axis=1)
# # df = df.drop(labels=['WorkerID', 'AssignmentID', 'DecisionScore'], axis=1)
# df = df.rename(columns={'sample_obj': 'obj', 'dist_obj': 'distr', 'prob_choice': 'acc'})
# df.acc = df.acc.astype(float)
# # for i in range(10):
# # dfr = df.iloc[5520*i:5520*(i+1)]
# dfr = df.iloc[:5520]
# pv = df.pivot_table(index=['obj', 'id'], columns='distr', values='acc')
# pv = pv.reset_index()
# import ipdb; ipdb.set_trace()
# pv = pv.drop('id', axis='columns').set_index('obj')
# dprime = utils.hitrate_to_dprime_o1(pv).reset_index()
df = df[['obj', 'id', 'distr', 'acc']]
# trials = pandas.read_pickle('/mindhive/dicarlolab/common/forJonas/trials_GOOGLENET_pool5_multicls20softmax_pandas.pkl')
# import ipdb; ipdb.set_trace()
# mtj = pandas.read_pickle('/mindhive/dicarlolab/u/qbilius/tmp_rishi_i2/objectome_utils/metrics240_full_jonas.pkl')
dprime = utils.hitrate_to_dprime(df, kind='i2', target='obj', distr='distr',
imid='id', value='acc', normalize=True)#.reset_index()
obj_order = ['lo_poly_animal_RHINO_2', 'calc01', 'womens_shorts_01M', 'zebra', 'MB27346', 'build51', 'weimaraner', 'interior_details_130_2', 'lo_poly_animal_CHICKDEE', 'kitchen_equipment_knife2', 'interior_details_103_4', 'lo_poly_animal_BEAR_BLK', 'MB30203', 'antique_furniture_item_18', 'lo_poly_animal_ELE_AS1', 'MB29874', 'womens_stockings_01M', 'Hanger_02', 'dromedary', 'MB28699', 'lo_poly_animal_TRANTULA', 'flarenut_spanner', 'MB30758', '22_acoustic_guitar']
mm = meta.iloc[test_idx]
id_order = np.concatenate([mm[mm.obj==o].id for o in obj_order])
# dprime.obj = dprime.obj.astype('category', ordered=True, categories=obj_order)
dprime.distr = dprime.distr.astype(pandas.api.types.CategoricalDtype(ordered=True, categories=order))
dprime.id = dprime.id.astype(pandas.api.types.CategoricalDtype(ordered=True, categories=id_order))
dprime = dprime.sort_values('id')
preds = dprime.set_index(['id','obj','distr']).unstack('distr').values
# import ipdb; ipdb.set_trace()
# import os
# mt = pandas.read_pickle('GOOGLENET_pool5_multicls20softmax.pkl')
# mtj = pandas.read_pickle('/mindhive/dicarlolab/u/qbilius/tmp_rishi_i2/objectome_utils/metrics240_full_jonas.pkl')
# mtj['I2_dprime'] = [(mtj['I2_dprime'], mtj['I2_dprime']), (mtj['I2_dprime'], mtj['I2_dprime'])]
# human_acc = pandas.read_pickle(os.path.expanduser('~/.streams/objectome/metrics240.pkl'))
# df = []
# for preds, split in zip(mtj['I2_dprime'], human_acc['I2_dprime']):
# inds = np.isfinite(split[0]) & np.isfinite(preds[0]) & np.isfinite(split[1]) & np.isfinite(preds[1])
# c0 = np.corrcoef(preds[0][inds], split[0][inds])[0,1]
# c1 = np.corrcoef(preds[1][inds], split[1][inds])[0,1]
# corr = (c0 + c1) / 2
# ich = np.corrcoef(split[0][inds], split[1][inds])[0,1]
# icm = np.corrcoef(preds[0][inds], preds[1][inds])[0,1]
# out = corr / np.sqrt(ich * icm)
# df.append(out)
# print(np.mean(df))
# import ipdb; ipdb.set_trace()
# ht = pandas.read_pickle(os.path.expanduser('~/.streams/objectome/metrics240.pkl'))
# for key in mt.keys():
# corr = np.corrcoef(mt[key][0][0].ravel(), ht[key][0][0].ravel())[0,1]
# ic = np.corrcoef(ht[key][0][0].ravel(), ht[key][0][1].ravel())[0,1]
# print(key, corr, corr / np.sqrt(ic))
# import ipdb; ipdb.set_trace()
# acc = df.groupby('obj').acc.mean()
# df.groupby()
# acc = clf.score(model_feats[test_idx], labels[test_idx], kind='2-way')
# import ipdb; ipdb.set_trace()
# if kind == 'dprime':
# preds = _feats_to_dprime(preds, np.array(labels[test_idx]), order)
# elif kind == 'dprime_c':
# preds = _feats_to_dprime(preds, np.array(labels[test_idx]), order)
# preds = _to_c(preds, np.array(labels[test_idx]), order)
# elif kind == 'acc':
# preds = _to_c(preds, np.array(labels[test_idx]), order)
# import matplotlib.pyplot as plt
# plt.figure(figsize=[10,6]);plt.imshow(preds.T); plt.show()
# plt.figure(figsize=[10,6]);plt.imshow(np.mean(human_acc,0).mean(0).T); plt.show()
# import ipdb; ipdb.set_trace()
# df = []
# for iterno, split in enumerate(tqdm.tqdm(human_acc)):
# split0 = split[0][~np.isnan(split[0])]
# split1 = split[1][~np.isnan(split[1])]
# # plt.figure(figsize=(18,5));plt.imshow(preds.T);plt.show()
# # import ipdb; ipdb.set_trace()
# c1 = np.corrcoef(preds[~np.isnan(split[0])], split0)[0,1]
# c2 = np.corrcoef(preds[~np.isnan(split[1])], split1)[0,1]
# corr = (c1 + c2) / 2
# ic = np.corrcoef(split0, split1)[0,1]
# df.append([iterno, ic, corr, corr / np.sqrt(ic)])
# import ipdb; ipdb.set_trace()
# df = pandas.DataFrame(df, columns=['split', 'internal_cons', 'r', 'cons'])
df = []
for iterno, split in enumerate(tqdm.tqdm(human_acc)):
inds = np.isfinite(split[0]) & np.isfinite(split[1]) & np.isfinite(preds)
c0 = np.corrcoef(preds[inds], split[0][inds])[0,1]
c1 = np.corrcoef(preds[inds], split[1][inds])[0,1]
corr = (c0 + c1) / 2
ic = np.corrcoef(split[0][inds], split[1][inds])[0,1]
df.append([iterno, ic, corr, corr / np.sqrt(ic)])
df = pandas.DataFrame(df, columns=['split', 'internal_cons', 'r', 'cons'])
# print(df)
import ipdb; ipdb.set_trace()
return df
if __name__ == "__main__":
df = hvm.human_data if not time else hvm.human_data_timing
|
from FilmSpecimenGenerator import Config
cfg = Config()
cfg.add_parameter("sizeX", "real_t", "0", "Specimen length (x direction)")
cfg.add_parameter("sizeY", "real_t", "0", "Specimen length (y direction)")
cfg.add_parameter("sizeZ", "real_t", "0", "Specimen length (z direction)")
cfg.add_parameter("oopp", "bool", "false", "Specimen out-of-plane periodicity (0 - film, 1 - cnt material)")
cfg.add_parameter("numBlocksX", "uint_t", "1", "Number of blocks in x direction")
cfg.add_parameter("numBlocksY", "uint_t", "1", "Number of blocks in y direction")
cfg.add_parameter("numBlocksZ", "uint_t", "1", "Number of blocks in z direction")
cfg.add_parameter("min_OOP", "real_t", "0", "Out-of-plane angle minimum")
cfg.add_parameter("max_OOP", "real_t", "0", "Out-of-plane angle maximum")
cfg.add_parameter("numCNTs", "int", "0", "Number of CNTs")
cfg.add_parameter("numSegs", "int", "0", "Number of segments in a CNT")
cfg.add_parameter("spacing", "real_t", "0", "Segment half-spacing")
cfg.add_parameter("localDamping", "real_t", "0", "Local damping coefficient")
cfg.add_parameter("viscousDamping", "real_t", "0", "Viscous damping coefficient")
cfg.add_parameter("seed", "uint_t", "0", "random generator seed")
cfg.add_parameter("vdW", "int", "0", "type of vdW interaction model")
cfg.add_parameter("simulationSteps", "int", "0", "Relaxation duration")
cfg.add_parameter("saveVTKEveryNthStep", "int", "0", "timesteps between saving VTK outputs")
cfg.add_parameter("saveEnergyEveryNthStep", "int", "0", "timesteps between saving energies")
cfg.add_parameter("saveConfEveryNthStep", "int", "0", "timesteps between saving confs")
cfg.add_parameter("vtkFolder", "std::string", '"."', "Folder for VTK files")
cfg.add_parameter("energyFolder", "std::string", '"."', "Folder for energy files")
cfg.add_parameter("confFolder", "std::string", '"."', "Folder for conf files")
cfg.add_parameter("useMPIIO", "bool", "false", "Write a single file instead of one file per process")
cfg.add_parameter("sqlFile", "std::string", '"cnt.sqlite"', "database file")
cfg.add_parameter("initialConfigurationFile", "std::string", '""', "restart from checkpoint")
cfg.generate()
|
from fife import fife
import base
from base import BaseBehaviour
class MovingAgentBehaviour (BaseBehaviour):
"""Fife agent listener"""
def __init__(self):
BaseBehaviour.__init__(self)
self.speed = 0
self.idle_counter = 1
def onNewMap(self, layer):
"""Sets the agent onto the new layer."""
BaseBehaviour.onNewMap(self, layer)
self.idle_counter = 1
def approach(self, location_or_agent, action=None):
"""Approaches a location or another agent and then perform an action
(if set).
@type loc: fife.Location
@param loc: the location or agent to approach
@type action: Action
@param action: The action to schedule for execution after the
approach.
@return: None"""
self.state = base._AGENT_STATE_APPROACH
self.nextAction = action
if isinstance(location_or_agent, fife.Instance):
agent = location_or_agent
self.agent.follow('run', agent, self.speed + 1)
else:
location = location_or_agent
boxLocation = tuple([int(float(i)) for i in location])
l = fife.Location(self.getLocation())
l.setLayerCoordinates(fife.ModelCoordinate(*boxLocation))
self.agent.move('run', l, self.speed + 1)
def onInstanceActionFinished(self, instance, action):
"""@type instance: ???
@param instance: ???
@type action: ???
@param action: ???
@return: None"""
BaseBehaviour.onInstanceActionFinished(self, instance, action)
if(action.getId() != 'stand'):
self.idle_counter = 1
else:
self.idle_counter += 1
def idle(self):
"""@return: None"""
BaseBehaviour.idle(self)
self.animate('stand')
def run(self, location):
"""Makes the PC run to a certain location
@type location: fife.ScreenPoint
@param location: Screen position to run to.
@return: None"""
self.state = base._AGENT_STATE_RUN
self.clear_animations()
self.nextAction = None
self.agent.move('run', location, self.speed + 1)
def walk(self, location):
"""Makes the PC walk to a certain location.
@type location: fife.ScreenPoint
@param location: Screen position to walk to.
@return: None"""
self.state = base._AGENT_STATE_RUN
self.clear_animations()
self.nextAction = None
self.agent.move('walk', location, self.speed - 1)
|
from PySide import QtCore
class Interface(QtCore.QObject):
highlightedPostcardIndexChangedSignal = QtCore.Signal(int)
screenStateChangedSignal = QtCore.Signal(str)
forceScreenChangeSignal = QtCore.Signal()
forceCreateStampSignal = QtCore.Signal()
@QtCore.Slot(int)
def highlightedPostcardIndexChanged(self, index):
self.highlightedPostcardIndexChangedSignal.emit(index)
@QtCore.Slot(str)
def screenStateChanged(self, state):
self.screenStateChangedSignal.emit(state)
def forceScreenChange(self, screen):
self.forceScreenChangeSignal.emit()
def createStamp(self, name, icon):
self.pendingStampName = name
self.pendingStampIcon = icon
self.forceCreateStampSignal.emit()
@QtCore.Slot(result=str)
def getPendingStampName(self):
return self.pendingStampName
@QtCore.Slot(result=str)
def getPendingStampIcon(self):
return self.pendingStampIcon
|
"""
Mail services.
This is quite moving work still.
This should be moved to the different packages when it stabilizes.
"""
import json
import os
from collections import defaultdict
from collections import namedtuple
from twisted.application import service
from twisted.internet import defer
from twisted.python import log
from leap.bonafide import config
from leap.common.service_hooks import HookableService
from leap.keymanager import KeyManager
from leap.keymanager.errors import KeyNotFound
from leap.keymanager.validation import ValidationLevels
from leap.soledad.client.api import Soledad
from leap.mail.constants import INBOX_NAME
from leap.mail.mail import Account
from leap.mail.imap.service import imap
from leap.mail.incoming.service import IncomingMail, INCOMING_CHECK_PERIOD
from leap.mail import smtp
from leap.bitmask.core.uuid_map import UserMap
from leap.bitmask.core.configurable import DEFAULT_BASEDIR
class Container(object):
def __init__(self, service=None):
self._instances = defaultdict(None)
if service is not None:
self.service = service
def get_instance(self, key):
return self._instances.get(key, None)
def add_instance(self, key, data):
self._instances[key] = data
class ImproperlyConfigured(Exception):
pass
class SoledadContainer(Container):
def __init__(self, service=None, basedir=DEFAULT_BASEDIR):
self._basedir = os.path.expanduser(basedir)
self._usermap = UserMap()
super(SoledadContainer, self).__init__(service=service)
def add_instance(self, userid, passphrase, uuid=None, token=None):
if not uuid:
bootstrapped_uuid = self._usermap.lookup_uuid(userid, passphrase)
uuid = bootstrapped_uuid
if not uuid:
return
else:
self._usermap.add(userid, uuid, passphrase)
user, provider = userid.split('@')
soledad_path = os.path.join(self._basedir, 'soledad')
soledad_url = _get_soledad_uri(self._basedir, provider)
cert_path = _get_ca_cert_path(self._basedir, provider)
soledad = self._create_soledad_instance(
uuid, passphrase, soledad_path, soledad_url,
cert_path, token)
super(SoledadContainer, self).add_instance(userid, soledad)
data = {'user': userid, 'uuid': uuid, 'token': token,
'soledad': soledad}
self.service.trigger_hook('on_new_soledad_instance', **data)
def _create_soledad_instance(self, uuid, passphrase, soledad_path,
server_url, cert_file, token):
# setup soledad info
secrets_path = os.path.join(soledad_path, '%s.secret' % uuid)
local_db_path = os.path.join(soledad_path, '%s.db' % uuid)
if token is None:
syncable = False
token = ''
else:
syncable = True
return Soledad(
uuid,
unicode(passphrase),
secrets_path=secrets_path,
local_db_path=local_db_path,
server_url=server_url,
cert_file=cert_file,
auth_token=token,
defer_encryption=True,
syncable=syncable)
def set_remote_auth_token(self, userid, token):
self.get_instance(userid).token = token
def set_syncable(self, userid, state):
# TODO should check that there's a token!
self.get_instance(userid).set_syncable(bool(state))
def sync(self, userid):
self.get_instance(userid).sync()
def _get_provider_from_full_userid(userid):
_, provider_id = config.get_username_and_provider(userid)
return config.Provider(provider_id)
def is_service_ready(service, provider):
"""
Returns True when the following conditions are met:
- Provider offers that service.
- We have the config files for the service.
- The service is enabled.
"""
has_service = provider.offers_service(service)
has_config = provider.has_config_for_service(service)
is_enabled = provider.is_service_enabled(service)
return has_service and has_config and is_enabled
class SoledadService(HookableService):
def __init__(self, basedir):
service.Service.__init__(self)
self._basedir = basedir
def startService(self):
log.msg('Starting Soledad Service')
self._container = SoledadContainer(service=self)
super(SoledadService, self).startService()
# hooks
def hook_on_passphrase_entry(self, **kw):
userid = kw.get('username')
provider = _get_provider_from_full_userid(userid)
provider.callWhenReady(self._hook_on_passphrase_entry, provider, **kw)
def _hook_on_passphrase_entry(self, provider, **kw):
if is_service_ready('mx', provider):
userid = kw.get('username')
password = kw.get('password')
uuid = kw.get('uuid')
container = self._container
log.msg("on_passphrase_entry: New Soledad Instance: %s" % userid)
if not container.get_instance(userid):
container.add_instance(userid, password, uuid=uuid, token=None)
else:
log.msg('Service MX is not ready...')
def hook_on_bonafide_auth(self, **kw):
userid = kw['username']
provider = _get_provider_from_full_userid(userid)
provider.callWhenReady(self._hook_on_bonafide_auth, provider, **kw)
def _hook_on_bonafide_auth(self, provider, **kw):
if provider.offers_service('mx'):
userid = kw['username']
password = kw['password']
token = kw['token']
uuid = kw['uuid']
container = self._container
if container.get_instance(userid):
log.msg("Passing a new SRP Token to Soledad: %s" % userid)
container.set_remote_auth_token(userid, token)
container.set_syncable(userid, True)
else:
log.msg("Adding a new Soledad Instance: %s" % userid)
container.add_instance(
userid, password, uuid=uuid, token=token)
class KeymanagerContainer(Container):
def __init__(self, service=None, basedir=DEFAULT_BASEDIR):
self._basedir = os.path.expanduser(basedir)
super(KeymanagerContainer, self).__init__(service=service)
def add_instance(self, userid, token, uuid, soledad):
keymanager = self._create_keymanager_instance(
userid, token, uuid, soledad)
d = self._get_or_generate_keys(keymanager, userid)
d.addCallback(self._on_keymanager_ready_cb, userid, soledad)
return d
def set_remote_auth_token(self, userid, token):
self.get_instance(userid)._token = token
def _on_keymanager_ready_cb(self, keymanager, userid, soledad):
# TODO use onready-deferreds instead
super(KeymanagerContainer, self).add_instance(userid, keymanager)
log.msg("Adding Keymanager instance for: %s" % userid)
data = {'userid': userid, 'soledad': soledad, 'keymanager': keymanager}
self.service.trigger_hook('on_new_keymanager_instance', **data)
def _get_or_generate_keys(self, keymanager, userid):
def if_not_found_generate(failure):
# TODO -------------- should ONLY generate if INITIAL_SYNC_DONE.
# ie: put callback on_soledad_first_sync_ready -----------------
# --------------------------------------------------------------
failure.trap(KeyNotFound)
log.msg("Core: Key not found. Generating key for %s" % (userid,))
d = keymanager.gen_key()
d.addCallbacks(send_key, log_key_error("generating"))
return d
def send_key(ignored):
# ----------------------------------------------------------------
# It might be the case that we have generated a key-pair
# but this hasn't been successfully uploaded. How do we know that?
# XXX Should this be a method of bonafide instead?
# -----------------------------------------------------------------
d = keymanager.send_key()
d.addCallbacks(
lambda _: log.msg(
"Key generated successfully for %s" % userid),
log_key_error("sending"))
return d
def log_key_error(step):
def log_error(failure):
log.err("Error while %s key!" % step)
log.err(failure)
return failure
return log_error
d = keymanager.get_key(userid, private=True, fetch_remote=False)
d.addErrback(if_not_found_generate)
d.addCallback(lambda _: keymanager)
return d
def _create_keymanager_instance(self, userid, token, uuid, soledad):
user, provider = userid.split('@')
nickserver_uri = self._get_nicknym_uri(provider)
cert_path = _get_ca_cert_path(self._basedir, provider)
api_uri = self._get_api_uri(provider)
if not token:
token = self.service.tokens.get(userid)
km_args = (userid, nickserver_uri, soledad)
# TODO use the method in
# services.soledadbootstrapper._get_gpg_bin_path.
# That should probably live in keymanager package.
km_kwargs = {
"token": token, "uid": uuid,
"api_uri": api_uri, "api_version": "1",
"ca_cert_path": cert_path,
"gpgbinary": "/usr/bin/gpg"
}
keymanager = KeyManager(*km_args, **km_kwargs)
return keymanager
def _get_api_uri(self, provider):
# TODO get this from service.json (use bonafide service)
api_uri = "https://api.{provider}:4430".format(
provider=provider)
return api_uri
def _get_nicknym_uri(self, provider):
return 'https://nicknym.{provider}:6425'.format(
provider=provider)
class KeymanagerService(HookableService):
def __init__(self, basedir=DEFAULT_BASEDIR):
service.Service.__init__(self)
self._basedir = basedir
def startService(self):
log.msg('Starting Keymanager Service')
self._container = KeymanagerContainer(self._basedir)
self._container.service = self
self.tokens = {}
super(KeymanagerService, self).startService()
# hooks
def hook_on_new_soledad_instance(self, **kw):
container = self._container
user = kw['user']
token = kw['token']
uuid = kw['uuid']
soledad = kw['soledad']
if not container.get_instance(user):
log.msg('Adding a new Keymanager instance for %s' % user)
if not token:
token = self.tokens.get(user)
container.add_instance(user, token, uuid, soledad)
def hook_on_bonafide_auth(self, **kw):
userid = kw['username']
provider = _get_provider_from_full_userid(userid)
provider.callWhenReady(self._hook_on_bonafide_auth, provider, **kw)
def _hook_on_bonafide_auth(self, provider, **kw):
if provider.offers_service('mx'):
userid = kw['username']
token = kw['token']
container = self._container
if container.get_instance(userid):
log.msg('Passing a new SRP Token to Keymanager: %s' % userid)
container.set_remote_auth_token(userid, token)
else:
log.msg('storing the keymanager token... %s ' % token)
self.tokens[userid] = token
# commands
def do_list_keys(self, userid, private=False):
km = self._container.get_instance(userid)
d = km.get_all_keys(private=private)
d.addCallback(lambda keys: [dict(key) for key in keys])
return d
def do_export(self, userid, address, private=False):
km = self._container.get_instance(userid)
d = km.get_key(address, private=private, fetch_remote=False)
d.addCallback(lambda key: dict(key))
return d
def do_insert(self, userid, address, rawkey, validation='Fingerprint'):
km = self._container.get_instance(userid)
validation = ValidationLevels.get(validation)
d = km.put_raw_key(rawkey, address, validation=validation)
d.addCallback(lambda _: km.get_key(address, fetch_remote=False))
d.addCallback(lambda key: dict(key))
return d
@defer.inlineCallbacks
def do_delete(self, userid, address, private=False):
km = self._container.get_instance(userid)
key = yield km.get_key(address, private=private, fetch_remote=False)
km.delete_key(key)
defer.returnValue(key.fingerprint)
class StandardMailService(service.MultiService, HookableService):
"""
A collection of Services.
This is the parent service, that launches 3 different services that expose
Encrypted Mail Capabilities on specific ports:
- SMTP service, on port 2013
- IMAP service, on port 1984
- The IncomingMail Service, which doesn't listen on any port, but
watches and processes the Incoming Queue and saves the processed mail
into the matching INBOX.
"""
name = 'mail'
# TODO factor out Mail Service to inside mail package.
subscribed_to_hooks = ('on_new_keymanager_instance',)
def __init__(self, basedir):
self._basedir = basedir
self._soledad_sessions = {}
self._keymanager_sessions = {}
self._sendmail_opts = {}
self._service_tokens = {}
self._active_user = None
super(StandardMailService, self).__init__()
self.initializeChildrenServices()
def initializeChildrenServices(self):
self.addService(IMAPService(self._soledad_sessions))
self.addService(SMTPService(
self._soledad_sessions, self._keymanager_sessions,
self._sendmail_opts))
# TODO adapt the service to receive soledad/keymanager sessions object.
# See also the TODO before IncomingMailService.startInstance
self.addService(IncomingMailService(self))
def startService(self):
log.msg('Starting Mail Service...')
super(StandardMailService, self).startService()
def stopService(self):
super(StandardMailService, self).stopService()
def startInstance(self, userid, soledad, keymanager):
username, provider = userid.split('@')
self._soledad_sessions[userid] = soledad
self._keymanager_sessions[userid] = keymanager
sendmail_opts = _get_sendmail_opts(self._basedir, provider, username)
self._sendmail_opts[userid] = sendmail_opts
incoming = self.getServiceNamed('incoming_mail')
incoming.startInstance(userid)
def registerToken(token):
self._service_tokens[userid] = token
self._active_user = userid
d = soledad.get_or_create_service_token('mail_auth')
d.addCallback(registerToken)
return d
def stopInstance(self):
pass
# hooks
def hook_on_new_keymanager_instance(self, **kw):
# XXX we can specify this as a waterfall, or just AND the two
# conditions.
userid = kw['userid']
soledad = kw['soledad']
keymanager = kw['keymanager']
# TODO --- only start instance if "autostart" is True.
self.startInstance(userid, soledad, keymanager)
# commands
def do_status(self):
status = 'running' if self.running else 'disabled'
return {'mail': status}
def get_token(self):
active_user = self._active_user
if not active_user:
return defer.succeed({'user': None})
token = self._service_tokens.get(active_user)
return defer.succeed({'user': active_user, 'token': token})
def do_get_smtp_cert_path(self, userid):
username, provider = userid.split('@')
return _get_smtp_client_cert_path(self._basedir, provider, username)
# access to containers
def get_soledad_session(self, userid):
return self._soledad_sessions.get(userid)
def get_keymanager_session(self, userid):
return self._keymanager_sessions.get(userid)
class IMAPService(service.Service):
name = 'imap'
def __init__(self, soledad_sessions):
port, factory = imap.run_service(soledad_sessions)
self._port = port
self._factory = factory
self._soledad_sessions = soledad_sessions
super(IMAPService, self).__init__()
def startService(self):
log.msg('Starting IMAP Service')
super(IMAPService, self).startService()
def stopService(self):
self._port.stopListening()
self._factory.doStop()
super(IMAPService, self).stopService()
class SMTPService(service.Service):
name = 'smtp'
def __init__(self, soledad_sessions, keymanager_sessions, sendmail_opts,
basedir=DEFAULT_BASEDIR):
self._basedir = os.path.expanduser(basedir)
port, factory = smtp.run_service(
soledad_sessions, keymanager_sessions, sendmail_opts)
self._port = port
self._factory = factory
self._soledad_sessions = soledad_sessions
self._keymanager_sessions = keymanager_sessions
self._sendmail_opts = sendmail_opts
super(SMTPService, self).__init__()
def startService(self):
log.msg('Starting SMTP Service')
super(SMTPService, self).startService()
def stopService(self):
# TODO cleanup all instances
super(SMTPService, self).stopService()
class IncomingMailService(service.Service):
name = 'incoming_mail'
def __init__(self, mail_service):
super(IncomingMailService, self).__init__()
self._mail = mail_service
self._instances = {}
def startService(self):
log.msg('Starting IncomingMail Service')
super(IncomingMailService, self).startService()
def stopService(self):
super(IncomingMailService, self).stopService()
# Individual accounts
# TODO IncomingMail *IS* already a service.
# I think we should better model the current Service
# as a startInstance inside a container, and get this
# multi-tenant service inside the leap.mail.incoming.service.
# ... or just simply make it a multiService and set per-user
# instances as Child of this parent.
def startInstance(self, userid):
soledad = self._mail.get_soledad_session(userid)
keymanager = self._mail.get_keymanager_session(userid)
log.msg('Starting Incoming Mail instance for %s' % userid)
self._start_incoming_mail_instance(
keymanager, soledad, userid)
def stopInstance(self, userid):
# TODO toggle offline!
pass
def _start_incoming_mail_instance(self, keymanager, soledad,
userid, start_sync=True):
def setUpIncomingMail(inbox):
incoming_mail = IncomingMail(
keymanager, soledad,
inbox, userid,
check_period=INCOMING_CHECK_PERIOD)
return incoming_mail
def registerInstance(incoming_instance):
self._instances[userid] = incoming_instance
if start_sync:
incoming_instance.startService()
acc = Account(soledad, userid)
d = acc.callWhenReady(
lambda _: acc.get_collection_by_mailbox(INBOX_NAME))
d.addCallback(setUpIncomingMail)
d.addCallback(registerInstance)
d.addErrback(log.err)
return d
SERVICES = ('soledad', 'smtp', 'eip')
Provider = namedtuple(
'Provider', ['hostname', 'ip_address', 'location', 'port'])
SendmailOpts = namedtuple(
'SendmailOpts', ['cert', 'key', 'hostname', 'port'])
def _get_ca_cert_path(basedir, provider):
path = os.path.join(
basedir, 'providers', provider, 'keys', 'ca', 'cacert.pem')
return path
def _get_sendmail_opts(basedir, provider, username):
cert = _get_smtp_client_cert_path(basedir, provider, username)
key = cert
prov = _get_provider_for_service('smtp', basedir, provider)
hostname = prov.hostname
port = prov.port
opts = SendmailOpts(cert, key, hostname, port)
return opts
def _get_smtp_client_cert_path(basedir, provider, username):
path = os.path.join(
basedir, 'providers', provider, 'keys', 'client', 'stmp_%s.pem' %
username)
return path
def _get_config_for_service(service, basedir, provider):
if service not in SERVICES:
raise ImproperlyConfigured('Tried to use an unknown service')
config_path = os.path.join(
basedir, 'providers', provider, '%s-service.json' % service)
try:
with open(config_path) as config:
config = json.loads(config.read())
except IOError:
# FIXME might be that the provider DOES NOT offer this service!
raise ImproperlyConfigured(
'could not open config file %s' % config_path)
else:
return config
def first(xs):
return xs[0]
def _pick_server(config, strategy=first):
"""
Picks a server from a list of possible choices.
The service files have a <describe>.
This implementation just picks the FIRST available server.
"""
servers = config['hosts'].keys()
choice = config['hosts'][strategy(servers)]
return choice
def _get_subdict(d, keys):
return {key: d.get(key) for key in keys}
def _get_provider_for_service(service, basedir, provider):
if service not in SERVICES:
raise ImproperlyConfigured('Tried to use an unknown service')
config = _get_config_for_service(service, basedir, provider)
p = _pick_server(config)
attrs = _get_subdict(p, ('hostname', 'ip_address', 'location', 'port'))
provider = Provider(**attrs)
return provider
def _get_smtp_uri(basedir, provider):
prov = _get_provider_for_service('smtp', basedir, provider)
url = 'https://{hostname}:{port}'.format(
hostname=prov.hostname, port=prov.port)
return url
def _get_soledad_uri(basedir, provider):
prov = _get_provider_for_service('soledad', basedir, provider)
url = 'https://{hostname}:{port}'.format(
hostname=prov.hostname, port=prov.port)
return url
|
from django.contrib import admin
from django.conf.urls import patterns, include, url
admin.autodiscover()
urlpatterns = patterns('',
url(r'^grappelli/', include('grappelli.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^customers/', include('customers.urls')),
url(r'^', include('readings.urls')),
url(r'^', include('home.urls')),
)
|
from gnuradio import gr, gr_unittest
import analog_swig as analog
import blocks_swig as blocks
import math
def sincos(x):
return math.cos(x) + math.sin(x) * 1j
class test_phase_modulator(gr_unittest.TestCase):
def setUp(self):
self.tb = gr.top_block()
def tearDown(self):
self.tb = None
def test_fm_001(self):
pi = math.pi
sensitivity = pi/4
src_data = (1.0/4, 1.0/2, 1.0/4, -1.0/4, -1.0/2, -1/4.0)
expected_result = tuple([sincos(sensitivity*x) for x in src_data])
src = blocks.vector_source_f(src_data)
op = analog.phase_modulator_fc(sensitivity)
dst = blocks.vector_sink_c()
self.tb.connect(src, op)
self.tb.connect(op, dst)
self.tb.run()
result_data = dst.data()
self.assertComplexTuplesAlmostEqual(expected_result, result_data, 5)
if __name__ == '__main__':
gr_unittest.run(test_phase_modulator, "test_phase_modulator.xml")
|
import os
def data_file(fname):
"""Return the path to a data file of ours."""
return os.path.join(os.path.split(__file__)[0], fname)
|
"""General test code for pyfusion.
Test code which doesn't have any other obvious home
(e.g.: data, acquisition, ...) goes here.
"""
import unittest, random, string, ConfigParser, os
import inspect, pkgutil, sys
import StringIO
import pyfusion
TEST_FLAGS = ['dev']
TEST_DATA_PATH = os.path.abspath(os.path.dirname(__file__))
TEST_CONFIG_FILE = os.path.join(TEST_DATA_PATH, "test.cfg")
TEST_NOSQL_CONFIG_FILE = os.path.join(TEST_DATA_PATH, "test_nosql.cfg")
CONFIG_TEST_DEVICE_NAME = "TestDevice"
NONCONFIG_TEST_DEVICE_NAME = "UnlistedTestDevice"
CONFIG_EMPTY_TEST_DEVICE_NAME = "TestEmptyDevice"
TEST_SHOT_NUMBER = 12345
UNLISTED_CONFIG_SECTION_TYPE = "UnlistedType"
class BasePyfusionTestCase(unittest.TestCase):
"""Simple customisation of TestCase."""
def __init__(self, *args):
self.listed_device = CONFIG_TEST_DEVICE_NAME
self.listed_empty_device = CONFIG_EMPTY_TEST_DEVICE_NAME
self.unlisted_device = NONCONFIG_TEST_DEVICE_NAME
self.shot_number = TEST_SHOT_NUMBER
self.unlisted_config_section_type = UNLISTED_CONFIG_SECTION_TYPE
unittest.TestCase.__init__(self, *args)
class PfTestBase(object):
"""Base class for generated sql and non-sql test cases."""
pass
class SQLTestCase(BasePyfusionTestCase):
def setUp(self):
pyfusion.orm_manager.shutdown_orm()
pyfusion.conf.utils.clear_config()
pyfusion.conf.utils.read_config(TEST_CONFIG_FILE)
class NoSQLTestCase(BasePyfusionTestCase):
def setUp(self):
pyfusion.orm_manager.shutdown_orm()
pyfusion.conf.utils.clear_config()
pyfusion.conf.utils.read_config(TEST_NOSQL_CONFIG_FILE)
class ConfigCheck(PfTestBase):
"""Check test config file is as we expect"""
def testListedDevices(self):
self.assertTrue(pyfusion.config.pf_has_section('Device', self.listed_device))
self.assertTrue(pyfusion.config.pf_has_section('Device',
self.listed_empty_device))
def testListedDeviceDatabase(self):
self.assertTrue(pyfusion.config.pf_has_option('Device',
self.listed_device, 'database'))
def testEmptyDevice(self):
self.assertEqual(len(pyfusion.config.pf_options('Device',
self.listed_empty_device)), 0)
def testUnlistedDevice(self):
self.assertFalse(pyfusion.config.pf_has_section('Device', self.unlisted_device))
class InitImports(PfTestBase):
"""Make sure that imports from __init__ files are present"""
def testImportgetDevice(self):
from pyfusion import getDevice
def testImportgetAcquisition(self):
from pyfusion import getAcquisition
class ConfigLoaders(PfTestBase):
"""Check pyfusion.read_config and pyfusion.refresh_config"""
def testReadConfig(self):
"""Check that new config is added but old retained"""
# check that unlisted device is not in config
self.assertFalse(pyfusion.config.pf_has_section('Device', self.unlisted_device))
self.assertTrue(pyfusion.config.pf_has_section('Device', self.listed_device))
# create a simple file in memory
tmp_config = StringIO.StringIO("[Device:%s]\n"
%(self.unlisted_device))
pyfusion.read_config(tmp_config)
self.assertTrue(pyfusion.config.pf_has_section('Device', self.unlisted_device))
self.assertTrue(pyfusion.config.pf_has_section('Device', self.listed_device))
def testClearConfig(self):
"""Check that pyfusion.clear_config works."""
self.assertTrue(pyfusion.config.pf_has_section('Device', self.listed_device))
pyfusion.conf.utils.clear_config()
self.assertFalse(pyfusion.config.pf_has_section('Device', self.listed_device))
self.assertEqual(pyfusion.config.sections(), [])
class SQLConfigCheck(PfTestBase):
"""Test module-wide SQLAlchemy config."""
def testSQLConfig(self):
database = pyfusion.config.get('global', 'database')
if database == 'None':
self.assertFalse(pyfusion.orm_manager.IS_ACTIVE)
self.assertFalse(hasattr(pyfusion.orm_manager, 'Session'))
self.assertFalse(hasattr(pyfusion.orm_manager, 'metadata'))
self.assertFalse(hasattr(pyfusion.orm_manager, 'engine'))
else:
self.assertTrue(pyfusion.orm_manager.IS_ACTIVE)
self.assertTrue(hasattr(pyfusion.orm_manager, 'Session'))
self.assertTrue(hasattr(pyfusion.orm_manager, 'metadata'))
self.assertTrue(hasattr(pyfusion.orm_manager, 'engine'))
self.assertEqual(pyfusion.orm_manager.engine.url.__str__(), database)
def test_reload_config(self):
pyfusion.conf.utils.read_config(TEST_CONFIG_FILE)
self.assertTrue(pyfusion.orm_manager.IS_ACTIVE)
pyfusion.conf.utils.read_config(TEST_NOSQL_CONFIG_FILE)
self.assertFalse(pyfusion.orm_manager.IS_ACTIVE)
pyfusion.conf.utils.read_config(TEST_CONFIG_FILE)
self.assertTrue(pyfusion.orm_manager.IS_ACTIVE)
pyfusion.conf.utils.read_config(TEST_NOSQL_CONFIG_FILE)
self.assertFalse(pyfusion.orm_manager.IS_ACTIVE)
SQLConfigCheck.dev=False
def find_subclasses(module, input_class):
mod_list = [i for i in pkgutil.walk_packages(module.__path__, module.__name__+'.')]
output = []
for tmp_instance, mod_name, is_pack in mod_list:
__import__(mod_name)
for name, cls in inspect.getmembers(sys.modules[mod_name]):
if inspect.isclass(cls) and issubclass(cls, input_class) and cls != input_class:
output.append(cls)
return output
|
"""server.py: Process requests to RNNSearch"""
from __future__ import absolute_import, division, print_function, unicode_literals
__author__ = "Frederic Bergeron"
__license__ = "undecided"
__version__ = "1.0"
__email__ = "bergeron@pa.jst.jp"
__status__ = "Development"
import datetime
import json
import numpy as np
from chainer import cuda
import logging
import logging.config
import sys
import tempfile
import os
from os import listdir
from os.path import isfile, join, dirname, basename
import six
from nmt_chainer.dataprocessing.processors import build_dataset_one_side_pp
import nmt_chainer.translation.eval
import traceback
import time
import timeit
import socket
import threading
import xml.etree.ElementTree as ET
import re
import subprocess
import bokeh.embed
import pipes
import hashlib
PAGE_SIZE = 5000
log = None
class TranslatorThread(threading.Thread):
"""Thread class with a stop() method useful to interrupt the translation before it ends."""
def __init__(self,
dest_filename, gpu, encdec, eos_idx, src_data,
beam_search_params,
#beam_width, beam_pruning_margin,
#beam_score_coverage_penalty=None,
#beam_score_coverage_penalty_strength=None,
nb_steps=None,
nb_steps_ratio=None,
#beam_score_length_normalization=None,
#beam_score_length_normalization_strength=None,
post_score_length_normalization=None,
post_score_length_normalization_strength=None,
post_score_coverage_penalty=None,
post_score_coverage_penalty_strength=None,
groundhog=None,
tgt_unk_id=None,
tgt_indexer=None,
#force_finish=None,
prob_space_combination=None, reverse_encdec=None,
generate_attention_html=None,
attn_graph_with_sum=None,
attn_graph_attribs=None, src_indexer=None,
rich_output_filename=None,
#use_unfinished_translation_if_none_found=None,
replace_unk=None, src=None, dic=None,
remove_unk=None, normalize_unicode_unk=None, attempt_to_relocate_unk_source=None,
constraints_fn_list=None,
use_chainerx = False):
threading.Thread.__init__(self)
self._stop_event = threading.Event()
self.dest_filename = dest_filename
self.gpu = gpu
self.encdec = encdec
self.eos_idx = eos_idx
self.src_data = src_data
self.beam_search_params = beam_search_params
# self.beam_width = beam_width
# self.beam_pruning_margin = beam_pruning_margin
# self.beam_score_coverage_penalty=beam_score_coverage_penalty
# self.beam_score_coverage_penalty_strength=beam_score_coverage_penalty_strength
self.nb_steps=nb_steps
self.nb_steps_ratio=nb_steps_ratio
# self.beam_score_length_normalization=beam_score_length_normalization
# self.beam_score_length_normalization_strength=beam_score_length_normalization_strength
self.post_score_length_normalization=post_score_length_normalization
self.post_score_length_normalization_strength=post_score_length_normalization_strength
self.post_score_coverage_penalty=post_score_coverage_penalty
self.post_score_coverage_penalty_strength=post_score_coverage_penalty_strength
self.groundhog=groundhog
self.tgt_unk_id=tgt_unk_id
self.tgt_indexer=tgt_indexer
# self.force_finish=force_finish
self.prob_space_combination=prob_space_combination
self.reverse_encdec=reverse_encdec
self.generate_attention_html=generate_attention_html
self.attn_graph_with_sum=attn_graph_with_sum
self.attn_graph_attribs=attn_graph_attribs
self.src_indexer=src_indexer
self.rich_output_filename=rich_output_filename
# self.use_unfinished_translation_if_none_found=use_unfinished_translation_if_none_found
self.replace_unk=replace_unk
self.src=src
self.dic=dic
self.remove_unk=remove_unk
self.normalize_unicode_unk=normalize_unicode_unk
self.attempt_to_relocate_unk_source=attempt_to_relocate_unk_source
self.constraints_fn_list = constraints_fn_list
self.use_chainerx = use_chainerx
def stop(self):
self._stop_event.set()
def stopped(self):
return self._stop_event.is_set()
def run(self):
from nmt_chainer.translation.eval import translate_to_file_with_beam_search
translate_to_file_with_beam_search(self.dest_filename, self.gpu, self.encdec, self.eos_idx, self.src_data,
beam_search_params=self.beam_search_params,
#self.beam_width, self.beam_pruning_margin,
#beam_score_coverage_penalty=self.beam_score_coverage_penalty,
#beam_score_coverage_penalty_strength=self.beam_score_coverage_penalty_strength,
nb_steps=self.nb_steps,
nb_steps_ratio=self.nb_steps_ratio,
#beam_score_length_normalization=self.beam_score_length_normalization,
#beam_score_length_normalization_strength=self.beam_score_length_normalization_strength,
post_score_length_normalization=self.post_score_length_normalization,
post_score_length_normalization_strength=self.post_score_length_normalization_strength,
post_score_coverage_penalty=self.post_score_coverage_penalty,
post_score_coverage_penalty_strength=self.post_score_coverage_penalty_strength,
groundhog=self.groundhog,
tgt_unk_id=self.tgt_unk_id,
tgt_indexer=self.tgt_indexer,
#force_finish=self.force_finish,
prob_space_combination=self.prob_space_combination, reverse_encdec=self.reverse_encdec,
generate_attention_html=self.generate_attention_html,
attn_graph_with_sum=self.attn_graph_with_sum,
attn_graph_attribs=self.attn_graph_attribs, src_indexer=self.src_indexer,
rich_output_filename=self.rich_output_filename,
#use_unfinished_translation_if_none_found=self.use_unfinished_translation_if_none_found,
replace_unk=self.replace_unk, src=self.src, dic=self.dic,
remove_unk=self.remove_unk, normalize_unicode_unk=self.normalize_unicode_unk, attempt_to_relocate_unk_source=self.attempt_to_relocate_unk_source,
constraints_fn_list=self.constraints_fn_list,
use_chainerx = self.use_chainerx,
show_progress_bar=False,
thread=self)
from .eval import placeholder_constraints_builder
class Translator(object):
def __init__(self, config_server):
self.config_server = config_server
from nmt_chainer.translation.eval import create_encdec
self.encdec, self.eos_idx, self.src_indexer, self.tgt_indexer, self.reverse_encdec, model_infos_list = create_encdec(
config_server)
self.encdec_list = [self.encdec]
self.translator_thread = None
if config_server.process.force_placeholders:
self.make_constraints = {"ph_constraint": placeholder_constraints_builder(self.src_indexer, self.tgt_indexer,
units_placeholders=config_server.process.units_placeholders)}
else:
self.make_constraints = None
self.produce_attention_graph = not config_server.output.no_attention_map_in_server_mode
self.always_consider_eos_and_placeholders = config_server.method.always_consider_eos_and_placeholders
self.use_chainerx = config_server.process.use_chainerx
def translate(self, sentence, beam_width, beam_pruning_margin, beam_score_coverage_penalty, beam_score_coverage_penalty_strength, nb_steps, nb_steps_ratio,
remove_unk, normalize_unicode_unk, attempt_to_relocate_unk_source, beam_score_length_normalization, beam_score_length_normalization_strength, post_score_length_normalization, post_score_length_normalization_strength,
post_score_coverage_penalty, post_score_coverage_penalty_strength,
groundhog, force_finish, prob_space_combination, attn_graph_width, attn_graph_height):
from nmt_chainer.utilities import visualisation
log.info("processing source string %s" % sentence)
src_file = tempfile.NamedTemporaryFile()
src_file.write(sentence.encode('utf-8'))
src_file.seek(0)
dest_file = tempfile.NamedTemporaryFile()
rich_output_file = tempfile.NamedTemporaryFile()
if self.produce_attention_graph:
attn_graph_script_file = tempfile.NamedTemporaryFile()
attn_graph_div_file = tempfile.NamedTemporaryFile()
generate_attention_html = (attn_graph_script_file.name, attn_graph_div_file.name)
else:
generate_attention_html = None
try:
out = ''
script = ''
div = '<div/>'
unk_mapping = []
#src_data, stats_src_pp = build_dataset_one_side_pp(src_file.name, self.src_indexer, max_nb_ex=self.config_server.process.max_nb_ex)
preprocessed_input = build_dataset_one_side_pp(src_file.name, self.src_indexer, max_nb_ex=self.config_server.process.max_nb_ex,
make_constraints_dict=self.make_constraints)
if self.make_constraints is not None:
src_data, stats_src_pp, constraints_list = preprocessed_input
else:
src_data, stats_src_pp = preprocessed_input
constraints_list = None
from nmt_chainer.translation.eval import translate_to_file_with_beam_search
from nmt_chainer.translation.beam_search import BeamSearchParams
beam_search_params = BeamSearchParams(
beam_width=beam_width,
beam_pruning_margin=beam_pruning_margin,
beam_score_coverage_penalty=beam_score_coverage_penalty,
beam_score_coverage_penalty_strength=beam_score_coverage_penalty_strength,
beam_score_length_normalization=beam_score_length_normalization,
beam_score_length_normalization_strength=beam_score_length_normalization_strength,
force_finish=force_finish,
use_unfinished_translation_if_none_found=True,
always_consider_eos_and_placeholders=self.always_consider_eos_and_placeholders)
# translate_to_file_with_beam_search(dest_file.name, self.config_server.process.gpu, self.encdec, self.eos_idx, src_data,
# beam_search_params,
# nb_steps=nb_steps,
# nb_steps_ratio=nb_steps_ratio,
# post_score_length_normalization=post_score_length_normalization,
# post_score_length_normalization_strength=post_score_length_normalization_strength,
# post_score_coverage_penalty=post_score_coverage_penalty,
# post_score_coverage_penalty_strength=post_score_coverage_penalty_strength,
# groundhog=groundhog,
# tgt_unk_id=self.config_server.output.tgt_unk_id,
# tgt_indexer=self.tgt_indexer,
# prob_space_combination=prob_space_combination, reverse_encdec=self.reverse_encdec,
# generate_attention_html=generate_attention_html,
# attn_graph_with_sum=False,
# attn_graph_attribs={'title': '', 'toolbar_location': 'below', 'plot_width': attn_graph_width, 'plot_height': attn_graph_height}, src_indexer=self.src_indexer,
# rich_output_filename=rich_output_file.name,
# #use_unfinished_translation_if_none_found=True,
# replace_unk=True, src=sentence, dic=self.config_server.output.dic,
# remove_unk=remove_unk, normalize_unicode_unk=normalize_unicode_unk, attempt_to_relocate_unk_source=attempt_to_relocate_unk_source,
# constraints_fn_list=constraints_list,
# use_chainerx = self.use_chainerx,
# show_progress_bar=False)
self.translator_thread = TranslatorThread(dest_file.name, self.config_server.process.gpu, self.encdec, self.eos_idx, src_data,
beam_search_params,
#beam_width, beam_pruning_margin,
#beam_score_coverage_penalty=beam_score_coverage_penalty,
#beam_score_coverage_penalty_strength=beam_score_coverage_penalty_strength,
nb_steps=nb_steps,
nb_steps_ratio=nb_steps_ratio,
#beam_score_length_normalization=beam_score_length_normalization,
#beam_score_length_normalization_strength=beam_score_length_normalization_strength,
post_score_length_normalization=post_score_length_normalization,
post_score_length_normalization_strength=post_score_length_normalization_strength,
post_score_coverage_penalty=post_score_coverage_penalty,
post_score_coverage_penalty_strength=post_score_coverage_penalty_strength,
groundhog=groundhog,
tgt_unk_id=self.config_server.output.tgt_unk_id,
tgt_indexer=self.tgt_indexer,
#force_finish=force_finish,
prob_space_combination=prob_space_combination, reverse_encdec=self.reverse_encdec,
generate_attention_html=generate_attention_html,
attn_graph_with_sum=False,
attn_graph_attribs={'title': '', 'toolbar_location': 'below', 'plot_width': attn_graph_width, 'plot_height': attn_graph_height}, src_indexer=self.src_indexer,
rich_output_filename=rich_output_file.name,
#use_unfinished_translation_if_none_found=True,
replace_unk=True, src=sentence, dic=self.config_server.output.dic,
remove_unk=remove_unk, normalize_unicode_unk=normalize_unicode_unk, attempt_to_relocate_unk_source=attempt_to_relocate_unk_source,
constraints_fn_list=constraints_list,
use_chainerx = self.use_chainerx)
self.translator_thread.start()
self.translator_thread.join()
dest_file.seek(0)
out = dest_file.read().decode('utf-8')
#print(rich_output_file.name)
rich_output_file.seek(0)
rich_output_data = json.loads(rich_output_file.read().decode('utf-8'))
#unk_mapping = rich_output_data[0]['unk_mapping']
if self.produce_attention_graph:
attn_graph_script_file.seek(0)
script = attn_graph_script_file.read()
script = script.decode('utf-8')
attn_graph_div_file.seek(0)
div = attn_graph_div_file.read()
div = div.decode('utf-8')
if len(rich_output_data) > 0 and 'unk_mapping' in rich_output_data[0]:
unk_mapping = rich_output_data[0]['unk_mapping']
finally:
src_file.close()
dest_file.close()
rich_output_file.close()
if self.produce_attention_graph:
attn_graph_script_file.close()
attn_graph_div_file.close()
return out, script, div, unk_mapping
def stop(self):
if self.translator_thread:
self.translator_thread.stop()
class RequestHandler(six.moves.socketserver.BaseRequestHandler):
def handle(self):
start_request = timeit.default_timer()
log.info("Handling request...")
data = self.request.recv(4096).decode('utf-8')
text_uid = hashlib.sha1("{0}_{1}".format(start_request, data).encode('utf-8')).hexdigest()
kw_filename = '/tmp/{0}.kw'.format(text_uid)
response = {}
if (data):
try:
cur_thread = threading.current_thread()
log.info("request={0}".format(data))
if "get_log_files" in data:
all_log_files = []
for handler in log.root.handlers:
if hasattr(handler, 'baseFilename'):
log_dir = os.path.dirname(handler.baseFilename)
log_base_fn = os.path.basename(handler.baseFilename)
log_files = [f for f in listdir(log_dir) if isfile(join(log_dir, f)) and f.startswith(log_base_fn)]
all_log_files += log_files
response['log_files'] = all_log_files
elif "get_log_file" in data:
root = ET.fromstring(data)
filename = root.get('filename')
try:
page = int(root.get('page'))
except BaseException:
page = 1
for handler in log.root.handlers:
if hasattr(handler, 'baseFilename'):
log_dir = os.path.dirname(handler.baseFilename)
log_base_fn = os.path.basename(handler.baseFilename)
log_file = "{0}/{1}".format(log_dir, filename)
if log_base_fn in filename and os.path.isfile(log_file):
page_count = 1
log_file_content = ''
line_in_page = 0
start = (page - 1) * PAGE_SIZE
stop = start + PAGE_SIZE
with open(log_file, 'r') as f:
for line, str_line in enumerate(f):
if line >= start and line < stop:
log_file_content += str_line
line_in_page += 1
if line_in_page == PAGE_SIZE:
page_count += 1
line_in_page = 0
response['content'] = log_file_content
response['page'] = page
response['pageCount'] = page_count
response['status'] = 'OK'
break
else:
response['status'] = 'NOT FOUND'
elif "cancel_translation" in data:
self.server.translator.stop()
else:
root = ET.fromstring(data)
article_id = root.get('id')
try:
attn_graph_width = int(root.get('attn_graph_width', 0))
except BaseException:
attn_graph_width = 0
try:
attn_graph_height = int(root.get('attn_graph_height', 0))
except BaseException:
attn_graph_height = 0
beam_width = int(root.get('beam_width', 30))
nb_steps = int(root.get('nb_steps', 50))
beam_pruning_margin = None
try:
beam_pruning_margin = float(root.get('beam_pruning_margin'))
except BaseException:
pass
beam_score_coverage_penalty = root.get(
'beam_score_coverage_penalty', 'none')
beam_score_coverage_penalty_strength = None
try:
beam_score_coverage_penalty_strength = float(root.get('beam_score_coverage_penalty_strength', 0.2))
except BaseException:
pass
nb_steps_ratio = None
try:
nb_steps_ratio = float(root.get('nb_steps_ratio', 1.2))
except BaseException:
pass
groundhog = ('true' == root.get('groundhog', 'false'))
force_finish = ('true' == root.get('force_finish', 'false'))
beam_score_length_normalization = root.get(
'beam_score_length_normalization', 'none')
beam_score_length_normalization_strength = None
try:
beam_score_length_normalization_strength = float(root.get('beam_score_length_normalization_strength', 0.2))
except BaseException:
pass
post_score_length_normalization = root.get(
'post_score_length_normalization', 'simple')
post_score_length_normalization_strength = None
try:
post_score_length_normalization_strength = float(root.get('post_score_length_normalization_strength', 0.2))
except BaseException:
pass
post_score_coverage_penalty = root.get(
'post_score_coverage_penalty', 'none')
post_score_coverage_penalty_strength = None
try:
post_score_coverage_penalty_strength = float(root.get('post_score_coverage_penalty_strength', 0.2))
except BaseException:
pass
prob_space_combination = (
'true' == root.get(
'prob_space_combination', 'false'))
remove_unk = ('true' == root.get('remove_unk', 'false'))
normalize_unicode_unk = (
'true' == root.get(
'normalize_unicode_unk', 'true'))
log.debug('normalize_unicode_unk=' + str(normalize_unicode_unk))
attempt_to_relocate_unk_source = ('true' == root.get(
'attempt_to_relocate_unk_source', 'false'))
log.debug("Article id: %s" % article_id)
in_ = ""
out = ""
graph_data = []
segmented_input = []
segmented_output = []
mapping = []
sentences = root.findall('sentence')
for idx, sentence in enumerate(sentences):
sentence_number = sentence.get('id')
text = sentence.findtext('i_sentence').strip()
log.info("text=%s" % text)
# cmd = self.server.segmenter_command % text.replace("'", "'\\''").encode('utf-8')
cmd = self.server.segmenter_command % text.replace("'", "'\\''") # p3
log.info("cmd=%s" % cmd)
start_cmd = timeit.default_timer()
#parser_output = subprocess.check_output(cmd, shell=True)
parser_output = subprocess.check_output(cmd, shell=True, universal_newlines=True) # p3
log.info(
"Segmenter request processed in {} s.".format(
timeit.default_timer() - start_cmd))
log.info("parser_output=%s" % parser_output)
words = []
if 'parse_server' == self.server.segmenter_format:
for line in parser_output.split("\n"):
if (line.startswith('#')):
continue
elif (not line.strip()):
break
else:
parts = line.split("\t")
word = parts[2]
words.append(word)
elif 'morph' == self.server.segmenter_format:
for pair in parser_output.split(' '):
if pair != '':
word, pos = pair.split('_')
words.append(word)
elif 'plain' == self.server.segmenter_format:
words = parser_output.split(' ')
else:
pass
splitted_sentence = ' '.join(words)
# log.info("splitted_sentence=" + splitted_sentence)
#decoded_sentence = splitted_sentence.decode('utf-8')
# log.info("decoded_sentence={0}".format(decoded_sentence))
# translation, unk_mapping = self.server.translator.translate(decoded_sentence,
translation, script, div, unk_mapping = self.server.translator.translate(splitted_sentence,
beam_width, beam_pruning_margin, beam_score_coverage_penalty, beam_score_coverage_penalty_strength, nb_steps, nb_steps_ratio, remove_unk, normalize_unicode_unk, attempt_to_relocate_unk_source,
beam_score_length_normalization, beam_score_length_normalization_strength, post_score_length_normalization, post_score_length_normalization_strength, post_score_coverage_penalty, post_score_coverage_penalty_strength,
groundhog, force_finish, prob_space_combination, attn_graph_width, attn_graph_height)
# in_ += decoded_sentence
in_ += text
out += translation
if self.server.pp_command is not None:
def apply_pp(str):
pp_cmd = self.server.pp_command % out.replace("'", "''")
log.info("pp_cmd=%s" % pp_cmd)
start_pp_cmd = timeit.default_timer()
pp_output = subprocess.check_output(pp_cmd, shell=True, universal_newlines=True)
log.info("Postprocessor request processed in {0} s.".format(timeit.default_timer() - start_pp_cmd))
log.info("pp_output={0}".format(pp_output))
return pp_output
out = apply_pp(out)
segmented_input.append(splitted_sentence)
segmented_output.append(translation)
mapping.append(unk_mapping)
graph_data.append((script, div))
# There should always be only one sentence for now. - FB
break
response['article_id'] = article_id
response['sentence_number'] = sentence_number
response['in_'] = in_
response['out'] = out
# log.info("in_={0}".format(in_))
log.info("out={0}".format(out))
response['segmented_input'] = segmented_input
response['segmented_output'] = segmented_output
response['mapping'] = list(map(lambda x: ' '.join(x), mapping))
graphes = []
for gd in graph_data:
script, div = gd
graphes.append({'script': script, 'div': div})
response['attn_graphes'] = graphes
except BaseException:
traceback.print_exc()
error_lines = traceback.format_exc().splitlines()
response['error'] = error_lines[-1]
response['stacktrace'] = error_lines
log.info("Request processed in {0} s. by {1}".format(timeit.default_timer() - start_request, cur_thread.name))
response = json.dumps(response)
# self.request.sendall(response)
self.request.sendall(response.encode('utf-8')) # p3
class Server(six.moves.socketserver.ThreadingMixIn, six.moves.socketserver.TCPServer):
daemon_threads = True
allow_reuse_address = True
def __init__(
self,
server_address,
handler_class,
segmenter_command,
segmenter_format,
translator,
pp_command):
six.moves.socketserver.TCPServer.__init__(self, server_address, handler_class)
self.segmenter_command = segmenter_command
self.segmenter_format = segmenter_format
self.translator = translator
self.pp_command = pp_command
def do_start_server(config_server):
if config_server.output.log_config:
logging.config.fileConfig(config_server.output.log_config)
global log
log = logging.getLogger("default")
log.setLevel(logging.INFO)
translator = Translator(config_server)
server_host, server_port = config_server.process.server.split(":")
server = Server(
(server_host,
int(server_port)),
RequestHandler,
config_server.process.segmenter_command,
config_server.process.segmenter_format,
translator,
config_server.process.pp_command)
ip, port = server.server_address
log.info("Start listening for requests on {0}:{1}...".format(socket.gethostname(), port))
try:
server.serve_forever()
except KeyboardInterrupt:
server.shutdown()
server.server_close()
sys.exit(0)
if __name__ == '__main__':
command_line()
|
"""
Given: Six nonnegative integers, each of which does not exceed 20,000.
The integers correspond to the number of couples in a population possessing each genotype pairing for a given factor.
In order, the six given integers represent the number of couples having the following genotypes:
AA-AA
AA-Aa
AA-aa
Aa-Aa
Aa-aa
aa-aa
Return: The expected number of offspring displaying the dominant phenotype in the next generation,
under the assumption that every couple has exactly two offspring.
"""
inp = '18004 19145 18815 16436 18694 18233'
ints = list(map(int, inp.split()))
probs =[2,2,2,1.5,1,0]
print(sum([p*i for p, i in zip(probs, ints)]))
|
from test_support import gnatprove
gnatprove(opt=["-P", "test.gpr", "--mode=check", "-cargs", "-gnat2012"])
|
"""
cfbrank -- A college football ranking algorithm
conference.py: Defines the Conference class for generating rankings
and other statistical information on an athletic conference as a
whole.
Written by Michael V. DePalatis <depalatis@gmail.com>
cfbrank is distributed under the terms of the GNU GPL.
"""
class Conference:
"""Utility class for doing statistical analysis for conferences."""
def __init__(self, members=[]):
self.members = members
def getWinningPercentage(self, count_FCS=True, FCS_penalty=False):
"""Calculate the conference's winning percentage. If count_FCS
is True, the result will fully count wins over FCS teams,
otherwise it will only look at the FBS games played. If
FCS_penalty is True, count FCS games, but as a win reduced by
the B factor."""
# TODO: FCS stuff
wins, games = 0, 0
for team in self.members:
wins += team.wins
games += team.wins + team.losses
return wins/float(games)
|
from cherrypy.test import test
test.prefer_parent_path()
import cherrypy
def setup_server():
class WSGIResponse(object):
def __init__(self, appresults):
self.appresults = appresults
self.iter = iter(appresults)
def __iter__(self):
return self
def next(self):
return self.iter.next()
def close(self):
if hasattr(self.appresults, "close"):
self.appresults.close()
class ChangeCase(object):
def __init__(self, app, to=None):
self.app = app
self.to = to
def __call__(self, environ, start_response):
res = self.app(environ, start_response)
class CaseResults(WSGIResponse):
def next(this):
return getattr(this.iter.next(), self.to)()
return CaseResults(res)
class Replacer(object):
def __init__(self, app, map={}):
self.app = app
self.map = map
def __call__(self, environ, start_response):
res = self.app(environ, start_response)
class ReplaceResults(WSGIResponse):
def next(this):
line = this.iter.next()
for k, v in self.map.iteritems():
line = line.replace(k, v)
return line
return ReplaceResults(res)
class Root(object):
def index(self):
return "HellO WoRlD!"
index.exposed = True
root_conf = {'wsgi.pipeline': [('replace', Replacer)],
'wsgi.replace.map': {'L': 'X', 'l': 'r'},
}
app = cherrypy.Application(Root())
app.wsgiapp.pipeline.append(('changecase', ChangeCase))
app.wsgiapp.config['changecase'] = {'to': 'upper'}
cherrypy.tree.mount(app, config={'/': root_conf})
from cherrypy.test import helper
class WSGI_Namespace_Test(helper.CPWebCase):
def test_pipeline(self):
if not cherrypy.server.httpserver:
return self.skip()
self.getPage("/")
# If body is "HEXXO WORXD!", the middleware was applied out of order.
self.assertBody("HERRO WORRD!")
if __name__ == '__main__':
helper.testmain()
|
from __future__ import division
import math
import random
import sys
import time
from twisted.internet import defer, protocol, reactor
from twisted.python import failure, log
import p2pool
from p2pool import data as p2pool_data
from p2pool.bitcoin import data as bitcoin_data
from p2pool.util import deferral, p2protocol, pack, variable
class PeerMisbehavingError(Exception):
pass
def fragment(f, **kwargs):
try:
f(**kwargs)
except p2protocol.TooLong:
fragment(f, **dict((k, v[:len(v)//2]) for k, v in kwargs.iteritems()))
fragment(f, **dict((k, v[len(v)//2:]) for k, v in kwargs.iteritems()))
class Protocol(p2protocol.Protocol):
VERSION = 3301
max_remembered_txs_size = 25000000
def __init__(self, node, incoming):
p2protocol.Protocol.__init__(self, node.net.PREFIX, 8000000, node.traffic_happened)
self.node = node
self.incoming = incoming
self.other_version = None
self.connected2 = False
def connectionMade(self):
self.factory.proto_made_connection(self)
self.connection_lost_event = variable.Event()
self.addr = self.transport.getPeer().host, self.transport.getPeer().port
self.send_version(
version=self.VERSION,
services=0,
addr_to=dict(
services=0,
address=self.transport.getPeer().host,
port=self.transport.getPeer().port,
),
addr_from=dict(
services=0,
address=self.transport.getHost().host,
port=self.transport.getHost().port,
),
nonce=self.node.nonce,
sub_version=p2pool.__version__,
mode=1,
best_share_hash=self.node.best_share_hash_func(),
)
self.timeout_delayed = reactor.callLater(10, self._connect_timeout)
self.get_shares = deferral.GenericDeferrer(
max_id=2**256,
func=lambda id, hashes, parents, stops: self.send_sharereq(id=id, hashes=hashes, parents=parents, stops=stops),
timeout=15,
on_timeout=self.disconnect,
)
self.remote_tx_hashes = set() # view of peer's known_txs # not actually initially empty, but sending txs instead of tx hashes won't hurt
self.remote_remembered_txs_size = 0
self.remembered_txs = {} # view of peer's mining_txs
self.remembered_txs_size = 0
self.known_txs_cache = {}
def _connect_timeout(self):
self.timeout_delayed = None
print 'Handshake timed out, disconnecting from %s:%i' % self.addr
self.disconnect()
def packetReceived(self, command, payload2):
try:
if command != 'version' and not self.connected2:
raise PeerMisbehavingError('first message was not version message')
p2protocol.Protocol.packetReceived(self, command, payload2)
except PeerMisbehavingError, e:
print 'Peer %s:%i misbehaving, will drop and ban. Reason:' % self.addr, e.message
self.badPeerHappened()
def badPeerHappened(self):
print "Bad peer banned:", self.addr
self.disconnect()
if self.transport.getPeer().host != '127.0.0.1': # never ban localhost
self.node.bans[self.transport.getPeer().host] = time.time() + 60*60
def _timeout(self):
self.timeout_delayed = None
print 'Connection timed out, disconnecting from %s:%i' % self.addr
self.disconnect()
def sendAdvertisement(self):
if self.node.serverfactory.listen_port is not None:
host=self.node.external_ip
port=self.node.serverfactory.listen_port.getHost().port
if host is not None:
if ':' in host:
host, port_str = host.split(':')
port = int(port_str)
if p2pool.DEBUG:
print 'Advertising for incoming connections: %s:%i' % (host, port)
# Advertise given external IP address, just as if there were another peer behind us, with that address, who asked us to advertise it for them
self.send_addrs(addrs=[
dict(
address=dict(
services=self.other_services,
address=host,
port=port,
),
timestamp=int(time.time()),
),
])
else:
if p2pool.DEBUG:
print 'Advertising for incoming connections'
# Ask peer to advertise what it believes our IP address to be
self.send_addrme(port=port)
message_version = pack.ComposedType([
('version', pack.IntType(32)),
('services', pack.IntType(64)),
('addr_to', bitcoin_data.address_type),
('addr_from', bitcoin_data.address_type),
('nonce', pack.IntType(64)),
('sub_version', pack.VarStrType()),
('mode', pack.IntType(32)), # always 1 for legacy compatibility
('best_share_hash', pack.PossiblyNoneType(0, pack.IntType(256))),
])
def handle_version(self, version, services, addr_to, addr_from, nonce, sub_version, mode, best_share_hash):
print "Peer %s:%s says protocol version is %s, client version %s" % (addr_from['address'], addr_from['port'], version, sub_version)
if self.other_version is not None:
raise PeerMisbehavingError('more than one version message')
if version < getattr(self.node.net, 'MINIMUM_PROTOCOL_VERSION', 1400):
raise PeerMisbehavingError('peer too old')
self.other_version = version
self.other_sub_version = sub_version[:512]
self.other_services = services
if nonce == self.node.nonce:
raise PeerMisbehavingError('was connected to self')
if nonce in self.node.peers:
if p2pool.DEBUG:
print 'Detected duplicate connection, disconnecting from %s:%i' % self.addr
self.disconnect()
return
self.nonce = nonce
self.connected2 = True
self.timeout_delayed.cancel()
self.timeout_delayed = reactor.callLater(100, self._timeout)
old_dataReceived = self.dataReceived
def new_dataReceived(data):
if self.timeout_delayed is not None:
self.timeout_delayed.reset(100)
old_dataReceived(data)
self.dataReceived = new_dataReceived
self.factory.proto_connected(self)
self._stop_thread = deferral.run_repeatedly(lambda: [
self.send_ping(),
random.expovariate(1/100)][-1])
if self.node.advertise_ip:
self._stop_thread2 = deferral.run_repeatedly(lambda: [
self.sendAdvertisement(),
random.expovariate(1/(100*len(self.node.peers) + 1))][-1])
if best_share_hash is not None:
self.node.handle_share_hashes([best_share_hash], self)
def add_to_remote_view_of_my_known_txs(added):
if added:
self.send_have_tx(tx_hashes=list(added.keys()))
watch_id0 = self.node.known_txs_var.added.watch(add_to_remote_view_of_my_known_txs)
self.connection_lost_event.watch(lambda: self.node.known_txs_var.added.unwatch(watch_id0))
def remove_from_remote_view_of_my_known_txs(removed):
if removed:
self.send_losing_tx(tx_hashes=list(removed.keys()))
# cache forgotten txs here for a little while so latency of "losing_tx" packets doesn't cause problems
key = max(self.known_txs_cache) + 1 if self.known_txs_cache else 0
self.known_txs_cache[key] = removed #dict((h, before[h]) for h in removed)
reactor.callLater(20, self.known_txs_cache.pop, key)
watch_id1 = self.node.known_txs_var.removed.watch(remove_from_remote_view_of_my_known_txs)
self.connection_lost_event.watch(lambda: self.node.known_txs_var.removed.unwatch(watch_id1))
def update_remote_view_of_my_known_txs(before, after):
t0 = time.time()
added = set(after) - set(before)
removed = set(before) - set(after)
if added:
self.send_have_tx(tx_hashes=list(added))
if removed:
self.send_losing_tx(tx_hashes=list(removed))
# cache forgotten txs here for a little while so latency of "losing_tx" packets doesn't cause problems
key = max(self.known_txs_cache) + 1 if self.known_txs_cache else 0
self.known_txs_cache[key] = dict((h, before[h]) for h in removed)
reactor.callLater(20, self.known_txs_cache.pop, key)
t1 = time.time()
if p2pool.BENCH and (t1-t0) > .01: print "%8.3f ms for update_remote_view_of_my_known_txs" % ((t1-t0)*1000.)
watch_id2 = self.node.known_txs_var.transitioned.watch(update_remote_view_of_my_known_txs)
self.connection_lost_event.watch(lambda: self.node.known_txs_var.transitioned.unwatch(watch_id2))
self.send_have_tx(tx_hashes=self.node.known_txs_var.value.keys())
def update_remote_view_of_my_mining_txs(before, after):
t0 = time.time()
added = set(after) - set(before)
removed = set(before) - set(after)
if removed:
self.send_forget_tx(tx_hashes=list(removed))
self.remote_remembered_txs_size -= sum(100 + bitcoin_data.tx_type.packed_size(before[x]) for x in removed)
if added:
self.remote_remembered_txs_size += sum(100 + bitcoin_data.tx_type.packed_size(after[x]) for x in added)
assert self.remote_remembered_txs_size <= self.max_remembered_txs_size
fragment(self.send_remember_tx, tx_hashes=[x for x in added if x in self.remote_tx_hashes], txs=[after[x] for x in added if x not in self.remote_tx_hashes])
t1 = time.time()
if p2pool.BENCH and (t1-t0) > .01: print "%8.3f ms for update_remote_view_of_my_mining_txs" % ((t1-t0)*1000.)
watch_id2 = self.node.mining_txs_var.transitioned.watch(update_remote_view_of_my_mining_txs)
self.connection_lost_event.watch(lambda: self.node.mining_txs_var.transitioned.unwatch(watch_id2))
self.remote_remembered_txs_size += sum(100 + bitcoin_data.tx_type.packed_size(x) for x in self.node.mining_txs_var.value.values())
assert self.remote_remembered_txs_size <= self.max_remembered_txs_size
fragment(self.send_remember_tx, tx_hashes=[], txs=self.node.mining_txs_var.value.values())
message_ping = pack.ComposedType([])
def handle_ping(self):
pass
message_addrme = pack.ComposedType([
('port', pack.IntType(16)),
])
def handle_addrme(self, port):
host = self.transport.getPeer().host
#print 'addrme from', host, port
if host == '127.0.0.1':
if random.random() < .8 and self.node.peers:
random.choice(self.node.peers.values()).send_addrme(port=port) # services...
else:
self.node.got_addr((self.transport.getPeer().host, port), self.other_services, int(time.time()))
if random.random() < .8 and self.node.peers:
random.choice(self.node.peers.values()).send_addrs(addrs=[
dict(
address=dict(
services=self.other_services,
address=host,
port=port,
),
timestamp=int(time.time()),
),
])
message_addrs = pack.ComposedType([
('addrs', pack.ListType(pack.ComposedType([
('timestamp', pack.IntType(64)),
('address', bitcoin_data.address_type),
]))),
])
def handle_addrs(self, addrs):
for addr_record in addrs:
self.node.got_addr((addr_record['address']['address'], addr_record['address']['port']), addr_record['address']['services'], min(int(time.time()), addr_record['timestamp']))
if random.random() < .8 and self.node.peers:
random.choice(self.node.peers.values()).send_addrs(addrs=[addr_record])
message_getaddrs = pack.ComposedType([
('count', pack.IntType(32)),
])
def handle_getaddrs(self, count):
if count > 100:
count = 100
self.send_addrs(addrs=[
dict(
timestamp=int(self.node.addr_store[host, port][2]),
address=dict(
services=self.node.addr_store[host, port][0],
address=host,
port=port,
),
) for host, port in
self.node.get_good_peers(count)
])
message_shares = pack.ComposedType([
('shares', pack.ListType(p2pool_data.share_type)),
])
def handle_shares(self, shares):
t0 = time.time()
result = []
for wrappedshare in shares:
if wrappedshare['type'] < p2pool_data.Share.VERSION: continue
share = p2pool_data.load_share(wrappedshare, self.node.net, self.addr)
if wrappedshare['type'] >= 13:
txs = []
for tx_hash in share.share_info['new_transaction_hashes']:
if tx_hash in self.node.known_txs_var.value:
tx = self.node.known_txs_var.value[tx_hash]
else:
for cache in self.known_txs_cache.itervalues():
if tx_hash in cache:
tx = cache[tx_hash]
print 'Transaction %064x rescued from peer latency cache!' % (tx_hash,)
break
else:
print >>sys.stderr, 'Peer referenced unknown transaction %064x, disconnecting' % (tx_hash,)
self.disconnect()
return
txs.append(tx)
else:
txs = None
result.append((share, txs))
self.node.handle_shares(result, self)
t1 = time.time()
if p2pool.BENCH: print "%8.3f ms for %i shares in handle_shares (%3.3f ms/share)" % ((t1-t0)*1000., len(shares), (t1-t0)*1000./ max(1, len(shares)))
def sendShares(self, shares, tracker, known_txs, include_txs_with=[]):
t0 = time.time()
tx_hashes = set()
for share in shares:
if share.VERSION >= 13:
# send full transaction for every new_transaction_hash that peer does not know
for tx_hash in share.share_info['new_transaction_hashes']:
if not tx_hash in known_txs:
newset = set(share.share_info['new_transaction_hashes'])
ktxset = set(known_txs)
missing = newset - ktxset
print "Missing %i of %i transactions for broadcast" % (len(missing), len(newset))
assert tx_hash in known_txs, 'tried to broadcast share without knowing all its new transactions'
if tx_hash not in self.remote_tx_hashes:
tx_hashes.add(tx_hash)
continue
if share.hash in include_txs_with:
x = share.get_other_tx_hashes(tracker)
if x is not None:
tx_hashes.update(x)
hashes_to_send = [x for x in tx_hashes if x not in self.node.mining_txs_var.value and x in known_txs]
all_hashes = share.share_info['new_transaction_hashes']
new_tx_size = sum(100 + bitcoin_data.tx_type.packed_size(known_txs[x]) for x in hashes_to_send)
all_tx_size = sum(100 + bitcoin_data.tx_type.packed_size(known_txs[x]) for x in all_hashes)
print "Sending a share with %i txs (%i new) totaling %i msg bytes (%i new)" % (len(all_hashes), len(hashes_to_send), all_tx_size, new_tx_size)
hashes_to_send = [x for x in tx_hashes if x not in self.node.mining_txs_var.value and x in known_txs]
new_tx_size = sum(100 + bitcoin_data.tx_type.packed_size(known_txs[x]) for x in hashes_to_send)
new_remote_remembered_txs_size = self.remote_remembered_txs_size + new_tx_size
if new_remote_remembered_txs_size > self.max_remembered_txs_size:
raise ValueError('shares have too many txs')
self.remote_remembered_txs_size = new_remote_remembered_txs_size
fragment(self.send_remember_tx, tx_hashes=[x for x in hashes_to_send if x in self.remote_tx_hashes], txs=[known_txs[x] for x in hashes_to_send if x not in self.remote_tx_hashes])
fragment(self.send_shares, shares=[share.as_share() for share in shares])
self.send_forget_tx(tx_hashes=hashes_to_send)
self.remote_remembered_txs_size -= new_tx_size
t1 = time.time()
if p2pool.BENCH: print "%8.3f ms for %i shares in sendShares (%3.3f ms/share)" % ((t1-t0)*1000., len(shares), (t1-t0)*1000./ max(1, len(shares)))
message_sharereq = pack.ComposedType([
('id', pack.IntType(256)),
('hashes', pack.ListType(pack.IntType(256))),
('parents', pack.VarIntType()),
('stops', pack.ListType(pack.IntType(256))),
])
def handle_sharereq(self, id, hashes, parents, stops):
shares = self.node.handle_get_shares(hashes, parents, stops, self)
try:
self.send_sharereply(id=id, result='good', shares=[share.as_share() for share in shares])
except p2protocol.TooLong:
self.send_sharereply(id=id, result='too long', shares=[])
message_sharereply = pack.ComposedType([
('id', pack.IntType(256)),
('result', pack.EnumType(pack.VarIntType(), {0: 'good', 1: 'too long', 2: 'unk2', 3: 'unk3', 4: 'unk4', 5: 'unk5', 6: 'unk6'})),
('shares', pack.ListType(p2pool_data.share_type)),
])
class ShareReplyError(Exception): pass
def handle_sharereply(self, id, result, shares):
if result == 'good':
res = [p2pool_data.load_share(share, self.node.net, self.addr) for share in shares if share['type'] >= p2pool_data.Share.VERSION]
else:
res = failure.Failure(self.ShareReplyError(result))
self.get_shares.got_response(id, res)
message_bestblock = pack.ComposedType([
('header', bitcoin_data.block_header_type),
])
def handle_bestblock(self, header):
self.node.handle_bestblock(header, self)
message_have_tx = pack.ComposedType([
('tx_hashes', pack.ListType(pack.IntType(256))),
])
def handle_have_tx(self, tx_hashes):
#assert self.remote_tx_hashes.isdisjoint(tx_hashes)
self.remote_tx_hashes.update(tx_hashes)
while len(self.remote_tx_hashes) > 10000:
self.remote_tx_hashes.pop()
message_losing_tx = pack.ComposedType([
('tx_hashes', pack.ListType(pack.IntType(256))),
])
def handle_losing_tx(self, tx_hashes):
t0 = time.time()
#assert self.remote_tx_hashes.issuperset(tx_hashes)
self.remote_tx_hashes.difference_update(tx_hashes)
t1 = time.time()
if p2pool.BENCH and (t1-t0) > .01: print "%8.3f ms for %i txs in handle_losing_tx (%3.3f ms/tx)" % ((t1-t0)*1000., len(tx_hashes), (t1-t0)*1000./ max(1, len(tx_hashes)))
message_remember_tx = pack.ComposedType([
('tx_hashes', pack.ListType(pack.IntType(256))),
('txs', pack.ListType(bitcoin_data.tx_type)),
])
def handle_remember_tx(self, tx_hashes, txs):
t0 = time.time()
for tx_hash in tx_hashes:
if tx_hash in self.remembered_txs:
print >>sys.stderr, 'Peer referenced transaction twice, disconnecting'
self.disconnect()
return
if tx_hash in self.node.known_txs_var.value:
tx = self.node.known_txs_var.value[tx_hash]
else:
for cache in self.known_txs_cache.itervalues():
if tx_hash in cache:
tx = cache[tx_hash]
print 'Transaction %064x rescued from peer latency cache!' % (tx_hash,)
break
else:
print >>sys.stderr, 'Peer referenced unknown transaction %064x, disconnecting' % (tx_hash,)
self.disconnect()
return
self.remembered_txs[tx_hash] = tx
self.remembered_txs_size += 100 + bitcoin_data.tx_type.packed_size(tx)
added_known_txs = {}
warned = False
for tx in txs:
tx_hash = bitcoin_data.hash256(bitcoin_data.tx_type.pack(tx))
if tx_hash in self.remembered_txs:
print >>sys.stderr, 'Peer referenced transaction twice, disconnecting'
self.disconnect()
return
if tx_hash in self.node.known_txs_var.value and not warned:
print 'Peer sent entire transaction %064x that was already received' % (tx_hash,)
warned = True
self.remembered_txs[tx_hash] = tx
self.remembered_txs_size += 100 + bitcoin_data.tx_type.packed_size(tx)
added_known_txs[tx_hash] = tx
self.node.known_txs_var.add(added_known_txs)
if self.remembered_txs_size >= self.max_remembered_txs_size:
raise PeerMisbehavingError('too much transaction data stored')
t1 = time.time()
if p2pool.BENCH and (t1-t0) > .01: print "%8.3f ms for %i txs in p2p.py:handle_remember_tx (%3.3f ms/tx)" % ((t1-t0)*1000., len(tx_hashes), ((t1-t0)*1000. / max(1,len(tx_hashes)) ))
message_forget_tx = pack.ComposedType([
('tx_hashes', pack.ListType(pack.IntType(256))),
])
def handle_forget_tx(self, tx_hashes):
for tx_hash in tx_hashes:
self.remembered_txs_size -= 100 + bitcoin_data.tx_type.packed_size(self.remembered_txs[tx_hash])
assert self.remembered_txs_size >= 0
del self.remembered_txs[tx_hash]
def connectionLost(self, reason):
self.connection_lost_event.happened()
if self.timeout_delayed is not None:
self.timeout_delayed.cancel()
if self.connected2:
self.factory.proto_disconnected(self, reason)
self._stop_thread()
if self.node.advertise_ip:
self._stop_thread2()
self.connected2 = False
self.factory.proto_lost_connection(self, reason)
if p2pool.DEBUG:
print "Peer connection lost:", self.addr, reason
self.get_shares.respond_all(reason)
@defer.inlineCallbacks
def do_ping(self):
start = reactor.seconds()
yield self.get_shares(hashes=[0], parents=0, stops=[])
end = reactor.seconds()
defer.returnValue(end - start)
class ServerFactory(protocol.ServerFactory):
def __init__(self, node, max_conns):
self.node = node
self.max_conns = max_conns
self.conns = {}
self.running = False
self.listen_port = None
def buildProtocol(self, addr):
if sum(self.conns.itervalues()) >= self.max_conns or self.conns.get(self._host_to_ident(addr.host), 0) >= 3:
return None
if addr.host in self.node.bans and self.node.bans[addr.host] > time.time():
return None
p = Protocol(self.node, True)
p.factory = self
if p2pool.DEBUG:
print "Got peer connection from:", addr
return p
def _host_to_ident(self, host):
a, b, c, d = host.split('.')
return a, b
def proto_made_connection(self, proto):
ident = self._host_to_ident(proto.transport.getPeer().host)
self.conns[ident] = self.conns.get(ident, 0) + 1
def proto_lost_connection(self, proto, reason):
ident = self._host_to_ident(proto.transport.getPeer().host)
self.conns[ident] -= 1
if not self.conns[ident]:
del self.conns[ident]
def proto_connected(self, proto):
self.node.got_conn(proto)
def proto_disconnected(self, proto, reason):
self.node.lost_conn(proto, reason)
def start(self):
assert not self.running
self.running = True
def attempt_listen():
if self.running:
self.listen_port = reactor.listenTCP(self.node.port, self)
deferral.retry('Error binding to P2P port:', traceback=False)(attempt_listen)()
def stop(self):
assert self.running
self.running = False
return self.listen_port.stopListening()
class ClientFactory(protocol.ClientFactory):
def __init__(self, node, desired_conns, max_attempts):
self.node = node
self.desired_conns = desired_conns
self.max_attempts = max_attempts
self.attempts = set()
self.conns = set()
self.running = False
def _host_to_ident(self, host):
a, b, c, d = host.split('.')
return a, b
def buildProtocol(self, addr):
p = Protocol(self.node, False)
p.factory = self
return p
def startedConnecting(self, connector):
ident = self._host_to_ident(connector.getDestination().host)
if ident in self.attempts:
raise AssertionError('already have attempt')
self.attempts.add(ident)
def clientConnectionFailed(self, connector, reason):
self.attempts.remove(self._host_to_ident(connector.getDestination().host))
def clientConnectionLost(self, connector, reason):
self.attempts.remove(self._host_to_ident(connector.getDestination().host))
def proto_made_connection(self, proto):
pass
def proto_lost_connection(self, proto, reason):
pass
def proto_connected(self, proto):
self.conns.add(proto)
self.node.got_conn(proto)
def proto_disconnected(self, proto, reason):
self.conns.remove(proto)
self.node.lost_conn(proto, reason)
def start(self):
assert not self.running
self.running = True
self._stop_thinking = deferral.run_repeatedly(self._think)
def stop(self):
assert self.running
self.running = False
self._stop_thinking()
def _think(self):
try:
if len(self.conns) < self.desired_conns and len(self.attempts) < self.max_attempts and self.node.addr_store:
(host, port), = self.node.get_good_peers(1)
if self._host_to_ident(host) in self.attempts:
pass
elif host in self.node.bans and self.node.bans[host] > time.time():
pass
else:
#print 'Trying to connect to', host, port
reactor.connectTCP(host, port, self, timeout=5)
except:
log.err()
return random.expovariate(1/1)
class SingleClientFactory(protocol.ReconnectingClientFactory):
def __init__(self, node):
self.node = node
def buildProtocol(self, addr):
p = Protocol(self.node, incoming=False)
p.factory = self
return p
def proto_made_connection(self, proto):
pass
def proto_lost_connection(self, proto, reason):
pass
def proto_connected(self, proto):
self.resetDelay()
self.node.got_conn(proto)
def proto_disconnected(self, proto, reason):
self.node.lost_conn(proto, reason)
class Node(object):
def __init__(self, best_share_hash_func, port, net, addr_store={}, connect_addrs=set(), desired_outgoing_conns=10, max_outgoing_attempts=30, max_incoming_conns=50, preferred_storage=1000, known_txs_var=variable.VariableDict({}), mining_txs_var=variable.VariableDict({}), mining2_txs_var=variable.VariableDict({}), advertise_ip=True, external_ip=None):
self.best_share_hash_func = best_share_hash_func
self.port = port
self.net = net
self.addr_store = dict(addr_store)
self.connect_addrs = connect_addrs
self.preferred_storage = preferred_storage
self.known_txs_var = known_txs_var
self.mining_txs_var = mining_txs_var
self.mining2_txs_var = mining2_txs_var
self.advertise_ip = advertise_ip
self.external_ip = external_ip
self.traffic_happened = variable.Event()
self.nonce = random.randrange(2**64)
self.peers = {}
self.bans = {} # address -> end_time
self.clientfactory = ClientFactory(self, desired_outgoing_conns, max_outgoing_attempts)
self.serverfactory = ServerFactory(self, max_incoming_conns)
self.running = False
def start(self):
if self.running:
raise ValueError('already running')
self.clientfactory.start()
self.serverfactory.start()
self.singleclientconnectors = [reactor.connectTCP(addr, port, SingleClientFactory(self)) for addr, port in self.connect_addrs]
self.running = True
self._stop_thinking = deferral.run_repeatedly(self._think)
def _think(self):
try:
if len(self.addr_store) < self.preferred_storage and self.peers:
random.choice(self.peers.values()).send_getaddrs(count=8)
except:
log.err()
return random.expovariate(1/20)
@defer.inlineCallbacks
def stop(self):
if not self.running:
raise ValueError('already stopped')
self.running = False
self._stop_thinking()
yield self.clientfactory.stop()
yield self.serverfactory.stop()
for singleclientconnector in self.singleclientconnectors:
yield singleclientconnector.factory.stopTrying()
yield singleclientconnector.disconnect()
del self.singleclientconnectors
def got_conn(self, conn):
if conn.nonce in self.peers:
raise ValueError('already have peer')
self.peers[conn.nonce] = conn
print '%s peer %s:%i established. p2pool version: %i %r' % ('Incoming connection from' if conn.incoming else 'Outgoing connection to', conn.addr[0], conn.addr[1], conn.other_version, conn.other_sub_version)
def lost_conn(self, conn, reason):
if conn.nonce not in self.peers:
raise ValueError('''don't have peer''')
if conn is not self.peers[conn.nonce]:
raise ValueError('wrong conn')
del self.peers[conn.nonce]
print 'Lost peer %s:%i - %s' % (conn.addr[0], conn.addr[1], reason.getErrorMessage())
def got_addr(self, (host, port), services, timestamp):
if (host, port) in self.addr_store:
old_services, old_first_seen, old_last_seen = self.addr_store[host, port]
self.addr_store[host, port] = services, old_first_seen, max(old_last_seen, timestamp)
else:
if len(self.addr_store) < 10000:
self.addr_store[host, port] = services, timestamp, timestamp
def handle_shares(self, shares, peer):
print 'handle_shares', (shares, peer)
def handle_share_hashes(self, hashes, peer):
print 'handle_share_hashes', (hashes, peer)
def handle_get_shares(self, hashes, parents, stops, peer):
print 'handle_get_shares', (hashes, parents, stops, peer)
def handle_bestblock(self, header, peer):
print 'handle_bestblock', header
def get_good_peers(self, max_count):
t = time.time()
return [x[0] for x in sorted(self.addr_store.iteritems(), key=lambda (k, (services, first_seen, last_seen)):
-math.log(max(3600, last_seen - first_seen))/math.log(max(3600, t - last_seen))*random.expovariate(1)
)][:max_count]
|
import shesha.config as conf
simul_name = "bench_scao_sh_40x40_10pix_lgs"
p_loop = conf.Param_loop()
p_loop.set_niter(5000)
p_loop.set_ittime(0.002) # =1/500
p_geom = conf.Param_geom()
p_geom.set_zenithangle(0.)
p_tel = conf.Param_tel()
p_tel.set_diam(8.0)
p_tel.set_cobs(0.12)
p_atmos = conf.Param_atmos()
p_atmos.set_r0(0.16)
p_atmos.set_nscreens(1)
p_atmos.set_frac([1.0])
p_atmos.set_alt([0.0])
p_atmos.set_windspeed([20.0])
p_atmos.set_winddir([45])
p_atmos.set_L0([1.e5])
p_target = conf.Param_target()
p_targets = [p_target]
p_target.set_xpos(0.)
p_target.set_ypos(0.)
p_target.set_Lambda(1.65)
p_target.set_mag(10.)
p_wfs0 = conf.Param_wfs()
p_wfs1 = conf.Param_wfs()
p_wfss = [p_wfs0]
p_wfs0.set_type("sh")
p_wfs0.set_nxsub(40)
p_wfs0.set_npix(10)
p_wfs0.set_pixsize(0.3)
p_wfs0.set_fracsub(0.8)
p_wfs0.set_xpos(0.)
p_wfs0.set_ypos(0.)
p_wfs0.set_Lambda(0.5)
p_wfs0.set_gsmag(3.)
p_wfs0.set_optthroughput(0.5)
p_wfs0.set_zerop(1.e11)
p_wfs0.set_noise(-1)
p_wfs0.set_atmos_seen(1)
p_wfs0.set_gsalt(90 * 1.e3)
p_wfs0.set_lltx(0)
p_wfs0.set_llty(0)
p_wfs0.set_laserpower(10)
p_wfs0.set_lgsreturnperwatt(1.e3)
p_wfs0.set_proftype("Exp")
p_wfs0.set_beamsize(0.8)
p_dm0 = conf.Param_dm()
p_dm1 = conf.Param_dm()
p_dms = [p_dm0, p_dm1]
p_dm0.set_type("pzt")
nact = p_wfs0.nxsub + 1
p_dm0.set_nact(nact)
p_dm0.set_alt(0.)
p_dm0.set_thresh(0.3)
p_dm0.set_coupling(0.2)
p_dm0.set_unitpervolt(0.01)
p_dm0.set_push4imat(100.)
p_dm1.set_type("tt")
p_dm1.set_alt(0.)
p_dm1.set_unitpervolt(0.0005)
p_dm1.set_push4imat(10.)
p_centroider0 = conf.Param_centroider()
p_centroiders = [p_centroider0]
p_centroider0.set_nwfs(0)
p_centroider0.set_type("corr")
p_centroider0.set_type_fct("gauss")
p_centroider0.set_width(2.0)
p_controller0 = conf.Param_controller()
p_controllers = [p_controller0]
p_controller0.set_type("ls")
p_controller0.set_nwfs([0])
p_controller0.set_ndm([0, 1])
p_controller0.set_maxcond(1500)
p_controller0.set_delay(1)
p_controller0.set_gain(0.4)
p_controller0.set_modopti(0)
p_controller0.set_nrec(2048)
p_controller0.set_nmodes(1286)
p_controller0.set_gmin(0.001)
p_controller0.set_gmax(0.5)
p_controller0.set_ngain(500)
|
"""Unit tests for collections.py."""
import unittest, doctest, operator
from test.support import TESTFN, forget, unlink
import inspect
from test import support
from collections import namedtuple, Counter, OrderedDict, _count_elements
from test import mapping_tests
import pickle, copy
from random import randrange, shuffle
import keyword
import re
import sys
from collections import UserDict
from collections import ChainMap
from collections.abc import Hashable, Iterable, Iterator
from collections.abc import Sized, Container, Callable
from collections.abc import Set, MutableSet
from collections.abc import Mapping, MutableMapping, KeysView, ItemsView
from collections.abc import Sequence, MutableSequence
from collections.abc import ByteString
class TestChainMap(unittest.TestCase):
def test_basics(self):
c = ChainMap()
c['a'] = 1
c['b'] = 2
d = c.new_child()
d['b'] = 20
d['c'] = 30
self.assertEqual(d.maps, [{'b':20, 'c':30}, {'a':1, 'b':2}]) # check internal state
self.assertEqual(d.items(), dict(a=1, b=20, c=30).items()) # check items/iter/getitem
self.assertEqual(len(d), 3) # check len
for key in 'abc': # check contains
self.assertIn(key, d)
for k, v in dict(a=1, b=20, c=30, z=100).items(): # check get
self.assertEqual(d.get(k, 100), v)
del d['b'] # unmask a value
self.assertEqual(d.maps, [{'c':30}, {'a':1, 'b':2}]) # check internal state
self.assertEqual(d.items(), dict(a=1, b=2, c=30).items()) # check items/iter/getitem
self.assertEqual(len(d), 3) # check len
for key in 'abc': # check contains
self.assertIn(key, d)
for k, v in dict(a=1, b=2, c=30, z=100).items(): # check get
self.assertEqual(d.get(k, 100), v)
self.assertIn(repr(d), [ # check repr
type(d).__name__ + "({'c': 30}, {'a': 1, 'b': 2})",
type(d).__name__ + "({'c': 30}, {'b': 2, 'a': 1})"
])
for e in d.copy(), copy.copy(d): # check shallow copies
self.assertEqual(d, e)
self.assertEqual(d.maps, e.maps)
self.assertIsNot(d, e)
self.assertIsNot(d.maps[0], e.maps[0])
for m1, m2 in zip(d.maps[1:], e.maps[1:]):
self.assertIs(m1, m2)
for e in [pickle.loads(pickle.dumps(d)),
copy.deepcopy(d),
eval(repr(d))
]: # check deep copies
self.assertEqual(d, e)
self.assertEqual(d.maps, e.maps)
self.assertIsNot(d, e)
for m1, m2 in zip(d.maps, e.maps):
self.assertIsNot(m1, m2, e)
f = d.new_child()
f['b'] = 5
self.assertEqual(f.maps, [{'b': 5}, {'c':30}, {'a':1, 'b':2}])
self.assertEqual(f.parents.maps, [{'c':30}, {'a':1, 'b':2}]) # check parents
self.assertEqual(f['b'], 5) # find first in chain
self.assertEqual(f.parents['b'], 2) # look beyond maps[0]
def test_contructor(self):
self.assertEqual(ChainMap().maps, [{}]) # no-args --> one new dict
self.assertEqual(ChainMap({1:2}).maps, [{1:2}]) # 1 arg --> list
def test_bool(self):
self.assertFalse(ChainMap())
self.assertFalse(ChainMap({}, {}))
self.assertTrue(ChainMap({1:2}, {}))
self.assertTrue(ChainMap({}, {1:2}))
def test_missing(self):
class DefaultChainMap(ChainMap):
def __missing__(self, key):
return 999
d = DefaultChainMap(dict(a=1, b=2), dict(b=20, c=30))
for k, v in dict(a=1, b=2, c=30, d=999).items():
self.assertEqual(d[k], v) # check __getitem__ w/missing
for k, v in dict(a=1, b=2, c=30, d=77).items():
self.assertEqual(d.get(k, 77), v) # check get() w/ missing
for k, v in dict(a=True, b=True, c=True, d=False).items():
self.assertEqual(k in d, v) # check __contains__ w/missing
self.assertEqual(d.pop('a', 1001), 1, d)
self.assertEqual(d.pop('a', 1002), 1002) # check pop() w/missing
self.assertEqual(d.popitem(), ('b', 2)) # check popitem() w/missing
with self.assertRaises(KeyError):
d.popitem()
def test_dict_coercion(self):
d = ChainMap(dict(a=1, b=2), dict(b=20, c=30))
self.assertEqual(dict(d), dict(a=1, b=2, c=30))
self.assertEqual(dict(d.items()), dict(a=1, b=2, c=30))
TestNT = namedtuple('TestNT', 'x y z') # type used for pickle tests
class TestNamedTuple(unittest.TestCase):
def test_factory(self):
Point = namedtuple('Point', 'x y')
self.assertEqual(Point.__name__, 'Point')
self.assertEqual(Point.__slots__, ())
self.assertEqual(Point.__module__, __name__)
self.assertEqual(Point.__getitem__, tuple.__getitem__)
self.assertEqual(Point._fields, ('x', 'y'))
self.assertIn('class Point(tuple)', Point._source)
self.assertRaises(ValueError, namedtuple, 'abc%', 'efg ghi') # type has non-alpha char
self.assertRaises(ValueError, namedtuple, 'class', 'efg ghi') # type has keyword
self.assertRaises(ValueError, namedtuple, '9abc', 'efg ghi') # type starts with digit
self.assertRaises(ValueError, namedtuple, 'abc', 'efg g%hi') # field with non-alpha char
self.assertRaises(ValueError, namedtuple, 'abc', 'abc class') # field has keyword
self.assertRaises(ValueError, namedtuple, 'abc', '8efg 9ghi') # field starts with digit
self.assertRaises(ValueError, namedtuple, 'abc', '_efg ghi') # field with leading underscore
self.assertRaises(ValueError, namedtuple, 'abc', 'efg efg ghi') # duplicate field
namedtuple('Point0', 'x1 y2') # Verify that numbers are allowed in names
namedtuple('_', 'a b c') # Test leading underscores in a typename
nt = namedtuple('nt', 'the quick brown fox') # check unicode input
self.assertNotIn("u'", repr(nt._fields))
nt = namedtuple('nt', ('the', 'quick')) # check unicode input
self.assertNotIn("u'", repr(nt._fields))
self.assertRaises(TypeError, Point._make, [11]) # catch too few args
self.assertRaises(TypeError, Point._make, [11, 22, 33]) # catch too many args
@unittest.skipIf(sys.flags.optimize >= 2,
"Docstrings are omitted with -O2 and above")
def test_factory_doc_attr(self):
Point = namedtuple('Point', 'x y')
self.assertEqual(Point.__doc__, 'Point(x, y)')
def test_name_fixer(self):
for spec, renamed in [
[('efg', 'g%hi'), ('efg', '_1')], # field with non-alpha char
[('abc', 'class'), ('abc', '_1')], # field has keyword
[('8efg', '9ghi'), ('_0', '_1')], # field starts with digit
[('abc', '_efg'), ('abc', '_1')], # field with leading underscore
[('abc', 'efg', 'efg', 'ghi'), ('abc', 'efg', '_2', 'ghi')], # duplicate field
[('abc', '', 'x'), ('abc', '_1', 'x')], # fieldname is a space
]:
self.assertEqual(namedtuple('NT', spec, rename=True)._fields, renamed)
def test_instance(self):
Point = namedtuple('Point', 'x y')
p = Point(11, 22)
self.assertEqual(p, Point(x=11, y=22))
self.assertEqual(p, Point(11, y=22))
self.assertEqual(p, Point(y=22, x=11))
self.assertEqual(p, Point(*(11, 22)))
self.assertEqual(p, Point(**dict(x=11, y=22)))
self.assertRaises(TypeError, Point, 1) # too few args
self.assertRaises(TypeError, Point, 1, 2, 3) # too many args
self.assertRaises(TypeError, eval, 'Point(XXX=1, y=2)', locals()) # wrong keyword argument
self.assertRaises(TypeError, eval, 'Point(x=1)', locals()) # missing keyword argument
self.assertEqual(repr(p), 'Point(x=11, y=22)')
self.assertNotIn('__weakref__', dir(p))
self.assertEqual(p, Point._make([11, 22])) # test _make classmethod
self.assertEqual(p._fields, ('x', 'y')) # test _fields attribute
self.assertEqual(p._replace(x=1), (1, 22)) # test _replace method
self.assertEqual(p._asdict(), dict(x=11, y=22)) # test _asdict method
self.assertEqual(vars(p), p._asdict()) # verify that vars() works
try:
p._replace(x=1, error=2)
except ValueError:
pass
else:
self._fail('Did not detect an incorrect fieldname')
# verify that field string can have commas
Point = namedtuple('Point', 'x, y')
p = Point(x=11, y=22)
self.assertEqual(repr(p), 'Point(x=11, y=22)')
# verify that fieldspec can be a non-string sequence
Point = namedtuple('Point', ('x', 'y'))
p = Point(x=11, y=22)
self.assertEqual(repr(p), 'Point(x=11, y=22)')
def test_tupleness(self):
Point = namedtuple('Point', 'x y')
p = Point(11, 22)
self.assertIsInstance(p, tuple)
self.assertEqual(p, (11, 22)) # matches a real tuple
self.assertEqual(tuple(p), (11, 22)) # coercable to a real tuple
self.assertEqual(list(p), [11, 22]) # coercable to a list
self.assertEqual(max(p), 22) # iterable
self.assertEqual(max(*p), 22) # star-able
x, y = p
self.assertEqual(p, (x, y)) # unpacks like a tuple
self.assertEqual((p[0], p[1]), (11, 22)) # indexable like a tuple
self.assertRaises(IndexError, p.__getitem__, 3)
self.assertEqual(p.x, x)
self.assertEqual(p.y, y)
self.assertRaises(AttributeError, eval, 'p.z', locals())
def test_odd_sizes(self):
Zero = namedtuple('Zero', '')
self.assertEqual(Zero(), ())
self.assertEqual(Zero._make([]), ())
self.assertEqual(repr(Zero()), 'Zero()')
self.assertEqual(Zero()._asdict(), {})
self.assertEqual(Zero()._fields, ())
Dot = namedtuple('Dot', 'd')
self.assertEqual(Dot(1), (1,))
self.assertEqual(Dot._make([1]), (1,))
self.assertEqual(Dot(1).d, 1)
self.assertEqual(repr(Dot(1)), 'Dot(d=1)')
self.assertEqual(Dot(1)._asdict(), {'d':1})
self.assertEqual(Dot(1)._replace(d=999), (999,))
self.assertEqual(Dot(1)._fields, ('d',))
# n = 5000
n = 254 # SyntaxError: more than 255 arguments:
import string, random
names = list(set(''.join([random.choice(string.ascii_letters)
for j in range(10)]) for i in range(n)))
n = len(names)
Big = namedtuple('Big', names)
b = Big(*range(n))
self.assertEqual(b, tuple(range(n)))
self.assertEqual(Big._make(range(n)), tuple(range(n)))
for pos, name in enumerate(names):
self.assertEqual(getattr(b, name), pos)
repr(b) # make sure repr() doesn't blow-up
d = b._asdict()
d_expected = dict(zip(names, range(n)))
self.assertEqual(d, d_expected)
b2 = b._replace(**dict([(names[1], 999),(names[-5], 42)]))
b2_expected = list(range(n))
b2_expected[1] = 999
b2_expected[-5] = 42
self.assertEqual(b2, tuple(b2_expected))
self.assertEqual(b._fields, tuple(names))
def test_pickle(self):
p = TestNT(x=10, y=20, z=30)
for module in (pickle,):
loads = getattr(module, 'loads')
dumps = getattr(module, 'dumps')
for protocol in -1, 0, 1, 2:
q = loads(dumps(p, protocol))
self.assertEqual(p, q)
self.assertEqual(p._fields, q._fields)
def test_copy(self):
p = TestNT(x=10, y=20, z=30)
for copier in copy.copy, copy.deepcopy:
q = copier(p)
self.assertEqual(p, q)
self.assertEqual(p._fields, q._fields)
def test_name_conflicts(self):
# Some names like "self", "cls", "tuple", "itemgetter", and "property"
# failed when used as field names. Test to make sure these now work.
T = namedtuple('T', 'itemgetter property self cls tuple')
t = T(1, 2, 3, 4, 5)
self.assertEqual(t, (1,2,3,4,5))
newt = t._replace(itemgetter=10, property=20, self=30, cls=40, tuple=50)
self.assertEqual(newt, (10,20,30,40,50))
# Broader test of all interesting names in a template
with support.captured_stdout() as template:
T = namedtuple('T', 'x', verbose=True)
words = set(re.findall('[A-Za-z]+', template.getvalue()))
words -= set(keyword.kwlist)
T = namedtuple('T', words)
# test __new__
values = tuple(range(len(words)))
t = T(*values)
self.assertEqual(t, values)
t = T(**dict(zip(T._fields, values)))
self.assertEqual(t, values)
# test _make
t = T._make(values)
self.assertEqual(t, values)
# exercise __repr__
repr(t)
# test _asdict
self.assertEqual(t._asdict(), dict(zip(T._fields, values)))
# test _replace
t = T._make(values)
newvalues = tuple(v*10 for v in values)
newt = t._replace(**dict(zip(T._fields, newvalues)))
self.assertEqual(newt, newvalues)
# test _fields
self.assertEqual(T._fields, tuple(words))
# test __getnewargs__
self.assertEqual(t.__getnewargs__(), values)
def test_repr(self):
with support.captured_stdout() as template:
A = namedtuple('A', 'x', verbose=True)
self.assertEqual(repr(A(1)), 'A(x=1)')
# repr should show the name of the subclass
class B(A):
pass
self.assertEqual(repr(B(1)), 'B(x=1)')
def test_source(self):
# verify that _source can be run through exec()
tmp = namedtuple('NTColor', 'red green blue')
globals().pop('NTColor', None) # remove artifacts from other tests
exec(tmp._source, globals())
self.assertIn('NTColor', globals())
c = NTColor(10, 20, 30)
self.assertEqual((c.red, c.green, c.blue), (10, 20, 30))
self.assertEqual(NTColor._fields, ('red', 'green', 'blue'))
globals().pop('NTColor', None) # clean-up after this test
class ABCTestCase(unittest.TestCase):
def validate_abstract_methods(self, abc, *names):
methodstubs = dict.fromkeys(names, lambda s, *args: 0)
# everything should work will all required methods are present
C = type('C', (abc,), methodstubs)
C()
# instantiation should fail if a required method is missing
for name in names:
stubs = methodstubs.copy()
del stubs[name]
C = type('C', (abc,), stubs)
self.assertRaises(TypeError, C, name)
def validate_isinstance(self, abc, name):
stub = lambda s, *args: 0
C = type('C', (object,), {'__hash__': None})
setattr(C, name, stub)
self.assertIsInstance(C(), abc)
self.assertTrue(issubclass(C, abc))
C = type('C', (object,), {'__hash__': None})
self.assertNotIsInstance(C(), abc)
self.assertFalse(issubclass(C, abc))
def validate_comparison(self, instance):
ops = ['lt', 'gt', 'le', 'ge', 'ne', 'or', 'and', 'xor', 'sub']
operators = {}
for op in ops:
name = '__' + op + '__'
operators[name] = getattr(operator, name)
class Other:
def __init__(self):
self.right_side = False
def __eq__(self, other):
self.right_side = True
return True
__lt__ = __eq__
__gt__ = __eq__
__le__ = __eq__
__ge__ = __eq__
__ne__ = __eq__
__ror__ = __eq__
__rand__ = __eq__
__rxor__ = __eq__
__rsub__ = __eq__
for name, op in operators.items():
if not hasattr(instance, name):
continue
other = Other()
op(instance, other)
self.assertTrue(other.right_side,'Right side not called for %s.%s'
% (type(instance), name))
class TestOneTrickPonyABCs(ABCTestCase):
def test_Hashable(self):
# Check some non-hashables
non_samples = [bytearray(), list(), set(), dict()]
for x in non_samples:
self.assertNotIsInstance(x, Hashable)
self.assertFalse(issubclass(type(x), Hashable), repr(type(x)))
# Check some hashables
samples = [None,
int(), float(), complex(),
str(),
tuple(), frozenset(),
int, list, object, type, bytes()
]
for x in samples:
self.assertIsInstance(x, Hashable)
self.assertTrue(issubclass(type(x), Hashable), repr(type(x)))
self.assertRaises(TypeError, Hashable)
# Check direct subclassing
class H(Hashable):
def __hash__(self):
return super().__hash__()
self.assertEqual(hash(H()), 0)
self.assertFalse(issubclass(int, H))
self.validate_abstract_methods(Hashable, '__hash__')
self.validate_isinstance(Hashable, '__hash__')
def test_Iterable(self):
# Check some non-iterables
non_samples = [None, 42, 3.14, 1j]
for x in non_samples:
self.assertNotIsInstance(x, Iterable)
self.assertFalse(issubclass(type(x), Iterable), repr(type(x)))
# Check some iterables
samples = [bytes(), str(),
tuple(), list(), set(), frozenset(), dict(),
dict().keys(), dict().items(), dict().values(),
(lambda: (yield))(),
(x for x in []),
]
for x in samples:
self.assertIsInstance(x, Iterable)
self.assertTrue(issubclass(type(x), Iterable), repr(type(x)))
# Check direct subclassing
class I(Iterable):
def __iter__(self):
return super().__iter__()
self.assertEqual(list(I()), [])
self.assertFalse(issubclass(str, I))
self.validate_abstract_methods(Iterable, '__iter__')
self.validate_isinstance(Iterable, '__iter__')
def test_Iterator(self):
non_samples = [None, 42, 3.14, 1j, b"", "", (), [], {}, set()]
for x in non_samples:
self.assertNotIsInstance(x, Iterator)
self.assertFalse(issubclass(type(x), Iterator), repr(type(x)))
samples = [iter(bytes()), iter(str()),
iter(tuple()), iter(list()), iter(dict()),
iter(set()), iter(frozenset()),
iter(dict().keys()), iter(dict().items()),
iter(dict().values()),
(lambda: (yield))(),
(x for x in []),
]
for x in samples:
self.assertIsInstance(x, Iterator)
self.assertTrue(issubclass(type(x), Iterator), repr(type(x)))
self.validate_abstract_methods(Iterator, '__next__', '__iter__')
# Issue 10565
class NextOnly:
def __next__(self):
yield 1
raise StopIteration
self.assertNotIsInstance(NextOnly(), Iterator)
def test_Sized(self):
non_samples = [None, 42, 3.14, 1j,
(lambda: (yield))(),
(x for x in []),
]
for x in non_samples:
self.assertNotIsInstance(x, Sized)
self.assertFalse(issubclass(type(x), Sized), repr(type(x)))
samples = [bytes(), str(),
tuple(), list(), set(), frozenset(), dict(),
dict().keys(), dict().items(), dict().values(),
]
for x in samples:
self.assertIsInstance(x, Sized)
self.assertTrue(issubclass(type(x), Sized), repr(type(x)))
self.validate_abstract_methods(Sized, '__len__')
self.validate_isinstance(Sized, '__len__')
def test_Container(self):
non_samples = [None, 42, 3.14, 1j,
(lambda: (yield))(),
(x for x in []),
]
for x in non_samples:
self.assertNotIsInstance(x, Container)
self.assertFalse(issubclass(type(x), Container), repr(type(x)))
samples = [bytes(), str(),
tuple(), list(), set(), frozenset(), dict(),
dict().keys(), dict().items(),
]
for x in samples:
self.assertIsInstance(x, Container)
self.assertTrue(issubclass(type(x), Container), repr(type(x)))
self.validate_abstract_methods(Container, '__contains__')
self.validate_isinstance(Container, '__contains__')
def test_Callable(self):
non_samples = [None, 42, 3.14, 1j,
"", b"", (), [], {}, set(),
(lambda: (yield))(),
(x for x in []),
]
for x in non_samples:
self.assertNotIsInstance(x, Callable)
self.assertFalse(issubclass(type(x), Callable), repr(type(x)))
samples = [lambda: None,
type, int, object,
len,
list.append, [].append,
]
for x in samples:
self.assertIsInstance(x, Callable)
self.assertTrue(issubclass(type(x), Callable), repr(type(x)))
self.validate_abstract_methods(Callable, '__call__')
self.validate_isinstance(Callable, '__call__')
def test_direct_subclassing(self):
for B in Hashable, Iterable, Iterator, Sized, Container, Callable:
class C(B):
pass
self.assertTrue(issubclass(C, B))
self.assertFalse(issubclass(int, C))
def test_registration(self):
for B in Hashable, Iterable, Iterator, Sized, Container, Callable:
class C:
__hash__ = None # Make sure it isn't hashable by default
self.assertFalse(issubclass(C, B), B.__name__)
B.register(C)
self.assertTrue(issubclass(C, B))
class WithSet(MutableSet):
def __init__(self, it=()):
self.data = set(it)
def __len__(self):
return len(self.data)
def __iter__(self):
return iter(self.data)
def __contains__(self, item):
return item in self.data
def add(self, item):
self.data.add(item)
def discard(self, item):
self.data.discard(item)
class TestCollectionABCs(ABCTestCase):
# XXX For now, we only test some virtual inheritance properties.
# We should also test the proper behavior of the collection ABCs
# as real base classes or mix-in classes.
def test_Set(self):
for sample in [set, frozenset]:
self.assertIsInstance(sample(), Set)
self.assertTrue(issubclass(sample, Set))
self.validate_abstract_methods(Set, '__contains__', '__iter__', '__len__')
class MySet(Set):
def __contains__(self, x):
return False
def __len__(self):
return 0
def __iter__(self):
return iter([])
self.validate_comparison(MySet())
def test_hash_Set(self):
class OneTwoThreeSet(Set):
def __init__(self):
self.contents = [1, 2, 3]
def __contains__(self, x):
return x in self.contents
def __len__(self):
return len(self.contents)
def __iter__(self):
return iter(self.contents)
def __hash__(self):
return self._hash()
a, b = OneTwoThreeSet(), OneTwoThreeSet()
self.assertTrue(hash(a) == hash(b))
def test_MutableSet(self):
self.assertIsInstance(set(), MutableSet)
self.assertTrue(issubclass(set, MutableSet))
self.assertNotIsInstance(frozenset(), MutableSet)
self.assertFalse(issubclass(frozenset, MutableSet))
self.validate_abstract_methods(MutableSet, '__contains__', '__iter__', '__len__',
'add', 'discard')
def test_issue_5647(self):
# MutableSet.__iand__ mutated the set during iteration
s = WithSet('abcd')
s &= WithSet('cdef') # This used to fail
self.assertEqual(set(s), set('cd'))
def test_issue_4920(self):
# MutableSet.pop() method did not work
class MySet(MutableSet):
__slots__=['__s']
def __init__(self,items=None):
if items is None:
items=[]
self.__s=set(items)
def __contains__(self,v):
return v in self.__s
def __iter__(self):
return iter(self.__s)
def __len__(self):
return len(self.__s)
def add(self,v):
result=v not in self.__s
self.__s.add(v)
return result
def discard(self,v):
result=v in self.__s
self.__s.discard(v)
return result
def __repr__(self):
return "MySet(%s)" % repr(list(self))
s = MySet([5,43,2,1])
self.assertEqual(s.pop(), 1)
def test_issue8750(self):
empty = WithSet()
full = WithSet(range(10))
s = WithSet(full)
s -= s
self.assertEqual(s, empty)
s = WithSet(full)
s ^= s
self.assertEqual(s, empty)
s = WithSet(full)
s &= s
self.assertEqual(s, full)
s |= s
self.assertEqual(s, full)
def test_issue16373(self):
# Recursion error comparing comparable and noncomparable
# Set instances
class MyComparableSet(Set):
def __contains__(self, x):
return False
def __len__(self):
return 0
def __iter__(self):
return iter([])
class MyNonComparableSet(Set):
def __contains__(self, x):
return False
def __len__(self):
return 0
def __iter__(self):
return iter([])
def __le__(self, x):
return NotImplemented
def __lt__(self, x):
return NotImplemented
cs = MyComparableSet()
ncs = MyNonComparableSet()
with self.assertRaises(TypeError):
ncs < cs
with self.assertRaises(TypeError):
ncs <= cs
with self.assertRaises(TypeError):
cs > ncs
with self.assertRaises(TypeError):
cs >= ncs
def test_Mapping(self):
for sample in [dict]:
self.assertIsInstance(sample(), Mapping)
self.assertTrue(issubclass(sample, Mapping))
self.validate_abstract_methods(Mapping, '__contains__', '__iter__', '__len__',
'__getitem__')
class MyMapping(Mapping):
def __len__(self):
return 0
def __getitem__(self, i):
raise IndexError
def __iter__(self):
return iter(())
self.validate_comparison(MyMapping())
def test_MutableMapping(self):
for sample in [dict]:
self.assertIsInstance(sample(), MutableMapping)
self.assertTrue(issubclass(sample, MutableMapping))
self.validate_abstract_methods(MutableMapping, '__contains__', '__iter__', '__len__',
'__getitem__', '__setitem__', '__delitem__')
def test_MutableMapping_subclass(self):
# Test issue 9214
mymap = UserDict()
mymap['red'] = 5
self.assertIsInstance(mymap.keys(), Set)
self.assertIsInstance(mymap.keys(), KeysView)
self.assertIsInstance(mymap.items(), Set)
self.assertIsInstance(mymap.items(), ItemsView)
mymap = UserDict()
mymap['red'] = 5
z = mymap.keys() | {'orange'}
self.assertIsInstance(z, set)
list(z)
mymap['blue'] = 7 # Shouldn't affect 'z'
self.assertEqual(sorted(z), ['orange', 'red'])
mymap = UserDict()
mymap['red'] = 5
z = mymap.items() | {('orange', 3)}
self.assertIsInstance(z, set)
list(z)
mymap['blue'] = 7 # Shouldn't affect 'z'
self.assertEqual(sorted(z), [('orange', 3), ('red', 5)])
def test_Sequence(self):
for sample in [tuple, list, bytes, str]:
self.assertIsInstance(sample(), Sequence)
self.assertTrue(issubclass(sample, Sequence))
self.assertIsInstance(range(10), Sequence)
self.assertTrue(issubclass(range, Sequence))
self.assertTrue(issubclass(str, Sequence))
self.validate_abstract_methods(Sequence, '__contains__', '__iter__', '__len__',
'__getitem__')
def test_ByteString(self):
for sample in [bytes, bytearray]:
self.assertIsInstance(sample(), ByteString)
self.assertTrue(issubclass(sample, ByteString))
for sample in [str, list, tuple]:
self.assertNotIsInstance(sample(), ByteString)
self.assertFalse(issubclass(sample, ByteString))
self.assertNotIsInstance(memoryview(b""), ByteString)
self.assertFalse(issubclass(memoryview, ByteString))
def test_MutableSequence(self):
for sample in [tuple, str, bytes]:
self.assertNotIsInstance(sample(), MutableSequence)
self.assertFalse(issubclass(sample, MutableSequence))
for sample in [list, bytearray]:
self.assertIsInstance(sample(), MutableSequence)
self.assertTrue(issubclass(sample, MutableSequence))
self.assertFalse(issubclass(str, MutableSequence))
self.validate_abstract_methods(MutableSequence, '__contains__', '__iter__',
'__len__', '__getitem__', '__setitem__', '__delitem__', 'insert')
def test_MutableSequence_mixins(self):
# Test the mixins of MutableSequence by creating a miminal concrete
# class inherited from it.
class MutableSequenceSubclass(MutableSequence):
def __init__(self):
self.lst = []
def __setitem__(self, index, value):
self.lst[index] = value
def __getitem__(self, index):
return self.lst[index]
def __len__(self):
return len(self.lst)
def __delitem__(self, index):
del self.lst[index]
def insert(self, index, value):
self.lst.insert(index, value)
mss = MutableSequenceSubclass()
mss.append(0)
mss.extend((1, 2, 3, 4))
self.assertEqual(len(mss), 5)
self.assertEqual(mss[3], 3)
mss.reverse()
self.assertEqual(mss[3], 1)
mss.pop()
self.assertEqual(len(mss), 4)
mss.remove(3)
self.assertEqual(len(mss), 3)
mss += (10, 20, 30)
self.assertEqual(len(mss), 6)
self.assertEqual(mss[-1], 30)
mss.clear()
self.assertEqual(len(mss), 0)
class TestCounter(unittest.TestCase):
def test_basics(self):
c = Counter('abcaba')
self.assertEqual(c, Counter({'a':3 , 'b': 2, 'c': 1}))
self.assertEqual(c, Counter(a=3, b=2, c=1))
self.assertIsInstance(c, dict)
self.assertIsInstance(c, Mapping)
self.assertTrue(issubclass(Counter, dict))
self.assertTrue(issubclass(Counter, Mapping))
self.assertEqual(len(c), 3)
self.assertEqual(sum(c.values()), 6)
self.assertEqual(sorted(c.values()), [1, 2, 3])
self.assertEqual(sorted(c.keys()), ['a', 'b', 'c'])
self.assertEqual(sorted(c), ['a', 'b', 'c'])
self.assertEqual(sorted(c.items()),
[('a', 3), ('b', 2), ('c', 1)])
self.assertEqual(c['b'], 2)
self.assertEqual(c['z'], 0)
self.assertEqual(c.__contains__('c'), True)
self.assertEqual(c.__contains__('z'), False)
self.assertEqual(c.get('b', 10), 2)
self.assertEqual(c.get('z', 10), 10)
self.assertEqual(c, dict(a=3, b=2, c=1))
self.assertEqual(repr(c), "Counter({'a': 3, 'b': 2, 'c': 1})")
self.assertEqual(c.most_common(), [('a', 3), ('b', 2), ('c', 1)])
for i in range(5):
self.assertEqual(c.most_common(i),
[('a', 3), ('b', 2), ('c', 1)][:i])
self.assertEqual(''.join(sorted(c.elements())), 'aaabbc')
c['a'] += 1 # increment an existing value
c['b'] -= 2 # sub existing value to zero
del c['c'] # remove an entry
del c['c'] # make sure that del doesn't raise KeyError
c['d'] -= 2 # sub from a missing value
c['e'] = -5 # directly assign a missing value
c['f'] += 4 # add to a missing value
self.assertEqual(c, dict(a=4, b=0, d=-2, e=-5, f=4))
self.assertEqual(''.join(sorted(c.elements())), 'aaaaffff')
self.assertEqual(c.pop('f'), 4)
self.assertNotIn('f', c)
for i in range(3):
elem, cnt = c.popitem()
self.assertNotIn(elem, c)
c.clear()
self.assertEqual(c, {})
self.assertEqual(repr(c), 'Counter()')
self.assertRaises(NotImplementedError, Counter.fromkeys, 'abc')
self.assertRaises(TypeError, hash, c)
c.update(dict(a=5, b=3))
c.update(c=1)
c.update(Counter('a' * 50 + 'b' * 30))
c.update() # test case with no args
c.__init__('a' * 500 + 'b' * 300)
c.__init__('cdc')
c.__init__()
self.assertEqual(c, dict(a=555, b=333, c=3, d=1))
self.assertEqual(c.setdefault('d', 5), 1)
self.assertEqual(c['d'], 1)
self.assertEqual(c.setdefault('e', 5), 5)
self.assertEqual(c['e'], 5)
def test_copying(self):
# Check that counters are copyable, deepcopyable, picklable, and
#have a repr/eval round-trip
words = Counter('which witch had which witches wrist watch'.split())
update_test = Counter()
update_test.update(words)
for i, dup in enumerate([
words.copy(),
copy.copy(words),
copy.deepcopy(words),
pickle.loads(pickle.dumps(words, 0)),
pickle.loads(pickle.dumps(words, 1)),
pickle.loads(pickle.dumps(words, 2)),
pickle.loads(pickle.dumps(words, -1)),
eval(repr(words)),
update_test,
Counter(words),
]):
msg = (i, dup, words)
self.assertTrue(dup is not words)
self.assertEqual(dup, words)
self.assertEqual(len(dup), len(words))
self.assertEqual(type(dup), type(words))
def test_copy_subclass(self):
class MyCounter(Counter):
pass
c = MyCounter('slartibartfast')
d = c.copy()
self.assertEqual(d, c)
self.assertEqual(len(d), len(c))
self.assertEqual(type(d), type(c))
def test_conversions(self):
# Convert to: set, list, dict
s = 'she sells sea shells by the sea shore'
self.assertEqual(sorted(Counter(s).elements()), sorted(s))
self.assertEqual(sorted(Counter(s)), sorted(set(s)))
self.assertEqual(dict(Counter(s)), dict(Counter(s).items()))
self.assertEqual(set(Counter(s)), set(s))
def test_invariant_for_the_in_operator(self):
c = Counter(a=10, b=-2, c=0)
for elem in c:
self.assertTrue(elem in c)
self.assertIn(elem, c)
def test_multiset_operations(self):
# Verify that adding a zero counter will strip zeros and negatives
c = Counter(a=10, b=-2, c=0) + Counter()
self.assertEqual(dict(c), dict(a=10))
elements = 'abcd'
for i in range(1000):
# test random pairs of multisets
p = Counter(dict((elem, randrange(-2,4)) for elem in elements))
p.update(e=1, f=-1, g=0)
q = Counter(dict((elem, randrange(-2,4)) for elem in elements))
q.update(h=1, i=-1, j=0)
for counterop, numberop in [
(Counter.__add__, lambda x, y: max(0, x+y)),
(Counter.__sub__, lambda x, y: max(0, x-y)),
(Counter.__or__, lambda x, y: max(0,x,y)),
(Counter.__and__, lambda x, y: max(0, min(x,y))),
]:
result = counterop(p, q)
for x in elements:
self.assertEqual(numberop(p[x], q[x]), result[x],
(counterop, x, p, q))
# verify that results exclude non-positive counts
self.assertTrue(x>0 for x in result.values())
elements = 'abcdef'
for i in range(100):
# verify that random multisets with no repeats are exactly like sets
p = Counter(dict((elem, randrange(0, 2)) for elem in elements))
q = Counter(dict((elem, randrange(0, 2)) for elem in elements))
for counterop, setop in [
(Counter.__sub__, set.__sub__),
(Counter.__or__, set.__or__),
(Counter.__and__, set.__and__),
]:
counter_result = counterop(p, q)
set_result = setop(set(p.elements()), set(q.elements()))
self.assertEqual(counter_result, dict.fromkeys(set_result, 1))
def test_inplace_operations(self):
elements = 'abcd'
for i in range(1000):
# test random pairs of multisets
p = Counter(dict((elem, randrange(-2,4)) for elem in elements))
p.update(e=1, f=-1, g=0)
q = Counter(dict((elem, randrange(-2,4)) for elem in elements))
q.update(h=1, i=-1, j=0)
for inplace_op, regular_op in [
(Counter.__iadd__, Counter.__add__),
(Counter.__isub__, Counter.__sub__),
(Counter.__ior__, Counter.__or__),
(Counter.__iand__, Counter.__and__),
]:
c = p.copy()
c_id = id(c)
regular_result = regular_op(c, q)
inplace_result = inplace_op(c, q)
self.assertEqual(inplace_result, regular_result)
self.assertEqual(id(inplace_result), c_id)
def test_subtract(self):
c = Counter(a=-5, b=0, c=5, d=10, e=15,g=40)
c.subtract(a=1, b=2, c=-3, d=10, e=20, f=30, h=-50)
self.assertEqual(c, Counter(a=-6, b=-2, c=8, d=0, e=-5, f=-30, g=40, h=50))
c = Counter(a=-5, b=0, c=5, d=10, e=15,g=40)
c.subtract(Counter(a=1, b=2, c=-3, d=10, e=20, f=30, h=-50))
self.assertEqual(c, Counter(a=-6, b=-2, c=8, d=0, e=-5, f=-30, g=40, h=50))
c = Counter('aaabbcd')
c.subtract('aaaabbcce')
self.assertEqual(c, Counter(a=-1, b=0, c=-1, d=1, e=-1))
def test_unary(self):
c = Counter(a=-5, b=0, c=5, d=10, e=15,g=40)
self.assertEqual(dict(+c), dict(c=5, d=10, e=15, g=40))
self.assertEqual(dict(-c), dict(a=5))
def test_repr_nonsortable(self):
c = Counter(a=2, b=None)
r = repr(c)
self.assertIn("'a': 2", r)
self.assertIn("'b': None", r)
def test_helper_function(self):
# two paths, one for real dicts and one for other mappings
elems = list('abracadabra')
d = dict()
_count_elements(d, elems)
self.assertEqual(d, {'a': 5, 'r': 2, 'b': 2, 'c': 1, 'd': 1})
m = OrderedDict()
_count_elements(m, elems)
self.assertEqual(m,
OrderedDict([('a', 5), ('b', 2), ('r', 2), ('c', 1), ('d', 1)]))
class TestOrderedDict(unittest.TestCase):
def test_init(self):
with self.assertRaises(TypeError):
OrderedDict([('a', 1), ('b', 2)], None) # too many args
pairs = [('a', 1), ('b', 2), ('c', 3), ('d', 4), ('e', 5)]
self.assertEqual(sorted(OrderedDict(dict(pairs)).items()), pairs) # dict input
self.assertEqual(sorted(OrderedDict(**dict(pairs)).items()), pairs) # kwds input
self.assertEqual(list(OrderedDict(pairs).items()), pairs) # pairs input
self.assertEqual(list(OrderedDict([('a', 1), ('b', 2), ('c', 9), ('d', 4)],
c=3, e=5).items()), pairs) # mixed input
# make sure no positional args conflict with possible kwdargs
self.assertEqual(inspect.getargspec(OrderedDict.__dict__['__init__']).args,
['self'])
# Make sure that direct calls to __init__ do not clear previous contents
d = OrderedDict([('a', 1), ('b', 2), ('c', 3), ('d', 44), ('e', 55)])
d.__init__([('e', 5), ('f', 6)], g=7, d=4)
self.assertEqual(list(d.items()),
[('a', 1), ('b', 2), ('c', 3), ('d', 4), ('e', 5), ('f', 6), ('g', 7)])
def test_update(self):
with self.assertRaises(TypeError):
OrderedDict().update([('a', 1), ('b', 2)], None) # too many args
pairs = [('a', 1), ('b', 2), ('c', 3), ('d', 4), ('e', 5)]
od = OrderedDict()
od.update(dict(pairs))
self.assertEqual(sorted(od.items()), pairs) # dict input
od = OrderedDict()
od.update(**dict(pairs))
self.assertEqual(sorted(od.items()), pairs) # kwds input
od = OrderedDict()
od.update(pairs)
self.assertEqual(list(od.items()), pairs) # pairs input
od = OrderedDict()
od.update([('a', 1), ('b', 2), ('c', 9), ('d', 4)], c=3, e=5)
self.assertEqual(list(od.items()), pairs) # mixed input
# Issue 9137: Named argument called 'other' or 'self'
# shouldn't be treated specially.
od = OrderedDict()
od.update(self=23)
self.assertEqual(list(od.items()), [('self', 23)])
od = OrderedDict()
od.update(other={})
self.assertEqual(list(od.items()), [('other', {})])
od = OrderedDict()
od.update(red=5, blue=6, other=7, self=8)
self.assertEqual(sorted(list(od.items())),
[('blue', 6), ('other', 7), ('red', 5), ('self', 8)])
# Make sure that direct calls to update do not clear previous contents
# add that updates items are not moved to the end
d = OrderedDict([('a', 1), ('b', 2), ('c', 3), ('d', 44), ('e', 55)])
d.update([('e', 5), ('f', 6)], g=7, d=4)
self.assertEqual(list(d.items()),
[('a', 1), ('b', 2), ('c', 3), ('d', 4), ('e', 5), ('f', 6), ('g', 7)])
def test_abc(self):
self.assertIsInstance(OrderedDict(), MutableMapping)
self.assertTrue(issubclass(OrderedDict, MutableMapping))
def test_clear(self):
pairs = [('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)]
shuffle(pairs)
od = OrderedDict(pairs)
self.assertEqual(len(od), len(pairs))
od.clear()
self.assertEqual(len(od), 0)
def test_delitem(self):
pairs = [('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)]
od = OrderedDict(pairs)
del od['a']
self.assertNotIn('a', od)
with self.assertRaises(KeyError):
del od['a']
self.assertEqual(list(od.items()), pairs[:2] + pairs[3:])
def test_setitem(self):
od = OrderedDict([('d', 1), ('b', 2), ('c', 3), ('a', 4), ('e', 5)])
od['c'] = 10 # existing element
od['f'] = 20 # new element
self.assertEqual(list(od.items()),
[('d', 1), ('b', 2), ('c', 10), ('a', 4), ('e', 5), ('f', 20)])
def test_iterators(self):
pairs = [('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)]
shuffle(pairs)
od = OrderedDict(pairs)
self.assertEqual(list(od), [t[0] for t in pairs])
self.assertEqual(list(od.keys()), [t[0] for t in pairs])
self.assertEqual(list(od.values()), [t[1] for t in pairs])
self.assertEqual(list(od.items()), pairs)
self.assertEqual(list(reversed(od)),
[t[0] for t in reversed(pairs)])
def test_popitem(self):
pairs = [('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)]
shuffle(pairs)
od = OrderedDict(pairs)
while pairs:
self.assertEqual(od.popitem(), pairs.pop())
with self.assertRaises(KeyError):
od.popitem()
self.assertEqual(len(od), 0)
def test_pop(self):
pairs = [('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)]
shuffle(pairs)
od = OrderedDict(pairs)
shuffle(pairs)
while pairs:
k, v = pairs.pop()
self.assertEqual(od.pop(k), v)
with self.assertRaises(KeyError):
od.pop('xyz')
self.assertEqual(len(od), 0)
self.assertEqual(od.pop(k, 12345), 12345)
# make sure pop still works when __missing__ is defined
class Missing(OrderedDict):
def __missing__(self, key):
return 0
m = Missing(a=1)
self.assertEqual(m.pop('b', 5), 5)
self.assertEqual(m.pop('a', 6), 1)
self.assertEqual(m.pop('a', 6), 6)
with self.assertRaises(KeyError):
m.pop('a')
def test_equality(self):
pairs = [('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)]
shuffle(pairs)
od1 = OrderedDict(pairs)
od2 = OrderedDict(pairs)
self.assertEqual(od1, od2) # same order implies equality
pairs = pairs[2:] + pairs[:2]
od2 = OrderedDict(pairs)
self.assertNotEqual(od1, od2) # different order implies inequality
# comparison to regular dict is not order sensitive
self.assertEqual(od1, dict(od2))
self.assertEqual(dict(od2), od1)
# different length implied inequality
self.assertNotEqual(od1, OrderedDict(pairs[:-1]))
def test_copying(self):
# Check that ordered dicts are copyable, deepcopyable, picklable,
# and have a repr/eval round-trip
pairs = [('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)]
od = OrderedDict(pairs)
update_test = OrderedDict()
update_test.update(od)
for i, dup in enumerate([
od.copy(),
copy.copy(od),
copy.deepcopy(od),
pickle.loads(pickle.dumps(od, 0)),
pickle.loads(pickle.dumps(od, 1)),
pickle.loads(pickle.dumps(od, 2)),
pickle.loads(pickle.dumps(od, 3)),
pickle.loads(pickle.dumps(od, -1)),
eval(repr(od)),
update_test,
OrderedDict(od),
]):
self.assertTrue(dup is not od)
self.assertEqual(dup, od)
self.assertEqual(list(dup.items()), list(od.items()))
self.assertEqual(len(dup), len(od))
self.assertEqual(type(dup), type(od))
def test_yaml_linkage(self):
# Verify that __reduce__ is setup in a way that supports PyYAML's dump() feature.
# In yaml, lists are native but tuples are not.
pairs = [('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)]
od = OrderedDict(pairs)
# yaml.dump(od) -->
# '!!python/object/apply:__main__.OrderedDict\n- - [a, 1]\n - [b, 2]\n'
self.assertTrue(all(type(pair)==list for pair in od.__reduce__()[1]))
def test_reduce_not_too_fat(self):
# do not save instance dictionary if not needed
pairs = [('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)]
od = OrderedDict(pairs)
self.assertEqual(len(od.__reduce__()), 2)
od.x = 10
self.assertEqual(len(od.__reduce__()), 3)
def test_repr(self):
od = OrderedDict([('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)])
self.assertEqual(repr(od),
"OrderedDict([('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)])")
self.assertEqual(eval(repr(od)), od)
self.assertEqual(repr(OrderedDict()), "OrderedDict()")
def test_repr_recursive(self):
# See issue #9826
od = OrderedDict.fromkeys('abc')
od['x'] = od
self.assertEqual(repr(od),
"OrderedDict([('a', None), ('b', None), ('c', None), ('x', ...)])")
def test_setdefault(self):
pairs = [('c', 1), ('b', 2), ('a', 3), ('d', 4), ('e', 5), ('f', 6)]
shuffle(pairs)
od = OrderedDict(pairs)
pair_order = list(od.items())
self.assertEqual(od.setdefault('a', 10), 3)
# make sure order didn't change
self.assertEqual(list(od.items()), pair_order)
self.assertEqual(od.setdefault('x', 10), 10)
# make sure 'x' is added to the end
self.assertEqual(list(od.items())[-1], ('x', 10))
# make sure setdefault still works when __missing__ is defined
class Missing(OrderedDict):
def __missing__(self, key):
return 0
self.assertEqual(Missing().setdefault(5, 9), 9)
def test_reinsert(self):
# Given insert a, insert b, delete a, re-insert a,
# verify that a is now later than b.
od = OrderedDict()
od['a'] = 1
od['b'] = 2
del od['a']
od['a'] = 1
self.assertEqual(list(od.items()), [('b', 2), ('a', 1)])
def test_move_to_end(self):
od = OrderedDict.fromkeys('abcde')
self.assertEqual(list(od), list('abcde'))
od.move_to_end('c')
self.assertEqual(list(od), list('abdec'))
od.move_to_end('c', 0)
self.assertEqual(list(od), list('cabde'))
od.move_to_end('c', 0)
self.assertEqual(list(od), list('cabde'))
od.move_to_end('e')
self.assertEqual(list(od), list('cabde'))
with self.assertRaises(KeyError):
od.move_to_end('x')
def test_sizeof(self):
# Wimpy test: Just verify the reported size is larger than a regular dict
d = dict(a=1)
od = OrderedDict(**d)
self.assertGreater(sys.getsizeof(od), sys.getsizeof(d))
def test_override_update(self):
# Verify that subclasses can override update() without breaking __init__()
class MyOD(OrderedDict):
def update(self, *args, **kwds):
raise Exception()
items = [('a', 1), ('c', 3), ('b', 2)]
self.assertEqual(list(MyOD(items).items()), items)
class GeneralMappingTests(mapping_tests.BasicTestMappingProtocol):
type2test = OrderedDict
def test_popitem(self):
d = self._empty_mapping()
self.assertRaises(KeyError, d.popitem)
class MyOrderedDict(OrderedDict):
pass
class SubclassMappingTests(mapping_tests.BasicTestMappingProtocol):
type2test = MyOrderedDict
def test_popitem(self):
d = self._empty_mapping()
self.assertRaises(KeyError, d.popitem)
import doctest, collections
def test_main(verbose=None):
NamedTupleDocs = doctest.DocTestSuite(module=collections)
test_classes = [TestNamedTuple, NamedTupleDocs, TestOneTrickPonyABCs,
TestCollectionABCs, TestCounter, TestChainMap,
TestOrderedDict, GeneralMappingTests, SubclassMappingTests]
support.run_unittest(*test_classes)
support.run_doctest(collections, verbose)
if __name__ == "__main__":
test_main(verbose=True)
|
"""
:copyright: (c) 2013 by Carlos Abalde, see AUTHORS.txt for more details.
:license: GPL, see LICENSE.txt for more details.
"""
from __future__ import absolute_import
import sys
import codecs
from time import ctime
from optparse import OptionParser
try:
from xml.etree.ElementTree import parse
except ImportError:
try:
from cElementTree import parse
except ImportError:
from elementtree.ElementTree import parse
from wurfl_python.exceptions import DeferredDeviceException
class Device(object):
def __init__(self, device, groups):
'''
@param device: An elementtree.Element instance of a device element in
a WURFL XML file.
@type device: elementtree.Element
@param groups: None or set of WURFL capability group names.
@type groups: set
'''
self.ua = device.attrib[u'user_agent']
self.id = device.attrib[u'id']
self.parent = device.attrib[u'fall_back']
self.actual_device_root = \
u'actual_device_root' in device.attrib and \
device.attrib[u'actual_device_root'].lower() == u'true'
self.capabilities = {}
for group in device:
if groups is None or group.attrib['id'] in groups:
for capability in group:
self.capabilities[capability.attrib['name']] = capability.attrib['value']
class Processor(object):
def __init__(self, input, groups, output):
'''
@param input: WURFL XML file path. It can be a regular, zip, bzip2
or gzipped file.
@type input: string
@param groups: None or list of WURFL capability group names.
@type groups: list
@param output: Python database file path.
@type output: string
'''
# Capability groups.
self.groups = set(groups) if groups is not None else None
# XML input.
if input.endswith('.gz'):
import gzip
input = gzip.open(input, 'rb')
elif input.endswith('.bz2'):
from bz2 import BZ2File
input = BZ2File(input)
elif input.endswith('.zip'):
from zipfile import ZipFile
from cStringIO import StringIO
zfile = ZipFile(input)
input = StringIO(zfile.read(zfile.namelist()[0]))
else:
input = open(input, 'rb')
self.tree = parse(input)
# Python output.
self.output = codecs.open(output, 'wb', 'utf8')
# Fetch normalized capability types.
self._load_capability_types()
def process(self):
# Initialice.
self.deferred = {}
self.done = set()
# Dump Python header.
self._dump_header()
# Process devices.
for item in self.tree.getroot().find('devices'):
# Instantiate device.
device = Device(item, self.groups)
# Ready to dump?
if device.parent != 'root' and (device.parent not in self.done):
if device.parent not in self.deferred:
self.deferred[device.parent] = []
self.deferred[device.parent].append(device)
else:
self.done.add(device.id)
self._dump_device(device)
self._process_deferred()
# Process deferred devices.
while self.deferred:
deferred_len = len(self.deferred)
self._process_deferred()
if deferred_len == len(self.deferred):
raise DeferredDeviceException('%s devices still deferred: %s' % (deferred_len, self.deferred.keys()))
def _process_deferred(self):
'''
Called to process any deferred devices (devices that have been defined
in the WURFL before their fall_back has been defined). It is called
after any device has been handled and also called in a loop
after all device definitions in the WURFL database have been exhausted.
'''
dumped = []
for parent in self.deferred:
if parent in self.done:
for device in self.deferred[parent]:
self.done.add(device.id)
self._dump_device(device)
dumped.append(parent)
for id in dumped:
del self.deferred[id]
def _dump_header(self):
self.output.write(u"# -*- coding: utf-8 -*-\n")
self.output.write(u"# Generated on: %s.\n" % ctime())
self.output.write(u"# Version: %s.\n\n" % self.tree.findtext("*/ver").strip())
self.output.write(u"from __future__ import absolute_import\n")
self.output.write(u"from wurfl_python import Repository, match, find\n\n")
def _dump_device(self, device):
capabilities = []
for capability in sorted(device.capabilities):
value = device.capabilities[capability]
capability_type = self.capability_types.get(capability, None)
if capability_type == int:
capabilities.append(u"ur'''%s''':%d" % (capability, int(value.strip())))
elif capability_type == float:
capabilities.append(u"ur'''%s''':%f" % (capability, float(value.strip())))
elif capability_type == bool:
if value.lower() == u'true':
capabilities.append(u"ur'''%s''':True" % capability)
elif value.lower() == u'false':
capabilities.append(u"ur'''%s''':False" % capability)
else:
capabilities.append(u"ur'''%s''':ur'''%s'''" % (capability, value))
self.output.write(u"Repository.register(ur'''%s''', ur'''%s''', %s, {%s}, %s)\n\n" % (
device.id,
device.ua if not device.ua.endswith(u'\\') else u'%s\\' % device.ua,
device.actual_device_root,
u','.join(capabilities),
u"ur'''%s'''" % device.parent if device.parent != u'root' else u'None'))
def _load_capability_types(self):
self.capability_types = {}
for capability in self.tree.findall('devices/device/group/capability'):
name = capability.attrib['name']
value = capability.attrib['value']
if name not in self.capability_types:
try:
int(value)
self.capability_types[name] = int
continue
except (TypeError, ValueError):
pass
try:
float(value)
self.capability_types[name] = float
continue
except (TypeError, ValueError):
pass
if value.strip().lower() in ('true', 'false'):
self.capability_types[name] = bool
continue
else:
self.capability_types[name] = str
else:
if self.capability_types[name] == str:
continue
elif self.capability_types[name] == bool:
if value.strip().lower() in ('true', 'false'):
continue
else:
self.capability_types[name] = str
elif self.capability_types[name] == float:
try:
float(value)
continue
except (TypeError, ValueError):
self.capability_types[name] = str
elif self.capability_types[name] == int:
try:
int(value)
continue
except (TypeError, ValueError):
self.capability_types[name] = str
def main():
option_parser = OptionParser(usage='%prog <WURFL XML file>')
option_parser.add_option(
'-o',
'--output',
dest='output',
default='wurfl.py',
help='Name of the database Python module to produce. Defaults to wurfl.py.')
option_parser.add_option(
'-g',
'--group',
dest='groups',
default=None,
action='append',
help='Name of a capability group to be included in the output database. If no groups are specified, all input database capabilities groups are included in the output.')
options, args = option_parser.parse_args()
if args:
wurfl = Processor(args[0], options.groups, options.output)
wurfl.process()
else:
sys.stderr.write(option_parser.get_usage())
sys.exit(1)
if __name__ == '__main__':
main()
|
class SkillNotFound(Exception):
def __init__(self, skill_id):
self.skill_id = skill_id
msg = "Skill not found: {}".format(skill_id)
Exception.__init__(self, msg)
|
from . import ast
from .setree import SEBlockItem, SEScope, SEIf, SESwitch, SETry, SEWhile
from ..ssa import ssa_types, ssa_ops, ssa_jumps, objtypes
from ..namegen import LabelGen
from ..verifier.descriptors import parseFieldDescriptor, parseMethodDescriptor
from .. import opnames
_prefix_map = {objtypes.IntTT:'i', objtypes.LongTT:'j',
objtypes.FloatTT:'f', objtypes.DoubleTT:'d',
objtypes.BoolTT:'b', objtypes.StringTT:'s'}
_ssaToTT = {ssa_types.SSA_INT:objtypes.IntTT, ssa_types.SSA_LONG:objtypes.LongTT,
ssa_types.SSA_FLOAT:objtypes.FloatTT, ssa_types.SSA_DOUBLE:objtypes.DoubleTT}
class VarInfo(object):
def __init__(self, method, blocks, namegen):
self.env = method.class_.env
self.labelgen = LabelGen().next
returnTypes = parseMethodDescriptor(method.descriptor, unsynthesize=False)[-1]
self.return_tt = objtypes.verifierToSynthetic(returnTypes[0]) if returnTypes else None
self.clsname = method.class_.name
self._namegen = namegen
self._uninit_vars = {}
self._vars = {}
self._tts = {}
for block in blocks:
for var, uc in block.unaryConstraints.items():
if var.type == ssa_types.SSA_MONAD:
continue
if var.type == ssa_types.SSA_OBJECT:
tt = uc.getSingleTType() #temp hack
if uc.types.isBoolOrByteArray():
tt = objtypes.TypeTT(objtypes.BExpr, objtypes.dim(tt)+1)
# assert((objtypes.BoolTT[0], tt[1]) in uc.types.exact)
else:
tt = _ssaToTT[var.type]
self._tts[var] = tt
def _nameCallback(self, expr):
prefix = _prefix_map.get(expr.dtype, 'a')
return self._namegen.getPrefix(prefix)
def _newVar(self, var, num):
tt = self._tts[var]
if var.const is not None:
return ast.Literal(tt, var.const)
if var.name:
#important to not add num when it is 0, since we currently
#use var names to force 'this'
temp = '{}_{}'.format(var.name, num) if num else var.name
namefunc = lambda expr:temp
else:
namefunc = self._nameCallback
result = ast.Local(tt, namefunc)
# merge all variables of uninitialized type to simplify fixObjectCreations in javamethod.py
if var.uninit_orig_num is not None:
result = self._uninit_vars.setdefault(var.uninit_orig_num, result)
return result
def var(self, node, var, isCast=False):
assert(var.type != ssa_types.SSA_MONAD)
key = node, var, isCast
try:
return self._vars[key]
except KeyError:
new = self._newVar(key[1], key[0].num)
self._vars[key] = new
return new
def customVar(self, tt, prefix): #for use with ignored exceptions
namefunc = lambda expr: self._namegen.getPrefix(prefix)
return ast.Local(tt, namefunc)
_math_types = (ssa_ops.IAdd, ssa_ops.IDiv, ssa_ops.IMul, ssa_ops.IRem, ssa_ops.ISub)
_math_types += (ssa_ops.IAnd, ssa_ops.IOr, ssa_ops.IShl, ssa_ops.IShr, ssa_ops.IUshr, ssa_ops.IXor)
_math_types += (ssa_ops.FAdd, ssa_ops.FDiv, ssa_ops.FMul, ssa_ops.FRem, ssa_ops.FSub)
_math_symbols = dict(zip(_math_types, '+ / * % - & | << >> >>> ^ + / * % -'.split()))
def _convertJExpr(op, getExpr, clsname):
params = [getExpr(var) for var in op.params if var.type != ssa_types.SSA_MONAD]
assert(None not in params)
expr = None
#Have to do this one seperately since it isn't an expression statement
if isinstance(op, ssa_ops.Throw):
return ast.ThrowStatement(params[0])
if isinstance(op, _math_types):
opdict = _math_symbols
expr = ast.BinaryInfix(opdict[type(op)], params)
elif isinstance(op, ssa_ops.ArrLength):
expr = ast.FieldAccess(params[0], 'length', objtypes.IntTT)
elif isinstance(op, ssa_ops.ArrLoad):
expr = ast.ArrayAccess(*params)
elif isinstance(op, ssa_ops.ArrStore):
expr = ast.ArrayAccess(params[0], params[1])
expr = ast.Assignment(expr, params[2])
elif isinstance(op, ssa_ops.CheckCast):
expr = ast.Cast(ast.TypeName(op.target_tt), params[0])
elif isinstance(op, ssa_ops.Convert):
expr = ast.makeCastExpr(_ssaToTT[op.target], params[0])
elif isinstance(op, (ssa_ops.FCmp, ssa_ops.ICmp)):
boolt = objtypes.BoolTT
cn1, c0, c1 = ast.Literal.N_ONE, ast.Literal.ZERO, ast.Literal.ONE
ascend = isinstance(op, ssa_ops.ICmp) or op.NaN_val == 1
if ascend:
expr = ast.Ternary(ast.BinaryInfix('<',params,boolt), cn1, ast.Ternary(ast.BinaryInfix('==',params,boolt), c0, c1))
else:
assert(op.NaN_val == -1)
expr = ast.Ternary(ast.BinaryInfix('>',params,boolt), c1, ast.Ternary(ast.BinaryInfix('==',params,boolt), c0, cn1))
elif isinstance(op, ssa_ops.FieldAccess):
dtype = objtypes.verifierToSynthetic(parseFieldDescriptor(op.desc, unsynthesize=False)[0])
if op.instruction[0] in (opnames.GETSTATIC, opnames.PUTSTATIC):
printLeft = (op.target != clsname) #Don't print classname if it is a static field in current class
tt = objtypes.TypeTT(op.target, 0) #Doesn't handle arrays, but they don't have any fields anyway
expr = ast.FieldAccess(ast.TypeName(tt), op.name, dtype, op, printLeft=printLeft)
else:
expr = ast.FieldAccess(params[0], op.name, dtype, op)
if op.instruction[0] in (opnames.PUTFIELD, opnames.PUTSTATIC):
expr = ast.Assignment(expr, params[-1])
elif isinstance(op, ssa_ops.FNeg):
expr = ast.UnaryPrefix('-', params[0])
elif isinstance(op, ssa_ops.InstanceOf):
args = params[0], ast.TypeName(op.target_tt)
expr = ast.BinaryInfix('instanceof', args, dtype=objtypes.BoolTT)
elif isinstance(op, ssa_ops.Invoke):
vtypes, rettypes = parseMethodDescriptor(op.desc, unsynthesize=False)
tt_types = objtypes.verifierToSynthetic_seq(vtypes)
ret_type = objtypes.verifierToSynthetic(rettypes[0]) if rettypes else None
target_tt = op.target_tt
if objtypes.dim(target_tt) and op.name == "clone": #In Java, T[].clone returns T[] rather than Object
ret_type = target_tt
if op.instruction[0] == opnames.INVOKEINIT and op.isThisCtor:
name = 'this' if (op.target == clsname) else 'super'
expr = ast.MethodInvocation(None, name, tt_types, params[1:], op, ret_type)
elif op.instruction[0] == opnames.INVOKESTATIC: #TODO - fix this for special super calls
expr = ast.MethodInvocation(ast.TypeName(target_tt), op.name, [None]+tt_types, params, op, ret_type)
else:
expr = ast.MethodInvocation(params[0], op.name, [target_tt]+tt_types, params[1:], op, ret_type)
elif isinstance(op, ssa_ops.Monitor):
fmt = '//monexit({})' if op.exit else '//monenter({})'
expr = ast.Dummy(fmt, params)
elif isinstance(op, ssa_ops.MultiNewArray):
expr = ast.ArrayCreation(op.tt, *params)
elif isinstance(op, ssa_ops.New):
expr = ast.Dummy('//<unmerged new> {}', [ast.TypeName(op.tt)], isNew=True)
elif isinstance(op, ssa_ops.NewArray):
expr = ast.ArrayCreation(op.tt, params[0])
elif isinstance(op, ssa_ops.Truncate):
tt = {(True,16): objtypes.ShortTT, (False,16): objtypes.CharTT, (True,8): objtypes.ByteTT}[op.signed, op.width]
expr = ast.Cast(ast.TypeName(tt), params[0])
if op.rval is not None and expr:
expr = ast.Assignment(getExpr(op.rval), expr)
if expr is None: #Temporary hack to show what's missing
if isinstance(op, ssa_ops.TryReturn):
return None #Don't print out anything
else:
return ast.StringStatement('//' + type(op).__name__)
return ast.ExpressionStatement(expr)
def _createASTBlock(info, endk, node):
getExpr = lambda var: info.var(node, var)
op2expr = lambda op: _convertJExpr(op, getExpr, info.clsname)
block = node.block
lines = map(op2expr, block.lines) if block is not None else []
lines = [x for x in lines if x is not None]
# Kind of hackish: If the block ends in a cast and hence it is not known to always
# succeed, assign the results of the cast rather than passing through the variable
# unchanged
outreplace = {}
if lines and isinstance(block.lines[-1], ssa_ops.CheckCast):
assert(isinstance(lines[-1].expr, ast.Cast))
var = block.lines[-1].params[0]
cexpr = lines[-1].expr
lines[-1].expr = ast.Assignment(info.var(node, var, True), cexpr)
nvar = outreplace[var] = lines[-1].expr.params[0]
nvar.dtype = cexpr.dtype
eassigns = []
nassigns = []
for n2 in node.successors:
assert((n2 in node.outvars) != (n2 in node.eassigns))
if n2 in node.eassigns:
for outv, inv in zip(node.eassigns[n2], n2.invars):
if outv is None: #this is how we mark the thrown exception, which
#obviously doesn't get an explicit assignment statement
continue
expr = ast.Assignment(info.var(n2, inv), info.var(node, outv))
if expr.params[0] != expr.params[1]:
eassigns.append(ast.ExpressionStatement(expr))
else:
for outv, inv in zip(node.outvars[n2], n2.invars):
right = outreplace.get(outv, info.var(node, outv))
expr = ast.Assignment(info.var(n2, inv), right)
if expr.params[0] != expr.params[1]:
nassigns.append(ast.ExpressionStatement(expr))
#Need to put exception assignments before last statement, which might throw
#While normal assignments must come last as they may depend on it
statements = lines[:-1] + eassigns + lines[-1:] + nassigns
norm_successors = node.normalSuccessors()
jump = None if block is None else block.jump
if isinstance(jump, (ssa_jumps.Rethrow, ssa_jumps.Return)):
assert(not norm_successors)
assert(not node.eassigns and not node.outvars)
if isinstance(jump, ssa_jumps.Rethrow):
param = info.var(node, jump.params[-1])
statements.append(ast.ThrowStatement(param))
else:
if len(jump.params) > 1: #even void returns have a monad param
param = info.var(node, jump.params[-1])
statements.append(ast.ReturnStatement(param, info.return_tt))
else:
statements.append(ast.ReturnStatement())
breakKey, jumpKey = endk, None
elif len(norm_successors) == 0:
assert(isinstance(jump, ssa_jumps.OnException))
breakKey, jumpKey = endk, None
elif len(norm_successors) == 1: #normal successors
breakKey, jumpKey = endk, norm_successors[0]._key
else: #case of if and switch jumps handled in parent scope
assert(len(norm_successors) > 1)
breakKey, jumpKey = endk, endk
new = ast.StatementBlock(info.labelgen, node._key, breakKey, statements, jumpKey)
assert(None not in statements)
return new
_cmp_strs = dict(zip(('eq','ne','lt','ge','gt','le'), "== != < >= > <=".split()))
def _createASTSub(info, current, ftitem, forceUnlabled=False):
begink = current.entryBlock._key
endk = ftitem.entryBlock._key if ftitem is not None else None
if isinstance(current, SEBlockItem):
return _createASTBlock(info, endk, current.node)
elif isinstance(current, SEScope):
ftitems = current.items[1:] + [ftitem]
parts = [_createASTSub(info, item, newft) for item, newft in zip(current.items, ftitems)]
return ast.StatementBlock(info.labelgen, begink, endk, parts, endk, labelable=(not forceUnlabled))
elif isinstance(current, SEWhile):
parts = [_createASTSub(info, scope, current, True) for scope in current.getScopes()]
return ast.WhileStatement(info.labelgen, begink, endk, tuple(parts))
elif isinstance(current, SETry):
assert(len(current.getScopes()) == 2)
parts = [_createASTSub(info, scope, ftitem, True) for scope in current.getScopes()]
catchnode = current.getScopes()[-1].entryBlock
declt = ast.CatchTypeNames(info.env, current.toptts)
if current.catchvar is None: #exception is ignored and hence not referred to by the graph, so we need to make our own
catchvar = info.customVar(declt.dtype, 'ignoredException')
else:
catchvar = info.var(catchnode, current.catchvar)
decl = ast.VariableDeclarator(declt, catchvar)
pairs = [(decl, parts[1])]
return ast.TryStatement(info.labelgen, begink, endk, parts[0], pairs)
#Create a fake key to represent the beginning of the conditional statement itself
#doesn't matter what it is as long as it's unique
midk = begink + (-1,)
node = current.head.node
jump = node.block.jump
if isinstance(current, SEIf):
parts = [_createASTSub(info, scope, ftitem, True) for scope in current.getScopes()]
cmp_str = _cmp_strs[jump.cmp]
exprs = [info.var(node, var) for var in jump.params]
ifexpr = ast.BinaryInfix(cmp_str, exprs, objtypes.BoolTT)
new = ast.IfStatement(info.labelgen, midk, endk, ifexpr, tuple(parts))
elif isinstance(current, SESwitch):
ftitems = current.ordered[1:] + [ftitem]
parts = [_createASTSub(info, item, newft, True) for item, newft in zip(current.ordered, ftitems)]
for part in parts:
part.breakKey = endk #createSub will assume break should be ft, which isn't the case with switch statements
expr = info.var(node, jump.params[0])
pairs = zip(current.ordered_keysets, parts)
new = ast.SwitchStatement(info.labelgen, midk, endk, expr, pairs)
#bundle head and if together so we can return as single statement
headscope = _createASTBlock(info, midk, node)
assert(headscope.jumpKey is midk)
return ast.StatementBlock(info.labelgen, begink, endk, [headscope, new], endk)
def createAST(method, ssagraph, seroot, namegen):
info = VarInfo(method, ssagraph.blocks, namegen)
astroot = _createASTSub(info, seroot, None)
return astroot, info
|
'''
Compute the analysis (through direct inversion of B+R innovation matrix) and output the error reduction.
For both observation and forecast errors, statistics need to be provided:
- correlation model
- correlation length
- bias (0 by default)
- variance (constant on the domain)
By default (and as it is a common hypothesis in most context), the observation error are uncorrelated.
What would be the impact of having correlated observation errors? The impact of biases?
'''
import numpy as np
from numpy import pi
import matplotlib.pyplot as plt
from DM93 import Covariance, Uncorrelated, Foar, Soar, Gaussian
execfile('config.py')
obsLc = None
obsCorr = Uncorrelated(grid, obsLc)
obsBias = 0.
obsVar = 1.
fctLc = grid.L/20.
fctCorr = Soar(grid, fctLc)
fctBias = 0.
fctVar = 2.
ampl = 10.
truth = ampl * np.exp(-grid.x**2/(grid.L/6.)**2)
B = Covariance(grid, fctVar * fctCorr.matrix)
R = Covariance(grid, obsVar * obsCorr.matrix)
fctErr = B.random(bias=fctBias)
obsErr = R.random(bias=obsBias)
xb = truth + fctErr
y = truth + obsErr
SInv = np.linalg.inv(B.matrix+R.matrix)
K = B.matrix.dot(SInv)
dxa = K.dot(y-xb)
xa = xb + dxa
error_b = grid.dx * np.sqrt(sum(fctErr**2))
error_a = grid.dx * np.sqrt(sum((xa-truth)**2))
print('background error = %.1e'%error_b)
print('analysis error = %.1e'%error_a)
print('error reduction = %.1f%%'%((error_b-error_a)/error_b*100.))
fig = plt.figure()
fig.subplots_adjust(wspace=0.05)
ax1 = plt.subplot(211)
ax2 = plt.subplot(212)
ax1.plot(grid.x, truth, color='k', linewidth=2, label='$x_t$')
ax1.plot(grid.x, xb, color='b', label='$x_b$')
ax1.plot(grid.x, y, color='g', marker='o', linestyle='none', label='$y$')
ax1.plot(grid.x, xa, color='r', linewidth=2, label='$x_a$')
ax2.plot( grid.x, y-xb, color='m', marker='o', markersize=4,
linestyle='none', label='$y-x_b$')
ax2.plot( grid.x, dxa, color='r', label='$\Delta x_a$')
ax2.plot( grid.x, fctErr, color='b', linestyle=':', linewidth=3,
label='$\epsilon_b$')
ax2.plot( grid.x, xa-truth, color='r', linestyle=':', linewidth=3,
label='$\epsilon_a$')
ax2.axhline(y=0, color='k')
xticklabels, xticks = grid.ticks(units=km)[:2]
ax1.set_xticks(xticks)
ax1.set_xticklabels(())
ax2.set_xlabel('$x$ [km]')
ax2.set_xticks(xticks)
ax2.set_xticklabels(xticklabels)
ax1.legend(loc='best')
ax2.legend(loc='best')
plt.show()
|
from .daemon import app
def start():
app.run(host='0.0.0.0', port=8001)
if __name__ == '__main__':
start()
|
"""
A PyQT4 dialog to show ID log and progress
"""
"""
Copyright 2012-2014 Anthony Beville
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
from PyQt4 import QtCore, QtGui, uic
import os
from settings import ComicTaggerSettings
from coverimagewidget import CoverImageWidget
import utils
class AutoTagProgressWindow(QtGui.QDialog):
def __init__(self, parent):
super(AutoTagProgressWindow, self).__init__(parent)
uic.loadUi(ComicTaggerSettings.getUIFile('autotagprogresswindow.ui' ), self)
self.archiveCoverWidget = CoverImageWidget( self.archiveCoverContainer, CoverImageWidget.DataMode, False )
gridlayout = QtGui.QGridLayout( self.archiveCoverContainer )
gridlayout.addWidget( self.archiveCoverWidget )
gridlayout.setContentsMargins(0,0,0,0)
self.testCoverWidget = CoverImageWidget( self.testCoverContainer, CoverImageWidget.DataMode, False )
gridlayout = QtGui.QGridLayout( self.testCoverContainer )
gridlayout.addWidget( self.testCoverWidget )
gridlayout.setContentsMargins(0,0,0,0)
self.isdone = False
self.setWindowFlags(self.windowFlags() |
QtCore.Qt.WindowSystemMenuHint |
QtCore.Qt.WindowMaximizeButtonHint)
utils.reduceWidgetFontSize( self.textEdit )
def setArchiveImage( self, img_data):
self.setCoverImage( img_data, self.archiveCoverWidget)
def setTestImage( self, img_data):
self.setCoverImage( img_data, self.testCoverWidget)
def setCoverImage( self, img_data , widget):
widget.setImageData( img_data )
QtCore.QCoreApplication.processEvents()
QtCore.QCoreApplication.processEvents()
def reject(self):
QtGui.QDialog.reject(self)
self.isdone = True
|
import unittest
from unittest import mock
from unittest.mock import MagicMock
import datetime
import sys
from lofar.sas.resourceassignment.resourceassigner.resource_availability_checker import ResourceAvailabilityChecker
from lofar.sas.resourceassignment.resourceassigner.resource_availability_checker import CouldNotFindClaimException
class ResourceAvailabilityCheckerTest(unittest.TestCase):
specification_id = 2323
task_mom_id = 351543
task_otdb_id = 1290472
task_id = 2299
task_end_time = datetime.datetime(2016, 3, 25, 22, 47, 31)
task_start_time = datetime.datetime(2016, 3, 25, 21, 47, 31)
non_existing_task_mom_id = -1
predecessor_task_mom_id = 1
predecessor_task_otdb_id = 2
predecessor_task_id = 3
predecessor_task = {
"mom_id": predecessor_task_mom_id,
"otdb_id": predecessor_task_otdb_id,
"id": predecessor_task_id,
"endtime": datetime.datetime(2016, 3, 25, 22, 47, 31),
"name": "IS HBA_DUAL",
"predecessor_ids": [],
"project_mom_id": 2,
"project_name": "test-lofar",
"specification_id": 2323,
"starttime": datetime.datetime(2016, 3, 25, 21, 47, 31),
"status": "prescheduled",
"status_id": 350,
"successor_ids": [],
"type": "pipeline",
"type_id": 0
}
successor_task_mom_id = 4
successor_task_otdb_id = 5
successor_task_id = 6
successor_task = {
"mom_id": successor_task_mom_id,
"otdb_id": successor_task_otdb_id,
"id": successor_task_id,
"endtime": datetime.datetime(2016, 3, 25, 22, 47, 31),
"name": "IS HBA_DUAL",
"predecessor_ids": [],
"project_mom_id": 2,
"project_name": "test-lofar",
"specification_id": 2323,
"starttime": datetime.datetime(2016, 3, 25, 21, 47, 31),
"status": "prescheduled",
"status_id": 350,
"successor_ids": [],
"type": "pipeline",
"type_id": 0
}
resources_with_rcus_otdb_id = 1290495
resources_with_errors_otdb_id = 1290496
resource_error1 = "error 1"
resource_error2 = "error 2"
unknown_resource_type_name = "fuel"
unknown_resource_type_otdb_id = 123489
cep4bandwidth_resource_id = 116
cep4storage_resource_id = 117
storage_claim = {
'resource_id': cep4storage_resource_id,
'resource_type_id': 5,
'starttime': task_start_time,
'used_rcus': None,
'endtime': task_end_time + datetime.timedelta(days=365),
'status': 'tentative',
'claim_size': 2,
'properties': [
{'io_type': 'output', 'type': 15, 'sap_nr': 0, 'value': 0},
{'io_type': 'output', 'type': 2, 'sap_nr': 0, 'value': 1},
{'io_type': 'output', 'type': 10, 'sap_nr': 0, 'value': 1073741824}
]
}
bandwidth_claim = {
'resource_id': cep4bandwidth_resource_id,
'resource_type_id': 3,
'starttime': task_start_time,
'used_rcus': None,
'endtime': task_end_time,
'status': 'tentative',
'claim_size': 2,
'properties': []
}
def reset_task(self):
self.task = {
"mom_id": self.task_mom_id,
"otdb_id": self.task_otdb_id,
"id": self.task_id,
"endtime": self.task_end_time,
"name": "IS HBA_DUAL",
"predecessor_ids": [],
"project_mom_id": 2,
"project_name": "test-lofar",
"specification_id": self.specification_id,
"starttime": self.task_start_time,
"status": "prescheduled",
"status_id": 350,
"successor_ids": [],
"type": "pipeline",
"type_id": 0
}
def setUp(self):
self.reset_task()
def get_task_side_effect(*args, **kwargs):
if 'mom_id' in kwargs:
if kwargs['mom_id'] == self.successor_task_mom_id:
return self.successor_task
elif kwargs['mom_id'] == self.predecessor_task_mom_id:
return self.predecessor_task
elif kwargs['mom_id'] == self.non_existing_task_mom_id:
return None
else:
return self.task
else:
return self.task
self.successor_task_mom_ids = [self.successor_task_mom_id]
self.predecessor_task_mom_ids = [self.predecessor_task_mom_id]
rarpc_patcher = mock.patch('lofar.sas.resourceassignment.resourceassignmentservice.rpc.RADBRPC')
self.addCleanup(rarpc_patcher.stop)
self.rarpc_mock = rarpc_patcher.start()
self.rarpc_mock.getTask.side_effect = get_task_side_effect
self.rarpc_mock.insertOrUpdateSpecificationAndTask.return_value = {
'inserted': True,
'specification_id': self.specification_id,
'task_id': self.task_id
}
self.rarpc_mock.getResourceClaimPropertyTypes.return_value = [
{'id': 0, 'name': 'nr_of_is_files'},
{'id': 1, 'name': 'nr_of_cs_files'},
{'id': 2, 'name': 'nr_of_uv_files'},
{'id': 3, 'name': 'nr_of_im_files'},
{'id': 4, 'name': 'nr_of_img_files'},
{'id': 5, 'name': 'nr_of_pulp_files'},
{'id': 6, 'name': 'nr_of_cs_stokes'},
{'id': 7, 'name': 'nr_of_is_stokes'},
{'id': 8, 'name': 'is_file_size'},
{'id': 9, 'name': 'cs_file_size'},
{'id': 10, 'name': 'uv_file_size'},
{'id': 11, 'name': 'im_file_size'},
{'id': 12, 'name': 'img_file_size'},
{'id': 13, 'name': 'nr_of_pulp_files'},
{'id': 14, 'name': 'nr_of_cs_parts'},
{'id': 15, 'name': 'start_sb_nr'},
{'id': 16, 'name': 'uv_otdb_id'},
{'id': 17, 'name': 'cs_otdb_id'},
{'id': 18, 'name': 'is_otdb_id'},
{'id': 19, 'name': 'im_otdb_id'},
{'id': 20, 'name': 'img_otdb_id'},
{'id': 21, 'name': 'pulp_otdb_id'},
{'id': 22, 'name': 'is_tab_nr'},
{'id': 23, 'name': 'start_sbg_nr'},
{'id': 24, 'name': 'pulp_file_size'}
]
self.rarpc_mock.getResourceTypes.return_value = [
{'id': 0, 'name': 'rsp', 'unit_id': 0, 'units': 'rsp_channel_bit'},
{'id': 1, 'name': 'tbb', 'unit_id': 1, 'units': 'bytes'},
{'id': 2, 'name': 'rcu', 'unit_id': 2, 'units': 'rcu_board'},
{'id': 3, 'name': 'bandwidth', 'unit_id': 3, 'units': 'bits/second'},
{'id': 4, 'name': 'processor', 'unit_id': 4, 'units': 'cores'},
{'id': 5, 'name': 'storage', 'unit_id': 1, 'units': 'bytes'},
]
self.rarpc_mock.insertResourceClaims.return_value = {'ids': [1, 2]}
self.rarpc_mock.getResourceGroupNames.return_value = [{"name": "CEP4"}, {"name": "DRAGNET"}, {"name": "COBALT"}]
self.rarpc_mock.getResourceGroupMemberships.return_value = {'groups': [
{'resource_group_parent_id': None, 'resource_group_parent_name': None, 'resource_group_id': 0,
'resource_group_name': 'CORE', 'child_ids': [1, 2], 'parent_ids': [], 'resource_ids': [0, 1]},
{'resource_group_parent_id': None, 'resource_group_parent_name': None, 'resource_group_id': 3,
'resource_group_name': 'CS001', 'child_ids': [], 'parent_ids': [0], 'resource_ids': [212]},
{'resource_group_parent_id': None, 'resource_group_parent_name': None, 'resource_group_id': 1,
'resource_group_name': 'CEP4', 'child_ids': [], 'parent_ids': [0], 'resource_ids': [116, 117]},
# {'resource_group_parent_id': None, 'resource_group_parent_name': None, 'resource_group_id': 4, # TODO: WHY DOES ORDER MATTER IN HERE???
# 'resource_group_name': 'CS002', 'child_ids': [], 'parent_ids': [0], 'resource_ids': [214]}, # TODO: check what happens when this is moved after e.g. CS001; also comment in CS002 in RE response
],
'resources': [{'resource_group_parent_id': 0,
'resource_group_parent_name': 'CORE',
'resource_id': 0,
'resource_name': 'CS001',
'parent_group_ids': []},
{'resource_group_parent_id': 0,
'resource_group_parent_name': 'CORE',
'resource_id': 1,
'resource_name': 'CS002',
'parent_group_ids': []},
{'resource_group_parent_id': 1,
'resource_group_parent_name': 'CEP4',
'resource_id': 2,
'resource_name': 'CEP4_storage:/data',
'parent_group_ids': []}]}
# incomplete response but good enough for tests
self.rarpc_mock.getResources.return_value = [
{'id': 0, 'name': 'cpunode01_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 1, 'name': 'cpunode01_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 2, 'name': 'cpunode02_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 3, 'name': 'cpunode02_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 4, 'name': 'cpunode03_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 5, 'name': 'cpunode03_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 6, 'name': 'cpunode04_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 7, 'name': 'cpunode04_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 8, 'name': 'cpunode05_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 9, 'name': 'cpunode05_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 10, 'name': 'cpunode06_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 11, 'name': 'cpunode06_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 12, 'name': 'cpunode07_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 13, 'name': 'cpunode07_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 14, 'name': 'cpunode08_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 15, 'name': 'cpunode08_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 16, 'name': 'cpunode09_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 17, 'name': 'cpunode09_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 18, 'name': 'cpunode10_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 19, 'name': 'cpunode10_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 20, 'name': 'cpunode11_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 21, 'name': 'cpunode11_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 22, 'name': 'cpunode12_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 23, 'name': 'cpunode12_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 24, 'name': 'cpunode13_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 25, 'name': 'cpunode13_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 26, 'name': 'cpunode14_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 27, 'name': 'cpunode14_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 28, 'name': 'cpunode15_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 29, 'name': 'cpunode15_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 30, 'name': 'cpunode16_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 31, 'name': 'cpunode16_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 32, 'name': 'cpunode17_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 33, 'name': 'cpunode17_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 34, 'name': 'cpunode18_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 35, 'name': 'cpunode18_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 36, 'name': 'cpunode19_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 37, 'name': 'cpunode19_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 38, 'name': 'cpunode20_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 39, 'name': 'cpunode20_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 40, 'name': 'cpunode21_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 41, 'name': 'cpunode21_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 42, 'name': 'cpunode22_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 43, 'name': 'cpunode22_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 44, 'name': 'cpunode23_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 45, 'name': 'cpunode23_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 46, 'name': 'cpunode24_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 47, 'name': 'cpunode24_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 48, 'name': 'cpunode25_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 49, 'name': 'cpunode25_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 50, 'name': 'cpunode26_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 51, 'name': 'cpunode26_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 52, 'name': 'cpunode27_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 53, 'name': 'cpunode27_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 54, 'name': 'cpunode28_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 55, 'name': 'cpunode28_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 56, 'name': 'cpunode29_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 57, 'name': 'cpunode29_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 58, 'name': 'cpunode30_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 59, 'name': 'cpunode30_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 60, 'name': 'cpunode31_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 61, 'name': 'cpunode31_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 62, 'name': 'cpunode32_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 63, 'name': 'cpunode32_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 64, 'name': 'cpunode33_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 65, 'name': 'cpunode33_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 66, 'name': 'cpunode34_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 67, 'name': 'cpunode34_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 68, 'name': 'cpunode35_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 69, 'name': 'cpunode35_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 70, 'name': 'cpunode36_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 71, 'name': 'cpunode36_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 72, 'name': 'cpunode37_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 73, 'name': 'cpunode37_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 74, 'name': 'cpunode38_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 75, 'name': 'cpunode38_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 76, 'name': 'cpunode39_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 77, 'name': 'cpunode39_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 78, 'name': 'cpunode40_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 79, 'name': 'cpunode40_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 80, 'name': 'cpunode41_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 81, 'name': 'cpunode41_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 82, 'name': 'cpunode42_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 83, 'name': 'cpunode42_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 84, 'name': 'cpunode43_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 85, 'name': 'cpunode43_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 86, 'name': 'cpunode44_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 87, 'name': 'cpunode44_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 88, 'name': 'cpunode45_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 89, 'name': 'cpunode45_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 90, 'name': 'cpunode46_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 91, 'name': 'cpunode46_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 92, 'name': 'cpunode47_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 93, 'name': 'cpunode47_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 94, 'name': 'cpunode48_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 95, 'name': 'cpunode48_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 96, 'name': 'cpunode49_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 97, 'name': 'cpunode49_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 98, 'name': 'cpunode50_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 99, 'name': 'cpunode50_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 100, 'name': 'cbt001_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 101, 'name': 'cbt001_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 102, 'name': 'cbt002_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 103, 'name': 'cbt002_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 104, 'name': 'cbt003_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 105, 'name': 'cbt003_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 106, 'name': 'cbt004_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 107, 'name': 'cbt004_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 108, 'name': 'cbt005_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 109, 'name': 'cbt005_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 110, 'name': 'cbt006_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 111, 'name': 'cbt006_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 112, 'name': 'cbt007_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 113, 'name': 'cbt007_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 114, 'name': 'cbt008_bandwidth', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 115, 'name': 'cbt008_processors', 'type_id': 4, 'type_name': 'processor', 'unit_id': 4,
'unit': 'cores', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 116, 'name': 'CEP4_bandwidth:/data', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 117, 'name': 'CEP4_storage:/data', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 118, 'name': 'dragproc_bandwidth:/data', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 119, 'name': 'dragproc_storage:/data', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 120, 'name': 'drg01_bandwidth:/data1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 121, 'name': 'drg01_bandwidth:/data2', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 122, 'name': 'drg01_storage:/data1', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 123, 'name': 'drg01_storage:/data2', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 124, 'name': 'drg02_bandwidth:/data1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 125, 'name': 'drg02_bandwidth:/data2', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 126, 'name': 'drg02_storage:/data1', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 127, 'name': 'drg02_storage:/data2', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 128, 'name': 'drg03_bandwidth:/data1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 129, 'name': 'drg03_bandwidth:/data2', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 130, 'name': 'drg03_storage:/data1', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 131, 'name': 'drg03_storage:/data2', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 132, 'name': 'drg04_bandwidth:/data1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 133, 'name': 'drg04_bandwidth:/data2', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 134, 'name': 'drg04_storage:/data1', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 135, 'name': 'drg04_storage:/data2', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 136, 'name': 'drg05_bandwidth:/data1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 137, 'name': 'drg05_bandwidth:/data2', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 138, 'name': 'drg05_storage:/data1', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 139, 'name': 'drg05_storage:/data2', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 140, 'name': 'drg06_bandwidth:/data1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 141, 'name': 'drg06_bandwidth:/data2', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 142, 'name': 'drg06_storage:/data1', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 143, 'name': 'drg06_storage:/data2', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 144, 'name': 'drg07_bandwidth:/data1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 145, 'name': 'drg07_bandwidth:/data2', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 146, 'name': 'drg07_storage:/data1', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 147, 'name': 'drg07_storage:/data2', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 148, 'name': 'drg08_bandwidth:/data1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 149, 'name': 'drg08_bandwidth:/data2', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 150, 'name': 'drg08_storage:/data1', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 151, 'name': 'drg08_storage:/data2', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 152, 'name': 'drg09_bandwidth:/data1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 153, 'name': 'drg09_bandwidth:/data2', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 154, 'name': 'drg09_storage:/data1', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 155, 'name': 'drg09_storage:/data2', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 156, 'name': 'drg10_bandwidth:/data1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 157, 'name': 'drg10_bandwidth:/data2', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 158, 'name': 'drg10_storage:/data1', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 159, 'name': 'drg10_storage:/data2', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 160, 'name': 'drg11_bandwidth:/data1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 161, 'name': 'drg11_bandwidth:/data2', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 162, 'name': 'drg11_storage:/data1', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 163, 'name': 'drg11_storage:/data2', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 164, 'name': 'drg12_bandwidth:/data1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 165, 'name': 'drg12_bandwidth:/data2', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 166, 'name': 'drg12_storage:/data1', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 167, 'name': 'drg12_storage:/data2', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 168, 'name': 'drg13_bandwidth:/data1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 169, 'name': 'drg13_bandwidth:/data2', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 170, 'name': 'drg13_storage:/data1', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 171, 'name': 'drg13_storage:/data2', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 172, 'name': 'drg14_bandwidth:/data1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 173, 'name': 'drg14_bandwidth:/data2', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 174, 'name': 'drg14_storage:/data1', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 175, 'name': 'drg14_storage:/data2', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 176, 'name': 'drg15_bandwidth:/data1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 177, 'name': 'drg15_bandwidth:/data2', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 178, 'name': 'drg15_storage:/data1', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 179, 'name': 'drg15_storage:/data2', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 180, 'name': 'drg16_bandwidth:/data1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 181, 'name': 'drg16_bandwidth:/data2', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 182, 'name': 'drg16_storage:/data1', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 183, 'name': 'drg16_storage:/data2', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 184, 'name': 'drg17_bandwidth:/data1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 185, 'name': 'drg17_bandwidth:/data2', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 186, 'name': 'drg17_storage:/data1', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 187, 'name': 'drg17_storage:/data2', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 188, 'name': 'drg18_bandwidth:/data1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 189, 'name': 'drg18_bandwidth:/data2', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 190, 'name': 'drg18_storage:/data1', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 191, 'name': 'drg18_storage:/data2', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 192, 'name': 'drg19_bandwidth:/data1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 193, 'name': 'drg19_bandwidth:/data2', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 194, 'name': 'drg19_storage:/data1', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 195, 'name': 'drg19_storage:/data2', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 196, 'name': 'drg20_bandwidth:/data1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 197, 'name': 'drg20_bandwidth:/data2', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 198, 'name': 'drg20_storage:/data1', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 199, 'name': 'drg20_storage:/data2', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 200, 'name': 'drg21_bandwidth:/data1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 201, 'name': 'drg21_bandwidth:/data2', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 202, 'name': 'drg21_storage:/data1', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 203, 'name': 'drg21_storage:/data2', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 204, 'name': 'drg22_bandwidth:/data1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 205, 'name': 'drg22_bandwidth:/data2', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 206, 'name': 'drg22_storage:/data1', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 207, 'name': 'drg22_storage:/data2', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 208, 'name': 'drg23_bandwidth:/data1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 209, 'name': 'drg23_bandwidth:/data2', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3,
'unit': 'bits/second', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 210, 'name': 'drg23_storage:/data1', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 211, 'name': 'drg23_storage:/data2', 'type_id': 5, 'type_name': 'storage', 'unit_id': 1,
'unit': 'bytes', 'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 212, 'name': 'CS001rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 213, 'name': 'CS001tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 214, 'name': 'CS002rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 215, 'name': 'CS002tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 216, 'name': 'CS003rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 217, 'name': 'CS003tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 218, 'name': 'CS004rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 219, 'name': 'CS004tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 220, 'name': 'CS005rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 221, 'name': 'CS005tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 222, 'name': 'CS006rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 223, 'name': 'CS006tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 224, 'name': 'CS007rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 225, 'name': 'CS007tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 226, 'name': 'CS011rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 227, 'name': 'CS011tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 228, 'name': 'CS013rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 229, 'name': 'CS013tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 230, 'name': 'CS017rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 231, 'name': 'CS017tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 232, 'name': 'CS021rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 233, 'name': 'CS021tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 234, 'name': 'CS024rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 235, 'name': 'CS024tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 236, 'name': 'CS026rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 237, 'name': 'CS026tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 238, 'name': 'CS028rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 239, 'name': 'CS028tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 240, 'name': 'CS030rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 241, 'name': 'CS030tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 242, 'name': 'CS031rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 243, 'name': 'CS031tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 244, 'name': 'CS032rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 245, 'name': 'CS032tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 246, 'name': 'CS101rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 247, 'name': 'CS101tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 248, 'name': 'CS103rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 249, 'name': 'CS103tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 250, 'name': 'CS201rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 251, 'name': 'CS201tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 252, 'name': 'CS301rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 253, 'name': 'CS301tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 254, 'name': 'CS302rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 255, 'name': 'CS302tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 256, 'name': 'CS401rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 257, 'name': 'CS401tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 258, 'name': 'CS501rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 259, 'name': 'CS501tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 260, 'name': 'RS106rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 261, 'name': 'RS106tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 262, 'name': 'RS205rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 263, 'name': 'RS205tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 264, 'name': 'RS208rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 265, 'name': 'RS208tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 266, 'name': 'RS210rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 267, 'name': 'RS210tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 268, 'name': 'RS305rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 269, 'name': 'RS305tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 270, 'name': 'RS306rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 271, 'name': 'RS306tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 272, 'name': 'RS307rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 273, 'name': 'RS307tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 274, 'name': 'RS310rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 275, 'name': 'RS310tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 276, 'name': 'RS406rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 277, 'name': 'RS406tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 278, 'name': 'RS407rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 279, 'name': 'RS407tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 280, 'name': 'RS408rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 281, 'name': 'RS408tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 282, 'name': 'RS409rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 283, 'name': 'RS409tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 284, 'name': 'RS503rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 285, 'name': 'RS503tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 286, 'name': 'RS508rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 287, 'name': 'RS508tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 288, 'name': 'RS509rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 289, 'name': 'RS509tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 290, 'name': 'DE601rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 291, 'name': 'DE601tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 292, 'name': 'DE602rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 293, 'name': 'DE602tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 294, 'name': 'DE603rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 295, 'name': 'DE603tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 296, 'name': 'DE604rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 297, 'name': 'DE604tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 298, 'name': 'DE605rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 299, 'name': 'DE605tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 300, 'name': 'FR606rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 301, 'name': 'FR606tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 302, 'name': 'SE607rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 303, 'name': 'SE607tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 304, 'name': 'UK608rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 305, 'name': 'UK608tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 306, 'name': 'DE609rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 307, 'name': 'DE609tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 308, 'name': 'PL610rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 309, 'name': 'PL610tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 310, 'name': 'PL611rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 311, 'name': 'PL611tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 312, 'name': 'PL612rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 313, 'name': 'PL612tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 314, 'name': 'IE613rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 315, 'name': 'IE613tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 316, 'name': 'IS614rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 317, 'name': 'IS614tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 318, 'name': 'TEST1rcu', 'type_id': 2, 'type_name': 'rcu', 'unit_id': 2, 'unit': 'rcu_board',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 319, 'name': 'TEST1tbb', 'type_id': 1, 'type_name': 'tbb', 'unit_id': 1, 'unit': 'bytes',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 320, 'name': 'CS001chan0', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 321, 'name': 'CS001bw0', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 322, 'name': 'CS001chan1', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 323, 'name': 'CS001bw1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 324, 'name': 'CS002chan0', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 325, 'name': 'CS002bw0', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 326, 'name': 'CS002chan1', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 327, 'name': 'CS002bw1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 328, 'name': 'CS003chan0', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 329, 'name': 'CS003bw0', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 330, 'name': 'CS003chan1', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 331, 'name': 'CS003bw1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 332, 'name': 'CS004chan0', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 333, 'name': 'CS004bw0', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 334, 'name': 'CS004chan1', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 335, 'name': 'CS004bw1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 336, 'name': 'CS005chan0', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 337, 'name': 'CS005bw0', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 338, 'name': 'CS005chan1', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 339, 'name': 'CS005bw1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 340, 'name': 'CS006chan0', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 341, 'name': 'CS006bw0', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 342, 'name': 'CS006chan1', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 343, 'name': 'CS006bw1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 344, 'name': 'CS007chan0', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 345, 'name': 'CS007bw0', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 346, 'name': 'CS007chan1', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 347, 'name': 'CS007bw1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 348, 'name': 'CS011chan0', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 349, 'name': 'CS011bw0', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 350, 'name': 'CS011chan1', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 351, 'name': 'CS011bw1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 352, 'name': 'CS013chan0', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 353, 'name': 'CS013bw0', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 354, 'name': 'CS013chan1', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 355, 'name': 'CS013bw1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 356, 'name': 'CS017chan0', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 357, 'name': 'CS017bw0', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 358, 'name': 'CS017chan1', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 359, 'name': 'CS017bw1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 360, 'name': 'CS021chan0', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 361, 'name': 'CS021bw0', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 362, 'name': 'CS021chan1', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 363, 'name': 'CS021bw1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 364, 'name': 'CS024chan0', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 365, 'name': 'CS024bw0', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 366, 'name': 'CS024chan1', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 367, 'name': 'CS024bw1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 368, 'name': 'CS026chan0', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 369, 'name': 'CS026bw0', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 370, 'name': 'CS026chan1', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 371, 'name': 'CS026bw1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 372, 'name': 'CS028chan0', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 373, 'name': 'CS028bw0', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 374, 'name': 'CS028chan1', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 375, 'name': 'CS028bw1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 376, 'name': 'CS030chan0', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 377, 'name': 'CS030bw0', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 378, 'name': 'CS030chan1', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 379, 'name': 'CS030bw1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 380, 'name': 'CS031chan0', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 381, 'name': 'CS031bw0', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 382, 'name': 'CS031chan1', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 383, 'name': 'CS031bw1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 384, 'name': 'CS032chan0', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 385, 'name': 'CS032bw0', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 386, 'name': 'CS032chan1', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 387, 'name': 'CS032bw1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 388, 'name': 'CS101chan0', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 389, 'name': 'CS101bw0', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 390, 'name': 'CS101chan1', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 391, 'name': 'CS101bw1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 392, 'name': 'CS103chan0', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 393, 'name': 'CS103bw0', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 394, 'name': 'CS103chan1', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 395, 'name': 'CS103bw1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 396, 'name': 'CS201chan0', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 397, 'name': 'CS201bw0', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 398, 'name': 'CS201chan1', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 399, 'name': 'CS201bw1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 400, 'name': 'CS301chan0', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 401, 'name': 'CS301bw0', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 402, 'name': 'CS301chan1', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 403, 'name': 'CS301bw1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 404, 'name': 'CS302chan0', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 405, 'name': 'CS302bw0', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 406, 'name': 'CS302chan1', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 407, 'name': 'CS302bw1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 408, 'name': 'CS401chan0', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 409, 'name': 'CS401bw0', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 410, 'name': 'CS401chan1', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 411, 'name': 'CS401bw1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 412, 'name': 'CS501chan0', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 413, 'name': 'CS501bw0', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 414, 'name': 'CS501chan1', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 415, 'name': 'CS501bw1', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 416, 'name': 'RS106chan', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 417, 'name': 'RS106bw', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 418, 'name': 'RS205chan', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 419, 'name': 'RS205bw', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 420, 'name': 'RS208chan', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 421, 'name': 'RS208bw', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 422, 'name': 'RS210chan', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 423, 'name': 'RS210bw', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 424, 'name': 'RS305chan', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 425, 'name': 'RS305bw', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 426, 'name': 'RS306chan', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 427, 'name': 'RS306bw', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 428, 'name': 'RS307chan', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 429, 'name': 'RS307bw', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 430, 'name': 'RS310chan', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 431, 'name': 'RS310bw', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 432, 'name': 'RS406chan', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 433, 'name': 'RS406bw', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 434, 'name': 'RS407chan', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 435, 'name': 'RS407bw', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 436, 'name': 'RS408chan', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 437, 'name': 'RS408bw', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 438, 'name': 'RS409chan', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 439, 'name': 'RS409bw', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 440, 'name': 'RS503chan', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 441, 'name': 'RS503bw', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 442, 'name': 'RS508chan', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 443, 'name': 'RS508bw', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 444, 'name': 'RS509chan', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 445, 'name': 'RS509bw', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 446, 'name': 'DE601chan', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 447, 'name': 'DE601bw', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 448, 'name': 'DE602chan', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 449, 'name': 'DE602bw', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 450, 'name': 'DE603chan', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 451, 'name': 'DE603bw', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 452, 'name': 'DE604chan', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 453, 'name': 'DE604bw', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 454, 'name': 'DE605chan', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 455, 'name': 'DE605bw', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 456, 'name': 'FR606chan', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 457, 'name': 'FR606bw', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 458, 'name': 'SE607chan', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 459, 'name': 'SE607bw', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 460, 'name': 'UK608chan', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 461, 'name': 'UK608bw', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 462, 'name': 'DE609chan', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 463, 'name': 'DE609bw', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 464, 'name': 'PL610chan', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 465, 'name': 'PL610bw', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 466, 'name': 'PL611chan', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 467, 'name': 'PL611bw', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 468, 'name': 'PL612chan', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 469, 'name': 'PL612bw', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 470, 'name': 'IE613chan', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 471, 'name': 'IE613bw', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 472, 'name': 'IS614chan', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 473, 'name': 'IS614bw', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 474, 'name': 'TEST1chan', 'type_id': 0, 'type_name': 'rsp', 'unit_id': 0, 'unit': 'rsp_channel_bit',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1},
{'id': 475, 'name': 'TEST1bw', 'type_id': 3, 'type_name': 'bandwidth', 'unit_id': 3, 'unit': 'bits/second',
'available_capacity': 10, 'used_capacity': 0, 'total_capacity': 10, 'active': 1}
]
self.rarpc_mock.getResourceClaims.return_value = []
self.rarpc_mock.getResourceAllocationConfig.return_value = [
{'name': 'max_fill_ratio_CEP4_storage', 'value': 0.85}, {'name': 'claim_timeout', 'value': 172800},
{'name': 'min_inter_task_delay', 'value': 60}, {'name': 'max_fill_ratio_CEP4_bandwidth', 'value': 0.75}
]
logger_patcher = mock.patch(
'lofar.sas.resourceassignment.resourceassigner.resource_availability_checker.logger'
)
self.addCleanup(logger_patcher.stop)
self.logger_mock = logger_patcher.start()
# Select logger output to see
def myprint(s, *args):
print(s % args if args else s, file=sys.stderr)
# self.logger_mock.debug.side_effect = myprint
self.logger_mock.info.side_effect = myprint
self.logger_mock.warn.side_effect = myprint
self.logger_mock.error.side_effect = myprint
self.uut = ResourceAvailabilityChecker(self.rarpc_mock)
def test_disk_claim(self):
files = \
{
'uv':
[{
'identification': 'mom.G732487.B0.1.C.SAP000.uv.dps',
'sap_nr': 0,
'properties':
{
'nr_of_uv_files': 120,
'start_sb_nr': 0,
'uv_file_size': 3617984960,
'uv_otdb_id': 2
}
}]
}
properties = self.uut._get_files_properties(files_dict = files,
io_type = 'input')
self.assertIsNotNone(properties)
def test_get_current_resource_usage(self):
'''
Check if the resource availability checker returns a proper
resource list.
'''
db_resource_list = self.uut._get_current_resource_usage()
self.assertIsNotNone(db_resource_list)
def test_fit_single_resource_no_claimable_resources(self):
"""
Given 1 needed resource, and 0 claimable resources, fit_single_resources should return
failure.
"""
needed_resources_by_type_id = {5: 500}
claimable_resources_list = []
with self.assertRaises(CouldNotFindClaimException):
self.uut._get_tentative_claim_objects_for_single_resource(needed_resources_by_type_id, claimable_resources_list)
def test_fit_single_resources_fit_one_disk(self):
"""
Given 1 needed resource, and 1 claimable resource that fits, fit_single_resources should return succesful.
"""
needed_resources_by_type_id = { 5: 500 }
claimable_resources_list = [ { 5: { 'id': 1, 'claimable_capacity': 1000, 'available_capacity': 1000 } } ]
uut = ResourceAvailabilityChecker(self.rarpc_mock)
claims = self.uut._get_tentative_claim_objects_for_single_resource(needed_resources_by_type_id, claimable_resources_list)
self.assertIsNotNone(claims)
def test_fit_single_resources_not_fit_one_disk(self):
"""
Given 1 needed resource, and 1 claimable resource that does NOT fits, fit_single_resources should return
failure.
"""
needed_resources_by_type_id = { 5: 500 }
claimable_resources_list = [ { 5: { 'id': 1, 'claimable_capacity': 400, 'available_capacity': 400 } } ]
with self.assertRaises(CouldNotFindClaimException):
self.uut._get_tentative_claim_objects_for_single_resource(needed_resources_by_type_id, claimable_resources_list)
def test_fit_single_resources_fit_multiple_disks(self):
"""
Given 1 needed resource, and 2 claimable resources, of which one fits, fit_single_resources should return
succesful.
"""
needed_resources_by_type_id = { 5: 500 }
claimable_resources_list = [
{5: {'id': 1, 'claimable_capacity': 400, 'available_capacity': 400}},
{5: {'id': 1, 'claimable_capacity': 1000, 'available_capacity': 1000}}]
claims = self.uut._get_tentative_claim_objects_for_single_resource(needed_resources_by_type_id, claimable_resources_list)
self.assertIsNotNone(claims)
def test_fit_single_resources_not_fit_multiple_resources(self):
"""
Given 2 needed resources, and 2 claimable resource sets, of which neither fit for a different resource,
fit_single_resources should return failure.
"""
needed_resources_by_type_id = { 3: 3000, 5: 500 }
claimable_resources_list = [
{3: {'id': 0, 'claimable_capacity': 3000, 'available_capacity': 3000},
5: {'id': 1, 'claimable_capacity': 400, 'available_capacity': 400}}, # type 5 does not fit
{3: {'id': 0, 'claimable_capacity': 1000, 'available_capacity': 1000},
5: {'id': 1, 'claimable_capacity': 1000, 'available_capacity': 1000}}] # type 3 does not fit
with self.assertRaises(CouldNotFindClaimException):
self.uut._get_tentative_claim_objects_for_single_resource(needed_resources_by_type_id, claimable_resources_list)
def test_fit_single_resources_fit_multiple_resources(self):
"""
Given 2 needed resources, and 2 claimable resource sets, of which only one fits, fit_single_resources() should
return success.
"""
needed_resources_by_type_id = { 3: 3000, 5: 500 }
claimable_resources_list = [
{3: {'id': 0, 'claimable_capacity': 3000, 'available_capacity': 3000},
5: {'id': 1, 'claimable_capacity': 400, 'available_capacity': 400}}, # type 5 does not fit
{3: {'id': 0, 'claimable_capacity': 3000, 'available_capacity': 3000},
5: {'id': 1, 'claimable_capacity': 1000, 'available_capacity': 1000}}] # both fit
claims = self.uut._get_tentative_claim_objects_for_single_resource(needed_resources_by_type_id, claimable_resources_list)
self.assertIsNotNone(claims)
def test_fit_multiple_resources_not_fit(self):
"""
Given 2 needed resources (which we need 4 times), and 2 claimable resource sets, only 3 out of 4 fit,
fit_multiple_resources() should return failure.
"""
needed_resources_by_type_id = {3: 1000, 5: 100}
claimable_resources_list = [
{3: {'id': 0, 'claimable_capacity': 3000, 'available_capacity': 3000},
5: {'id': 1, 'claimable_capacity': 200, 'available_capacity': 200}}, # fits 2x
{3: {'id': 0, 'claimable_capacity': 1000, 'available_capacity': 1000},
5: {'id': 1, 'claimable_capacity': 1000, 'available_capacity': 1000}}] # fits 1x
with self.assertRaises(CouldNotFindClaimException):
self.uut._get_tentative_claim_objects_for_multiple_resources(needed_resources_by_type_id, 4, claimable_resources_list)
def test_fit_multiple_resources_fit(self):
"""
Given 2 needed resources (which we need 4 times), and 2 claimable resource sets, all 4 out of 4 fit,
fit_multiple_resources() should return success.
"""
needed_resources_by_type_id = {3: 1000, 5: 100}
claimable_resources_list = [
{3: {'id': 0, 'claimable_capacity': 3000, 'available_capacity': 3000},
5: {'id': 1, 'claimable_capacity': 200, 'available_capacity': 200}}, # fits 2x
{3: {'id': 0, 'claimable_capacity': 2000, 'available_capacity': 2000},
5: {'id': 1, 'claimable_capacity': 1000, 'available_capacity': 1000}}] # fits 2x
claims = self.uut._get_tentative_claim_objects_for_multiple_resources(needed_resources_by_type_id, 4, claimable_resources_list)
self.assertIsNotNone(claims)
def test_fit_multiple_resources_logs_created_claim_per_needed_resource_type(self):
"""
Given 2 needed resources (which we need 4 times), and 2 claimable resource sets, all 4 out of 4 fit, check if
fit_multiple_resources() logs the expected created claim
"""
needed_resources_by_type_id = {3: 1000, 5: 100}
claimable_resources_list = [
{3: {'id': 0, 'claimable_capacity': 3000, 'available_capacity': 3000},
5: {'id': 1, 'claimable_capacity': 200, 'available_capacity': 200}}, # fits 2x
{3: {'id': 0, 'claimable_capacity': 2000, 'available_capacity': 2000},
5: {'id': 1, 'claimable_capacity': 1000, 'available_capacity': 1000}}] # fits 2x
self.uut._get_tentative_claim_objects_for_multiple_resources(needed_resources_by_type_id, 4, claimable_resources_list)
resource_type_3_dict = {'status': 'tentative', 'resource_type_id': 3, 'resource_id': 0, 'claim_size': 1000,
'starttime': None, 'used_rcus': None, 'endtime': None, 'properties': []}
resource_type_5_dict = {'status': 'tentative', 'resource_type_id': 5, 'resource_id': 1, 'claim_size': 100,
'starttime': None, 'used_rcus': None, 'endtime': None, 'properties': []}
expected_claims = [resource_type_3_dict, resource_type_5_dict, resource_type_3_dict, resource_type_5_dict,
resource_type_3_dict, resource_type_5_dict, resource_type_3_dict, resource_type_5_dict]
self.logger_mock.info.assert_any_call('fit_multiple_resources: created claims: %s', expected_claims)
def test_get_is_claimable_invalid_resource_group(self):
""" If we try to find claims with a non-existing root_resource_group, get_is_claimable should fail. """
estimates = [{
'root_resource_group': 'MIDDLE EARTH',
'resource_count': 1,
'resource_types': {
'storage': 100
}
}]
claimable_resources_list = [{
'id': self.cep4storage_resource_id,
'type_id': 5,
'claimable_capacity': 400,
'available_capacity': 400,
'active': True
}
]
with self.assertRaises(ValueError):
_, _ = self.uut.get_is_claimable(estimates, claimable_resources_list)
def test_get_is_claimable_fit(self):
"""
Given 2 needed resources (which we need 4 times), and 2 claimable resource sets, all 4 out of 4 fit,
get_is_claimable should return success.
"""
estimates = [{
'root_resource_group': 'CEP4',
'resource_count': 4,
'resource_types': {
'bandwidth': 1000,
'storage': 100
}
}]
claimable_resources_list = [{
'id': self.cep4bandwidth_resource_id,
'type_id': 3,
'claimable_capacity': 4000,
'available_capacity': 4000,
'active': True
},
{
'id': self.cep4storage_resource_id,
'type_id': 5,
'claimable_capacity': 400,
'available_capacity': 400,
'active': True
}]
claimable_resources = self.uut.get_is_claimable(estimates, claimable_resources_list)
self.assertEqual(len(claimable_resources), len(claimable_resources_list))
def test_get_is_claimable_not_fit(self):
""" Given 2 needed resources (which we need 4 times), and 2 claimable resource sets, 3 out of 4 fit,
get_is_claimable should return failure. """
estimates = [{
'root_resource_group': 'CEP4',
'resource_count': 4,
'resource_types': {
'bandwidth': 1000,
'storage': 100
}
}]
claimable_resources_list = [{
'id': self.cep4bandwidth_resource_id,
'type_id': 3,
'claimable_capacity': 4000,
'available_capacity': 4000, 'active': True
},
{
'id': self.cep4storage_resource_id,
'type_id': 5,
'claimable_capacity': 300,
'available_capacity': 300,
'active': True
}
]
with self.assertRaises(CouldNotFindClaimException):
self.uut.get_is_claimable(estimates, claimable_resources_list)
def test_get_is_claimable_partial_fit(self):
""" Given 2 sets of 2 needed resources (which we need 4 times), and 2 claimable resource sets, only one set
fits, get_is_claimable should return partial success. """
estimates = [{
'root_resource_group': 'CEP4',
'resource_count': 4,
'resource_types': {
'bandwidth': 1000,
'storage': 100
}}, {
'root_resource_group': 'CEP4',
'resource_count': 4,
'resource_types': {
'bandwidth': 1000,
'storage': 100
}}]
claimable_resources_list = [{
'id': self.cep4bandwidth_resource_id,
'type_id': 3,
'claimable_capacity': 5000,
'available_capacity': 5000,
'active': True
},
{
'id': self.cep4storage_resource_id,
'type_id': 5,
'claimable_capacity': 500,
'available_capacity': 500,
'active': True
}]
# TODO: verify with Jan David whether this test case (returning a partial fit) should still succeed or whether
# an exception is expected to be raised
with self.assertRaises(CouldNotFindClaimException):
self.uut.get_is_claimable(estimates, claimable_resources_list)
# TODO: remove if uut raising exception is what's expected
# claimable_resources = self.uut.get_is_claimable(estimates, claimable_resources_list)
# self.assertEqual(len(claimable_resources), 2) # storage & bandwidth for estimates[0]
if __name__ == '__main__':
unittest.main()
|
import logging
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from pylons import response
from pkg_resources import resource_stream
from lxml import etree
from ckan.lib.base import request, config, abort
from ckan.controllers.api import ApiController as BaseApiController
from ckan.model import Session
from ckanext.harvest.model import HarvestObject, HarvestObjectExtra
from ckanext.spatial.lib import get_srid, validate_bbox, bbox_query
log = logging.getLogger(__name__)
class ApiController(BaseApiController):
def spatial_query(self):
error_400_msg = \
'Please provide a suitable bbox parameter [minx,miny,maxx,maxy]'
if not 'bbox' in request.params:
abort(400, error_400_msg)
bbox = validate_bbox(request.params['bbox'])
if not bbox:
abort(400, error_400_msg)
srid = get_srid(request.params.get('crs')) if 'crs' in \
request.params else None
extents = bbox_query(bbox, srid)
format = request.params.get('format', '')
return self._output_results(extents, format)
def _output_results(self, extents, format=None):
ids = [extent.package_id for extent in extents]
output = dict(count=len(ids), results=ids)
return self._finish_ok(output)
class HarvestMetadataApiController(BaseApiController):
def _get_content(self, id):
obj = Session.query(HarvestObject) \
.filter(HarvestObject.id == id).first()
if obj:
return obj.content
else:
return None
def _get_original_content(self, id):
extra = Session.query(HarvestObjectExtra).join(HarvestObject) \
.filter(HarvestObject.id == id) \
.filter(
HarvestObjectExtra.key == 'original_document'
).first()
if extra:
return extra.value
else:
return None
def _transform_to_html(self, content, xslt_package=None, xslt_path=None):
xslt_package = xslt_package or __name__
xslt_path = xslt_path or \
'../templates/ckanext/spatial/gemini2-html-stylesheet.xsl'
# optimise -- read transform only once and compile rather
# than at each request
with resource_stream(xslt_package, xslt_path) as style:
style_xml = etree.parse(style)
transformer = etree.XSLT(style_xml)
xml = etree.parse(StringIO(content.encode('utf-8')))
html = transformer(xml)
response.headers['Content-Type'] = 'text/html; charset=utf-8'
response.headers['Content-Length'] = len(content)
result = etree.tostring(html, pretty_print=True)
return result
def _get_xslt(self, original=False):
if original:
config_option = \
'ckanext.spatial.harvest.xslt_html_content_original'
else:
config_option = 'ckanext.spatial.harvest.xslt_html_content'
xslt_package = None
xslt_path = None
xslt = config.get(config_option, None)
if xslt:
if ':' in xslt:
xslt = xslt.split(':')
xslt_package = xslt[0]
xslt_path = xslt[1]
else:
log.error(
'XSLT should be defined in the form <package>:<path>' +
', eg ckanext.myext:templates/my.xslt')
return xslt_package, xslt_path
def display_xml_original(self, id):
content = self._get_original_content(id)
if not content:
abort(404)
response.headers['Content-Type'] = 'application/xml; charset=utf-8'
response.headers['Content-Length'] = len(content)
if not '<?xml' in content.split('\n')[0]:
content = u'<?xml version="1.0" encoding="UTF-8"?>\n' + content
return content.encode('utf-8')
def display_html(self, id):
content = self._get_content(id)
if not content:
abort(404)
xslt_package, xslt_path = self._get_xslt()
return self._transform_to_html(content, xslt_package, xslt_path)
def display_html_original(self, id):
content = self._get_original_content(id)
if content is None:
abort(404)
xslt_package, xslt_path = self._get_xslt(original=True)
return self._transform_to_html(content, xslt_package, xslt_path)
|
VERSION = '0.7.1'
import sys, rpcalc, argparse
parser = argparse.ArgumentParser(prog='rpcalc',
description="A reverse polish notation calculator written in Python 3.",
epilog="For more information, see qguv.github.io/rpcalc")
parser.add_argument("-s", "--stack-size",
help="Limits the stack to a certain number of entries",
type=int,
metavar='N',
default=None)
parser.add_argument("-i", "--initial-values",
help="Initializes the stack with certain values already pushed. Accepts numbers separated by spaces. Values are pushed in order.",
# The type is a string here to enable both int and float input. It will be
# converted later, and throw an error if appropriate.
type=str,
nargs='+',
metavar="X",
default=None)
parser.add_argument("-e", "--exclusive",
help="Sets the stack length to the amount of initialized values given with -i.",
action="store_true")
parser.add_argument("--version",
help="Prints the program version and exits.",
action="store_true")
args = parser.parse_args()
def panic(code, message):
'''Gives a pretty error message and exits with an error code.'''
print("\nerror!", message, "\n")
sys.exit(code)
if args.version:
print(VERSION)
sys.exit()
if args.exclusive and not args.initial_values:
panic(2, "-e (--exclusive) can only be used with -i (--initial-values)")
elif args.exclusive and args.stack_size:
panic(2, "-e (--exclusive) can not be used with -s (--stack-size)")
if args.stack_size and args.initial_values:
if args.stack_size < len(args.initial_values):
panic(2, "too many initial values for allocated stack size")
if args.exclusive:
stackLength = len(args.initial_values)
else:
stackLength = args.stack_size or None
if args.initial_values:
try:
values = [ float(x) for x in args.initial_values ]
except ValueError:
panic(2, "-i (--initial-values) only accepts numbers")
else:
values = []
if stackLength:
if len(values) < stackLength:
padding = stackLength - len(values)
padding *= [0.0]
values = padding + values
sys.exit(rpcalc.main(limit=stackLength, values=values))
|
from __future__ import print_function, unicode_literals, division, absolute_import
import argparse
import sys
import os
import io
from libsemeval2014task5.format import Reader
from libsemeval2014task5.common import log, runcmd, red, yellow, white
VERSION = "2.0"
def main():
parser = argparse.ArgumentParser(description="Evaluation")
parser.add_argument('--mtevaldir',type=str, help="Path to MT evaluation scripts",action='store',default="")
parser.add_argument('--ref',type=str,help='Reference file', action='store',required=True)
parser.add_argument('--out',type=str,help='Output file', action='store',required=True)
parser.add_argument('--workdir','-w',type=str,help='Work directory', action='store',default=".")
parser.add_argument('-i',dest='casesensitive',help='Measure translation accuracy without regard for case',action='store_false',default=True)
parser.add_argument('-a',dest='oof',help='Out of five evaluation, considers up to four additional alternatives in system output',action='store_true',default=False)
args = parser.parse_args()
totalavgaccuracy, totalwordavgaccuracy, totalavgrecall, matrexsrcfile, matrextgtfile, matrexoutfile = evaluate(Reader(args.ref), Reader(args.out), args.mtevaldir, args.workdir, args.casesensitive, args.oof)
outprefix = '.'.join(args.out.split('.')[:-1])
if args.mtevaldir:
mtscore(args.mtevaldir, matrexsrcfile, matrextgtfile, matrexoutfile, totalavgaccuracy, totalwordavgaccuracy, totalavgrecall, outprefix, args.workdir)
def comparefragments(outfragment, reffragment, casesensitive, oof):
global matches, wordmatches, misses, wordmisses, missedrecall
if casesensitive:
eq = lambda x,y: " ".join(x) == " ".join(y)
else:
eq = lambda x,y: " ".join(x).lower() == " ".join(y).lower()
if not outfragment.value or len(outfragment.value) == 0:
missedrecall += 1
misses += 1
wordmisses += 1
return 0
else:
outvalues = [outfragment.value]
if oof and outfragment.alternatives:
for alt in outfragment.alternatives[:4]:
outvalues.append( alt.value )
refvalues = [reffragment.value]
for alt in reffragment.alternatives:
refvalues.append( alt.value )
wordmatchscores = []
for outvalue in outvalues:
for refvalue in refvalues:
if eq(outvalue, refvalue):
wordmatchscore = 1
elif len(outvalue) >= len(refvalue):
partialmatch = False
for i in range(0, len(outvalue)):
if eq(outvalue[i:i+len(refvalue)], refvalue):
partialmatch = True
break
if partialmatch:
wordmatchscore = len(refvalue) / len(outvalue)
else:
wordmatchscore = 0
elif len(outvalue) < len(refvalue):
partialmatch = False
for i in range(0, len(refvalue)):
if eq(refvalue[i:i+len(outvalue)], outvalue):
partialmatch = True
break
if partialmatch:
wordmatchscore = len(outvalue) / len(refvalue)
else:
wordmatchscore = 0
wordmatchscores.append(wordmatchscore)
wordmatchscore = max(wordmatchscores)
wordmatches += wordmatchscore
wordmisses += (1 - wordmatchscore)
if wordmatchscore == 1:
matches += 1
else:
misses += 1
return wordmatchscore
def evaluate(ref, out, mtevaldir, workdir, casesensitive=True, oof=False):
global matches, wordmatches, misses, wordmisses, missedrecall
ref_it = iter(ref)
out_it = iter(out)
accuracies = []
wordaccuracies = []
recalls = []
matrexsrcfile = out.filename.replace('.xml','') + '.matrex-src.xml'
matrextgtfile = out.filename.replace('.xml','') + '.matrex-ref.xml'
matrexoutfile = out.filename.replace('.xml','') + '.matrex-out.xml'
matrexsrc = io.open(matrexsrcfile ,'w', encoding='utf-8')
matrextgt = io.open(matrextgtfile ,'w', encoding='utf-8')
matrexout = io.open(matrexoutfile ,'w', encoding='utf-8')
for t,f in (('src',matrexsrc),('ref',matrextgt),('tst',matrexout)):
f.write( "<" + t + "set setid=\"mteval\" srclang=\"src\" trglang=\"tgt\">\n")
f.write("<DOC docid=\"colibrita\" sysid=\"colibrita\">\n")
while True:
try:
ref_s = next(ref_it)
out_s = next(out_it)
except StopIteration:
break
if ref_s.id != out_s.id:
raise Exception("Sentence ID mismatch in reference and output! " + str(ref_s.id) + " vs " + str(out_s.id))
elif ref_s.input != out_s.input:
raise Exception("Sentence input mismatch in reference and output! " , ref_s.input, " vs " , out_s.input)
elif not ref_s.ref:
raise Exception("No reference for sentence " + str(ref_s.id))
elif not out_s.output:
raise Exception("No output for sentence " + str(out_s.id))
matrexsrc.write("<seg id=\"" + str(ref_s.id) + "\">" + ref_s.inputstr() + "</seg>\n")
matrextgt.write("<seg id=\"" + str(ref_s.id) + "\">" + ref_s.refstr() + "</seg>\n")
matrexout.write("<seg id=\"" + str(out_s.id) + "\">" + out_s.outputstr() + "</seg>\n")
matches = 0
misses = 0
wordmatches = 0
wordmisses = 0
missedrecall = 0
outputfragments = out_s.outputfragmentsdict()
reffragments = ref_s.reffragmentsdict()
for inputfragment in ref_s.inputfragmentsdict().values():
if not inputfragment.id in reffragments:
raise Exception("No reference fragment found for fragment " + str(inputfragment.id) + " in sentence " + str(ref_s.id))
if not inputfragment.id in outputfragments:
print("WARNING: Input fragment " + str(inputfragment.id) + " in sentence " + str(ref_s.id) + " is not translated!", file=sys.stderr)
misses += 1
wordmisses += 1
missedrecall += 1
else:
comparefragments( outputfragments[inputfragment.id], reffragments[inputfragment.id], casesensitive, oof)
if missedrecall == matches +misses:
recall = 0.0
else:
recall = (matches+misses)/((matches+misses)-missedrecall)
print("Recall for sentence " + str(ref_s.id) + " = " + str(recall) )
recalls.append(recall)
accuracy = matches/(matches+misses)
print("Accuracy for sentence " + str(ref_s.id) + " = " + str(accuracy))
accuracies.append(accuracy)
wordaccuracy = wordmatches/(wordmatches+wordmisses)
print("Word accuracy for sentence " + str(ref_s.id) + " = " + str(wordaccuracy))
wordaccuracies.append(wordaccuracy)
if recalls:
totalavgrecall = sum(recalls) / len(recalls)
print("Total average recall = " + str(totalavgrecall))
if accuracies:
totalavgaccuracy = sum(accuracies) / len(accuracies)
print("Total average accuracy = " + str(totalavgaccuracy))
if wordaccuracies:
totalwordavgaccuracy = sum(wordaccuracies) / len(wordaccuracies)
print("Total word average accuracy = " + str(totalwordavgaccuracy))
for t,f in (('src',matrexsrc),('ref',matrextgt),('tst',matrexout)):
f.write("</DOC>\n</" + t + "set>")
f.close()
return totalavgaccuracy, totalwordavgaccuracy, totalavgrecall,matrexsrcfile, matrextgtfile, matrexoutfile
def mtscore(mtevaldir, sourcexml, refxml, targetxml, totalavgaccuracy, totalwordavgaccuracy, totalavgrecall, outprefix, WORKDIR = '.'):
per = 0
wer = 0
bleu = 0
meteor = 0
nist = 0
ter = 0
EXEC_MATREX_WER = mtevaldir + '/eval/WER_v01.pl'
EXEC_MATREX_PER = mtevaldir + '/eval/PER_v01.pl'
EXEC_MATREX_BLEU = mtevaldir + '/eval/bleu-1.04.pl'
EXEC_MATREX_METEOR = mtevaldir + '/meteor-0.6/meteor.pl'
EXEC_MATREX_MTEVAL = mtevaldir + '/mteval-v11b.pl'
EXEC_MATREX_TER = mtevaldir + '/tercom.jar'
EXEC_PERL = 'perl'
EXEC_JAVA = 'java'
errors = False
if EXEC_MATREX_BLEU and os.path.exists(EXEC_MATREX_BLEU):
if not runcmd(EXEC_PERL + ' ' + EXEC_MATREX_BLEU + " -r " + refxml + ' -t ' + targetxml + ' -s ' + sourcexml + ' -ci > ' + outprefix + '.bleu.score', 'Computing BLEU score'): errors = True
if not errors:
try:
f = io.open(WORKDIR + '/' + outprefix + '.bleu.score')
for line in f:
if line[0:9] == "BLEUr1n4,":
bleu = float(line[10:].strip())
print("BLEU score: ", bleu, file=sys.stderr)
f.close()
except Exception as e:
log("Error reading bleu.score:" + str(e),red)
errors = True
else:
log("Skipping BLEU (no script found ["+EXEC_MATREX_BLEU+"])",yellow)
if EXEC_MATREX_WER and os.path.exists(EXEC_MATREX_WER):
if not runcmd(EXEC_PERL + ' ' + EXEC_MATREX_WER + " -r " + refxml + ' -t ' + targetxml + ' -s ' + sourcexml + ' > ' + outprefix + '.wer.score', 'Computing WER score'): errors = True
if not errors:
try:
f = io.open(WORKDIR + '/' + outprefix + '.wer.score','r',encoding='utf-8')
for line in f:
if line[0:11] == "WER score =":
wer = float(line[12:19].strip())
log("WER score: " + str(wer), white)
f.close()
except Exception as e:
log("Error reading wer.score:" + str(e),red)
errors = True
else:
log("Skipping WER (no script found ["+EXEC_MATREX_WER+"]) ",yellow)
if EXEC_MATREX_PER and os.path.exists(EXEC_MATREX_PER):
if not runcmd(EXEC_PERL + ' ' + EXEC_MATREX_PER + " -r " + refxml + ' -t ' + targetxml + ' -s ' + sourcexml + ' > ' + outprefix + '.per.score', 'Computing PER score'): errors = True
if not errors:
try:
f = io.open(WORKDIR + '/' + outprefix +'.per.score','r',encoding='utf-8')
for line in f:
if line[0:11] == "PER score =":
per = float(line[12:19].strip())
log("PER score: " + str(per), white)
f.close()
except Exception as e:
log("Error reading per.score" + str(e),red)
errors = True
else:
log("Skipping PER (no script found ["+EXEC_MATREX_PER+"])",yellow)
if EXEC_MATREX_METEOR and os.path.exists(EXEC_MATREX_METEOR):
if not runcmd(EXEC_PERL + ' -I ' + os.path.dirname(EXEC_MATREX_METEOR) + ' ' + EXEC_MATREX_METEOR + " -s colibrita -r " + refxml + ' -t ' + targetxml + ' --modules "exact" > ' + outprefix + '.meteor.score', 'Computing METEOR score'): errors = True
if not errors:
try:
f = io.open(WORKDIR + '/' + outprefix + '.meteor.score','r',encoding='utf-8')
for line in f:
if line[0:6] == "Score:":
meteor = float(line[7:].strip())
log("METEOR score: " + str(meteor), white)
f.close()
except Exception as e:
log("Error reading meteor.score:" + str(e),red)
errors = True
else:
log("Skipping METEOR (no script found ["+EXEC_MATREX_METEOR+"])",yellow)
if EXEC_MATREX_MTEVAL and os.path.exists(EXEC_MATREX_MTEVAL):
if not runcmd(EXEC_PERL + ' ' + EXEC_MATREX_MTEVAL + " -r " + refxml + ' -t ' + targetxml + ' -s ' + sourcexml + ' > ' + outprefix + '.mteval.score', 'Computing NIST & BLEU scores'): errors = True
if not errors:
try:
f = io.open(WORKDIR + '/' + outprefix + '.mteval.score','r',encoding='utf-8')
for line in f:
if line[0:12] == "NIST score =":
nist = float(line[13:21].strip())
log("NIST score: ", nist)
if line[21:33] == "BLEU score =":
try:
bleu2 = float(line[34:40].strip())
if bleu == 0:
bleu = bleu2
log("BLEU score: " + str(bleu), white)
elif abs(bleu - bleu2) > 0.01:
log("blue score from MTEVAL scripts differs too much: " + str(bleu) + " vs " + str(bleu2) + ", choosing highest score")
if bleu2 > bleu:
bleu = bleu2
else:
log("BLEU score (not stored): " + str(float(line[34:40].strip())))
except:
raise
f.close()
except Exception as e:
log("Error reading mteval.score: " + str(e),red)
errors = True
else:
log("Skipping MTEVAL (BLEU & NIST) (no script found)", yellow)
if EXEC_MATREX_TER and os.path.exists(EXEC_MATREX_TER):
if not runcmd(EXEC_JAVA + ' -jar ' + EXEC_MATREX_TER + " -r " + refxml + ' -h ' + targetxml + ' > ' + outprefix + '.ter.score', 'Computing TER score'): errors = True
if not errors:
try:
f = io.open(WORKDIR +'/' + outprefix + '.ter.score','r',encoding='utf-8')
for line in f:
if line[0:10] == "Total TER:":
ter = float(line[11:].strip().split(' ')[0])
log("TER score: ", ter,white)
f.close()
except Exception as e:
log("Error reading ter.score: " + str(e),red)
else:
log("Skipping TER (no script found)",yellow)
log("SCORE SUMMARY\n===================\n")
f = io.open(WORKDIR + '/' + outprefix + '.summary.score','w')
s = "Accuracy Word-Accuracy Recall BLEU METEOR NIST TER WER PER"
f.write(s+ "\n")
log(s)
s = str(totalavgaccuracy) + " " + str(totalwordavgaccuracy) + " " + str(totalavgrecall) + " " + str(bleu) + " " + str(meteor) + " " + str(nist) + " " + str(ter) + " " + str(wer) + " " + str(per)
f.write(s + "\n")
log(s)
f.close()
return not errors
|
import sys # System module
noerror="NO UNDEFINED SYMBOLS"
noerror_len=len(noerror)
error="UNDEFINED SYMBOLS"
error_len=len(error)
def ckas(filename):
try:
listing=open(filename,"rt")
except IOError:
print("ckaslst.py: error - could not open GNU as listing: %s" % filename)
sys.exit(1)
errorline=False
for line in listing:
if errorline:
# Encountered undefined symbols, so print them
print(line[:-1])
continue
if len(line)>=noerror_len and line[:noerror_len]==noerror:
# For GNU as listing without undefined symbols, silently end
return
if len(line)>=error_len and line[:error_len]==error:
# There are undefined symbols, print the line and all undefined symbols
print(line[:-1])
errorline=True
# If we get here and errorline is not set, then something went wrong
if not errorline:
print("ckaslst.py: error - did not recognize end of GNU as listing: %s" \
% filename)
listing.close()
if __name__ == "__main__":
# sys.argv[0] == the script name
# sys.argv[1] == the listing file name
if len(sys.argv)!=2:
print("ckaslst.py: error - expected one command line argument, found %s" \
% len(sys.argv)-1)
sys.exit(1)
ckas(sys.argv[1])
sys.exit(0)
|
"""Pychemqt, Chemical Engineering Process simulator
Copyright (C) 2009-2017, Juan José Gómez Romera <jjgomera@gmail.com>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>."""
from PyQt5.QtWidgets import QApplication
from iapws._utils import getphase
from lib import unidades
class Thermo(object):
"""Class with common functionality for special thermo model, children class
are iapws, coolprop, refprop"""
_bool = False
status = 0
msg = "Unknown variables"
kwargs = {}
h = 0
s = 0
u = 0
a = 0
g = 0
def __init__(self, **kwargs):
self.kwargs = self.__class__.kwargs.copy()
self.__call__(**kwargs)
def _new(self, **kw):
"""Create a new instance"""
return self.__class__(**kw)
def __call__(self, **kwargs):
self.kwargs.update(kwargs)
if self.calculable:
self.status = 1
self.calculo()
self.msg = "Solved"
def calculable(self):
pass
def calculo(self):
pass
def _cp0(self, cp0):
"Set ideal properties to state"""
self.v0 = unidades.SpecificVolume(cp0["v"])
self.rho0 = unidades.Density(1./cp0["v"])
self.h0 = unidades.Enthalpy(cp0["h"])
self.u0 = unidades.Enthalpy(self.h0-self.P*self.v0)
self.s0 = unidades.SpecificHeat(cp0["s"])
self.a0 = unidades.Enthalpy(self.u0-self.T*self.s0)
self.g0 = unidades.Enthalpy(self.h0-self.T*self.s0)
self.cp0 = unidades.SpecificHeat(cp0["cp"])
self.cv0 = unidades.SpecificHeat(cp0["cv"])
self.cp0_cv = unidades.Dimensionless(self.cp0/self.cv0)
self.w0 = unidades.Speed(cp0["w"])
self.gamma0 = self.cp0_cv
self.rhoM0 = unidades.MolarDensity(self.rho0/self.M)
self.hM0 = unidades.MolarEnthalpy(self.h0*self.M)
self.uM0 = unidades.MolarEnthalpy(self.u0*self.M)
self.sM0 = unidades.MolarSpecificHeat(self.s0*self.M)
self.aM0 = unidades.MolarEnthalpy(self.a0*self.M)
self.gM0 = unidades.MolarEnthalpy(self.g0*self.M)
self.cpM0 = unidades.MolarSpecificHeat(self.cp0*self.M)
self.cvM0 = unidades.MolarSpecificHeat(self.cv0*self.M)
def derivative(self, z, x, y, fase):
"""Calculate generic partial derivative: (δz/δx)y
where x, y, z can be: P, T, v, u, h, s, g, a"""
dT = {"P": 0,
"T": 1,
"v": fase.v*fase.alfav,
"u": fase.cp-self.P*fase.v*fase.alfav,
"h": fase.cp,
"s": fase.cp/self.T,
"g": -fase.s,
"a": -self.P*fase.v*fase.alfav-fase.s}
dP = {"P": 1,
"T": 0,
"v": -fase.v*fase.kappa,
"u": fase.v*(self.P*fase.kappa-self.T*fase.alfav),
"h": fase.v*(1-self.T*fase.alfav),
"s": -fase.v*fase.alfav,
"g": fase.v,
"a": self.P*fase.v*fase.kappa}
return (dP[z]*dT[y]-dT[z]*dP[y])/(dP[x]*dT[y]-dT[x]*dP[y])
def getphase(self, **kwargs):
"""Return fluid phase
kwarg:
phase: direct msg
Tc, Pc, T, P, x, region: to calculate by iapws"""
data = {
"Supercritical fluid": QApplication.translate(
"pychemqt", "Supercritical fluid"),
"Gas": QApplication.translate("pychemqt", "Gas"),
"Compressible liquid": QApplication.translate(
"pychemqt", "Compressible liquid"),
"Critical point": QApplication.translate(
"pychemqt", "Critical point"),
"Saturated vapor": QApplication.translate(
"pychemqt", "Saturated vapor"),
"Saturated liquid": QApplication.translate(
"pychemqt", "Saturated liquid"),
"Two phases": QApplication.translate("pychemqt", "Two phases"),
"Vapour": QApplication.translate("pychemqt", "Vapour"),
"Liquid": QApplication.translate("pychemqt", "Liquid"),
"Unknown": QApplication.translate("pychemqt", "Unknown")}
if "phase" in kwargs:
phase = kwargs["phase"]
else:
phase = getphase(**kwargs)
return data[phase]
@classmethod
def properties(cls):
l = [
(QApplication.translate("pychemqt", "Temperature"), "T",
unidades.Temperature),
(QApplication.translate("pychemqt", "Reduced temperature"), "Tr",
unidades.Dimensionless),
(QApplication.translate("pychemqt", "Pressure"), "P",
unidades.Pressure),
(QApplication.translate("pychemqt", "Reduced Pressure"), "Pr",
unidades.Dimensionless),
(QApplication.translate("pychemqt", "Quality"), "x",
unidades.Dimensionless),
(QApplication.translate("pychemqt", "Density"), "rho",
unidades.Density),
(QApplication.translate("pychemqt", "Molar Density"), "rhoM",
unidades.MolarDensity),
(QApplication.translate("pychemqt", "Volume"), "v",
unidades.SpecificVolume),
(QApplication.translate("pychemqt", "Enthalpy"), "h",
unidades.Enthalpy),
(QApplication.translate("pychemqt", "Molar Enthalpy"), "hM",
unidades.MolarEnthalpy),
(QApplication.translate("pychemqt", "Entropy"), "s",
unidades.SpecificHeat),
(QApplication.translate("pychemqt", "Molar Entropy"), "sM",
unidades.MolarSpecificHeat),
(QApplication.translate("pychemqt", "Internal Energy"), "u",
unidades.Enthalpy),
(QApplication.translate("pychemqt", "Molar Internal Energy"), "uM",
unidades.MolarEnthalpy),
(QApplication.translate("pychemqt", "Helmholtz Free Energy"), "a",
unidades.Enthalpy),
(QApplication.translate("pychemqt", "Molar Helmholtz Free Energy"),
"aM", unidades.MolarEnthalpy),
(QApplication.translate("pychemqt", "Gibbs Free Energy"), "g",
unidades.Enthalpy),
(QApplication.translate("pychemqt", "Molar Gibbs Free Energy"),
"gM", unidades.MolarEnthalpy),
(QApplication.translate(
"pychemqt", "Specific isochoric heat capacity"), "cv",
unidades.SpecificHeat),
(QApplication.translate(
"pychemqt", "Molar Specific isochoric heat capacity"), "cvM",
unidades.MolarSpecificHeat),
(QApplication.translate
("pychemqt", "Specific isobaric heat capacity"), "cp",
unidades.SpecificHeat),
(QApplication.translate(
"pychemqt", "Molar Specific isobaric heat capacity"), "cpM",
unidades.MolarSpecificHeat),
(QApplication.translate("pychemqt", "Heat capacities ratio"),
"cp_cv", unidades.Dimensionless),
(QApplication.translate("pychemqt", "Speed sound"), "w",
unidades.Speed),
(QApplication.translate("pychemqt", "Compresibility"), "Z",
unidades.Dimensionless),
(QApplication.translate("pychemqt", "Fugacity coefficient"), "fi",
unidades.Dimensionless),
(QApplication.translate("pychemqt", "Fugacity"), "f",
unidades.Pressure),
(QApplication.translate("pychemqt", "Isoentropic exponent"),
"gamma", unidades.Dimensionless),
(QApplication.translate("pychemqt", "Volume Expansivity"), "alfav",
unidades.InvTemperature), # 1/V dV/dt = -1/D dD/dt
(QApplication.translate("pychemqt", "Isothermal compresibility"),
"kappa", unidades.InvPressure), # -1/V (dV/dP)T = 1/D (dD/dP)T
(QApplication.translate("pychemqt", "Adiabatic compresibility"),
"kappas", unidades.InvPressure), # -1/V (dV/dP)s = 1/D (dD/dP)s
(QApplication.translate(
"pychemqt", "Relative pressure coefficient"), "alfap",
unidades.InvTemperature), # 1/P (dP/dT)v
(QApplication.translate(
"pychemqt", "Isothermal stress coefficient"), "betap",
unidades.Density), # -1/P (dP/dv)T = 1/P (dP/dD)T
(QApplication.translate("pychemqt", "Joule-Thomson coefficient"),
"joule", unidades.TemperaturePressure),
(QApplication.translate(
"pychemqt", "Isothermal throttling coefficient"), "deltat",
unidades.EnthalpyPressure),
(QApplication.translate("pychemqt", "Vaporization heat"), "Hvap",
unidades.Enthalpy),
(QApplication.translate("pychemqt", "Vaporization entropy"),
"Svap", unidades.SpecificHeat),
(QApplication.translate("pychemqt", "Viscosity"), "mu",
unidades.Viscosity),
(QApplication.translate("pychemqt", "Thermal conductivity"), "k",
unidades.ThermalConductivity),
(QApplication.translate("pychemqt", "Kinematic viscosity"), "nu",
unidades.Diffusivity),
(QApplication.translate("pychemqt", "Thermal diffusivity"), "alfa",
unidades.Diffusivity),
(QApplication.translate("pychemqt", "Surface tension"), "sigma",
unidades.Tension),
(QApplication.translate("pychemqt", "Prandtl number"), "Prandt",
unidades.Dimensionless),
(QApplication.translate("pychemqt", "Ideal gas Specific volume"),
"v0", unidades.SpecificVolume),
(QApplication.translate("pychemqt", "Ideal gas Density"), "rho0",
unidades.Density),
(QApplication.translate("pychemqt", "Ideal gas Specific enthalpy"),
"h0", unidades.Enthalpy),
(QApplication.translate(
"pychemqt", "Ideal gas Specific internal energy"), "u0",
unidades.Enthalpy),
(QApplication.translate("pychemqt", "Ideal gas Specific entropy"),
"s0", unidades.SpecificHeat),
(QApplication.translate(
"pychemqt", "Ideal gas Specific Helmholtz free energy"),
"a0", unidades.Enthalpy),
(QApplication.translate
("pychemqt", "Ideal gas Specific Gibbs free energy"), "g0",
unidades.Enthalpy),
(QApplication.translate(
"pychemqt", "Ideal gas Specific isobaric heat capacity"),
"cp0", unidades.SpecificHeat),
(QApplication.translate(
"pychemqt", "Ideal gas Specific isochoric heat capacity"),
"cv0", unidades.SpecificHeat),
(QApplication.translate(
"pychemqt", "Ideal gas heat capacities ratio"), "cp0_cv",
unidades.Dimensionless),
(QApplication.translate(
"pychemqt", "Ideal gas Isoentropic exponent"), "gamma0",
unidades.Dimensionless)]
return l
@classmethod
def propertiesName(cls):
return [prop[0] for prop in cls.properties()]
@classmethod
def propertiesKey(cls):
return [prop[1] for prop in cls.properties()]
@classmethod
def propertiesUnit(cls):
return [prop[2] for prop in cls.properties()]
@classmethod
def _dictUnit(cls):
d = {}
for name, key, unit in cls.properties():
d[key] = unit
return d
@classmethod
def propertiesGlobal(cls):
"""List properties only availables for global stream, not defined by
phase"""
prop = ["T", "Tr", "P", "Pr", "x", "Hvap", "Svap", "v0", "rho0", "h0",
"u0", "s0", "a0", "g0", "cp0", "cv0", "cp0_cv", "gamma0"]
return prop
@classmethod
def propertiesPhase(cls):
"""List properties availables for single phase"""
single = cls.propertiesGlobal()
total = cls.propertiesKey()
prop = []
for p in total:
if p not in single:
prop.append(p)
return prop
def _fillCorriente(self, corriente):
"""Procedure to populate the corriente with the global advanced
properties
corriente: instance of corriente to populate"""
for prop in self.propertiesGlobal():
corriente.__setattr__(prop, self.__getattribute__(prop))
def _writeGlobalState(self, corriente, state):
"""Procedure to populate a state dict with the global advanced
properties
corriente: instance of corriente to populate
state: dict properties"""
for prop in self.propertiesGlobal():
state[prop] = corriente.__getattribute__(prop)
def _readGlobalState(self, corriente, state):
units = self._dictUnit()
for prop in self.propertiesGlobal():
if prop in ["K", "csat", "dpdt_sat", "cv2p", "chempot"]:
value = [units[prop](p) for p in state[prop]]
else:
value = units[prop](state[prop])
corriente.__setattr__(prop, value)
def fillNone(self, fase):
"""Fill properties in null phase with a explicative msg"""
fase._bool = False
if self.x == 0:
txt = QApplication.translate("pychemqt", "Subcooled")
elif self.Tr < 1 and self.Pr < 1:
txt = QApplication.translate("pychemqt", "Superheated")
elif self.Tr == 1 and self.Pr == 1:
txt = QApplication.translate("pychemqt", "Critic point")
else:
txt = QApplication.translate("pychemqt", "Supercritical")
for key in self.propertiesPhase():
fase.__setattr__(key, txt)
def writeStatetoJSON(self, state, fase):
fluid = {}
if self._bool:
fluid["M"] = self.M
fluid["v"] = self.v
fluid["h"] = self.h
fluid["s"] = self.s
fluid["u"] = self.u
fluid["a"] = self.a
fluid["g"] = self.g
fluid["cv"] = self.cv
fluid["cp"] = self.cp
fluid["cp/cv"] = self.cp_cv
fluid["w"] = self.w
fluid["Z"] = self.Z
fluid["alfav"] = self.alfav
fluid["kappa"] = self.kappa
fluid["kappas"] = self.kappas
fluid["mu"] = self.mu
fluid["k"] = self.k
fluid["nu"] = self.nu
fluid["Prandt"] = self.Prandt
fluid["alfa"] = self.alfa
fluid["joule"] = self.joule
fluid["deltat"] = self.deltat
fluid["gamma"] = self.gamma
fluid["alfap"] = self.alfap
fluid["betap"] = self.betap
fluid["fi"] = self.fi
fluid["f"] = self.f
fluid["volFlow"] = self.Q
fluid["massFlow"] = self.caudalmasico
fluid["molarFlow"] = self.caudalmolar
fluid["fraction"] = self.fraccion
fluid["massFraction"] = self.fraccion_masica
fluid["massUnitFlow"] = self.caudalunitariomasico
fluid["molarUnitFlow"] = self.caudalunitariomolar
state[fase] = fluid
def readStatefromJSON(self, fluid):
if fluid:
self._bool = True
self.M = unidades.Dimensionless(fluid["M"])
self.v = unidades.SpecificVolume(fluid["v"])
self.rho = unidades.Density(1/self.v)
self.h = unidades.Enthalpy(fluid["h"])
self.s = unidades.SpecificHeat(fluid["s"])
self.u = unidades.Enthalpy(fluid["u"])
self.a = unidades.Enthalpy(fluid["a"])
self.g = unidades.Enthalpy(fluid["g"])
self.cv = unidades.SpecificHeat(fluid["cv"])
self.cp = unidades.SpecificHeat(fluid["cp"])
self.cp_cv = unidades.Dimensionless(fluid["cp/cv"])
self.w = unidades.Speed(fluid["w"])
self.Z = unidades.Dimensionless(fluid["Z"])
self.alfav = unidades.InvTemperature(fluid["alfav"])
self.kappa = unidades.InvPressure(fluid["kappa"])
self.kappas = unidades.InvPressure(fluid["kappas"])
self.mu = unidades.Viscosity(fluid["mu"])
self.k = unidades.ThermalConductivity(fluid["k"])
self.nu = unidades.Diffusivity(fluid["nu"])
self.Prandt = unidades.Dimensionless(fluid["Prandt"])
self.alfa = unidades.Diffusivity(fluid["alfa"])
self.joule = unidades.TemperaturePressure(fluid["joule"])
self.deltat = unidades.EnthalpyPressure(fluid["deltat"])
self.gamma = unidades.Dimensionless(fluid["gamma"])
self.alfap = unidades.Dimensionless(fluid["alfap"])
self.betap = unidades.Dimensionless(fluid["betap"])
self.fi = [unidades.Dimensionless(f) for f in fluid["fi"]]
self.f = [unidades.Pressure(f) for f in fluid["f"]]
self.Q = unidades.VolFlow(fluid["volFlow"])
self.caudalmasico = unidades.MassFlow(fluid["massFlow"])
self.caudalmolar = unidades.MolarFlow(fluid["molarFlow"])
self.fraccion = [unidades.Dimensionless(x)
for x in fluid["fraction"]]
self.fraccion_masica = [unidades.Dimensionless(x)
for x in fluid["massFraction"]]
self.caudalunitariomasico = [unidades.MassFlow(x)
for x in fluid["massUnitFlow"]]
self.caudalunitariomolar = [unidades.MolarFlow(x)
for x in fluid["molarUnitFlow"]]
self.rhoM = unidades.MolarDensity(self.rho/self.M)
self.hM = unidades.MolarEnthalpy(self.h/self.M)
self.sM = unidades.MolarSpecificHeat(self.s/self.M)
self.uM = unidades.MolarEnthalpy(self.u/self.M)
self.aM = unidades.MolarEnthalpy(self.a/self.M)
self.gM = unidades.MolarEnthalpy(self.g/self.M)
self.cvM = unidades.MolarSpecificHeat(self.cv/self.M)
self.cpM = unidades.MolarSpecificHeat(self.cp/self.M)
class ThermoWater(Thermo):
"""Custom specified thermo instance to add special properties for water"""
@classmethod
def properties(cls):
prop = Thermo.properties()[:]
l = [
(QApplication.translate("pychemqt", "Dielectric constant"),
"epsilon", unidades.Dimensionless),
(QApplication.translate("pychemqt", "Refractive index"),
"n", unidades.Dimensionless)]
for p in l:
prop.insert(-11, p)
return prop
def writeStatetoJSON(self, state, fase):
Thermo.writeStatetoJSON(self, state, fase)
if self._bool:
state[fase]["n"] = self.n
state[fase]["epsilon"] = self.epsilon
def readStatefromJSON(self, fluid):
Thermo.readStatefromJSON(self, fluid)
if fluid:
self.epsilon = unidades.Dimensionless(fluid["epsilon"])
self.n = unidades.Dimensionless(fluid["n"])
class ThermoAdvanced(Thermo):
"""Custom specified thermo instance to add special properties for advanced
model as coolprop, refprop and meos"""
@classmethod
def properties(cls):
prop = Thermo.properties()[:]
l = [
(QApplication.translate(
"pychemqt", "Isentropic temperature-pressure"),
"betas", unidades.TemperaturePressure),
(QApplication.translate("pychemqt", "Gruneisen parameter"),
"Gruneisen", unidades.Dimensionless),
(QApplication.translate("pychemqt", "2nd virial coefficient"),
"virialB", unidades.SpecificVolume),
(QApplication.translate("pychemqt", "3er virial coefficient"),
"virialC", unidades.SpecificVolume_square),
("(dp/dT)_rho", "dpdT_rho", unidades.PressureTemperature),
("(dp/drho)_T", "dpdrho_T", unidades.PressureDensity),
("(drho/dT)_P", "drhodT_P", unidades.DensityTemperature),
("(drho/dP)_T", "drhodP_T", unidades.DensityPressure),
("(dh/dT)_rho", "dhdT_rho", unidades.SpecificHeat),
("(dh/dP)_T", "dhdP_T", unidades.EnthalpyPressure),
("(dh/dT)_P", "dhdT_P", unidades.SpecificHeat),
("(dh/drho)_T", "dhdrho_T", unidades.EnthalpyDensity),
("(dh/dP)_rho", "dhdP_rho", unidades.EnthalpyPressure),
(QApplication.translate(
"pychemqt", "Isothermal expansion coefficient"),
"kt", unidades.Dimensionless),
(QApplication.translate(
"pychemqt", "Isentropic expansion coefficient"),
"ks", unidades.Dimensionless),
(QApplication.translate("pychemqt", "Adiabatic bulk modulus"),
"Ks", unidades.Pressure),
(QApplication.translate("pychemqt", "Isothermal bulk modulus"),
"Kt", unidades.Pressure),
# Z_rho - (Z-1) over the density, m³/kg
(QApplication.translate("pychemqt", "Internal pressure"),
"IntP", unidades.Pressure),
(QApplication.translate(
"pychemqt", "Negative reciprocal temperature"),
"invT", unidades.InvTemperature),
(QApplication.translate("pychemqt", "Specific heat input"),
"hInput", unidades.Enthalpy),
(QApplication.translate("pychemqt", "Dielectric constant"),
"epsilon", unidades.Dimensionless)]
for p in l:
prop.insert(34, p)
return prop
@classmethod
def propertiesGlobal(cls):
"""List properties only availables for global stream, not defined by
phase"""
prop = Thermo.propertiesGlobal()
prop.append("invT")
return prop
def writeStatetoJSON(self, state, fase):
Thermo.writeStatetoJSON(self, state, fase)
if self._bool:
state[fase]["betas"] = self.betas
state[fase]["Gruneisen"] = self.Gruneisen
state[fase]["virialB"] = self.virialB
state[fase]["virialC"] = self.virialC
state[fase]["dpdT_rho"] = self.dpdT_rho
state[fase]["dpdrho_T"] = self.dpdrho_T
state[fase]["drhodT_P"] = self.drhodT_P
state[fase]["drhodP_T"] = self.drhodP_T
state[fase]["dhdT_rho"] = self.dhdT_rho
state[fase]["dhdP_T"] = self.dhdP_T
state[fase]["dhdT_P"] = self.dhdT_P
state[fase]["dhdrho_T"] = self.dhdrho_T
state[fase]["dhdP_rho"] = self.dhdP_rho
state[fase]["kt"] = self.kt
state[fase]["ks"] = self.ks
state[fase]["Ks"] = self.Ks
state[fase]["Kt"] = self.Kt
state[fase]["IntP"] = self.IntP
state[fase]["invT"] = self.invT
state[fase]["hInput"] = self.hInput
state[fase]["epsilon"] = self.epsilon
def readStatefromJSON(self, fluid):
Thermo.readStatefromJSON(self, fluid)
if fluid:
self.betas = unidades.TemperaturePressure(fluid["betas"])
self.Gruneisen = unidades.Dimensionless(fluid["Gruneisen"])
self.virialB = unidades.SpecificVolume(fluid["virialB"])
self.virialC = unidades.SpecificVolume_square(fluid["virialC"])
self.dpdT_rho = unidades.PressureTemperature(fluid["dpdT_rho"])
self.dpdrho_T = unidades.PressureDensity(fluid["dpdrho_T"])
self.drhodT_P = unidades.DensityTemperature(fluid["drhodT_P"])
self.drhodP_T = unidades.DensityPressure(fluid["drhodP_T"])
self.dhdT_rho = unidades.SpecificHeat(fluid["dhdT_rho"])
self.dhdP_T = unidades.EnthalpyPressure(fluid["dhdP_T"])
self.dhdT_P = unidades.SpecificHeat(fluid["dhdT_P"])
self.dhdrho_T = unidades.EnthalpyDensity(fluid["dhdrho_T"])
self.dhdP_rho = unidades.EnthalpyPressure(fluid["dhdP_rho"])
self.kt = unidades.Dimensionless(fluid["kt"])
self.ks = unidades.InvPressure(fluid["ks"])
self.Ks = unidades.Pressure(fluid["Ks"])
self.Kt = unidades.Pressure(fluid["Kt"])
self.IntP = unidades.Pressure(fluid["IntP"])
self.invT = unidades.InvTemperature(fluid["invT"])
self.hInput = unidades.Enthalpy(fluid["hInput"])
self.epsilon = unidades.Dimensionless(fluid["epsilon"])
class ThermoRefProp(ThermoAdvanced):
"""Custom specified thermo instance to add special properties for advanced
model as coolprop, refprop and meos"""
@classmethod
def properties(cls):
prop = ThermoAdvanced.properties()[:]
l = [
(QApplication.translate("pychemqt", "Ideal Pressure"),
"P0", unidades.Pressure),
(QApplication.translate("pychemqt", "Residual Pressure"),
"P_Pideal", unidades.Pressure),
(QApplication.translate("pychemqt", "K value"),
"K", unidades.Dimensionless),
(QApplication.translate(
"pychemqt", "Heat Capacity along the saturation line"),
"csat", unidades.SpecificHeat),
("dP/dT [sat]", "dpdt_sat", unidades.PressureTemperature),
(QApplication.translate("pychemqt", "Cv two phases"),
"cv2p", unidades.SpecificHeat),
(QApplication.translate("pychemqt", "Excess volume"),
"vE", unidades.SpecificVolume),
(QApplication.translate("pychemqt", "Excess internal energy"),
"uE", unidades.Enthalpy),
(QApplication.translate("pychemqt", "Excess enthalpy"),
"hE", unidades.Enthalpy),
(QApplication.translate("pychemqt", "Excess entropy"),
"sE", unidades.SpecificHeat),
(QApplication.translate("pychemqt", "Excess Helmholtz energy"),
"aE", unidades.Enthalpy),
(QApplication.translate("pychemqt", "Excess Gibbs energy"),
"gE", unidades.Enthalpy),
(QApplication.translate("pychemqt", "Residual pressure"),
"pr", unidades.SpecificVolume),
(QApplication.translate("pychemqt", "Residual internal energy"),
"ur", unidades.Enthalpy),
(QApplication.translate("pychemqt", "Residual enthalpy"),
"hr", unidades.Enthalpy),
(QApplication.translate("pychemqt", "Residual entropy"),
"sr", unidades.SpecificHeat),
(QApplication.translate("pychemqt", "Residual Helmholtz energy"),
"ar", unidades.Enthalpy),
(QApplication.translate("pychemqt", "Residual Gibbs energy"),
"gr", unidades.Enthalpy),
(QApplication.translate(
"pychemqt", "Residual isobaric heat capacity"),
"cpr", unidades.SpecificHeat),
(QApplication.translate(
"pychemqt", "Residual isochoric heat capacity"),
"cvr", unidades.SpecificHeat),
(QApplication.translate("pychemqt", "Supercompressibility factor"),
"fpv", unidades.Dimensionless),
(QApplication.translate("pychemqt", "Chemical potential"),
"chempot", unidades.Enthalpy),
(QApplication.translate("pychemqt", "Fourth virial coefficient"),
"virialD", unidades.Dimensionless),
(QApplication.translate(
"pychemqt", "Second acoustic virial coefficient"),
"virialBa", unidades.SpecificVolume),
(QApplication.translate(
"pychemqt", "Third acoustic virial coefficient"),
"virialCa", unidades.SpecificVolume_square),
("dC/dT", "dCdt", unidades.Dimensionless),
("d²C/dT²", "dCdt2", unidades.Dimensionless),
("dB/dT", "dBdt", unidades.Dimensionless),
("b12", "b12", unidades.SpecificVolume),
(QApplication.translate("pychemqt", "Critical flow factor"),
"cstar", unidades.Dimensionless)]
for p in l:
prop.append(p)
return prop
@classmethod
def propertiesGlobal(cls):
"""List properties only availables for global stream, not defined by
phase"""
prop = ThermoAdvanced.propertiesGlobal()
new = ["P0", "P_Pideal", "K", "csat", "dpdt_sat", "cv2p", "vE", "uE",
"hE", "sE", "aE", "gE", "pr", "ur", "hr", "sr", "ar", "gr",
"cpr", "cvr", "fpv", "chempot", "b12", "cstar"]
for p in new:
prop.append(p)
return prop
def writeStatetoJSON(self, state, fase):
ThermoAdvanced.writeStatetoJSON(self, state, fase)
if self._bool:
state[fase]["virialD"] = self.virialD
state[fase]["virialBa"] = self.virialBa
state[fase]["virialCa"] = self.virialCa
state[fase]["dCdt"] = self.dCdt
state[fase]["dCdt2"] = self.dCdt2
state[fase]["dBdt"] = self.dBdt
def readStatefromJSON(self, fluid):
ThermoAdvanced.readStatefromJSON(self, fluid)
if fluid:
self.virialD = unidades.Dimensionless(fluid["virialD"])
self.virialBa = unidades.SpecificVolume(fluid["virialBa"])
self.virialCa = unidades.SpecificVolume_square(fluid["virialCa"])
self.dCdt = unidades.Dimensionless(fluid["dCdt"])
self.dCdt2 = unidades.Dimensionless(fluid["dCdt2"])
self.dBdt = unidades.Dimensionless(fluid["dBdt"])
|
from google.appengine.ext import ndb
class Game(ndb.Model):
name = ndb.StringProperty()
description = ndb.StringProperty()
draws = ndb.IntegerProperty(repeated=True)
price = ndb.FloatProperty()
@property
def draw_sun(self):
return 0 in self.draws
@property
def draw_mon(self):
return 1 in self.draws
@property
def draw_tue(self):
return 2 in self.draws
@property
def draw_wed(self):
return 3 in self.draws
@property
def draw_thu(self):
return 4 in self.draws
@property
def draw_fri(self):
return 5 in self.draws
@property
def draw_sat(self):
return 6 in self.draws
class Draw(ndb.Model):
date = ndb.DateProperty()
numbers = ndb.IntegerProperty(repeated=True)
class User(ndb.Model):
nickname = ndb.StringProperty()
first_name = ndb.StringProperty()
last_name = ndb.StringProperty()
@property
def email(self):
return self.key.id()
@property
def display_name(self):
if self.nickname:
return self.nickname
elif self.first_name and self.last_name:
return ' '.join([self.first_name, self.last_name])
else:
return self.email
class Syndicate(ndb.Model):
name = ndb.StringProperty()
game_key = ndb.KeyProperty(kind=Game)
manager_key = ndb.KeyProperty(kind=User)
deputy_manager_key = ndb.KeyProperty(kind=User)
# denormalized values
game_name = ndb.StringProperty()
manager_name = ndb.StringProperty()
def _pre_put_hook(self):
ndb.Model._pre_put_hook(self)
game = self.game_key.get()
manager = self.manager_key.get()
self.game_name = game.name
self.manager_name = manager.display_name
class Ticket(ndb.Model):
numbers = ndb.IntegerProperty(repeated=True)
class UserSyndicate(ndb.Model):
user_key = ndb.KeyProperty(kind=User)
syndicate_key = ndb.KeyProperty(kind=Syndicate)
# how much money the user has got in the kitty for this syndicate
status = ndb.StringProperty(choices=['Invited', # a manager has invited user to the syndicate, but they haven't accepted
'Pending', # a user has entered a code to join a syndicate, but the manager hasn't approved
'Approved']) # a user who is a member of a syndicate
kitty = ndb.FloatProperty(default=0.0)
# denormalized values
user_name = ndb.StringProperty()
syndicate_name = ndb.StringProperty()
game_name = ndb.StringProperty()
def _pre_put_hook(self):
ndb.Model._pre_put_hook(self)
user = self.user_key.get()
syndicate = self.syndicate_key.get()
self.user_name = user.display_name
self.syndicate_name = syndicate.name
self.game_name = syndicate.game_name
class UserDraw(ndb.Model):
user_key = ndb.KeyProperty(kind=User)
syndicate_key = ndb.KeyProperty(kind=Syndicate)
draw_key = ndb.KeyProperty(kind=Draw)
|
import collections
import datetime
import itertools
from bson import objectid
from pymongo import errors
from pymongo import ReturnDocument
from anubis import error
from anubis import db
from anubis.constant import contest
from anubis.util import argmethod
from anubis.util import validator
from anubis.util import json
from anubis.model import system
from anubis.model import user
from anubis.model import record
from anubis.service import bus
RULE_OI = 2
RULE_ACM = 3
TYPE_ONLINE = 1
TYPE_OFFLINE = 2
RULE_TEXTS = {
RULE_OI: 'OI',
RULE_ACM: 'ACM-ICPC',
}
Rule = collections.namedtuple('Rule', ['show_func', 'stat_func', 'status_sort', 'rank_func'])
def _oi_stat(tdoc, journal):
detail = list(dict((j['pid'], j) for j in journal if j['pid'] in tdoc['pids']).values())
return {'score': sum(d['score'] for d in detail), 'detail': detail}
def _acm_stat(tdoc, journal):
naccept = collections.defaultdict(int)
effective = {}
for j in journal:
if j['pid'] in tdoc['pids'] and not (j['pid'] in effective and effective[j['pid']]['accept']):
effective[j['pid']] = j
if not j['accept']:
naccept[j['pid']] += 1
def time(jdoc):
real = jdoc['rid'].generation_time.replace(tzinfo=None) - tdoc['begin_at']
penalty = datetime.timedelta(minutes=20) * naccept[jdoc['pid']]
return (real + penalty).total_seconds()
detail = [{**j, 'naccept': naccept[j['pid']], 'time': time(j)} for j in effective.values()]
return {'accept': sum(int(d['accept']) for d in detail),
'time': sum(d['time'] for d in detail if d['accept']),
'detail': detail}
def _acm_rank(tsdocs):
now = 1
gold = contest.COUNT_GOLD
silver = gold + contest.COUNT_SILVER
bronze = silver + contest.COUNT_BRONZE
for tsdoc in tsdocs:
prize = None
if now <= gold:
prize = 'gold'
elif gold < now <= silver:
prize = 'silver'
elif silver < now <= bronze:
prize = 'bronze'
if prize:
tsdoc['prize'] = prize
ranked = tsdoc.get('ranked', True)
if ranked:
rank = now
now += 1
else:
rank = '*'
yield (rank, tsdoc)
RULES = {
RULE_ACM: Rule(lambda tdoc, now: now >= tdoc['begin_at'],
_acm_stat, [('accept', -1), ('time', 1)], _acm_rank),
}
def convert_to_pid(pids: list, pid_letter: str):
try:
return pids[ord(pid_letter) - ord('A')]
except IndexError:
raise error.ContestProblemNotFoundError(pid_letter)
def convert_to_letter(pids: list, pid: int):
try:
return chr(pids.index(pid) + ord('A'))
except ValueError:
raise error.ContestProblemNotFoundError(pid)
@argmethod.wrap
async def add(domain_id: str, title: str, content: str, owner_uid: int, rule: int, private: bool,
begin_at: lambda i: datetime.datetime.utcfromtimestamp(int(i)),
end_at: lambda i: datetime.datetime.utcfromtimestamp(int(i)),
pids=[], **kwargs):
validator.check_title(title)
validator.check_content(content)
if rule not in RULES:
raise error.ValidationError('rule')
if begin_at >= end_at:
raise error.ValidationError('begin_at', 'end_at')
# TODO: should we check problem existance here?
tid = await system.inc_contest_counter()
coll = db.Collection('contest')
doc = {
'_id': tid,
'domain_id': domain_id,
'title': title,
'content': content,
'owner_uid': owner_uid,
'rule': rule,
'private': private,
'begin_at': begin_at,
'end_at': end_at,
'pids': pids,
'attend': 0,
**kwargs,
}
await coll.insert_one(doc)
return tid
@argmethod.wrap
async def edit(domain_id: str, tid: int, **kwargs):
if 'title' in kwargs:
validator.check_title(kwargs['title'])
if 'content' in kwargs:
validator.check_content(kwargs['content'])
coll = db.Collection('contest')
tdoc = await coll.find_one_and_update(filter={'domain_id': domain_id,
'_id': tid},
update={'$set': kwargs},
return_document=True)
if not tdoc:
raise error.ContestNotFoundError(domain_id, tid)
return tdoc
@argmethod.wrap
async def get(domain_id: str, tid: int):
coll = db.Collection('contest')
tdoc = await coll.find_one({'domain_id': domain_id, '_id': tid})
if not tdoc:
raise error.ContestNotFoundError(domain_id, tid)
return tdoc
def get_multi(domain_id: str, projection=None, **kwargs):
coll = db.Collection('contest')
return coll.find({'domain_id': domain_id, **kwargs}, projection=projection)
@argmethod.wrap
async def get_list(domain_id: str, projection=None):
return await get_multi(domain_id=domain_id,
projection=projection).sort([('_id', -1)]).to_list(None)
@argmethod.wrap
async def attend(domain_id: str, tid: int, uid: int):
# TODO: check time.
coll = db.Collection('contest.status')
try:
await coll.find_one_and_update(filter={'domain_id': domain_id,
'tid': tid,
'uid': uid,
'attend': {'$eq': 0}},
update={'$set': {'attend': 1}},
upsert=True,
return_document=ReturnDocument.AFTER)
except errors.DuplicateKeyError:
raise error.ContestAlreadyAttendedError(domain_id, tid, uid) from None
coll = db.Collection('contest')
return await coll.find_one_and_update(filter={'domain_id': domain_id,
'_id': tid},
update={'$inc': {'attend': 1}},
return_document=ReturnDocument.AFTER)
@argmethod.wrap
async def remove_status(domain_id: str, tid: int, uid: int):
tsdoc = await get_status(domain_id, tid, uid)
if not tsdoc:
raise error.UserNotFoundError(uid)
for j in tsdoc['journal']:
await record.remove_property(j['rid'], 'tid')
coll = db.Collection('contest.status')
await coll.delete_one({'domain_id': domain_id,
'tid': tid,
'uid': uid})
return tsdoc
@argmethod.wrap
async def get_status(domain_id: str, tid: int, uid: int, projection=None):
coll = db.Collection('contest.status')
return await coll.find_one({'domain_id': domain_id, 'tid': tid, 'uid': uid},
projection=projection)
def get_multi_status(*, projection=None, **kwargs):
coll = db.Collection('contest.status')
return coll.find(kwargs, projection=projection)
async def get_dict_status(domain_id, uid, tids, *, projection=None):
result = dict()
async for tsdoc in get_multi_status(domain_id=domain_id,
uid=uid,
tid={'$in': list(set(tids))},
projection=projection):
result[tsdoc['tid']] = tsdoc
return result
@argmethod.wrap
async def get_and_list_status(domain_id: str, tid: int, projection=None):
# TODO: projection, pagination
tdoc = await get(domain_id, tid)
tsdocs = await get_multi_status(domain_id=domain_id,
tid=tid,
projection=projection
).sort(RULES[tdoc['rule']].status_sort).to_list(None)
return tdoc, tsdocs
@argmethod.wrap
async def update_status(domain_id: str, tid: int, uid: int, rid: objectid.ObjectId,
pid: int, accept: bool):
tdoc = await get(domain_id, tid)
if pid not in tdoc['pids']:
raise error.ValidationError('pid')
coll = db.Collection('contest.status')
tsdoc = await coll.find_one_and_update(filter={'domain_id': domain_id,
'tid': tid,
'uid': uid},
update={
'$push': {
'journal': {'rid': rid,
'pid': pid,
'accept': accept}
},
'$inc': {'rev': 1}},
return_document=ReturnDocument.AFTER)
if not tsdoc:
return {}
if 'attend' not in tsdoc or not tsdoc['attend']:
raise error.ContestNotAttendedError(domain_id, tid, uid)
# Sort and uniquify journal of the contest status, by rid.
key_func = lambda j: j['rid']
journal = [list(g)[-1]
for _, g in itertools.groupby(sorted(tsdoc['journal'], key=key_func), key=key_func)]
stats = RULES[tdoc['rule']].stat_func(tdoc, journal)
psdict = {}
for detail in tsdoc.get('detail', []):
psdict[detail['pid']] = detail
for detail in stats.get('detail', []):
detail['balloon'] = psdict.get(detail['pid'], {'balloon': False}).get('balloon', False)
tsdoc = await coll.find_one_and_update(filter={'domain_id': domain_id,
'tid': tid,
'uid': uid},
update={'$set': {'journal': journal, **stats},
'$inc': {'rev': 1}},
return_document=ReturnDocument.AFTER)
await bus.publish('contest_notification-' + str(tid), json.encode({'type': 'rank_changed'}))
if accept and not psdict.get(pid, {'accept': False})['accept']:
await set_status_balloon(domain_id, tid, uid, pid, False)
return tsdoc
@argmethod.wrap
async def set_status_balloon(domain_id: str, tid: int, uid: int, pid: int, balloon: bool=True):
tdoc = await get(domain_id, tid)
if pid not in tdoc['pids']:
raise error.ValidationError('pid')
coll = db.Collection('contest.status')
tsdoc = await coll.find_one_and_update(filter={'domain_id': domain_id,
'tid': tid,
'uid': uid,
'detail.pid': pid},
update={'$set': {'detail.$.balloon': balloon}},
return_document=ReturnDocument.AFTER)
udoc = await user.get_by_uid(uid)
await bus.publish('balloon_change', json.encode({'uid': uid,
'uname': udoc['uname'],
'nickname': udoc.get('nickname', ''),
'tid': tid,
'pid': pid,
'letter': convert_to_letter(tdoc['pids'], pid),
'balloon': balloon}))
return tsdoc
@argmethod.wrap
async def create_indexes():
coll = db.Collection('contest')
await coll.create_index([('domain_id', 1),
('_id', 1)], unique=True)
await coll.create_index([('domain_id', 1),
('pids', 1)], sparse=True)
await coll.create_index([('domain_id', 1),
('rule', 1),
('_id', -1)], sparse=True)
status_coll = db.Collection('contest.status')
await status_coll.create_index([('domain_id', 1),
('uid', 1),
('tid', 1)], unique=True)
await status_coll.create_index([('domain_id', 1),
('tid', 1),
('accept', -1),
('time', 1)], sparse=True)
await status_coll.create_index([('domain_id', 1),
('tid', 1),
('detail.accept', 1),
('detail.balloon', -1)])
await status_coll.create_index([('domain_id', 1),
('tid', 1),
('uid', 1),
('detail.pid', 1)], sparse=True)
if __name__ == '__main__':
argmethod.invoke_by_args()
|
"""Unit test for the irf.arf module.
"""
import unittest
import os
import numpy
from ximpol.core.spline import xInterpolatedUnivariateSplineLinear
from ximpol.detector.xipe import _full_path
from ximpol.irf import load_arf
IRF_NAME = 'xipe_baseline'
OPT_AEFF_FILE_PATH = _full_path('Area_XIPE_201602b_x3.asc')
GPD_QEFF_FILE_PATH = _full_path('eff_hedme8020_1atm_1cm_cuts80p_be50um_p_x.asc')
class TestXipeArf(unittest.TestCase):
"""Unit test for the XIPE effective area.
"""
def test_xipe_arf(self):
"""Test the XIPE effective area.
This is loading the effective area from the .arf FITS file, then
loading the data points from the text files the response function
is created from, and finally testing that the actual values from the
two methods are close enough over the entire energy range.
"""
_x, _y = numpy.loadtxt(OPT_AEFF_FILE_PATH, unpack=True)
opt_aeff = xInterpolatedUnivariateSplineLinear(_x, _y)
_x, _y = numpy.loadtxt(GPD_QEFF_FILE_PATH, unpack=True)
gpd_eff = xInterpolatedUnivariateSplineLinear(_x, _y)
aeff = load_arf(IRF_NAME)
_x = numpy.linspace(aeff.xmin(), aeff.xmax(), 100)
# Remove the data points where the effective area is 0.
_x = _x[aeff(_x) > 0.]
_delta = abs((aeff(_x) - opt_aeff(_x)*gpd_eff(_x))/aeff(_x))
self.assertTrue(_delta.max() < 5e-3, 'max. diff. %.9f' % _delta.max())
if __name__ == '__main__':
unittest.main()
|
"""
This file is our entrypoint.
Here is where we import and subclass the modreader.file_format and change behaviour.
"""
from . import records, fields
from ...file_format import ModFile, ModField
class SkyrimMod(ModFile):
"""
This is our subclass of modreader.file_format.ModFile
Most code wont do much except override the _create_field method
Overriding this function allows us to use our own ModField subclass instead of the orginial base class.
"""
def get_masters(self):
"""
This handly little function tells third party code using the Mod Reader API
how to find master files!
Third party code shouldent have to manually read each mod, which would be very
game specific, to figure out what else needs to be loaded, so we do it here!
:rtype: list
"""
# Read the data from our header
header_data = self.header.read_data()
# Find the MAST field, where the Master File data is stored
master_data = header_data["MAST"]
# This list will contain our masters!
masters = []
# Go through master_data, which will be the processed field data
# You DID setup atleast header handling, right?
# This will fail miserbly if you havent setup, atleast, the miniumum
# of, for skyrim, the TES4 header and MAST field processing.
for master in master_data:
# append the field data to our list!
# We call .lower() on it because we setup MAST processing to return a string!
# And we lower it so it's easy to compare master file names!
# We cant know if it was called, for example
# MyMasterMod, or mYmasTERmOD, and MyMasterMod != mYmasTERmOD
# but lowering it makes it mymastermod.
masters.append(master.read_data().lower())
return masters
def _create_header(self, flags, version):
"""
This handy little function lets us create NEW files correctly.
The base class doesnt know what game we have, so it wont know what version to use!
So in our subclass here, we tell it skyrim is version 40!
"""
super()._create_header(flags, 40)
def _create_field(self, *args, **kwargs):
"""
As you can see, all this has to do is return a new instance of SkyrimField,
passing along all arguments aswell.
"""
return SkyrimField(*args, **kwargs)
class SkyrimField(ModField):
"""
This is our subclass of modreader.file_format.ModField
Most code will just need to override the ModField.read_field method
in a mannor similar to the below.
"""
def read_data(self):
# The below line calls read_field from the orginial ModField class
# This means we dont have duplicate code.
raw_data = super().read_data()
# This handy little trick here gets a class from records.py that matches the type of our field's Record!
# this makes it reallt easy to add new record types!
# And if it doesnt exist, it returns None instead of erroring!
record_handler_class = getattr(records, self.record.type.decode(), None)
# This block of code here checks if record_handler_class is None
# If it is, it tells us that we found an unsupported record!
# The bit about _last_read_record is to make sure we dont spam our dear user!
# Without it, in a mod with, say, 10 RACE records, and RACE is not supported, it would print
# "Unsupported Record: Race" TEN times! That sounds annoying, huh?
# With it, now it will just print ONE time. Phew!
if record_handler_class is None and not getattr(self, "_last_read_record", None) == self.record.type:
print("Unsupported Record: " + self.record.type.decode())
self._last_read_record = self.record.type
# This little bit of code creates an instance of our record handling class!
record_handler_class = record_handler_class()
# And this little but of code is the read method of that class!
# It wont work unless we use it on an instance of the class, which is why we need the above line!
record_handler = record_handler_class.read
# This line calls our read method, passing it ourselves(Which is a field, remember?)
# The raw data(Bytes)
# And fields.py, a module! Thats so it knows where to look for the processing functions!
# That sure makes it easy to support new fields, huh!
new_data = record_handler(self, raw_data, fields)
# And this returns our new, processed, data!
return new_data
|
import itertools
import operator
import os
import copy
import pytest
from click.testing import CliRunner
import perun.cli as cli
import perun.utils.log as log
import perun.postprocess.clusterizer.run as clusterizer
import perun.logic.store as store
import perun.testing.asserts as asserts
__author__ = 'Tomas Fiedor'
def test_from_cli(pcs_full):
"""Tests running the clusterization from CLI"""
object_dir = pcs_full.get_job_directory()
object_no = len(os.listdir(object_dir))
runner = CliRunner()
result = runner.invoke(cli.postprocessby, ["0@i", "clusterizer"])
asserts.predicate_from_cli(result, result.exit_code == 0)
# Test that something was created
object_no_after = len(os.listdir(object_dir))
assert object_no_after == object_no + 2
# Test verbosity of printing the groups
log.VERBOSITY = log.VERBOSE_DEBUG
result = runner.invoke(cli.postprocessby, ["0@i", "clusterizer"])
asserts.predicate_from_cli(result, result.exit_code == 0)
def test_sort_order(full_profiles):
"""Test sort order method"""
for _, full_profile in full_profiles:
clusterizer.postprocess(full_profile, 'sort_order')
def get_malloced_resources(profile):
"""Helper function for getting resources that were allocated by malloc
:param Profile profile: dictionary with resources
:return: list of resources allocated by malloc
"""
resources = list(map(operator.itemgetter(1), profile.all_resources()))
resources.sort(key=clusterizer.resource_sort_key)
malloced = []
for group, members in itertools.groupby(resources, clusterizer.resource_group_key):
if group[1] == 'malloc':
malloced.extend(list(members))
return malloced
def test_sliding_window(pcs_full):
"""Tests sliding window method"""
runner = CliRunner()
result = runner.invoke(cli.postprocessby, ["0@i", "clusterizer", "-s", "sliding_window"])
asserts.predicate_from_cli(result, result.exit_code == 0)
pool_path = os.path.join(os.path.split(__file__)[0], 'profiles', 'clustering_profiles')
clustered_profile = store.load_profile_from_file(os.path.join(pool_path, 'clustering-workload.perf'), True)
postprocessed_profile = copy.deepcopy(clustered_profile)
params = {
'window_width': 2, 'width_measure': 'absolute',
'window_height': 4, 'height_measure': 'absolute'
}
clusterizer.postprocess(postprocessed_profile, 'sliding_window', **params)
malloced = get_malloced_resources(postprocessed_profile)
# Assert we clustered resources to five clusters only
assert max(res['cluster'] for res in malloced) == 5
postprocessed_profile = copy.deepcopy(clustered_profile)
params = {
'window_width': 20, 'width_measure': 'absolute',
'window_height': 40, 'height_measure': 'absolute'
}
clusterizer.postprocess(postprocessed_profile, 'sliding_window', **params)
malloced = get_malloced_resources(postprocessed_profile)
# Assert we clustered resources to one clusters only, because the window is big
assert max(res['cluster'] for res in malloced) == 1
postprocessed_profile = copy.deepcopy(clustered_profile)
params = {
'window_width': 0.5, 'width_measure': 'relative',
'window_height': 40, 'height_measure': 'absolute'
}
clusterizer.postprocess(postprocessed_profile, 'sliding_window', **params)
malloced = get_malloced_resources(postprocessed_profile)
# Assert we clustered resources to two clusters only
assert max(res['cluster'] for res in malloced) == 2
# Try noexistant or unsupported options
with pytest.raises(SystemExit):
params = {
'window_width': 0.5, 'width_measure': 'weighted',
'window_height': 40, 'height_measure': 'absolute'
}
clusterizer.postprocess(postprocessed_profile, 'sliding_window', **params)
with pytest.raises(SystemExit):
params = {
'window_width': 0.5, 'width_measure': 'nonexistant',
'window_height': 40, 'height_measure': 'absolute'
}
clusterizer.postprocess(postprocessed_profile, 'sliding_window', **params)
with pytest.raises(SystemExit):
params = {
'window_width': 0.5, 'width_measure': 'absolute',
'window_height': 40, 'height_measure': 'nonexistant'
}
clusterizer.postprocess(postprocessed_profile, 'sliding_window', **params)
|
from abc import ABCMeta, abstractmethod
from hoomd import variant
import matplotlib
matplotlib.use('AGG')
import matplotlib.pyplot as plt
class TemperatureProfileBuilder(object):
"""Common base class for all TemperatureProfiles."""
__metaclass__ = ABCMeta
def __init__(self):
self.temperature_profile = []
@abstractmethod
def get_profile(self):
"""
Abstract method for TemperatureProfileBuilder.get_profile()
Returns a hoomd.variant object
"""
pass
def get_figure(self):
fig = plt.figure()
x_val = [x[0] for x in self.temperature_profile]
y_val = [x[1] for x in self.temperature_profile]
plt.xlabel('Time')
plt.ylabel('kT')
plt.margins(x=0.1, y=0.1)
plt.plot(x_val, y_val)
plt.plot(x_val, y_val, 'or')
return fig
def get_total_sim_time(self):
first_state_point = self.temperature_profile[0]
last_state_point = self.temperature_profile[-1]
return last_state_point[0]-first_state_point[0]
def get_raw(self):
return self.temperature_profile
def set_raw(self, data):
self.temperature_profile = data
class LinearTemperatureProfileBuilder(TemperatureProfileBuilder):
"""Builds a Linear Temperature Profile."""
def __init__(self, initial_temperature, initial_time=0):
TemperatureProfileBuilder.__init__(self)
self.initial_temperature = initial_temperature
self.temperature_profile.append((initial_time, initial_temperature))
def add_state_point(self, ramp_time, desired_temperature):
"""
Add a state point for the linear temperature profile. Adds ramp time to previous time or initial_time
"""
last_state_point = self.temperature_profile[-1]
new_state_point = ((last_state_point[0]+ramp_time), desired_temperature)
if new_state_point[0] >= last_state_point[0]:
self.temperature_profile.append(new_state_point)
else:
err_string = 'Inconsistent state point added. The new time should be greater or equal to previous time.' \
'Previous time: {}, new time: {}'.format(last_state_point[0], new_state_point[0])
raise ValueError(err_string)
def get_profile(self):
"""
Returns a hoomd.variant object
"""
profile = variant.linear_interp(points=self.temperature_profile)
return profile
|
import collections
import json
import logging
import re
import time
import sys
import pprint
from errbot.backends.base import Message, Presence, ONLINE, AWAY, Room, RoomError, RoomDoesNotExistError, \
UserDoesNotExistError, RoomOccupant, Person
from errbot.errBot import ErrBot
from errbot.utils import PY3, split_string_after
from errbot.rendering import imtext
log = logging.getLogger('errbot.backends.slack')
try:
from functools import lru_cache
except ImportError:
from backports.functools_lru_cache import lru_cache
try:
from slackclient import SlackClient
except ImportError:
log.exception("Could not start the Slack back-end")
log.fatal(
"You need to install the slackclient package in order to use the Slack "
"back-end. You should be able to install this package using: "
"pip install slackclient"
)
sys.exit(1)
except SyntaxError:
if not PY3:
raise
log.exception("Could not start the Slack back-end")
log.fatal(
"I cannot start the Slack back-end because I cannot import the SlackClient. "
"Python 3 compatibility on SlackClient is still quite young, you may be "
"running an old version or perhaps they released a version with a Python "
"3 regression. As a last resort to fix this, you could try installing the "
"latest master version from them using: "
"pip install --upgrade https://github.com/slackhq/python-slackclient/archive/master.zip"
)
sys.exit(1)
SLACK_CLIENT_CHANNEL_HYPERLINK = re.compile(r'^<#(?P<id>(C|G)[0-9A-Z]+)>$')
SLACK_MESSAGE_LIMIT = 4096
USER_IS_BOT_HELPTEXT = (
"Connected to Slack using a bot account, which cannot manage "
"channels itself (you must invite the bot to channels instead, "
"it will auto-accept) nor invite people.\n\n"
"If you need this functionality, you will have to create a "
"regular user account and connect Err using that account. "
"For this, you will also need to generate a user token at "
"https://api.slack.com/web."
)
class SlackAPIResponseError(RuntimeError):
"""Slack API returned a non-OK response"""
def __init__(self, *args, error='', **kwargs):
"""
:param error:
The 'error' key from the API response data
"""
self.error = error
super().__init__(*args, **kwargs)
class SlackPerson(Person):
"""
This class describes a person on Slack's network.
"""
def __init__(self, sc, userid=None, channelid=None):
if userid is not None and userid[0] not in ('U', 'B'):
raise Exception('This is not a Slack user or bot id: %s (should start with U or B)' % userid)
if channelid is not None and channelid[0] not in ('D', 'C', 'G'):
raise Exception('This is not a valid Slack channelid: %s (should start with D, C or G)' % channelid)
self._userid = userid
self._channelid = channelid
self._sc = sc
@property
def userid(self):
return self._userid
@property
def username(self):
"""Convert a Slack user ID to their user name"""
user = self._sc.server.users.find(self._userid)
if user is None:
log.error("Cannot find user with ID %s" % self._userid)
return "<%s>" % self._userid
return user.name
@property
def channelid(self):
return self._channelid
@property
def channelname(self):
"""Convert a Slack channel ID to its channel name"""
if self._channelid is None:
return None
channel = self._sc.server.channels.find(self._channelid)
if channel is None:
raise RoomDoesNotExistError("No channel with ID %s exists" % self._channelid)
return channel.name
@property
def domain(self):
return self._sc.server.domain
# Compatibility with the generic API.
person = userid
client = channelid
nick = username
# Override for ACLs
@property
def aclattr(self):
# Note: Don't use str(self) here because that will return
# an incorrect format from SlackMUCOccupant.
return "@%s" % self.username
@property
def fullname(self):
"""Convert a Slack user ID to their user name"""
user = self._sc.server.users.find(self._userid)
if user is None:
log.error("Cannot find user with ID %s" % self._userid)
return "<%s>" % self._userid
return user.real_name
def __unicode__(self):
return "@%s" % self.username
def __str__(self):
return self.__unicode__()
def __eq__(self, other):
return other.userid == self.userid
class SlackRoomOccupant(RoomOccupant, SlackPerson):
"""
This class represents a person inside a MUC.
"""
def __init__(self, sc, userid, channelid, bot):
super().__init__(sc, userid, channelid)
self._room = SlackRoom(channelid=channelid, bot=bot)
@property
def room(self):
return self._room
def __unicode__(self):
return "#%s/%s" % (self._room.name, self.username)
def __str__(self):
return self.__unicode__()
def __eq__(self, other):
if not isinstance(other, RoomOccupant):
log.warn('tried to compare a SlackRoomOccupant with a SlackPerson %s vs %s', self, other)
return False
return other.room.id == self.room.id and other.userid == self.userid
class SlackBackend(ErrBot):
def __init__(self, config):
super().__init__(config)
identity = config.BOT_IDENTITY
self.token = identity.get('token', None)
if not self.token:
log.fatal(
'You need to set your token (found under "Bot Integration" on Slack) in '
'the BOT_IDENTITY setting in your configuration. Without this token I '
'cannot connect to Slack.'
)
sys.exit(1)
self.sc = None # Will be initialized in serve_once
self.md = imtext()
def api_call(self, method, data=None, raise_errors=True):
"""
Make an API call to the Slack API and return response data.
This is a thin wrapper around `SlackClient.server.api_call`.
:param method:
The API method to invoke (see https://api.slack.com/methods/).
:param raise_errors:
Whether to raise :class:`~SlackAPIResponseError` if the API
returns an error
:param data:
A dictionary with data to pass along in the API request.
:returns:
A dictionary containing the (JSON-decoded) API response
:raises:
:class:`~SlackAPIResponseError` if raise_errors is True and the
API responds with `{"ok": false}`
"""
if data is None:
data = {}
response = self.sc.api_call(method, **data)
if not isinstance(response, collections.Mapping):
# Compatibility with SlackClient < 1.0.0
response = json.loads(response.decode('utf-8'))
if raise_errors and not response['ok']:
raise SlackAPIResponseError(
"Slack API call to %s failed: %s" % (method, response['error']),
error=response['error']
)
return response
def serve_once(self):
self.sc = SlackClient(self.token)
log.info("Verifying authentication token")
self.auth = self.api_call("auth.test", raise_errors=False)
if not self.auth['ok']:
raise SlackAPIResponseError(error="Couldn't authenticate with Slack. Server said: %s" % self.auth['error'])
log.debug("Token accepted")
self.bot_identifier = SlackPerson(self.sc, self.auth["user_id"])
log.info("Connecting to Slack real-time-messaging API")
if self.sc.rtm_connect():
log.info("Connected")
self.reset_reconnection_count()
try:
while True:
for message in self.sc.rtm_read():
self._dispatch_slack_message(message)
time.sleep(1)
except KeyboardInterrupt:
log.info("Interrupt received, shutting down..")
return True
except:
log.exception("Error reading from RTM stream:")
finally:
log.debug("Triggering disconnect callback")
self.disconnect_callback()
else:
raise Exception('Connection failed, invalid token ?')
def _dispatch_slack_message(self, message):
"""
Process an incoming message from slack.
"""
if 'type' not in message:
log.debug("Ignoring non-event message: %s" % message)
return
event_type = message['type']
event_handlers = {
'hello': self._hello_event_handler,
'presence_change': self._presence_change_event_handler,
'message': self._message_event_handler,
}
event_handler = event_handlers.get(event_type)
if event_handler is None:
log.debug("No event handler available for %s, ignoring this event" % event_type)
return
try:
log.debug("Processing slack event: %s" % message)
event_handler(message)
except Exception:
log.exception("%s event handler raised an exception" % event_type)
def _hello_event_handler(self, event):
"""Event handler for the 'hello' event"""
self.connect_callback()
self.callback_presence(Presence(identifier=self.bot_identifier, status=ONLINE))
def _presence_change_event_handler(self, event):
"""Event handler for the 'presence_change' event"""
idd = SlackPerson(self.sc, event['user'])
presence = event['presence']
# According to https://api.slack.com/docs/presence, presence can
# only be one of 'active' and 'away'
if presence == 'active':
status = ONLINE
elif presence == 'away':
status = AWAY
else:
log.error(
"It appears the Slack API changed, I received an unknown presence type %s" % presence
)
status = ONLINE
self.callback_presence(Presence(identifier=idd, status=status))
def _message_event_handler(self, event):
"""Event handler for the 'message' event"""
channel = event['channel']
if channel[0] not in 'CGD':
log.warning("Unknown message type! Unable to handle %s", channel)
return
subtype = event.get('subtype', None)
if subtype == "message_deleted":
log.debug("Message of type message_deleted, ignoring this event")
return
if subtype == "message_changed" and 'attachments' in event['message']:
# If you paste a link into Slack, it does a call-out to grab details
# from it so it can display this in the chatroom. These show up as
# message_changed events with an 'attachments' key in the embedded
# message. We should completely ignore these events otherwise we
# could end up processing bot commands twice (user issues a command
# containing a link, it gets processed, then Slack triggers the
# message_changed event and we end up processing it again as a new
# message. This is not what we want).
log.debug(
"Ignoring message_changed event with attachments, likely caused "
"by Slack auto-expanding a link"
)
return
if 'message' in event:
text = event['message']['text']
user = event['message'].get('user', event.get('bot_id'))
else:
text = event['text']
user = event.get('user', event.get('bot_id'))
mentioned = []
for word in text.split():
if word.startswith('<') or word.startswith('@') or word.startswith('#'):
try:
identifier = self.build_identifier(word.replace(':', ''))
except Exception as e:
log.debug("Tried to build an identifier from '%s' but got exception: %s", word, e)
continue
log.debug('Someone mentioned')
mentioned.append(identifier)
text = re.sub('<@[^>]*>:*', '@%s' % mentioned[-1].username, text)
text = self.sanitize_uris(text)
log.debug("Saw an event: %s" % pprint.pformat(event))
log.debug("Escaped IDs event text: %s" % text)
msg = Message(
text,
extras={'attachments': event.get('attachments')})
if channel.startswith('D'):
msg.frm = SlackPerson(self.sc, user, event['channel'])
msg.to = SlackPerson(self.sc, self.username_to_userid(self.sc.server.username),
event['channel'])
else:
msg.frm = SlackRoomOccupant(self.sc, user, event['channel'], bot=self)
msg.to = SlackRoom(channelid=event['channel'], bot=self)
self.callback_message(msg)
if mentioned:
self.callback_mention(msg, mentioned)
def userid_to_username(self, id_):
"""Convert a Slack user ID to their user name"""
user = [user for user in self.sc.server.users if user.id == id_]
if not user:
raise UserDoesNotExistError("Cannot find user with ID %s" % id_)
return user[0].name
def username_to_userid(self, name):
"""Convert a Slack user name to their user ID"""
user = [user for user in self.sc.server.users if user.name == name]
if not user:
raise UserDoesNotExistError("Cannot find user %s" % name)
return user[0].id
def channelid_to_channelname(self, id_):
"""Convert a Slack channel ID to its channel name"""
channel = [channel for channel in self.sc.server.channels if channel.id == id_]
if not channel:
raise RoomDoesNotExistError("No channel with ID %s exists" % id_)
return channel[0].name
def channelname_to_channelid(self, name):
"""Convert a Slack channel name to its channel ID"""
if name.startswith('#'):
name = name[1:]
channel = [channel for channel in self.sc.server.channels if channel.name == name]
if not channel:
raise RoomDoesNotExistError("No channel named %s exists" % name)
return channel[0].id
def channels(self, exclude_archived=True, joined_only=False):
"""
Get all channels and groups and return information about them.
:param exclude_archived:
Exclude archived channels/groups
:param joined_only:
Filter out channels the bot hasn't joined
:returns:
A list of channel (https://api.slack.com/types/channel)
and group (https://api.slack.com/types/group) types.
See also:
* https://api.slack.com/methods/channels.list
* https://api.slack.com/methods/groups.list
"""
response = self.api_call('channels.list', data={'exclude_archived': exclude_archived})
channels = [channel for channel in response['channels']
if channel['is_member'] or not joined_only]
response = self.api_call('groups.list', data={'exclude_archived': exclude_archived})
# No need to filter for 'is_member' in this next call (it doesn't
# (even exist) because leaving a group means you have to get invited
# back again by somebody else.
groups = [group for group in response['groups']]
return channels + groups
@lru_cache(50)
def get_im_channel(self, id_):
"""Open a direct message channel to a user"""
response = self.api_call('im.open', data={'user': id_})
return response['channel']['id']
def send_message(self, mess):
super().send_message(mess)
to_humanreadable = "<unknown>"
try:
if mess.is_group:
to_channel_id = mess.to.id
to_humanreadable = mess.to.name if mess.to.name else self.channelid_to_channelname(to_channel_id)
else:
to_humanreadable = mess.to.username
to_channel_id = mess.to.channelid
if to_channel_id.startswith('C'):
log.debug("This is a divert to private message, sending it directly to the user.")
to_channel_id = self.get_im_channel(self.username_to_userid(mess.to.username))
msgtype = "direct" if mess.is_direct else "channel"
log.debug('Sending %s message to %s (%s)' % (msgtype, to_humanreadable, to_channel_id))
body = self.md.convert(mess.body)
log.debug('Message size: %d' % len(body))
limit = min(self.bot_config.MESSAGE_SIZE_LIMIT, SLACK_MESSAGE_LIMIT)
parts = self.prepare_message_body(body, limit)
for part in parts:
self.sc.rtm_send_message(to_channel_id, part)
except Exception:
log.exception(
"An exception occurred while trying to send the following message "
"to %s: %s" % (to_humanreadable, mess.body)
)
def __hash__(self):
return 0 # this is a singleton anyway
def change_presence(self, status: str = ONLINE, message: str = '') -> None:
self.api_call('users.setPresence', data={'presence': 'auto' if status == ONLINE else 'away'})
@staticmethod
def prepare_message_body(body, size_limit):
"""
Returns the parts of a message chunked and ready for sending.
This is a staticmethod for easier testing.
Args:
body (str)
size_limit (int): chunk the body into sizes capped at this maximum
Returns:
[str]
"""
fixed_format = body.startswith('```') # hack to fix the formatting
parts = list(split_string_after(body, size_limit))
if len(parts) == 1:
# If we've got an open fixed block, close it out
if parts[0].count('```') % 2 != 0:
parts[0] += '\n```\n'
else:
for i, part in enumerate(parts):
starts_with_code = part.startswith('```')
# If we're continuing a fixed block from the last part
if fixed_format and not starts_with_code:
parts[i] = '```\n' + part
# If we've got an open fixed block, close it out
if part.count('```') % 2 != 0:
parts[i] += '\n```\n'
return parts
@staticmethod
def extract_identifiers_from_string(text):
"""
Parse a string for Slack user/channel IDs.
Supports strings with the following formats::
<#C12345>
<@U12345>
@user
#channel/user
#channel
Returns the tuple (username, userid, channelname, channelid).
Some elements may come back as None.
"""
exception_message = (
"Unparseable slack identifier, should be of the format `<#C12345>`, `<@U12345>`, "
"`@user`, `#channel/user` or `#channel`. (Got `%s`)"
)
text = text.strip()
if text == "":
raise ValueError(exception_message % "")
channelname = None
username = None
channelid = None
userid = None
if text[0] == "<" and text[-1] == ">":
exception_message = (
"Unparseable slack ID, should start with U, B, C, G or D "
"(got `%s`)"
)
text = text[2:-1]
if text == "":
raise ValueError(exception_message % "")
if text[0] in ('U', 'B'):
userid = text
elif text[0] in ('C', 'G', 'D'):
channelid = text
else:
raise ValueError(exception_message % text)
elif text[0] == '@':
username = text[1:]
elif text[0] == '#':
plainrep = text[1:]
if '/' in text:
channelname, username = plainrep.split('/', 1)
else:
channelname = plainrep
else:
raise ValueError(exception_message % text)
return username, userid, channelname, channelid
def build_identifier(self, txtrep):
"""
Build a :class:`SlackIdentifier` from the given string txtrep.
Supports strings with the formats accepted by
:func:`~extract_identifiers_from_string`.
"""
log.debug("building an identifier from %s" % txtrep)
username, userid, channelname, channelid = self.extract_identifiers_from_string(txtrep)
if userid is not None:
return SlackPerson(self.sc, userid, self.get_im_channel(userid))
if channelid is not None:
return SlackPerson(self.sc, None, channelid)
if username is not None:
userid = self.username_to_userid(username)
return SlackPerson(self.sc, userid, self.get_im_channel(userid))
if channelname is not None:
channelid = self.channelname_to_channelid(channelname)
return SlackRoomOccupant(self.sc, userid, channelid, bot=self)
raise Exception(
"You found a bug. I expected at least one of userid, channelid, username or channelname "
"to be resolved but none of them were. This shouldn't happen so, please file a bug."
)
def build_reply(self, mess, text=None, private=False):
response = self.build_message(text)
response.frm = self.bot_identifier
if private:
response.to = mess.frm
else:
response.to = mess.frm.room if isinstance(mess.frm, RoomOccupant) else mess.frm
return response
def shutdown(self):
super().shutdown()
@property
def mode(self):
return 'slack'
def query_room(self, room):
""" Room can either be a name or a channelid """
if room.startswith('C') or room.startswith('G'):
return SlackRoom(channelid=room, bot=self)
m = SLACK_CLIENT_CHANNEL_HYPERLINK.match(room)
if m is not None:
return SlackRoom(channelid=m.groupdict()['id'], bot=self)
return SlackRoom(name=room, bot=self)
def rooms(self):
"""
Return a list of rooms the bot is currently in.
:returns:
A list of :class:`~SlackRoom` instances.
"""
channels = self.channels(joined_only=True, exclude_archived=True)
return [SlackRoom(channelid=channel['id'], bot=self) for channel in channels]
def prefix_groupchat_reply(self, message, identifier):
super().prefix_groupchat_reply(message, identifier)
message.body = '@{0}: {1}'.format(identifier.nick, message.body)
@staticmethod
def sanitize_uris(text):
"""
Sanitizes URI's present within a slack message. e.g.
<mailto:example@example.org|example@example.org>,
<http://example.org|example.org>
<http://example.org>
:returns:
string
"""
text = re.sub(r'<([^\|>]+)\|([^\|>]+)>', r'\2', text)
text = re.sub(r'<(http([^\>]+))>', r'\1', text)
return text
class SlackRoom(Room):
def __init__(self, name=None, channelid=None, bot=None):
if channelid is not None and name is not None:
raise ValueError("channelid and name are mutually exclusive")
if name is not None:
if name.startswith('#'):
self._name = name[1:]
else:
self._name = name
else:
self._name = bot.channelid_to_channelname(channelid)
self._id = None
self._bot = bot
self.sc = bot.sc
def __str__(self):
return "#%s" % self.name
@property
def _channel(self):
"""
The channel object exposed by SlackClient
"""
id_ = self.sc.server.channels.find(self.name)
if id_ is None:
raise RoomDoesNotExistError(
"%s does not exist (or is a private group you don't have access to)" % str(self)
)
return id_
@property
def _channel_info(self):
"""
Channel info as returned by the Slack API.
See also:
* https://api.slack.com/methods/channels.list
* https://api.slack.com/methods/groups.list
"""
if self.private:
return self._bot.api_call('groups.info', data={'channel': self.id})["group"]
else:
return self._bot.api_call('channels.info', data={'channel': self.id})["channel"]
@property
def private(self):
"""Return True if the room is a private group"""
return self._channel.id.startswith('G')
@property
def id(self):
"""Return the ID of this room"""
if self._id is None:
self._id = self._channel.id
return self._id
@property
def name(self):
"""Return the name of this room"""
return self._name
def join(self, username=None, password=None):
log.info("Joining channel %s" % str(self))
try:
self._bot.api_call('channels.join', data={'name': self.name})
except SlackAPIResponseError as e:
if e.error == "user_is_bot":
raise RoomError("Unable to join channel. " + USER_IS_BOT_HELPTEXT)
else:
raise RoomError(e)
def leave(self, reason=None):
try:
if self.id.startswith('C'):
log.info("Leaving channel %s (%s)" % (str(self), self.id))
self._bot.api_call('channels.leave', data={'channel': self.id})
else:
log.info("Leaving group %s (%s)" % (str(self), self.id))
self._bot.api_call('groups.leave', data={'channel': self.id})
except SlackAPIResponseError as e:
if e.error == "user_is_bot":
raise RoomError("Unable to leave channel. " + USER_IS_BOT_HELPTEXT)
else:
raise RoomError(e)
self._id = None
def create(self, private=False):
try:
if private:
log.info("Creating group %s" % str(self))
self._bot.api_call('groups.create', data={'name': self.name})
else:
log.info("Creating channel %s" % str(self))
self._bot.api_call('channels.create', data={'name': self.name})
except SlackAPIResponseError as e:
if e.error == "user_is_bot":
raise RoomError("Unable to create channel. " + USER_IS_BOT_HELPTEXT)
else:
raise RoomError(e)
def destroy(self):
try:
if self.id.startswith('C'):
log.info("Archiving channel %s (%s)" % (str(self), self.id))
self._bot.api_call('channels.archive', data={'channel': self.id})
else:
log.info("Archiving group %s (%s)" % (str(self), self.id))
self._bot.api_call('groups.archive', data={'channel': self.id})
except SlackAPIResponseError as e:
if e.error == "user_is_bot":
raise RoomError("Unable to archive channel. " + USER_IS_BOT_HELPTEXT)
else:
raise RoomError(e)
self._id = None
@property
def exists(self):
channels = self._bot.channels(joined_only=False, exclude_archived=False)
return len([c for c in channels if c['name'] == self.name]) > 0
@property
def joined(self):
channels = self._bot.channels(joined_only=True)
return len([c for c in channels if c['name'] == self.name]) > 0
@property
def topic(self):
if self._channel_info['topic']['value'] == '':
return None
else:
return self._channel_info['topic']['value']
@topic.setter
def topic(self, topic):
if self.private:
log.info("Setting topic of %s (%s) to '%s'" % (str(self), self.id, topic))
self._bot.api_call('groups.setTopic', data={'channel': self.id, 'topic': topic})
else:
log.info("Setting topic of %s (%s) to '%s'" % (str(self), self.id, topic))
self._bot.api_call('channels.setTopic', data={'channel': self.id, 'topic': topic})
@property
def purpose(self):
if self._channel_info['purpose']['value'] == '':
return None
else:
return self._channel_info['purpose']['value']
@purpose.setter
def purpose(self, purpose):
if self.private:
log.info("Setting purpose of %s (%s) to '%s'" % (str(self), self.id, purpose))
self._bot.api_call('groups.setPurpose', data={'channel': self.id, 'purpose': purpose})
else:
log.info("Setting purpose of %s (%s) to '%s'" % (str(self), self.id, purpose))
self._bot.api_call('channels.setPurpose', data={'channel': self.id, 'purpose': purpose})
@property
def occupants(self):
members = self._channel_info['members']
return [SlackRoomOccupant(self.sc, self._bot.userid_to_username(m), self._name, self._bot) for m in members]
def invite(self, *args):
users = {user['name']: user['id'] for user in self._bot.api_call('users.list')['members']}
for user in args:
if user not in users:
raise UserDoesNotExistError("User '%s' not found" % user)
log.info("Inviting %s into %s (%s)" % (user, str(self), self.id))
method = 'groups.invite' if self.private else 'channels.invite'
response = self._bot.api_call(
method,
data={'channel': self.id, 'user': users[user]},
raise_errors=False
)
if not response['ok']:
if response['error'] == "user_is_bot":
raise RoomError("Unable to invite people. " + USER_IS_BOT_HELPTEXT)
elif response['error'] != "already_in_channel":
raise SlackAPIResponseError(error="Slack API call to %s failed: %s" % (method, response['error']))
def __eq__(self, other):
return self.id == other.id
|
"""Visualization and display functions.
Author: Michael Denbina
Copyright 2016 California Institute of Technology. All rights reserved.
United States Government Sponsorship acknowledged.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import numpy as np
import matplotlib.pyplot as plt
def show_linear(data, bounds=None, vmin=None, vmax=None, cmap='viridis',
cbar=True, cbar_label=None, xlabel='Range Index', ylabel='Azimuth Index',
figsize=None, dpi=125, rotate=False, hideaxis=False, savefile=None, **kwargs):
"""Display data in linear units (e.g., tree heights, kz).
Arguments:
data (array): 2D array containing the values to display.
bounds (tuple): Tuple containing (azimuth start, azimuth end, range
start, range end) bounds in that order. Only the subset of the
data within bounds will be displayed. For a full swath subset,
two element bounds can be given: (azimuth start, azimuth end).
vmin (float): Minimum value for colormap. Default: None.
vmax (float): Maximum value for colormap. Default: None.
cmap: Colormap. Default: 'viridis'.
cbar (bool): Set to False to not show colorbar.
cbar_label (str): Text label on the colorbar.
xlabel (str): Text label on the x axis.
ylabel (str): Text label on the y axis.
figsize (tuple): figsize used to create the matplotlib figure.
dpi (int): DPI (dots per inch) used in the matplotlib figure.
rotate (bool): Set to True to display azimuth as x-axis and range
as y-axis. Default: Azimuth as y-axis, range as x-axis.
hideaxis (bool): Set to True to hide the axis ticks and labels.
savefile (str): If specified, the plotted figure is saved under this
filename.
**kwargs: Additional keyword arguments which will be passed
directly to the matplotlib imshow() function call.
"""
if data is not None:
if figsize is not None:
plt.figure(figsize=figsize, dpi=dpi)
else:
plt.figure()
if bounds is not None:
if len(bounds) == 2:
bounds = (bounds[0], bounds[1], 0, data.shape[1])
data = data[bounds[0]:bounds[1],bounds[2]:bounds[3]]
if rotate:
data = np.flipud(np.transpose(data))
temp = xlabel
xlabel = ylabel
ylabel = temp
if bounds is not None:
bounds = (bounds[2], bounds[3], bounds[0], bounds[1])
if bounds is None:
plt.imshow(np.real(data), vmin=vmin, vmax=vmax, cmap=cmap, aspect=1, interpolation='nearest', **kwargs)
else:
plt.imshow(np.real(data), extent=(bounds[2],bounds[3],bounds[1],bounds[0]), vmin=vmin, vmax=vmax, cmap=cmap, aspect=1, interpolation='nearest', **kwargs)
if cbar and (cbar_label is not None):
plt.colorbar(label=cbar_label)
elif cbar:
plt.colorbar()
plt.xlabel(xlabel)
plt.ylabel(ylabel)
plt.tight_layout()
if hideaxis:
plt.gca().get_xaxis().set_visible(False)
plt.gca().get_yaxis().set_visible(False)
if savefile is not None:
plt.savefig(savefile, dpi=dpi, bbox_inches='tight', pad_inches=0.1)
def show_power(data, bounds=None, vmin=None, vmax=None, cmap='gray',
cbar=True, cbar_label='Backscatter (dB)', xlabel='Range Index',
ylabel='Azimuth Index', figsize=None, dpi=125, rotate=False,
hideaxis=False, savefile=None, **kwargs):
"""Display a power image (e.g., backscatter) in dB units.
Arguments:
data (array): 2D array containing the power for each pixel,
in linear units. Converted to dB for display.
bounds (tuple): Tuple containing (azimuth start, azimuth end, range
start, range end) bounds in that order. Only the subset of the
data within bounds will be displayed. For a full swath subset,
two element bounds can be given: (azimuth start, azimuth end).
vmin (float): Minimum dB value for colormap. Default: -25.
vmax (float): Maximum dB value for colormap. Default: 3.
cmap: Colormap. Default: 'afmhot'.
cbar (bool): Set to False to not show colorbar.
cbar_label (str): Text label on the colorbar.
xlabel (str): Text label on the x axis.
ylabel (str): Text label on the y axis.
figsize (tuple): figsize used to create the matplotlib figure.
dpi (int): DPI (dots per inch) used in the matplotlib figure.
rotate (bool): Set to True to display azimuth as x-axis and range
as y-axis. Default: Azimuth as y-axis, range as x-axis.
hideaxis (bool): Set to True to hide the axis ticks and labels.
savefile (str): If specified, the plotted figure is saved under this
filename.
**kwargs: Additional keyword arguments which will be passed
directly to the matplotlib imshow() function call.
"""
if vmin is None:
vmin = -25
if vmax is None:
vmax = 3
if data is not None:
if figsize is not None:
plt.figure(figsize=figsize, dpi=dpi)
else:
plt.figure()
if bounds is not None:
if len(bounds) == 2:
bounds = (bounds[0], bounds[1], 0, data.shape[1])
data = data[bounds[0]:bounds[1],bounds[2]:bounds[3]]
data = np.real(data)
data[data <= 1e-10] = 1e-10
data = 10*np.log10(data)
if rotate:
data = np.flipud(np.transpose(data))
temp = xlabel
xlabel = ylabel
ylabel = temp
if bounds is not None:
bounds = (bounds[2], bounds[3], bounds[0], bounds[1])
if bounds is None:
plt.imshow(data, vmin=vmin, vmax=vmax, cmap=cmap, aspect=1, interpolation='nearest', **kwargs)
else:
plt.imshow(data, extent=(bounds[2],bounds[3],bounds[1],bounds[0]), vmin=vmin, vmax=vmax, cmap=cmap, aspect=1, interpolation='nearest', **kwargs)
if cbar and (cbar_label is not None):
plt.colorbar(label=cbar_label)
elif cbar:
plt.colorbar()
plt.xlabel(xlabel)
plt.ylabel(ylabel)
plt.tight_layout()
if hideaxis:
plt.gca().get_xaxis().set_visible(False)
plt.gca().get_yaxis().set_visible(False)
if savefile is not None:
plt.savefig(savefile, dpi=dpi, bbox_inches='tight', pad_inches=0.1)
def show_complex(data, bounds=None, cbar=False, xlabel='Range Index',
ylabel='Azimuth Index', figsize=None, dpi=125, rotate=False,
hideaxis=False, savefile=None, **kwargs):
"""Display a complex-valued image (e.g., coherence) using the HSV color
system, with the phase as the hue, and the magnitude as saturation and
value.
Arguments:
data (array): 2D complex array containing coherence or other
complex values to display.
bounds (tuple): Tuple containing (azimuth start, azimuth end, range
start, range end) bounds in that order. Only the subset of the
data within bounds will be displayed. For a full swath
subset, two element bounds can be given: (azimuth start,
azimuth end).
cbar (bool): Set to True to display a colorbar for the phases
only (the hues, with full saturation and value).
xlabel (str): Text label on the x axis.
ylabel (str): Text label on the y axis.
figsize (tuple): figsize used to create the matplotlib figure.
dpi (int): DPI (dots per inch) used in the matplotlib figure.
rotate (bool): Set to True to display azimuth as x-axis and range
as y-axis. Default: Azimuth as y-axis, range as x-axis.
hideaxis (bool): Set to True to hide the axis ticks and labels.
savefile (str): If specified, the plotted figure is saved under this
filename.
**kwargs: Additional keyword arguments which will be passed
directly to the matplotlib imshow() function call.
"""
if data is not None:
# Subsetting
if bounds is not None:
if len(bounds) == 2:
bounds = (bounds[0], bounds[1], 0, data.shape[1])
data = data[bounds[0]:bounds[1],bounds[2]:bounds[3]]
# HSV based on magnitude and phase of data.
h = np.clip(np.angle(data)/(2*np.pi) + 0.5,0,1)
s = np.clip(np.abs(data),0,1)
v = np.clip(np.abs(data),0,1)
# HSV to RGB Conversion
red = np.zeros((data.shape[0],data.shape[1]),dtype='float32')
green = np.zeros((data.shape[0],data.shape[1]),dtype='float32')
blue = np.zeros((data.shape[0],data.shape[1]),dtype='float32')
ind = (s == 0)
if np.any(ind):
red[ind] = v[ind]
green[ind] = v[ind]
blue[ind] = v[ind]
a = (h*6.0).astype('int')
f = (h*6.0) - a
p = v*(1.0 - s)
q = v*(1.0 - s*f)
t = v*(1.0 - s*(1.0-f))
a = a % 6
ind = (a == 0)
if np.any(ind):
red[ind] = v[ind]
green[ind] = t[ind]
blue[ind] = p[ind]
ind = (a == 1)
if np.any(ind):
red[ind] = q[ind]
green[ind] = v[ind]
blue[ind] = p[ind]
ind = (a == 2)
if np.any(ind):
red[ind] = p[ind]
green[ind] = v[ind]
blue[ind] = t[ind]
ind = (a == 3)
if np.any(ind):
red[ind] = p[ind]
green[ind] = q[ind]
blue[ind] = v[ind]
ind = (a == 4)
if np.any(ind):
red[ind] = t[ind]
green[ind] = p[ind]
blue[ind] = v[ind]
ind = (a == 5)
if np.any(ind):
red[ind] = v[ind]
green[ind] = p[ind]
blue[ind] = q[ind]
if figsize is not None:
plt.figure(figsize=figsize, dpi=dpi)
else:
plt.figure()
if rotate:
red = np.flipud(np.transpose(data))
green = np.flipud(np.transpose(green))
blue = np.flipud(np.transpose(blue))
temp = xlabel
xlabel = ylabel
ylabel = temp
if bounds is not None:
bounds = (bounds[2], bounds[3], bounds[0], bounds[1])
if bounds is None:
plt.imshow(np.dstack((red,green,blue)), aspect=1, interpolation='nearest', **kwargs)
else:
plt.imshow(np.dstack((red,green,blue)), extent=(bounds[2],bounds[3],bounds[1],bounds[0]), aspect=1, interpolation='nearest', **kwargs)
if cbar is True:
if bounds is None:
plt.imshow(red, aspect=1, interpolation='nearest', cmap='hsv', vmin=-np.pi, vmax=np.pi, alpha=0.0, **kwargs)
else:
plt.imshow(red, extent=(bounds[2],bounds[3],bounds[1],bounds[0]), aspect=1, interpolation='nearest', cmap='hsv', vmin=-np.pi, vmax=np.pi, alpha=0.0, **kwargs)
cbar = plt.colorbar(label='Phase (radians)')
cbar.set_alpha(1)
cbar.draw_all()
plt.xlabel(xlabel)
plt.ylabel(ylabel)
plt.tight_layout()
if hideaxis:
plt.gca().get_xaxis().set_visible(False)
plt.gca().get_yaxis().set_visible(False)
if savefile is not None:
plt.savefig(savefile, dpi=dpi, bbox_inches='tight', pad_inches=0.1)
def show_paulirgb(cov, bounds=None, vmin=None, vmax=None, xlabel='Range Index',
ylabel='Azimuth Index', figsize=None, dpi=125, rotate=False,
hideaxis=False, savefile=None, **kwargs):
"""Display a Pauli RGB color composite image from a covariance matrix.
Color mapping is as follows. Red: 0.5*(HH-VV). Green: 2*HV.
Blue: 0.5*(HH+VV).
Arguments:
cov (array): Array containing a single track's
polarimetric covariance matrix with dimensions (az, rng, 3, 3).
bounds (tuple): Tuple containing (azimuth start, azimuth end, range
start, range end) bounds in that order. Only the subset of the
data within bounds will be displayed. For a full swath subset,
two element bounds can be given: (azimuth start, azimuth end).
vmin (int): Minimum value in dB of color range. Default: -25.
vmax (int): Maximum value in dB of color range. Default: 3.
xlabel (str): Text label on the x axis.
ylabel (str): Text label on the y axis.
figsize (tuple): figsize used to create the matplotlib figure.
dpi (int): DPI (dots per inch) used in the matplotlib figure.
rotate (bool): Set to True to display azimuth as x-axis and range
as y-axis. Default: Azimuth as y-axis, range as x-axis.
hideaxis (bool): Set to True to hide the axis ticks and labels.
savefile (str): If specified, the plotted figure is saved under
this filename.
**kwargs: Additional keyword arguments which will be passed
directly to the matplotlib imshow() function call.
"""
if vmin is None:
vmin = -25
if vmax is None:
vmax = 3
if bounds is not None:
if len(bounds) == 2:
bounds = (bounds[0], bounds[1], 0, cov.shape[1])
cov = cov[bounds[0]:bounds[1],bounds[2]:bounds[3]]
rgb = np.zeros((cov.shape[0],cov.shape[1],3))
# Red: (HH-VV)/2
w = np.array([1,0,-1]/np.sqrt(2), dtype='complex64')
wimage = np.array([[w[0]*w[0],w[0]*w[1],w[0]*w[2]],
[w[1]*w[0],w[1]*w[1],w[1]*w[2]],
[w[2]*w[0],w[2]*w[1],w[2]*w[2]]], dtype='complex64')
rgb[:,:,0] = np.real(np.sum(cov*wimage, axis=(2,3)))
# Green: (2*HV)
w = np.array([0,np.sqrt(2),0], dtype='complex64')
wimage = np.array([[w[0]*w[0],w[0]*w[1],w[0]*w[2]],
[w[1]*w[0],w[1]*w[1],w[1]*w[2]],
[w[2]*w[0],w[2]*w[1],w[2]*w[2]]], dtype='complex64')
rgb[:,:,1] = np.real(np.sum(cov*wimage, axis=(2,3)))
# Blue: (HH+VV)/2
w = np.array([1,0,1]/np.sqrt(2), dtype='complex64')
wimage = np.array([[w[0]*w[0],w[0]*w[1],w[0]*w[2]],
[w[1]*w[0],w[1]*w[1],w[1]*w[2]],
[w[2]*w[0],w[2]*w[1],w[2]*w[2]]], dtype='complex64')
rgb[:,:,2] = np.real(np.sum(cov*wimage, axis=(2,3)))
rgb[rgb <= 1e-10] = 1e-10
rgb = 10*np.log10(rgb)
rgb = (rgb-vmin)/(vmax-vmin)
rgb[rgb < 0] = 0
rgb[rgb > 1] = 1
if rotate:
rgb = np.flipud(np.transpose(rgb, (1,0,2)))
temp = xlabel
xlabel = ylabel
ylabel = temp
if bounds is not None:
bounds = (bounds[2], bounds[3], bounds[0], bounds[1])
if figsize is not None:
plt.figure(figsize=figsize, dpi=dpi)
else:
plt.figure()
if bounds is None:
plt.imshow(rgb, aspect=1, interpolation='nearest', **kwargs)
else:
plt.imshow(rgb, extent=(bounds[2],bounds[3],bounds[1],bounds[0]), aspect=1, interpolation='nearest', **kwargs)
plt.xlabel(xlabel)
plt.ylabel(ylabel)
plt.tight_layout()
if hideaxis:
plt.gca().get_xaxis().set_visible(False)
plt.gca().get_yaxis().set_visible(False)
if savefile is not None:
plt.savefig(savefile, dpi=dpi, bbox_inches='tight', pad_inches=0.1)
|
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_PipelineSegmentDialog(object):
def setupUi(self, PipelineSegmentDialog):
PipelineSegmentDialog.setObjectName(_fromUtf8("PipelineSegmentDialog"))
PipelineSegmentDialog.resize(588, 610)
PipelineSegmentDialog.setMinimumSize(QtCore.QSize(588, 480))
PipelineSegmentDialog.setMaximumSize(QtCore.QSize(588, 610))
self.buttonBox = QtGui.QDialogButtonBox(PipelineSegmentDialog)
self.buttonBox.setGeometry(QtCore.QRect(405, 570, 166, 32))
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Cancel|QtGui.QDialogButtonBox.Ok)
self.buttonBox.setObjectName(_fromUtf8("buttonBox"))
self.tabWidget = QtGui.QTabWidget(PipelineSegmentDialog)
self.tabWidget.setGeometry(QtCore.QRect(20, 10, 556, 551))
self.tabWidget.setObjectName(_fromUtf8("tabWidget"))
self.tab = QtGui.QWidget()
self.tab.setObjectName(_fromUtf8("tab"))
self.inspection_interval_sbox = QtGui.QDoubleSpinBox(self.tab)
self.inspection_interval_sbox.setGeometry(QtCore.QRect(20, 170, 196, 22))
self.inspection_interval_sbox.setObjectName(_fromUtf8("inspection_interval_sbox"))
self.installation_date_edit = QtGui.QDateEdit(self.tab)
self.installation_date_edit.setEnabled(False)
self.installation_date_edit.setGeometry(QtCore.QRect(45, 120, 171, 22))
self.installation_date_edit.setCalendarPopup(True)
self.installation_date_edit.setObjectName(_fromUtf8("installation_date_edit"))
self.installation_date_chbox = QtGui.QCheckBox(self.tab)
self.installation_date_chbox.setGeometry(QtCore.QRect(20, 125, 26, 17))
self.installation_date_chbox.setText(_fromUtf8(""))
self.installation_date_chbox.setObjectName(_fromUtf8("installation_date_chbox"))
self.segment_no_edit = QtGui.QLineEdit(self.tab)
self.segment_no_edit.setGeometry(QtCore.QRect(20, 30, 196, 20))
self.segment_no_edit.setObjectName(_fromUtf8("segment_no_edit"))
self.label_2 = QtGui.QLabel(self.tab)
self.label_2.setGeometry(QtCore.QRect(20, 60, 126, 16))
self.label_2.setObjectName(_fromUtf8("label_2"))
self.label_3 = QtGui.QLabel(self.tab)
self.label_3.setGeometry(QtCore.QRect(20, 105, 126, 16))
self.label_3.setObjectName(_fromUtf8("label_3"))
self.label_5 = QtGui.QLabel(self.tab)
self.label_5.setGeometry(QtCore.QRect(20, 155, 126, 16))
self.label_5.setObjectName(_fromUtf8("label_5"))
self.label = QtGui.QLabel(self.tab)
self.label.setGeometry(QtCore.QRect(20, 15, 126, 16))
self.label.setObjectName(_fromUtf8("label"))
self.inspection_interval_unit_cbox = QtGui.QComboBox(self.tab)
self.inspection_interval_unit_cbox.setGeometry(QtCore.QRect(20, 215, 196, 22))
self.inspection_interval_unit_cbox.setObjectName(_fromUtf8("inspection_interval_unit_cbox"))
self.label_6 = QtGui.QLabel(self.tab)
self.label_6.setGeometry(QtCore.QRect(20, 200, 126, 16))
self.label_6.setObjectName(_fromUtf8("label_6"))
self.label_7 = QtGui.QLabel(self.tab)
self.label_7.setGeometry(QtCore.QRect(20, 290, 126, 16))
self.label_7.setObjectName(_fromUtf8("label_7"))
self.operating_state_cbox = QtGui.QComboBox(self.tab)
self.operating_state_cbox.setGeometry(QtCore.QRect(20, 305, 196, 22))
self.operating_state_cbox.setObjectName(_fromUtf8("operating_state_cbox"))
self.status_change_date_edit = QtGui.QDateEdit(self.tab)
self.status_change_date_edit.setEnabled(False)
self.status_change_date_edit.setGeometry(QtCore.QRect(45, 350, 171, 22))
self.status_change_date_edit.setCalendarPopup(True)
self.status_change_date_edit.setObjectName(_fromUtf8("status_change_date_edit"))
self.status_change_date_chbox = QtGui.QCheckBox(self.tab)
self.status_change_date_chbox.setGeometry(QtCore.QRect(20, 355, 26, 17))
self.status_change_date_chbox.setText(_fromUtf8(""))
self.status_change_date_chbox.setObjectName(_fromUtf8("status_change_date_chbox"))
self.label_8 = QtGui.QLabel(self.tab)
self.label_8.setGeometry(QtCore.QRect(20, 335, 126, 16))
self.label_8.setObjectName(_fromUtf8("label_8"))
self.label_9 = QtGui.QLabel(self.tab)
self.label_9.setGeometry(QtCore.QRect(270, 15, 126, 16))
self.label_9.setObjectName(_fromUtf8("label_9"))
self.pipeline_type_cbox = QtGui.QComboBox(self.tab)
self.pipeline_type_cbox.setGeometry(QtCore.QRect(270, 30, 196, 22))
self.pipeline_type_cbox.setObjectName(_fromUtf8("pipeline_type_cbox"))
self.label_10 = QtGui.QLabel(self.tab)
self.label_10.setGeometry(QtCore.QRect(270, 60, 126, 16))
self.label_10.setObjectName(_fromUtf8("label_10"))
self.mounting_type_cbox = QtGui.QComboBox(self.tab)
self.mounting_type_cbox.setGeometry(QtCore.QRect(270, 75, 196, 22))
self.mounting_type_cbox.setObjectName(_fromUtf8("mounting_type_cbox"))
self.label_11 = QtGui.QLabel(self.tab)
self.label_11.setGeometry(QtCore.QRect(270, 105, 126, 16))
self.label_11.setObjectName(_fromUtf8("label_11"))
self.material_cbox = QtGui.QComboBox(self.tab)
self.material_cbox.setGeometry(QtCore.QRect(270, 120, 196, 22))
self.material_cbox.setObjectName(_fromUtf8("material_cbox"))
self.label_12 = QtGui.QLabel(self.tab)
self.label_12.setGeometry(QtCore.QRect(270, 155, 126, 16))
self.label_12.setObjectName(_fromUtf8("label_12"))
self.placement_type_cbox = QtGui.QComboBox(self.tab)
self.placement_type_cbox.setGeometry(QtCore.QRect(270, 170, 196, 22))
self.placement_type_cbox.setObjectName(_fromUtf8("placement_type_cbox"))
self.label_13 = QtGui.QLabel(self.tab)
self.label_13.setGeometry(QtCore.QRect(270, 200, 126, 16))
self.label_13.setObjectName(_fromUtf8("label_13"))
self.nominal_width_cbox = QtGui.QComboBox(self.tab)
self.nominal_width_cbox.setGeometry(QtCore.QRect(270, 215, 196, 22))
self.nominal_width_cbox.setObjectName(_fromUtf8("nominal_width_cbox"))
self.outside_diameter_sbox = QtGui.QDoubleSpinBox(self.tab)
self.outside_diameter_sbox.setGeometry(QtCore.QRect(270, 305, 196, 22))
self.outside_diameter_sbox.setMinimum(-100.0)
self.outside_diameter_sbox.setMaximum(2000.0)
self.outside_diameter_sbox.setObjectName(_fromUtf8("outside_diameter_sbox"))
self.label_14 = QtGui.QLabel(self.tab)
self.label_14.setGeometry(QtCore.QRect(270, 290, 126, 16))
self.label_14.setObjectName(_fromUtf8("label_14"))
self.label_15 = QtGui.QLabel(self.tab)
self.label_15.setGeometry(QtCore.QRect(20, 380, 126, 16))
self.label_15.setObjectName(_fromUtf8("label_15"))
self.network_cbox = QtGui.QComboBox(self.tab)
self.network_cbox.setGeometry(QtCore.QRect(20, 395, 196, 22))
self.network_cbox.setObjectName(_fromUtf8("network_cbox"))
self.positional_accuracy_cbox = QtGui.QComboBox(self.tab)
self.positional_accuracy_cbox.setGeometry(QtCore.QRect(270, 265, 196, 22))
self.positional_accuracy_cbox.setObjectName(_fromUtf8("positional_accuracy_cbox"))
self.label_17 = QtGui.QLabel(self.tab)
self.label_17.setGeometry(QtCore.QRect(270, 250, 126, 16))
self.label_17.setObjectName(_fromUtf8("label_17"))
self.label_18 = QtGui.QLabel(self.tab)
self.label_18.setGeometry(QtCore.QRect(270, 335, 126, 16))
self.label_18.setObjectName(_fromUtf8("label_18"))
self.wall_thickness_sbox = QtGui.QDoubleSpinBox(self.tab)
self.wall_thickness_sbox.setGeometry(QtCore.QRect(270, 350, 196, 22))
self.wall_thickness_sbox.setMinimum(-100.0)
self.wall_thickness_sbox.setMaximum(2000.0)
self.wall_thickness_sbox.setObjectName(_fromUtf8("wall_thickness_sbox"))
self.label_19 = QtGui.QLabel(self.tab)
self.label_19.setGeometry(QtCore.QRect(270, 380, 126, 16))
self.label_19.setObjectName(_fromUtf8("label_19"))
self.height_at_start_sbox = QtGui.QDoubleSpinBox(self.tab)
self.height_at_start_sbox.setGeometry(QtCore.QRect(270, 395, 196, 22))
self.height_at_start_sbox.setMinimum(-100.0)
self.height_at_start_sbox.setMaximum(2000.0)
self.height_at_start_sbox.setObjectName(_fromUtf8("height_at_start_sbox"))
self.label_20 = QtGui.QLabel(self.tab)
self.label_20.setGeometry(QtCore.QRect(270, 425, 126, 16))
self.label_20.setObjectName(_fromUtf8("label_20"))
self.height_at_end_sbox = QtGui.QDoubleSpinBox(self.tab)
self.height_at_end_sbox.setGeometry(QtCore.QRect(270, 440, 196, 22))
self.height_at_end_sbox.setMinimum(-100.0)
self.height_at_end_sbox.setMaximum(2000.0)
self.height_at_end_sbox.setObjectName(_fromUtf8("height_at_end_sbox"))
self.date_of_last_inspection_chbox = QtGui.QCheckBox(self.tab)
self.date_of_last_inspection_chbox.setGeometry(QtCore.QRect(20, 270, 26, 17))
self.date_of_last_inspection_chbox.setText(_fromUtf8(""))
self.date_of_last_inspection_chbox.setObjectName(_fromUtf8("date_of_last_inspection_chbox"))
self.date_of_last_inspection_edit = QtGui.QDateEdit(self.tab)
self.date_of_last_inspection_edit.setEnabled(False)
self.date_of_last_inspection_edit.setGeometry(QtCore.QRect(45, 265, 171, 22))
self.date_of_last_inspection_edit.setCalendarPopup(True)
self.date_of_last_inspection_edit.setObjectName(_fromUtf8("date_of_last_inspection_edit"))
self.label_21 = QtGui.QLabel(self.tab)
self.label_21.setGeometry(QtCore.QRect(20, 250, 126, 16))
self.label_21.setObjectName(_fromUtf8("label_21"))
self.length_edit = QtGui.QLineEdit(self.tab)
self.length_edit.setGeometry(QtCore.QRect(20, 75, 196, 20))
self.length_edit.setReadOnly(True)
self.length_edit.setObjectName(_fromUtf8("length_edit"))
self.tabWidget.addTab(self.tab, _fromUtf8(""))
self.retranslateUi(PipelineSegmentDialog)
self.tabWidget.setCurrentIndex(0)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("accepted()")), PipelineSegmentDialog.accept)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("rejected()")), PipelineSegmentDialog.reject)
QtCore.QMetaObject.connectSlotsByName(PipelineSegmentDialog)
PipelineSegmentDialog.setTabOrder(self.tabWidget, self.segment_no_edit)
PipelineSegmentDialog.setTabOrder(self.segment_no_edit, self.length_edit)
PipelineSegmentDialog.setTabOrder(self.length_edit, self.installation_date_chbox)
PipelineSegmentDialog.setTabOrder(self.installation_date_chbox, self.installation_date_edit)
PipelineSegmentDialog.setTabOrder(self.installation_date_edit, self.inspection_interval_sbox)
PipelineSegmentDialog.setTabOrder(self.inspection_interval_sbox, self.inspection_interval_unit_cbox)
PipelineSegmentDialog.setTabOrder(self.inspection_interval_unit_cbox, self.date_of_last_inspection_chbox)
PipelineSegmentDialog.setTabOrder(self.date_of_last_inspection_chbox, self.date_of_last_inspection_edit)
PipelineSegmentDialog.setTabOrder(self.date_of_last_inspection_edit, self.operating_state_cbox)
PipelineSegmentDialog.setTabOrder(self.operating_state_cbox, self.status_change_date_chbox)
PipelineSegmentDialog.setTabOrder(self.status_change_date_chbox, self.status_change_date_edit)
PipelineSegmentDialog.setTabOrder(self.status_change_date_edit, self.network_cbox)
PipelineSegmentDialog.setTabOrder(self.network_cbox, self.pipeline_type_cbox)
PipelineSegmentDialog.setTabOrder(self.pipeline_type_cbox, self.mounting_type_cbox)
PipelineSegmentDialog.setTabOrder(self.mounting_type_cbox, self.material_cbox)
PipelineSegmentDialog.setTabOrder(self.material_cbox, self.placement_type_cbox)
PipelineSegmentDialog.setTabOrder(self.placement_type_cbox, self.nominal_width_cbox)
PipelineSegmentDialog.setTabOrder(self.nominal_width_cbox, self.positional_accuracy_cbox)
PipelineSegmentDialog.setTabOrder(self.positional_accuracy_cbox, self.outside_diameter_sbox)
PipelineSegmentDialog.setTabOrder(self.outside_diameter_sbox, self.wall_thickness_sbox)
PipelineSegmentDialog.setTabOrder(self.wall_thickness_sbox, self.height_at_start_sbox)
PipelineSegmentDialog.setTabOrder(self.height_at_start_sbox, self.height_at_end_sbox)
PipelineSegmentDialog.setTabOrder(self.height_at_end_sbox, self.buttonBox)
def retranslateUi(self, PipelineSegmentDialog):
PipelineSegmentDialog.setWindowTitle(_translate("PipelineSegmentDialog", "Add / Edit Pipeline Segment", None))
self.installation_date_edit.setDisplayFormat(_translate("PipelineSegmentDialog", "yyyy-MM-dd", None))
self.label_2.setText(_translate("PipelineSegmentDialog", "Calculated Length [m]", None))
self.label_3.setText(_translate("PipelineSegmentDialog", "Installation Date", None))
self.label_5.setText(_translate("PipelineSegmentDialog", "Inspection Interval", None))
self.label.setText(_translate("PipelineSegmentDialog", "Segment Number", None))
self.label_6.setText(_translate("PipelineSegmentDialog", "Inspection Interval Unit", None))
self.label_7.setText(_translate("PipelineSegmentDialog", "Operating State", None))
self.status_change_date_edit.setDisplayFormat(_translate("PipelineSegmentDialog", "yyyy-MM-dd", None))
self.label_8.setText(_translate("PipelineSegmentDialog", "Date of Status Change", None))
self.label_9.setText(_translate("PipelineSegmentDialog", "Pipeline Type", None))
self.label_10.setText(_translate("PipelineSegmentDialog", "Mounting Type", None))
self.label_11.setText(_translate("PipelineSegmentDialog", "Material", None))
self.label_12.setText(_translate("PipelineSegmentDialog", "Placement Type", None))
self.label_13.setText(_translate("PipelineSegmentDialog", "Nominal Width", None))
self.label_14.setText(_translate("PipelineSegmentDialog", "Outside Diameter [mm]", None))
self.label_15.setText(_translate("PipelineSegmentDialog", "Network", None))
self.label_17.setText(_translate("PipelineSegmentDialog", "Positional Accuracy", None))
self.label_18.setText(_translate("PipelineSegmentDialog", "Wall Thickness [mm]", None))
self.label_19.setText(_translate("PipelineSegmentDialog", "Height At Begin [m]", None))
self.label_20.setText(_translate("PipelineSegmentDialog", "Height At End [m]", None))
self.date_of_last_inspection_edit.setDisplayFormat(_translate("PipelineSegmentDialog", "yyyy-MM-dd", None))
self.label_21.setText(_translate("PipelineSegmentDialog", "Date of last inspection", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab), _translate("PipelineSegmentDialog", "Description", None))
import resources_rc
|
from kivy.app import App
from kivy.lang import Builder
from kivy.uix.screenmanager import ScreenManager, Screen, WipeTransition
from kivy.properties import ObjectProperty
from kivy.uix.widget import Widget
class MenuScreen(Screen):
pass
class NewGameScreen(Screen):
class XY(Widget):
def on_touch_down(self, touch):
with self.canvas:
sm.current="load"
class LoadGameScreen(Screen):
class YX(Widget):
def on_touch_down(self, touch):
with self.canvas:
sm.current="menu"
buildKV = Builder.load_file("hue.kv")
sm=ScreenManager()
sm.add_widget(MenuScreen(name="menu"))
sm.add_widget(LoadGameScreen(name="load"))
class HueApp(App):
def build(self):
return sm
if __name__ == "__main__":
HueApp().run()
|
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
from itertools import chain
class Migration(DataMigration):
def forwards(self, orm):
sensors = chain(orm.SonoSensor.objects.all(), orm.EmgSensor.objects.all())
for sensor in sensors:
# TODO: improve matching, perhaps with hardcoded lookup table
loc = sensor.location_controlled
print "Location: %s" % loc.label
m = orm.MuscleOwl.objects.filter(label__icontains=loc.label).first()
print "Muscle: %s" % m
sensor.muscle = m
sensor.save()
def backwards(self, orm):
pass
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'feed.ageunit': {
'Meta': {'ordering': "['label']", 'object_name': 'AgeUnit'},
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'ageunit_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.anatomicallocation': {
'Meta': {'ordering': "['label']", 'object_name': 'AnatomicalLocation'},
'category': ('django.db.models.fields.IntegerField', [], {}),
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'anatomicallocation_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.anteriorposterioraxis': {
'Meta': {'object_name': 'AnteriorPosteriorAxis'},
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'anteriorposterioraxis_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.behavior': {
'Meta': {'ordering': "['label']", 'object_name': 'Behavior'},
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'behavior_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.behaviorowl': {
'Meta': {'object_name': 'BehaviorOwl'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'obo_definition': ('django.db.models.fields.TextField', [], {}),
'rdfs_comment': ('django.db.models.fields.TextField', [], {}),
'rdfs_subClassOf_ancestors': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['feed.BehaviorOwl']", 'symmetrical': 'False'}),
'uri': ('django.db.models.fields.CharField', [], {'max_length': '1500'})
},
u'feed.channel': {
'Meta': {'object_name': 'Channel'},
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'channel_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'rate': ('django.db.models.fields.IntegerField', [], {}),
'setup': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Setup']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.channellineup': {
'Meta': {'ordering': "['position']", 'object_name': 'ChannelLineup'},
'channel': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Channel']", 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'channellineup_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'position': ('django.db.models.fields.IntegerField', [], {}),
'session': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Session']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.depthaxis': {
'Meta': {'object_name': 'DepthAxis'},
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'depthaxis_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.developmentstage': {
'Meta': {'ordering': "['label']", 'object_name': 'DevelopmentStage'},
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'developmentstage_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.dorsalventralaxis': {
'Meta': {'object_name': 'DorsalVentralAxis'},
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'dorsalventralaxis_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.electrodetype': {
'Meta': {'ordering': "['label']", 'object_name': 'ElectrodeType'},
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'electrodetype_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.emgchannel': {
'Meta': {'object_name': 'EmgChannel', '_ormbases': [u'feed.Channel']},
u'channel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['feed.Channel']", 'unique': 'True', 'primary_key': 'True'}),
'emg_amplification': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'emg_filtering': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Emgfiltering']"}),
'sensor': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.EmgSensor']"}),
'unit': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Unit']"})
},
u'feed.emgfiltering': {
'Meta': {'ordering': "['label']", 'object_name': 'Emgfiltering'},
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'emgfiltering_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.emgsensor': {
'Meta': {'ordering': "['id']", 'object_name': 'EmgSensor', '_ormbases': [u'feed.Sensor']},
'axisdepth': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.DepthAxis']", 'null': 'True', 'blank': 'True'}),
'electrode_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.ElectrodeType']", 'null': 'True', 'blank': 'True'}),
'location_controlled': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.AnatomicalLocation']"}),
'muscle': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.MuscleOwl']", 'null': 'True'}),
u'sensor_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['feed.Sensor']", 'unique': 'True', 'primary_key': 'True'})
},
u'feed.emgsetup': {
'Meta': {'object_name': 'EmgSetup', '_ormbases': [u'feed.Setup']},
'preamplifier': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'setup_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['feed.Setup']", 'unique': 'True', 'primary_key': 'True'})
},
u'feed.eventchannel': {
'Meta': {'object_name': 'EventChannel', '_ormbases': [u'feed.Channel']},
u'channel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['feed.Channel']", 'unique': 'True', 'primary_key': 'True'}),
'unit': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'feed.eventsetup': {
'Meta': {'object_name': 'EventSetup', '_ormbases': [u'feed.Setup']},
u'setup_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['feed.Setup']", 'unique': 'True', 'primary_key': 'True'})
},
u'feed.experiment': {
'Meta': {'object_name': 'Experiment'},
'accession': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'bookkeeping': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'experiment_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'end': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'impl_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'start': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'study': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Study']"}),
'subj_age': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '19', 'decimal_places': '5', 'blank': 'True'}),
'subj_ageunit': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.AgeUnit']", 'null': 'True', 'blank': 'True'}),
'subj_devstage': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.DevelopmentStage']"}),
'subj_tooth': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'subj_weight': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '19', 'decimal_places': '5', 'blank': 'True'}),
'subject': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Subject']"}),
'subject_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.forcechannel': {
'Meta': {'object_name': 'ForceChannel', '_ormbases': [u'feed.Channel']},
u'channel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['feed.Channel']", 'unique': 'True', 'primary_key': 'True'}),
'sensor': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.ForceSensor']"}),
'unit': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Unit']", 'null': 'True'})
},
u'feed.forcesensor': {
'Meta': {'object_name': 'ForceSensor', '_ormbases': [u'feed.Sensor']},
u'sensor_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['feed.Sensor']", 'unique': 'True', 'primary_key': 'True'})
},
u'feed.forcesetup': {
'Meta': {'object_name': 'ForceSetup', '_ormbases': [u'feed.Setup']},
u'setup_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['feed.Setup']", 'unique': 'True', 'primary_key': 'True'})
},
u'feed.illustration': {
'Meta': {'object_name': 'Illustration'},
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'illustration_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
'experiment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Experiment']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'picture': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'setup': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Setup']", 'null': 'True', 'blank': 'True'}),
'subject': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Subject']", 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.kinematicschannel': {
'Meta': {'object_name': 'KinematicsChannel', '_ormbases': [u'feed.Channel']},
u'channel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['feed.Channel']", 'unique': 'True', 'primary_key': 'True'}),
'sensor': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.KinematicsSensor']"}),
'unit': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Unit']", 'null': 'True'})
},
u'feed.kinematicssensor': {
'Meta': {'object_name': 'KinematicsSensor', '_ormbases': [u'feed.Sensor']},
u'sensor_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['feed.Sensor']", 'unique': 'True', 'primary_key': 'True'})
},
u'feed.kinematicssetup': {
'Meta': {'object_name': 'KinematicsSetup', '_ormbases': [u'feed.Setup']},
u'setup_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['feed.Setup']", 'unique': 'True', 'primary_key': 'True'})
},
u'feed.mediallateralaxis': {
'Meta': {'object_name': 'MedialLateralAxis'},
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'mediallateralaxis_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.muscleowl': {
'Meta': {'object_name': 'MuscleOwl'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'obo_definition': ('django.db.models.fields.TextField', [], {}),
'rdfs_comment': ('django.db.models.fields.TextField', [], {}),
'rdfs_subClassOf_ancestors': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['feed.MuscleOwl']", 'symmetrical': 'False'}),
'uri': ('django.db.models.fields.CharField', [], {'max_length': '1500'})
},
u'feed.pressurechannel': {
'Meta': {'object_name': 'PressureChannel', '_ormbases': [u'feed.Channel']},
u'channel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['feed.Channel']", 'unique': 'True', 'primary_key': 'True'}),
'sensor': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.PressureSensor']"}),
'unit': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Unit']", 'null': 'True'})
},
u'feed.pressuresensor': {
'Meta': {'object_name': 'PressureSensor', '_ormbases': [u'feed.Sensor']},
u'sensor_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['feed.Sensor']", 'unique': 'True', 'primary_key': 'True'})
},
u'feed.pressuresetup': {
'Meta': {'object_name': 'PressureSetup', '_ormbases': [u'feed.Setup']},
u'setup_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['feed.Setup']", 'unique': 'True', 'primary_key': 'True'})
},
u'feed.proximaldistalaxis': {
'Meta': {'object_name': 'ProximalDistalAxis'},
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'proximaldistalaxis_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.restraint': {
'Meta': {'ordering': "['label']", 'object_name': 'Restraint'},
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'restraint_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.sensor': {
'Meta': {'object_name': 'Sensor'},
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'sensor_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'loc_ap': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.AnteriorPosteriorAxis']", 'null': 'True', 'blank': 'True'}),
'loc_dv': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.DorsalVentralAxis']", 'null': 'True', 'blank': 'True'}),
'loc_ml': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.MedialLateralAxis']", 'null': 'True', 'blank': 'True'}),
'loc_pd': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.ProximalDistalAxis']", 'null': 'True', 'blank': 'True'}),
'loc_side': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Side']"}),
'location_freetext': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'setup': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Setup']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.session': {
'Meta': {'ordering': "['position']", 'object_name': 'Session'},
'accession': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'bookkeeping': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'channels': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['feed.Channel']", 'through': u"orm['feed.ChannelLineup']", 'symmetrical': 'False'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'session_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
'end': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'experiment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Experiment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'position': ('django.db.models.fields.IntegerField', [], {}),
'start': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'subj_anesthesia_sedation': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'subj_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'subj_restraint': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Restraint']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.setup': {
'Meta': {'object_name': 'Setup'},
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'setup_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
'experiment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Experiment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'sampling_rate': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'technique': ('django.db.models.fields.IntegerField', [], {}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.side': {
'Meta': {'ordering': "['label']", 'object_name': 'Side'},
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'side_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.sonochannel': {
'Meta': {'object_name': 'SonoChannel', '_ormbases': [u'feed.Channel']},
u'channel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['feed.Channel']", 'unique': 'True', 'primary_key': 'True'}),
'crystal1': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'crystals1_related'", 'to': u"orm['feed.SonoSensor']"}),
'crystal2': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'crystals2_related'", 'to': u"orm['feed.SonoSensor']"}),
'unit': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Unit']"})
},
u'feed.sonosensor': {
'Meta': {'object_name': 'SonoSensor', '_ormbases': [u'feed.Sensor']},
'axisdepth': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.DepthAxis']", 'null': 'True', 'blank': 'True'}),
'location_controlled': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.AnatomicalLocation']"}),
'muscle': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.MuscleOwl']", 'null': 'True'}),
u'sensor_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['feed.Sensor']", 'unique': 'True', 'primary_key': 'True'})
},
u'feed.sonosetup': {
'Meta': {'object_name': 'SonoSetup', '_ormbases': [u'feed.Setup']},
u'setup_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['feed.Setup']", 'unique': 'True', 'primary_key': 'True'}),
'sonomicrometer': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'feed.strainchannel': {
'Meta': {'object_name': 'StrainChannel', '_ormbases': [u'feed.Channel']},
u'channel_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['feed.Channel']", 'unique': 'True', 'primary_key': 'True'}),
'sensor': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.StrainSensor']"}),
'unit': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Unit']", 'null': 'True'})
},
u'feed.strainsensor': {
'Meta': {'object_name': 'StrainSensor', '_ormbases': [u'feed.Sensor']},
u'sensor_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['feed.Sensor']", 'unique': 'True', 'primary_key': 'True'})
},
u'feed.strainsetup': {
'Meta': {'object_name': 'StrainSetup', '_ormbases': [u'feed.Setup']},
u'setup_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['feed.Setup']", 'unique': 'True', 'primary_key': 'True'})
},
u'feed.study': {
'Meta': {'ordering': "['title']", 'object_name': 'Study'},
'accession': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'approval_secured': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'bookkeeping': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'study_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
'description': ('django.db.models.fields.TextField', [], {}),
'end': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'funding_agency': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'resources': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'start': ('django.db.models.fields.DateTimeField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.studyprivate': {
'Meta': {'object_name': 'StudyPrivate'},
'approval': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'studyprivate_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
'funding': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lab': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'organization': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'pi': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'study': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Study']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.subject': {
'Meta': {'object_name': 'Subject'},
'breed': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'subject_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'sex': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'source': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'study': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Study']"}),
'taxon': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Taxon']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.taxon': {
'Meta': {'ordering': "['genus']", 'object_name': 'Taxon'},
'common_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'taxon_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
'genus': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'species': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
},
u'feed.trial': {
'Meta': {'object_name': 'Trial'},
'accession': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'behavior_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'behavior_primary': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Behavior']"}),
'behavior_secondary': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'bookkeeping': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'trial_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
'data_file': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'end': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'estimated_duration': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'food_property': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'food_size': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'food_type': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'position': ('django.db.models.fields.IntegerField', [], {}),
'session': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['feed.Session']"}),
'start': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'subj_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'subj_treatment': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {}),
'waveform_picture': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'})
},
u'feed.unit': {
'Meta': {'ordering': "['technique', 'label']", 'object_name': 'Unit'},
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'unit_related'", 'null': 'True', 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'technique': ('django.db.models.fields.IntegerField', [], {}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {})
}
}
complete_apps = ['feed']
symmetrical = True
|
import datetime
import json
import os
import re
import traceback
from operator import itemgetter
from urllib.parse import unquote_plus
import dateutil.parser
from tornado.escape import xhtml_unescape
from tornado.web import HTTPError
import sickchill
from sickchill import logger, settings
from sickchill.helper import sanitize_filename, try_int
from sickchill.oldbeard import config, db, filters, helpers, ui
from sickchill.oldbeard.blackandwhitelist import short_group_names
from sickchill.oldbeard.common import Quality
from sickchill.oldbeard.show_name_helpers import containsAtLeastOneWord
from sickchill.oldbeard.trakt_api import TraktAPI
from sickchill.oldbeard.traktTrending import trakt_trending
from sickchill.show.recommendations.favorites import favorites
from sickchill.show.recommendations.imdb import imdb_popular
from sickchill.show.Show import Show
from sickchill.views.common import PageTemplate
from sickchill.views.home import Home
from sickchill.views.routes import Route
@Route("/addShows(/?.*)", name="addShows")
class AddShows(Home):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def index(self, *args_, **kwargs_):
t = PageTemplate(rh=self, filename="addShows.mako")
return t.render(title=_("Add Shows"), header=_("Add Shows"), topmenu="home", controller="addShows", action="index")
@staticmethod
def sanitizeFileName(name):
return sanitize_filename(name)
def searchIndexersForShowName(self, search_term, lang=None, indexer=None, exact=False):
self.set_header("Cache-Control", "max-age=0,no-cache,no-store")
self.set_header("Content-Type", "application/json")
if not lang or lang == "null":
lang = settings.INDEXER_DEFAULT_LANGUAGE
search_term = xhtml_unescape(search_term)
search_terms = [search_term]
# If search term ends with what looks like a year, enclose it in ()
matches = re.match(r"^(.+ |)([12][0-9]{3})$", search_term)
if matches:
search_terms.append("{0}({1})".format(matches.group(1), matches.group(2)))
for term in search_terms:
# If search term begins with an article, let's also search for it without
matches = re.match(r"^(?:a|an|the) (.+)$", term, re.I)
if matches:
search_terms.append(matches.group(1))
results = {}
final_results = []
# Query Indexers for each search term and build the list of results
for i, j in sickchill.indexer if not int(indexer) else [(int(indexer), None)]:
logger.debug(_(f"Searching for Show with search term(s): {search_terms} on Indexer: {sickchill.indexer[i].name} (exact: {exact})"))
for term in search_terms:
# noinspection PyBroadException
try:
indexerResults = sickchill.indexer[i].search(term, language=lang, exact=exact)
except Exception:
logger.exception(traceback.format_exc())
continue
# add search results
results.setdefault(i, []).extend(indexerResults)
for i, shows in results.items():
# noinspection PyUnresolvedReferences
final_results.extend(
{
(
sickchill.indexer.name(i),
i,
sickchill.indexer[i].show_url,
show["id"],
show["seriesName"],
show["firstAired"],
sickchill.tv.Show.find(settings.showList, show["id"]) is not None,
)
for show in shows
}
)
if exact:
logger.debug(_("Filtering and sorting out excess results because exact match was checked"))
final_results = [item for item in final_results if search_term.lower() in item[4].lower()]
final_results.sort(key=itemgetter(4))
final_results.sort(key=lambda x: x[4].lower().index(search_term.lower()))
final_results.sort(key=lambda x: x[4].lower() == search_term.lower(), reverse=True)
lang_id = sickchill.indexer.lang_dict()[lang]
return json.dumps({"results": final_results, "langid": lang_id, "success": len(final_results) > 0})
def massAddTable(self, rootDir=None):
t = PageTemplate(rh=self, filename="home_massAddTable.mako")
if not rootDir:
return _("No folders selected.")
elif not isinstance(rootDir, list):
root_dirs = [rootDir]
else:
root_dirs = rootDir
root_dirs = [unquote_plus(xhtml_unescape(x)) for x in root_dirs]
if settings.ROOT_DIRS:
default_index = int(settings.ROOT_DIRS.split("|")[0])
else:
default_index = 0
if len(root_dirs) > default_index:
tmp = root_dirs[default_index]
if tmp in root_dirs:
root_dirs.remove(tmp)
root_dirs.insert(0, tmp)
dir_list = []
main_db_con = db.DBConnection()
for root_dir in root_dirs:
# noinspection PyBroadException
try:
file_list = os.listdir(root_dir)
except Exception:
continue
for cur_file in file_list:
# noinspection PyBroadException
try:
cur_path = os.path.normpath(os.path.join(root_dir, cur_file))
if not os.path.isdir(cur_path):
continue
# ignore Synology folders
if cur_file.lower() in ["#recycle", "@eadir"]:
continue
except Exception:
continue
cur_dir = {
"dir": cur_path,
"existing_info": (None, None, None),
"display_dir": "<b>" + os.path.dirname(cur_path) + os.sep + "</b>" + os.path.basename(cur_path),
}
# see if the folder is in KODI already
dirResults = main_db_con.select("SELECT indexer_id FROM tv_shows WHERE location = ? LIMIT 1", [cur_path])
if dirResults:
cur_dir["added_already"] = True
else:
cur_dir["added_already"] = False
dir_list.append(cur_dir)
indexer_id = show_name = indexer = None
for cur_provider in settings.metadata_provider_dict.values():
if not (indexer_id and show_name):
(indexer_id, show_name, indexer) = cur_provider.retrieveShowMetadata(cur_path)
if all((indexer_id, show_name, indexer)):
break
if all((indexer_id, show_name, indexer)):
cur_dir["existing_info"] = (indexer_id, show_name, indexer)
if indexer_id and Show.find(settings.showList, indexer_id):
cur_dir["added_already"] = True
return t.render(dirList=dir_list)
def newShow(self, show_to_add=None, other_shows=None, search_string=None):
"""
Display the new show page which collects a tvdb id, folder, and extra options and
posts them to addNewShow
"""
t = PageTemplate(rh=self, filename="addShows_newShow.mako")
indexer, show_dir, indexer_id, show_name = self.split_extra_show(show_to_add)
if indexer_id and indexer and show_name:
use_provided_info = True
else:
use_provided_info = False
# use the given show_dir for the indexer search if available
if not show_dir:
if search_string:
default_show_name = search_string
else:
default_show_name = ""
elif not show_name:
default_show_name = re.sub(r" \(\d{4}\)", "", os.path.basename(os.path.normpath(show_dir)).replace(".", " "))
else:
default_show_name = show_name
# carry a list of other dirs if given
if not other_shows:
other_shows = []
elif not isinstance(other_shows, list):
other_shows = [other_shows]
provided_indexer_id = int(indexer_id or 0)
provided_indexer_name = show_name
provided_indexer = int(indexer or settings.INDEXER_DEFAULT)
return t.render(
enable_anime_options=True,
use_provided_info=use_provided_info,
default_show_name=default_show_name,
other_shows=other_shows,
provided_show_dir=show_dir,
provided_indexer_id=provided_indexer_id,
provided_indexer_name=provided_indexer_name,
provided_indexer=provided_indexer,
whitelist=[],
blacklist=[],
groups=[],
title=_("New Show"),
header=_("New Show"),
topmenu="home",
controller="addShows",
action="newShow",
)
def trendingShows(self, traktList=None):
"""
Display the new show page which collects a tvdb id, folder, and extra options and
posts them to addNewShow
"""
if not traktList:
traktList = ""
traktList = traktList.lower()
if traktList == "trending":
page_title = _("Trending Shows")
elif traktList == "popular":
page_title = _("Popular Shows")
elif traktList == "anticipated":
page_title = _("Most Anticipated Shows")
elif traktList == "collected":
page_title = _("Most Collected Shows")
elif traktList == "watched":
page_title = _("Most Watched Shows")
elif traktList == "played":
page_title = _("Most Played Shows")
elif traktList == "recommended":
page_title = _("Recommended Shows")
elif traktList == "newshow":
page_title = _("New Shows")
elif traktList == "newseason":
page_title = _("Season Premieres")
else:
page_title = _("Most Anticipated Shows")
t = PageTemplate(rh=self, filename="addShows_trendingShows.mako")
return t.render(title=page_title, header=page_title, enable_anime_options=False, traktList=traktList, controller="addShows", action="trendingShows")
def getTrendingShows(self, traktList=None):
"""
Display the new show page which collects a tvdb id, folder, and extra options and posts them to addNewShow
"""
t = PageTemplate(rh=self, filename="trendingShows.mako")
if not traktList:
traktList = ""
traktList = traktList.lower()
if traktList == "trending":
page_url = "shows/trending"
elif traktList == "popular":
page_url = "shows/popular"
elif traktList == "anticipated":
page_url = "shows/anticipated"
elif traktList == "collected":
page_url = "shows/collected"
elif traktList == "watched":
page_url = "shows/watched"
elif traktList == "played":
page_url = "shows/played"
elif traktList == "recommended":
page_url = "recommendations/shows"
elif traktList == "newshow":
page_url = "calendars/all/shows/new/{0}/30".format(datetime.date.today().strftime("%Y-%m-%d"))
elif traktList == "newseason":
page_url = "calendars/all/shows/premieres/{0}/30".format(datetime.date.today().strftime("%Y-%m-%d"))
else:
page_url = "shows/anticipated"
trending_shows = []
black_list = False
try:
trending_shows, black_list = trakt_trending.fetch_trending_shows(traktList, page_url)
except Exception as e:
logger.warning("Could not get trending shows: {0}".format(str(e)))
return t.render(black_list=black_list, trending_shows=trending_shows)
@staticmethod
def getTrendingShowImage(indexerId):
image_url = sickchill.indexer.series_poster_url_by_id(indexerId)
if image_url:
image_path = trakt_trending.get_image_path(trakt_trending.get_image_name(indexerId))
trakt_trending.cache_image(image_url, image_path)
return indexerId
def popularShows(self):
"""
Fetches data from IMDB to show a list of popular shows.
"""
t = PageTemplate(rh=self, filename="addShows_popularShows.mako")
try:
popular_shows = imdb_popular.fetch_popular_shows()
imdb_exception = None
except Exception as error:
logger.warning("Could not get popular shows: {0}".format(str(error)))
logger.debug(traceback.format_exc())
popular_shows = None
imdb_exception = error
return t.render(
title=_("Popular Shows"),
header=_("Popular Shows"),
popular_shows=popular_shows,
imdb_exception=imdb_exception,
imdb_url=imdb_popular.imdb_url,
topmenu="home",
controller="addShows",
action="popularShows",
)
def favoriteShows(self):
"""
Fetches data from IMDB to show a list of popular shows.
"""
t = PageTemplate(rh=self, filename="addShows_favoriteShows.mako")
e = None
if self.get_body_argument("submit", None):
tvdb_user = self.get_body_argument("tvdb_user")
tvdb_user_key = filters.unhide(settings.TVDB_USER_KEY, self.get_body_argument("tvdb_user_key"))
if tvdb_user and tvdb_user_key:
if tvdb_user != settings.TVDB_USER or tvdb_user_key != settings.TVDB_USER_KEY:
favorites.test_user_key(tvdb_user, tvdb_user_key, 1)
try:
favorite_shows = favorites.fetch_indexer_favorites()
except Exception as e:
logger.exception(traceback.format_exc())
logger.warning(_("Could not get favorite shows: {0}").format(str(e)))
favorite_shows = None
return t.render(
title=_("Favorite Shows"),
header=_("Favorite Shows"),
favorite_shows=favorite_shows,
favorites_exception=e,
topmenu="home",
controller="addShows",
action="popularShows",
)
def addShowToBlacklist(self):
# URL parameters
indexer_id = self.get_query_argument("indexer_id")
if not indexer_id:
raise HTTPError(404)
data = {"shows": [{"ids": {"tvdb": indexer_id}}]}
trakt_api = TraktAPI(settings.SSL_VERIFY, settings.TRAKT_TIMEOUT)
trakt_api.traktRequest("users/" + settings.TRAKT_USERNAME + "/lists/" + settings.TRAKT_BLACKLIST_NAME + "/items", data, method="POST")
return self.redirect("/addShows/trendingShows/")
def existingShows(self):
"""
Prints out the page to add existing shows from a root dir
"""
t = PageTemplate(rh=self, filename="addShows_addExistingShow.mako")
return t.render(
enable_anime_options=False, title=_("Existing Show"), header=_("Existing Show"), topmenu="home", controller="addShows", action="addExistingShow"
)
# noinspection PyUnusedLocal
def addShowByID(
self,
indexer_id,
show_name,
indexer="TVDB",
which_series=None,
indexer_lang=None,
root_dir=None,
default_status=None,
quality_preset=None,
any_qualities=None,
best_qualities=None,
season_folders=None,
subtitles=None,
full_show_path=None,
other_shows=None,
skip_show=None,
provided_indexer=None,
anime=None,
scene=None,
blacklist=None,
whitelist=None,
default_status_after=None,
default_season_folders=None,
configure_show_options=None,
):
if indexer != "TVDB":
indexer_id = helpers.tvdbid_from_remote_id(indexer_id, indexer.upper())
if not indexer_id:
logger.info("Unable to to find tvdb ID to add {0}".format(show_name))
ui.notifications.error(
"Unable to add {0}".format(show_name), "Could not add {0}. We were unable to locate the tvdb id at this time.".format(show_name)
)
return
indexer_id = try_int(indexer_id)
if indexer_id <= 0 or Show.find(settings.showList, indexer_id):
return
# Sanitize the parameter anyQualities and bestQualities. As these would normally be passed as lists
any_qualities = any_qualities.split(",") if any_qualities else []
best_qualities = best_qualities.split(",") if best_qualities else []
# If configure_show_options is enabled let's use the provided settings
if config.checkbox_to_value(configure_show_options):
# prepare the inputs for passing along
scene = config.checkbox_to_value(scene)
anime = config.checkbox_to_value(anime)
season_folders = config.checkbox_to_value(season_folders)
subtitles = config.checkbox_to_value(subtitles)
if whitelist:
whitelist = short_group_names(whitelist)
if blacklist:
blacklist = short_group_names(blacklist)
if not any_qualities:
any_qualities = []
if not best_qualities or try_int(quality_preset, None):
best_qualities = []
if not isinstance(any_qualities, list):
any_qualities = [any_qualities]
if not isinstance(best_qualities, list):
best_qualities = [best_qualities]
quality = Quality.combineQualities([int(q) for q in any_qualities], [int(q) for q in best_qualities])
location = root_dir
else:
default_status = settings.STATUS_DEFAULT
quality = settings.QUALITY_DEFAULT
season_folders = settings.SEASON_FOLDERS_DEFAULT
subtitles = settings.SUBTITLES_DEFAULT
anime = settings.ANIME_DEFAULT
scene = settings.SCENE_DEFAULT
default_status_after = settings.STATUS_DEFAULT_AFTER
if settings.ROOT_DIRS:
root_dirs = settings.ROOT_DIRS.split("|")
location = root_dirs[int(root_dirs[0]) + 1]
else:
location = None
if not location:
logger.info("There was an error creating the show, no root directory setting found")
return _("No root directories setup, please go back and add one.")
show_name = sickchill.indexer[1].get_series_by_id(indexer_id, indexer_lang).seriesName
show_dir = None
if not show_name:
ui.notifications.error(_("Unable to add show"))
return self.redirect("/home/")
# add the show
settings.showQueueScheduler.action.add_show(
indexer=1,
indexer_id=indexer_id,
showDir=show_dir,
default_status=default_status,
quality=quality,
season_folders=season_folders,
lang=indexer_lang,
subtitles=subtitles,
subtitles_sr_metadata=None,
anime=anime,
scene=scene,
paused=None,
blacklist=blacklist,
whitelist=whitelist,
default_status_after=default_status_after,
root_dir=location,
)
ui.notifications.message(_("Show added"), _("Adding the specified show {show_name}").format(show_name=show_name))
# done adding show
return self.redirect("/home/")
def addNewShow(
self,
whichSeries=None,
indexerLang=None,
rootDir=None,
defaultStatus=None,
quality_preset=None,
anyQualities=None,
bestQualities=None,
season_folders=None,
subtitles=None,
subtitles_sr_metadata=None,
fullShowPath=None,
other_shows=None,
skipShow=None,
providedIndexer=None,
anime=None,
scene=None,
blacklist=None,
whitelist=None,
defaultStatusAfter=None,
):
"""
Receive tvdb id, dir, and other options and create a show from them. If extra show dirs are
provided then it forwards back to newShow, if not it goes to /home.
"""
if not indexerLang:
indexerLang = settings.INDEXER_DEFAULT_LANGUAGE
# grab our list of other dirs if given
if not other_shows:
other_shows = []
elif not isinstance(other_shows, list):
other_shows = [other_shows]
def finishAddShow():
# if there are no extra shows then go home
if not other_shows:
return self.redirect("/home/")
# peel off the next one
next_show_dir = other_shows[0]
rest_of_show_dirs = other_shows[1:]
# go to add the next show
return self.newShow(next_show_dir, rest_of_show_dirs)
# if we're skipping then behave accordingly
if skipShow:
return finishAddShow()
# sanity check on our inputs
if (not rootDir and not fullShowPath) or not whichSeries:
return _("Missing params, no Indexer ID or folder: {show_to_add} and {root_dir}/{show_path}").format(
show_to_add=whichSeries, root_dir=rootDir, show_path=fullShowPath
)
# figure out what show we're adding and where
series_pieces = whichSeries.split("|")
if (whichSeries and rootDir) or (whichSeries and fullShowPath and len(series_pieces) > 1):
if len(series_pieces) < 6:
logger.error("Unable to add show due to show selection. Not enough arguments: {0}".format((repr(series_pieces))))
ui.notifications.error(_("Unknown error. Unable to add show due to problem with show selection."))
return self.redirect("/addShows/existingShows/")
indexer = int(series_pieces[1])
indexer_id = int(series_pieces[3])
# Show name was sent in UTF-8 in the form
show_name = xhtml_unescape(series_pieces[4])
else:
# if no indexer was provided use the default indexer set in General settings
if not providedIndexer:
providedIndexer = settings.INDEXER_DEFAULT
indexer = int(providedIndexer)
indexer_id = int(whichSeries)
show_name = os.path.basename(os.path.normpath(xhtml_unescape(fullShowPath)))
# use the whole path if it's given, or else append the show name to the root dir to get the full show path
if fullShowPath:
show_dir = os.path.normpath(xhtml_unescape(fullShowPath))
extra_check_dir = show_dir
else:
folder_name = show_name
s = sickchill.indexer.series_by_id(indexerid=indexer_id, indexer=indexer, language=indexerLang)
if settings.ADD_SHOWS_WITH_YEAR and s.firstAired:
try:
year = "({0})".format(dateutil.parser.parse(s.firstAired).year)
if year not in folder_name:
folder_name = "{0} {1}".format(s.seriesName, year)
except (TypeError, ValueError):
logger.info(_("Could not append the show year folder for the show: {0}").format(folder_name))
show_dir = os.path.join(rootDir, sanitize_filename(xhtml_unescape(folder_name)))
extra_check_dir = os.path.join(rootDir, sanitize_filename(xhtml_unescape(show_name)))
# blanket policy - if the dir exists you should have used "add existing show" numbnuts
if (os.path.isdir(show_dir) or os.path.isdir(extra_check_dir)) and not fullShowPath:
ui.notifications.error(_("Unable to add show"), _("Folder {show_dir} exists already").format(show_dir=show_dir))
return self.redirect("/addShows/existingShows/")
# don't create show dir if config says not to
if settings.ADD_SHOWS_WO_DIR:
logger.info("Skipping initial creation of " + show_dir + " due to config.ini setting")
else:
dir_exists = helpers.makeDir(show_dir)
if not dir_exists:
logger.exception("Unable to create the folder " + show_dir + ", can't add the show")
ui.notifications.error(_("Unable to add show"), _("Unable to create the folder {show_dir}, can't add the show").format(show_dir=show_dir))
# Don't redirect to default page because user wants to see the new show
return self.redirect("/home/")
else:
helpers.chmodAsParent(show_dir)
# prepare the inputs for passing along
scene = config.checkbox_to_value(scene)
anime = config.checkbox_to_value(anime)
season_folders = config.checkbox_to_value(season_folders)
subtitles = config.checkbox_to_value(subtitles)
subtitles_sr_metadata = config.checkbox_to_value(subtitles_sr_metadata)
if whitelist:
whitelist = short_group_names(whitelist)
if blacklist:
blacklist = short_group_names(blacklist)
if not anyQualities:
anyQualities = []
if not bestQualities or try_int(quality_preset, None):
bestQualities = []
if not isinstance(anyQualities, list):
anyQualities = [anyQualities]
if not isinstance(bestQualities, list):
bestQualities = [bestQualities]
newQuality = Quality.combineQualities([int(q) for q in anyQualities], [int(q) for q in bestQualities])
# add the show
settings.showQueueScheduler.action.add_show(
indexer,
indexer_id,
showDir=show_dir,
default_status=int(defaultStatus),
quality=newQuality,
season_folders=season_folders,
lang=indexerLang,
subtitles=subtitles,
subtitles_sr_metadata=subtitles_sr_metadata,
anime=anime,
scene=scene,
paused=None,
blacklist=blacklist,
whitelist=whitelist,
default_status_after=int(defaultStatusAfter),
root_dir=rootDir,
)
ui.notifications.message(_("Show added"), _("Adding the specified show into {show_dir}").format(show_dir=show_dir))
return finishAddShow()
@staticmethod
def split_extra_show(extra_show):
if not extra_show:
return None, None, None, None
split_vals = extra_show.split("|")
if len(split_vals) < 4:
indexer = split_vals[0]
show_dir = split_vals[1]
return indexer, show_dir, None, None
indexer = split_vals[0]
show_dir = split_vals[1]
indexer_id = split_vals[2]
show_name = "|".join(split_vals[3:])
return indexer, show_dir, indexer_id, show_name
def addExistingShows(self, shows_to_add, promptForSettings, **kwargs):
"""
Receives a dir list and add them. Adds the ones with given TVDB IDs first, then forwards
along to the newShow page.
"""
# grab a list of other shows to add, if provided
if not shows_to_add:
shows_to_add = []
elif not isinstance(shows_to_add, list):
shows_to_add = [shows_to_add]
shows_to_add = [unquote_plus(xhtml_unescape(x)) for x in shows_to_add]
indexer_id_given = []
dirs_only = []
# separate all the ones with Indexer IDs
for cur_dir in shows_to_add:
if "|" in cur_dir:
split_vals = cur_dir.split("|")
if len(split_vals) < 3:
dirs_only.append(cur_dir)
if "|" not in cur_dir:
dirs_only.append(cur_dir)
else:
indexer, show_dir, indexer_id, show_name = self.split_extra_show(cur_dir)
if not show_dir or not indexer_id or not show_name:
continue
indexer_id_given.append((int(indexer), show_dir, int(indexer_id), show_name))
# if they want me to prompt for settings then I will just carry on to the newShow page
if shows_to_add and config.checkbox_to_value(promptForSettings):
return self.newShow(shows_to_add[0], shows_to_add[1:])
# if they don't want me to prompt for settings then I can just add all the nfo shows now
num_added = 0
for cur_show in indexer_id_given:
indexer, show_dir, indexer_id, show_name = cur_show
if indexer is not None and indexer_id is not None:
# add the show
settings.showQueueScheduler.action.add_show(
indexer,
indexer_id,
show_dir,
default_status=settings.STATUS_DEFAULT,
quality=settings.QUALITY_DEFAULT,
season_folders=settings.SEASON_FOLDERS_DEFAULT,
subtitles=settings.SUBTITLES_DEFAULT,
anime=settings.ANIME_DEFAULT,
scene=settings.SCENE_DEFAULT,
default_status_after=settings.STATUS_DEFAULT_AFTER,
)
num_added += 1
if num_added:
ui.notifications.message(_("Shows Added"), _("Automatically added {num_shows} from their existing metadata files").format(num_shows=str(num_added)))
# if we're done then go home
if not dirs_only:
return self.redirect("/home/")
# for the remaining shows we need to prompt for each one, so forward this on to the newShow page
return self.newShow(dirs_only[0], dirs_only[1:])
|
"""Bot object for Dozer"""
import logging
import re
import sys
import traceback
import discord
import aiohttp
from discord.ext import commands
from . import utils
from .asyncdb.orm import orm
dozer_logger = logging.getLogger('dozer')
dozer_logger.level = logging.DEBUG
discord_logger = logging.getLogger('discord')
discord_logger.level = logging.DEBUG
dozer_log_handler = logging.StreamHandler(stream=sys.stdout)
dozer_log_handler.level = logging.INFO
dozer_logger.addHandler(dozer_log_handler)
discord_logger.addHandler(dozer_log_handler)
dozer_log_handler.setFormatter(fmt=logging.Formatter('[%(asctime)s] [%(levelname)s] [%(name)s] %(message)s'))
if discord.version_info.major < 1:
dozer_logger.error("Your installed discord.py version is too low "
"%d.%d.%d, please upgrade to at least 1.0.0a",
discord.version_info.major,
discord.version_info.minor,
discord.version_info.micro)
sys.exit(1)
elif not hasattr(commands, "Cog"):
dozer_logger.error("Your installed discord.py rewrite version is too "
"old and lacks discord.ext.commands.Cog, please reinstall it and try again.")
sys.exit(1)
class InvalidContext(commands.CheckFailure):
"""
Check failure raised by the global check for an invalid command context - executed by a bot, exceeding global rate-limit, etc.
The message will be ignored.
"""
class DozerContext(commands.Context):
"""Cleans all messages before sending"""
async def send(self, content=None, **kwargs): # pylint: disable=arguments-differ
if content is not None:
content = utils.clean(self, content, mass=True, member=False, role=False, channel=False)
if "embed" in kwargs and isinstance(kwargs["embed"], discord.Embed):
for field in kwargs["embed"].fields:
if not field.name and field.value:
dozer_logger.error(f"Invalid embed values {field.name!r}: {field.value!r}")
return await super().send(content, **kwargs)
class Dozer(commands.Bot):
"""Botty things that are critical to Dozer working"""
#_global_cooldown = commands.Cooldown(1, 1, commands.BucketType.user) # One command per second per user
def __init__(self, config):
intents = discord.Intents.default()
intents.members = True
intents.presences = True
super().__init__(command_prefix=config['prefix'], intents=intents, case_insensitive=True)
self.config = config
self.logger = dozer_logger
self._restarting = False
self.check(self.global_checks)
self.http_session = aiohttp.ClientSession(loop=self.loop)
if 'log_level' in config:
dozer_log_handler.setLevel(config['log_level'])
async def update_status(self):
"""Dynamically update the bot's status."""
dozer_logger.info('Signed in as {}#{} ({})'.format(self.user.name, self.user.discriminator, self.user.id))
if self.config['is_backup']:
status = discord.Status.dnd
else:
status = discord.Status.online
game = discord.Game(name=f"{self.config['prefix']}help | {len(self.guilds)} guilds")
try:
await self.change_presence(activity=game, status=status)
except TypeError:
dozer_logger.warning("You are running an older version of the discord.py rewrite (with breaking changes)! "
"To upgrade, run `pip install -r requirements.txt --upgrade`")
async def on_ready(self):
"""Things to run when the bot has initialized and signed in"""
await self.update_status()
async def on_guild_join(self, guild): # pylint: disable=unused-argument
"""Update bot status to remain accurate."""
await self.update_status()
async def on_guild_remove(self, guild): # pylint: disable=unused-argument
"""Update bot status to remain accurate."""
await self.update_status()
async def get_context(self, message, *, cls=DozerContext):
return await super().get_context(message, cls=cls)
async def on_command_error(self, context, exception):
if isinstance(exception, commands.NoPrivateMessage):
await context.send('{}, This command cannot be used in DMs.'.format(context.author.mention))
elif isinstance(exception, commands.UserInputError):
await context.send('{}, {}'.format(context.author.mention, self.format_error(context, exception)))
elif isinstance(exception, commands.NotOwner):
await context.send('{}, {}'.format(context.author.mention, exception.args[0]))
elif isinstance(exception, commands.MissingPermissions):
permission_names = [name.replace('guild', 'server').replace('_', ' ').title() for name in exception.missing_perms]
await context.send('{}, you need {} permissions to run this command!'.format(
context.author.mention, utils.pretty_concat(permission_names)))
elif isinstance(exception, commands.BotMissingPermissions):
permission_names = [name.replace('guild', 'server').replace('_', ' ').title() for name in exception.missing_perms]
await context.send('{}, I need {} permissions to run this command!'.format(
context.author.mention, utils.pretty_concat(permission_names)))
elif isinstance(exception, commands.CommandOnCooldown):
await context.send('{}, That command is on cooldown! Try again in {:.2f}s!'.format(context.author.mention, exception.retry_after))
elif isinstance(exception, (commands.CommandNotFound, InvalidContext)):
pass # Silent ignore
else:
await context.send('```\n%s\n```' % ''.join(traceback.format_exception_only(type(exception), exception)).strip())
if isinstance(context.channel, discord.TextChannel):
dozer_logger.error('Error in command <{0}> ({1.name!r}:({1.id}) {2}:({2.id}) {3}:({3.id}) {4})'
''.format(context.command, context.guild, context.channel, context.author, context.message.content))
else:
dozer_logger.error('Error in command <{0}> (DM {1}:({1}.id) {2})'.format(context.command, context.channel.recipient,
context.message.content))
dozer_logger.error(''.join(traceback.format_exception(type(exception), exception, exception.__traceback__)))
@staticmethod
def format_error(ctx, err, *, word_re=re.compile('[A-Z][a-z]+')):
"""Turns an exception into a user-friendly (or -friendlier, at least) error message."""
type_words = word_re.findall(type(err).__name__)
type_msg = ' '.join(map(str.lower, type_words))
if err.args:
return '%s: %s' % (type_msg, utils.clean(ctx, err.args[0]))
else:
return type_msg
def global_checks(self, ctx):
"""Checks that should be executed before passed to the command"""
if ctx.author.bot:
raise InvalidContext('Bots cannot run commands!')
retry_after = False #self._global_cooldown.update_rate_limit()
if retry_after and not hasattr(ctx, "is_pseudo"): # bypass ratelimit for su'ed commands
raise InvalidContext('Global rate-limit exceeded!')
return True
def run(self, *args, **kwargs):
token = self.config['discord_token']
del self.config['discord_token'] # Prevent token dumping
super().run(token)
async def shutdown(self, restart=False):
"""Shuts down the bot"""
self._restarting = restart
#await self.logout()
await self.close()
await orm.close()
await self.http_session.close()
self.loop.stop()
|
from django.shortcuts import render_to_response
from django.http import HttpResponse, HttpResponseServerError
from django.template import Context
from django.views.decorators.csrf import csrf_exempt
from Photos.messages import MessageCode
from Photos.models import *
from Common.component.messages import MessageVO
from Common.component.UploadProgressHandler import *
def start(request, view):
return render_to_response(view, {}, context_instance=Context(request))
def get_tmp_photos(request):
user = User.objects.get(pk=request.session['logged']['id'])
tmp = TmpPhoto.objects.filter(owner=user)
return render_to_response(request.GET['view'], {'Photos': tmp}, context_instance=Context(request))
def get_bound_photos(request):
if request.method == "GET":
user = User.objects.get(pk=request.session['logged']['id'])
data = {'Photos': user.get_photos_by_bound(request.GET['ca_b'],
request.GET['ca_j'],
request.GET['ea_b'],
request.GET['ea_j'])}
return render_to_response(request.GET['view'], data, context_instance=Context(request))
else:
result = MessageVO(_type=False, msg=MessageCode._001)
return HttpResponse(result.getJSON(), mimetype='application/json')
@csrf_exempt
def del_tmp_photo(request):
if request.method == "POST":
photo = TmpPhoto.objects.get(pk=request.POST['id'])
photo.delete()
return HttpResponse()
@csrf_exempt
def add_tmp_photo(request):
# Create a new temporal photo
# request.upload_handlers.insert(0, UploadProgressHandler(request))
for _file in request.FILES.getlist('files'):
TmpPhoto.create(img=_file, userId=request.session['logged']['id'])
# Moving from coordiante photos to temporal ones
if 'id' in request.POST:
photo = Photo.objects.get(pk=request.POST['id'])
photo.move_to_temp()
return HttpResponse()
@csrf_exempt
def get_photo_by_id(request):
if request.method == "GET":
photo = Photo.objects.get(pk=request.GET['id'])
return render_to_response(request.GET['view'], {'data': photo}, context_instance=Context(request))
result = MessageVO(_type=False, msg=MessageCode._001)
return HttpResponse(result.getJSON(), mimetype='application/json')
@csrf_exempt
def set_photo_by_id(request):
if request.method == "POST":
photo = Photo.objects.get(pk=request.POST['id'])
photo.comment = request.POST['comment']
photo.title = request.POST['title']
photo.save()
result = MessageVO(_type=True, msg=MessageCode._301)
else:
result = MessageVO(_type=False, msg=MessageCode._001)
return HttpResponse(result.getJSON(), mimetype='application/json')
def upload_progress(request):
"""
A view to report back on upload progress.
Return JSON object with information about the progress of an upload.
Copied from:
http://djangosnippets.org/snippets/678/
See upload.py for file upload handler.
"""
#import ipdb
#ipdb.set_trace()
progress_id = ''
if 'X-Progress-ID' in request.GET:
progress_id = request.GET['X-Progress-ID']
elif 'X-Progress-ID' in request.META:
progress_id = request.META['X-Progress-ID']
if progress_id:
from django.utils import simplejson
cache_key = "%s_%s" % (request.META['REMOTE_ADDR'], progress_id)
data = cache.get(cache_key)
return HttpResponse(simplejson.dumps(data))
else:
return HttpResponseServerError('Server Error: You must provide X-Progress-ID header or query param.')
|
import os
import unittest
from vsg.rules import entity
from vsg import vhdlFile
from vsg.tests import utils
sTestDir = os.path.dirname(__file__)
lFile, eError =vhdlFile.utils.read_vhdlfile(os.path.join(sTestDir,'rule_002_test_input.vhd'))
lExpected = []
lExpected.append('')
utils.read_file(os.path.join(sTestDir, 'rule_002_test_input.fixed.vhd'), lExpected)
class test_entity_rule(unittest.TestCase):
def setUp(self):
self.oFile = vhdlFile.vhdlFile(lFile)
self.assertIsNone(eError)
def test_rule_002(self):
oRule = entity.rule_002()
self.assertTrue(oRule)
self.assertEqual(oRule.name, 'entity')
self.assertEqual(oRule.identifier, '002')
lExpected = [7]
oRule.analyze(self.oFile)
self.assertEqual(lExpected, utils.extract_violation_lines_from_violation_object(oRule.violations))
def test_fix_rule_002(self):
oRule = entity.rule_002()
oRule.fix(self.oFile)
lActual = self.oFile.get_lines()
self.assertEqual(lExpected, lActual)
oRule.analyze(self.oFile)
self.assertEqual(oRule.violations, [])
|
from textwrap import dedent
from unittest import mock
import pytest
from testtools import TestCase
from testtools.matchers import Contains, Equals
import snapcraft.internal.project_loader._config # noqa: F401
import snapcraft.yaml_utils.errors
from snapcraft.project._schema import Validator
from . import ProjectBaseTest
def get_data():
return {
"name": "my-package-1",
"base": "core18",
"version": "1.0-snapcraft1~ppa1",
"summary": "my summary less that 79 chars",
"description": "description which can be pretty long",
"adopt-info": "part1",
"parts": {"part1": {"plugin": "project", "parse-info": ["test-metadata-file"]}},
}
@pytest.fixture
def data():
"""Return snapcraft.yaml data to validate."""
return get_data()
class ValidationBaseTest(TestCase):
def setUp(self):
super().setUp()
self.data = get_data()
class ValidationTest(ValidationBaseTest):
def test_summary_too_long(self):
self.data["summary"] = "a" * 80
raised = self.assertRaises(
snapcraft.yaml_utils.errors.YamlValidationError,
Validator(self.data).validate,
)
expected_message = (
"The 'summary' property does not match the required schema: "
"'{}' is too long (maximum length is 78)"
).format(self.data["summary"])
self.assertThat(raised.message, Equals(expected_message), message=self.data)
def test_apps_required_properties(self):
self.data["apps"] = {"service1": {}}
raised = self.assertRaises(
snapcraft.yaml_utils.errors.YamlValidationError,
Validator(self.data).validate,
)
expected_message = (
"The 'apps/service1' property does not match the "
"required schema: 'command' is a required "
"property"
)
self.assertThat(raised.message, Equals(expected_message), message=self.data)
def test_schema_file_not_found(self):
mock_the_open = mock.mock_open()
mock_the_open.side_effect = FileNotFoundError()
with mock.patch("snapcraft.project._schema.open", mock_the_open, create=True):
raised = self.assertRaises(
snapcraft.yaml_utils.errors.YamlValidationError, Validator, self.data
)
expected_message = "snapcraft validation file is missing from installation path"
self.assertThat(raised.message, Equals(expected_message))
def test_valid_app_daemons(self):
self.data["apps"] = {
"service1": {"command": "binary1 start", "daemon": "simple"},
"service2": {
"command": "binary2",
"stop-command": "binary2 --stop",
"daemon": "simple",
},
"service3": {"command": "binary3", "daemon": "forking"},
"service4": {
"command": "binary4",
"daemon": "simple",
"restart-condition": "always",
},
"service5": {"command": "binary5", "daemon": "notify"},
"service6": {
"command": "binary6",
"post-stop-command": "binary6 --post-stop",
"daemon": "simple",
},
"service7": {
"command": "binary7",
"reload-command": "binary7 --reload",
"daemon": "simple",
},
"service8": {
"command": "binary8",
"daemon": "dbus",
"bus-name": "org.test.snapcraft",
},
"service9": {
"command": "binary9",
"daemon": "simple",
"start-timeout": "1s",
},
"service10": {
"command": "binary10",
"daemon": "simple",
"stop-timeout": "1s",
},
"service11": {
"command": "binary11",
"daemon": "simple",
"restart-delay": "1s",
},
"service12": {
"command": "binary12",
"daemon": "simple",
"watchdog-timeout": "1s",
},
"service13": {
"command": "binary13",
"daemon": "oneshot",
"timer": "mon,10:00-12:00",
},
"service14": {
"command": "binary14",
"daemon": "simple",
"restart-condition": "on-watchdog",
"watchdog-timeout": "30s",
},
"service15": {
"command": "binary15",
"daemon": "simple",
"install-mode": "enable",
},
"service16": {
"command": "binary16",
"daemon": "simple",
"install-mode": "disable",
},
}
Validator(self.data).validate()
def test_invalid_restart_condition(self):
self.data["apps"] = {
"service1": {
"command": "binary1",
"daemon": "simple",
"restart-condition": "on-tuesday",
}
}
raised = self.assertRaises(
snapcraft.yaml_utils.errors.YamlValidationError,
Validator(self.data).validate,
)
self.assertThat(
str(raised),
Contains(
"The 'apps/service1/restart-condition' property does not match "
"the required schema: 'on-tuesday' is not one of ['on-success', "
"'on-failure', 'on-abnormal', 'on-abort', 'on-watchdog', 'always', "
"'never']"
),
)
def test_missing_required_property_and_missing_adopt_info(self):
del self.data["summary"]
del self.data["adopt-info"]
raised = self.assertRaises(
snapcraft.yaml_utils.errors.YamlValidationError,
Validator(self.data).validate,
)
expected_message = (
"'adopt-info' is a required property or 'summary' is a required property"
)
self.assertThat(raised.message, Equals(expected_message), message=self.data)
def test_invalid_install_mode(self):
self.data["apps"] = {
"service": {
"command": "binary",
"daemon": "simple",
"install-mode": "invalid",
}
}
raised = self.assertRaises(
snapcraft.yaml_utils.errors.YamlValidationError,
Validator(self.data).validate,
)
expected_message = (
"The 'apps/service/install-mode' property does not match the required schema: "
"'invalid' is not one of ['enable', 'disable']"
)
self.assertThat(raised.message, Equals(expected_message), message=self.data)
@pytest.mark.parametrize(
"option,value",
[
("stop-command", "binary1 --stop"),
("post-stop-command", "binary1 --post-stop"),
("before", ["service1"]),
("after", ["service2"]),
],
)
def test_daemon_dependency(data, option, value):
data["apps"] = {"service1": {"command": "binary1", option: value}}
with pytest.raises(snapcraft.yaml_utils.errors.YamlValidationError) as error:
Validator(data).validate()
assert str(error.value).endswith(
"The 'apps/service1' property does not match the required schema: "
f"'daemon' is a dependency of {option!r}"
)
@pytest.mark.parametrize("key", ["name", "parts"])
def test_required_properties(data, key):
del data[key]
with pytest.raises(snapcraft.yaml_utils.errors.YamlValidationError) as error:
Validator(data).validate()
assert f"{key!r} is a required property" in str(error.value)
class TestInvalidNames:
e1 = "not a valid snap name. Snap names can only use ASCII lowercase letters, numbers, and hyphens, and must have at least one letter."
e2 = "not a valid snap name. Snap names cannot start with a hyphen."
e3 = "not a valid snap name. Snap names cannot end with a hyphen."
e4 = "not a valid snap name. Snap names cannot have two hyphens in a row."
e5 = "too long (maximum length is 40)"
scenarios = [
# snapcraft's existing unit tests
("existing test #1", dict(name="package@awesome", err=e1)),
("existing test #2", dict(name="something.another", err=e1)),
("existing test #3", dict(name="_hideme", err=e1)),
("existing test #4", dict(name="-no", err=e2)),
("existing test #5", dict(name="a:a", err=e1)),
("existing test #6", dict(name="123", err=e1)),
# this one manages to fail every validation test except type
("order check", dict(name="-----------------------------------------", err=e5)),
# from snapd's unit tests (except those covered by above)
("name cannot be empty", dict(name="", err=e1)),
(
"name cannot be too long",
dict(name="aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", err=e5),
),
("dashes alone are not a name", dict(name="-", err=e1)),
("dashes alone are not a name, take 2", dict(name="--", err=e1)),
("double dashes in a name are not ok", dict(name="a--a", err=e4)),
("name should not end with a dash", dict(name="a-", err=e3)),
("name cannot have any spaces in it, #1", dict(name="a ", err=e1)),
("name cannot have any spaces in it, #2", dict(name=" a", err=e1)),
("name cannot have any spaces in it, #3", dict(name="a a", err=e1)),
("a number alone is not a name", dict(name="0", err=e1)),
("just numbers and dashes", dict(name="1-2-3", err=e1)),
("plain ASCII #1", dict(name="реасе", err=e1)),
("plain ASCII #2", dict(name="日本語", err=e1)),
("plain ASCII #3", dict(name="한글", err=e1)),
("plain ASCII #4", dict(name="ру́сский язы́к", err=e1)),
# from review-tools', except as covered
(
"stress the regexper",
dict(name="u-9490371368748654323415773467328453675-", err=e3),
),
("review-tools bad", dict(name="foo?bar", err=e1)),
("review-tools bad1", dict(name="foo/bar", err=e1)),
("review-tools bad6", dict(name="foo-Bar", err=e1)),
]
def test(self, data, name, err):
data["name"] = name
with pytest.raises(snapcraft.yaml_utils.errors.YamlValidationError) as error:
Validator(data).validate()
assert str(error.value).endswith(
f"The 'name' property does not match the required schema: {name!r} is {err}"
)
@pytest.mark.parametrize("snap_type", ["app", "base", "gadget", "kernel", "snapd"])
def test_valid_types(data, snap_type):
data["type"] = snap_type
if snap_type in ("base", "kernel", "snapd"):
data.pop("base")
Validator(data).validate()
_BASE_TYPE_MSG = (
"must be one of base: <base> and type: <app|gadget>, "
"base: bare (with a build-base), "
"or type: <base|kernel|snapd> (without a base)"
)
_TYPE_ENUM_TMPL = (
"The 'type' property does not match the required schema: '{}' is not one of "
"['app', 'base', 'gadget', 'kernel', 'snapd']"
)
@pytest.mark.parametrize("snap_type", ["apps", "framework", "platform", "oem"])
def test_invalid_types(data, snap_type):
data["type"] = snap_type
with pytest.raises(snapcraft.yaml_utils.errors.YamlValidationError):
Validator(data).validate()
def test_type_base_and_no_base(data):
data.pop("base")
data["type"] = "base"
Validator(data).validate()
def test_type_base_and_base(data):
data["type"] = "base"
with pytest.raises(snapcraft.yaml_utils.errors.YamlValidationError) as error:
Validator(data).validate()
assert _BASE_TYPE_MSG in str(error.value)
def test_build_base_and_base(data):
data["build-base"] = "fake-base"
Validator(data).validate()
def test_build_base_and_type_base(data):
data.pop("base")
data["type"] = "base"
data["build-base"] = "fake-base"
Validator(data).validate()
def test_build_base_and_base_bare(data):
data["base"] = "bare"
data["build-base"] = "fake-base"
Validator(data).validate()
@pytest.mark.parametrize(
"name",
[
"1",
"a",
"aa",
"aaa",
"aaaa",
"Aa",
"aA",
"1a",
"a1",
"1-a",
"a-1",
"a-a",
"aa-a",
"a-aa",
"a-b-c",
"0a-a",
"a-0a",
],
)
def test_valid_app_names(data, name):
data["apps"] = {name: {"command": "foo"}}
Validator(data).validate()
@pytest.mark.parametrize(
"name",
[
"qwe#rty",
"qwe_rty",
"queue rty",
"queue rty",
"",
"-",
"--",
"a--a",
"a-",
"a ",
" a",
"a a",
"日本語",
"한글",
"ру́сский язы́к",
"ໄຂ່ອີສເຕີ້",
":a",
"a:",
"a:a",
"_a",
"a_",
"a_a",
],
)
def test_invalid_app_names(data, name):
data["apps"] = {name: {"command": "1"}}
with pytest.raises(snapcraft.yaml_utils.errors.YamlValidationError) as error:
Validator(data).validate()
expected_message = (
f"The 'apps' property does not match the required schema: {name!r} is "
"not a valid app name. App names consist of upper- and lower-case "
"alphanumeric characters and hyphens. They cannot start or end "
"with a hyphen."
)
assert expected_message in str(error.value)
@pytest.mark.parametrize(
"condition",
[
"always",
"on-success",
"on-failure",
"on-abnormal",
"on-abort",
"on-watchdog",
"never",
],
)
def test_valid_restart_conditions(data, condition):
data["apps"] = {"service1": {"command": "binary1", "daemon": "simple"}}
data["apps"]["service1"]["restart-condition"] = condition
Validator(data).validate()
_REFRESH_MODES = ["endure", "restart"]
@pytest.mark.parametrize("mode", _REFRESH_MODES)
def test_valid_refresh_modes(data, mode):
data["apps"] = {
"service1": {"command": "binary1", "daemon": "simple", "refresh-mode": mode}
}
Validator(data).validate()
@pytest.mark.parametrize("mode", _REFRESH_MODES)
def test_refresh_mode_daemon_missing_errors(data, mode):
data["apps"] = {"service1": {"command": "binary1", "refresh-mode": mode}}
with pytest.raises(snapcraft.yaml_utils.errors.YamlValidationError):
Validator(data).validate()
_STOP_MODES = [
"sigterm",
"sigterm-all",
"sighup",
"sighup-all",
"sigusr1",
"sigusr1-all",
"sigusr2",
"sigusr2-all",
]
@pytest.mark.parametrize("mode", _STOP_MODES)
def test_valid_modes(data, mode):
data["apps"] = {
"service1": {"command": "binary1", "daemon": "simple", "stop-mode": mode}
}
Validator(data).validate()
@pytest.mark.parametrize("mode", _STOP_MODES)
def test_daemon_missing_errors(data, mode):
data["apps"] = {"service1": {"command": "binary1", "stop-mode": mode}}
with pytest.raises(snapcraft.yaml_utils.errors.YamlValidationError):
Validator(data).validate()
@pytest.mark.parametrize(
"name",
[
"qwe#rty",
"qwe_rty",
"queue rty",
"queue rty",
"Hi",
"",
"-",
"--",
"a--a",
"a-",
"a ",
" a",
"a a",
"日本語",
"한글",
"ру́сский язы́к",
"ໄຂ່ອີສເຕີ້",
":a",
"a:",
"a:a",
"_a",
"a_",
"a_a",
],
)
def test_invalid_hook_names(data, name):
data["hooks"] = {name: {"plugs": ["network"]}}
with pytest.raises(snapcraft.yaml_utils.errors.YamlValidationError) as error:
Validator(data).validate()
expected_message = (
f"The 'hooks' property does not match the required schema: {name!r} is "
"not a valid hook name. Hook names consist of lower-case "
"alphanumeric characters and hyphens. They cannot start or end "
"with a hyphen."
)
assert expected_message in str(error.value)
@pytest.mark.parametrize(
"name", ["plugins", "qwe#rty", "qwe_rty", "queue rty", "queue rty", "part/sub"]
)
def test_invalid_part_names(data, name):
data["parts"] = {name: {"plugin": "nil"}}
with pytest.raises(snapcraft.yaml_utils.errors.YamlValidationError) as error:
Validator(data).validate()
expected_message = (
f"The 'parts' property does not match the required schema: {name!r} is "
"not a valid part name. Part names consist of lower-case "
"alphanumeric characters, hyphens and plus signs. "
"As a special case, 'plugins' is also not a valid part name."
)
assert str(error.value).endswith(expected_message)
class TestInvalidArchitectures:
scenarios = [
(
"single string",
{"architectures": "amd64", "message": "'amd64' is not of type 'array'"},
),
(
"unknown object properties",
{
"architectures": [{"builds-on": ["amd64"], "runs-on": ["amd64"]}],
"message": "'build-on' is a required property and additional "
"properties are not allowed",
},
),
(
"omit build-on",
{
"architectures": [{"run-on": ["amd64"]}],
"message": "'build-on' is a required property",
},
),
(
"build on all and others",
{
"architectures": [{"build-on": ["amd64", "all"]}],
"message": "'all' can only be used within 'build-on' by itself, "
"not with other architectures",
},
),
(
"run on all and others",
{
"architectures": [{"build-on": ["amd64"], "run-on": ["amd64", "all"]}],
"message": "'all' can only be used within 'run-on' by itself, "
"not with other architectures",
},
),
(
"run on all and more objects",
{
"architectures": [
{"build-on": ["amd64"], "run-on": ["all"]},
{"build-on": ["i396"], "run-on": ["i386"]},
],
"message": "one of the items has 'all' in 'run-on', but there are "
"2 items: upon release they will conflict. 'all' "
"should only be used if there is a single item",
},
),
(
"build on all and more objects",
{
"architectures": [{"build-on": ["all"]}, {"build-on": ["i396"]}],
"message": "one of the items has 'all' in 'build-on', but there "
"are 2 items: snapcraft doesn't know which one to use. "
"'all' should only be used if there is a single item",
},
),
(
"multiple builds run on same arch",
{
"architectures": [
{"build-on": ["amd64"], "run-on": ["amd64"]},
{"build-on": ["i396"], "run-on": ["amd64", "i386"]},
],
"message": "multiple items will build snaps that claim to run on "
"'amd64'",
},
),
(
"multiple builds run on same arch with implicit run-on",
{
"architectures": [
{"build-on": ["amd64"]},
{"build-on": ["i396"], "run-on": ["amd64", "i386"]},
],
"message": "multiple items will build snaps that claim to run on "
"'amd64'",
},
),
(
"mixing forms",
{
"architectures": [
"amd64",
{"build-on": ["i386"], "run-on": ["amd64", "i386"]},
],
"message": "every item must either be a string or an object",
},
),
(
"build on all run on specific",
{
"architectures": [
{"build-on": ["all"], "run-on": ["amd64"]},
{"build-on": ["all"], "run-on": ["i386"]},
],
"message": "one of the items has 'all' in 'build-on', but there "
"are 2 items: snapcraft doesn't know which one to use. "
"'all' should only be used if there is a single item",
},
),
(
"build on overlap",
{
"architectures": [
{"build-on": ["amd64", "i386"], "run-on": ["i386"]},
{"build-on": ["amd64"], "run-on": ["amd64"]},
],
"message": "'amd64' is present in the 'build-on' of multiple "
"items, which means snapcraft doesn't know which "
"'run-on' to use when building on that architecture",
},
),
]
def test(self, data, architectures, message):
data["architectures"] = architectures
with pytest.raises(snapcraft.yaml_utils.errors.YamlValidationError) as error:
Validator(data).validate()
assert message in str(error.value)
class IconTest(ProjectBaseTest):
def test_invalid_yaml_invalid_icon_extension(self):
raised = self.assertValidationRaises(
dedent(
"""\
name: test
base: core18
version: "1"
summary: test
description: test
icon: icon.foo
confinement: strict
grade: stable
parts:
part1:
plugin: nil
"""
)
)
self.assertThat(
raised.message, Equals("icon 'icon.foo' must be either a .png or a .svg")
)
@pytest.mark.parametrize("title", ["a title", "A Title", "T" * 40, "💩" * 40, "’" * 40])
def test_valid_title(data, title):
data["title"] = title
Validator(data).validate()
_EXPECTED_ERROR_TEMPLATE = {
"number": (
"Issues while validating snapcraft.yaml: The 'title' property "
"does not match the required schema: {} is not of type 'string'"
),
"length": (
"Issues while validating snapcraft.yaml: The 'title' property "
"does not match the required schema: {!r} is too long "
"(maximum length is 40)"
),
}
@pytest.mark.parametrize(
"title,error_template",
[(1, "number"), ("T" * 41, "length"), ("💩" * 41, "length"), ("’" * 41, "length")],
)
def test_invalid_title(data, title, error_template):
data["title"] = title
with pytest.raises(snapcraft.yaml_utils.errors.YamlValidationError) as error:
Validator(data).validate()
assert _EXPECTED_ERROR_TEMPLATE[error_template].format(title) in str(error.value)
class OrganizeTest(ProjectBaseTest):
def test_yaml_organize_value_none(self):
raised = self.assertValidationRaises(
dedent(
"""\
name: test
base: core18
version: "1"
summary: test
description: nothing
confinement: strict
parts:
part1:
plugin: nil
organize:
foo:
"""
)
)
self.assertThat(
str(raised),
Contains(
"The 'parts/part1/organize/foo' property does not match the "
"required schema: None is not of type 'string'"
),
)
def test_yaml_organize_value_empty(self):
raised = self.assertValidationRaises(
dedent(
"""\
name: test
base: core18
version: "1"
summary: test
description: nothing
confinement: strict
parts:
part1:
plugin: nil
organize:
foo: ''
"""
)
)
self.assertThat(
str(raised),
Contains(
"The 'parts/part1/organize/foo' property does not match the "
"required schema: '' is too short (minimum length is 1)"
),
)
class VersionTest(ProjectBaseTest):
def test_invalid_yaml_version_too_long(self):
raised = self.assertValidationRaises(
dedent(
"""\
name: test
base: core18
version: 'abcdefghijklmnopqrstuvwxyz1234567' # Max is 32 in the store
summary: test
description: test
confinement: strict
grade: stable
parts:
part1:
plugin: nil
"""
)
) # noqa: E501
self.assertThat(
raised.message,
Equals(
"The 'version' property does not match the required "
"schema: 'abcdefghijklmnopqrstuvwxyz1234567' is too long "
"(maximum length is 32)"
),
)
@pytest.mark.parametrize(
"version", ["buttered-popcorn", "1.2.3", "v12.4:1:2~", "HeLlo", "v+"]
)
def test_valid_version(data, version):
data["version"] = version
Validator(data).validate()
@pytest.mark.parametrize(
"version",
[
"'*'",
"''",
"':v'",
"'.v'",
"'+v'",
"'~v'",
"'_v'",
"'-v'",
"'v:'",
"'v.'",
"'v_'",
"'v-'",
"'underscores_are_bad'",
],
)
def test_invalid_version(data, version):
data["version"] = version
with pytest.raises(snapcraft.yaml_utils.errors.YamlValidationError) as error:
Validator(data).validate()
expected_message = (
"The 'version' property does not match the required "
f"schema: {version!r} is not a valid snap version string. Snap versions "
"consist of upper- and lower-case alphanumeric characters, "
"as well as periods, colons, plus signs, tildes, and "
"hyphens. They cannot begin with a period, colon, plus "
"sign, tilde, or hyphen. They cannot end with a period, "
"colon, or hyphen."
)
assert expected_message in str(error.value)
def test_invalid_version_type(data):
data["version"] = 0.1
with pytest.raises(snapcraft.yaml_utils.errors.YamlValidationError) as error:
Validator(data).validate()
expected_message = (
"The 'version' property does not match the required "
"schema: snap versions need to be strings. They must "
"also be wrapped in quotes when the value will be "
"interpreted by the YAML parser as a non-string. "
"Examples: '1', '1.2', '1.2.3', git (will be replaced "
"by a git describe based version string)."
)
assert expected_message in str(error.value)
def test_invalid_version_length(data):
data["version"] = "this.is.a.really.too.long.version"
with pytest.raises(snapcraft.yaml_utils.errors.YamlValidationError) as error:
Validator(data).validate()
expected_message = (
"The 'version' property does not match the required "
"schema: 'this.is.a.really.too.long.version' is too long "
"(maximum length is 32)"
)
assert expected_message in str(error.value)
class EnvironmentTest(ProjectBaseTest):
def test_valid_environment(self):
snapcraft_yaml = self.assertValidationPasses(
dedent(
dedent(
"""\
name: project-name
base: core18
version: "1"
summary: test
description: test
confinement: strict
environment:
GLOBAL: "1"
OTHER: valid-value
apps:
app1:
command: app1
environment:
LOCALE: C
PLUGIN_PATH: $SNAP_USER_DATA/plugins
hooks:
configure:
environment:
CONFIGURE_ENV: FOOBAR
CONFIGURE_ENV_INT: 1
parts:
main:
plugin: nil
source: .
"""
)
)
)
self.assertThat(
snapcraft_yaml["environment"], Equals(dict(GLOBAL="1", OTHER="valid-value"))
)
self.assertThat(
snapcraft_yaml["apps"]["app1"]["environment"],
Equals(dict(LOCALE="C", PLUGIN_PATH="$SNAP_USER_DATA/plugins")),
)
self.assertThat(
snapcraft_yaml["hooks"]["configure"]["environment"],
Equals(dict(CONFIGURE_ENV="FOOBAR", CONFIGURE_ENV_INT=1)),
)
def test_invalid_environment(self):
raised = self.assertValidationRaises(
dedent(
"""\
name: project-name
base: core18
version: "1"
summary: test
description: test
confinement: strict
environment:
INVALID:
- 1
- 2
parts:
main:
source: .
plugin: nil
"""
)
)
self.assertRegex(
raised.message,
r"The 'environment/INVALID' property does not match the required "
r"schema: \[1, 2\].*",
)
@pytest.mark.parametrize("compression", ["lzo", "xz"])
def test_valid_compression(data, compression):
data["compression"] = compression
Validator(data).validate()
@pytest.mark.parametrize("compression", ["lzma", "gz", "rar"])
def test_invalid_compression(data, compression):
data["compression"] = compression
with pytest.raises(snapcraft.yaml_utils.errors.YamlValidationError) as error:
Validator(data).validate()
expected_message = (
"The 'compression' property does not match the required "
f"schema: {compression!r} is not one of ['lzo', 'xz']"
)
assert expected_message in str(error.value)
@pytest.mark.parametrize("confinement", ["strict", "devmode", "classic"])
def test_valid_confinement(data, confinement):
data["confinement"] = confinement
Validator(data).validate()
@pytest.mark.parametrize("confinement", ["foo", "strict-", "_devmode"])
def test_invalid_confinement(data, confinement):
data["confinement"] = confinement
with pytest.raises(snapcraft.yaml_utils.errors.YamlValidationError) as error:
Validator(data).validate()
expected_message = (
"The 'confinement' property does not match the required "
f"schema: {confinement!r} is not one of ['classic', 'devmode', "
"'strict']"
)
assert expected_message in str(error.value)
@pytest.mark.parametrize("desc", ["test", "multi\nline\n"])
def test_valid_description(data, desc):
data["description"] = desc
Validator(data).validate()
@pytest.mark.parametrize("desc", [""])
def test_invalid_description(data, desc):
data["description"] = desc
with pytest.raises(snapcraft.yaml_utils.errors.YamlValidationError) as error:
Validator(data).validate()
expected_message = (
"The 'description' property does not match the required "
f"schema: {desc!r} is not a valid description string"
)
assert expected_message in str(error.value)
@pytest.mark.parametrize("grade", ["stable", "devel"])
def test_valid_grade(data, grade):
data["grade"] = grade
Validator(data).validate()
@pytest.mark.parametrize("grade", ["foo", "strict-", "_devmode"])
def test_invalid_grade(data, grade):
data["grade"] = grade
with pytest.raises(snapcraft.yaml_utils.errors.YamlValidationError) as error:
Validator(data).validate()
expected_message = (
"The 'grade' property does not match the required "
f"schema: {grade!r} is not one of ['stable', 'devel']"
)
assert expected_message in str(error.value)
@pytest.mark.parametrize("epoch", [0, "0", "1*", 1, "1", "400*", "1234"])
def test_valid_epoch(data, epoch):
data["epoch"] = epoch
Validator(data).validate()
@pytest.mark.parametrize(
"epoch",
[
"0*",
"_",
"1-",
"1+",
"-1",
"-1*",
"a",
"1a",
"1**",
'"01"',
"1.2",
'"1.2"',
"[1]",
],
)
def test_invalid_epoch(data, epoch):
data["epoch"] = epoch
with pytest.raises(snapcraft.yaml_utils.errors.YamlValidationError) as error:
Validator(data).validate()
expected_message = (
f"{epoch!r} is not a 'epoch' (epochs are positive integers "
"followed by an optional asterisk)"
)
assert expected_message in str(error.value)
def test_valid_license(data):
data["license"] = "MIT-0"
Validator(data).validate()
def test_invalid_license(data):
data["license"] = 1234
with pytest.raises(snapcraft.yaml_utils.errors.YamlValidationError) as error:
Validator(data).validate()
expected_message = (
"The 'license' property does not match the required schema: "
"1234 is not of type 'string'"
)
assert expected_message in str(error.value)
@pytest.mark.parametrize("adapter", ["none", "legacy", "full"])
def test_valid_adapter(data, adapter):
data["apps"] = {"foo": {"command": "foo", "adapter": adapter}}
Validator(data).validate()
@pytest.mark.parametrize("adapter", ["NONE", "F", "Full"])
def test_invalid_adapter(data, adapter):
data["apps"] = {"foo": {"command": "foo", "adapter": adapter}}
with pytest.raises(snapcraft.yaml_utils.errors.YamlValidationError) as error:
Validator(data).validate()
expected_message = "The 'apps/foo/adapter' property does not match"
assert expected_message in str(error.value)
@pytest.mark.parametrize(
"build_environment",
["a string", ["a string"], [{"k1": "v1", "k2": "v2"}], [{"k1": 5}]],
)
def test_invalid_part_build_environment_key_type(data, build_environment):
data["parts"]["part1"]["build-environment"] = build_environment
with pytest.raises(snapcraft.yaml_utils.errors.YamlValidationError):
Validator(data).validate()
@pytest.mark.parametrize(
"command_chain", ["a string", [1], ["test chain"], ["test'chain"]]
)
def test_invalid_command_chain(data, command_chain):
data["apps"] = {"foo": {"command": "foo", "command-chain": command_chain}}
with pytest.raises(snapcraft.yaml_utils.errors.YamlValidationError) as error:
Validator(data).validate()
expected_message = "The 'apps/foo/command-chain"
assert expected_message in str(error.value)
@pytest.mark.parametrize("username", ["snap_daemon", "snap_microk8s"])
def test_yaml_valid_system_usernames_long(data, username):
data["system-usernames"] = {username: {"scope": "shared"}}
Validator(data).validate()
@pytest.mark.parametrize("username", ["snap_daemon", "snap_microk8s"])
def test_yaml_valid_system_usernames_short(data, username):
data["system-usernames"] = {username: "shared"}
Validator(data).validate()
def test_invalid_yaml_invalid_username(data):
data["system-usernames"] = {"snap_user": "shared"}
with pytest.raises(snapcraft.yaml_utils.errors.YamlValidationError) as error:
Validator(data).validate()
expected_message = "The 'system-usernames' property does not match the required schema: 'snap_user' is not a valid system-username."
assert expected_message in str(error.value)
def test_invalid_yaml_invalid_short_scope(data):
data["system-usernames"] = {"snap_daemon": "invalid-scope"}
with pytest.raises(snapcraft.yaml_utils.errors.YamlValidationError) as error:
Validator(data).validate()
expected_message = "The 'system-usernames/snap_daemon' property does not match the required schema: 'invalid-scope' is not valid under any of the given schemas"
assert expected_message in str(error.value)
def test_invalid_yaml_invalid_long_scope(data):
data["system-usernames"] = {"snap_daemon": {"scope": "invalid-scope"}}
with pytest.raises(snapcraft.yaml_utils.errors.YamlValidationError) as error:
Validator(data).validate()
expected_message = "The 'system-usernames/snap_daemon' property does not match the required schema: {'scope': 'invalid-scope'} is not valid under any of the given schemas"
assert expected_message in str(error.value)
class PackageManagement(ProjectBaseTest):
def test_yaml_valid_apt_repositories(self):
self.assertValidationPasses(
dedent(
"""\
name: test
base: core18
version: "1"
summary: test
description: nothing
license: MIT
parts:
part1:
plugin: nil
package-repositories:
- type: apt
ppa: user/test-ppa
- type: apt
architectures: [amd64, i386]
components: [main, multiverse]
formats: [deb, deb-src]
key-id: test-key-id
key-server: keyserver.ubuntu.com
url: http://archive.ubuntu.com/ubuntu
suites: [test, test-updates, test-security]
- type: apt
key-id: test-key-id
url: http://archive.ubuntu.com/ubuntu
suites: [bionic, bionic-updates]
- type: apt
key-id: test-key-id
path: foo
url: http://archive.ubuntu.com/ubuntu
"""
)
)
class TestInvalidAptConfigurations:
scenarios = [
(
"ppa extra invalid field",
dict(
packages=[{"type": "apt", "ppa": "test/ppa", "invalid": "invalid"}],
message_contains="The 'package-repositories[0]' property does not match the required schema:",
),
),
(
"deb extra invalid field",
dict(
packages=[
{
"type": "apt",
"components": ["main"],
"key-id": "test-key-id",
"url": "http://archive.ubuntu.com/ubuntu",
"suites": ["test", "test-updates", "test-security"],
"invalid": "invalid",
}
],
message_contains="The 'package-repositories[0]' property does not match the required schema:",
),
),
(
"deb missing field: key-id",
dict(
packages=[
{
"type": "apt",
"components": ["main"],
"url": "http://archive.ubuntu.com/ubuntu",
"suites": ["test", "test-updates", "test-security"],
}
],
message_contains="The 'package-repositories[0]' property does not match the required schema:",
),
),
(
"deb missing field: url",
dict(
packages=[
{
"type": "apt",
"components": ["main"],
"key-id": "test-key-id",
"suites": ["test", "test-updates", "test-security"],
}
],
message_contains="The 'package-repositories[0]' property does not match the required schema:",
),
),
(
"deb invalid deb type",
dict(
packages=[
{
"type": "apt",
"format": ["invalid"],
"components": ["main"],
"key-id": "test-key-id",
"url": "http://archive.ubuntu.com/ubuntu",
"suites": ["test", "test-updates", "test-security"],
}
],
message_contains="The 'package-repositories[0]' property does not match the required schema:",
),
),
(
"deb invalid key-id",
dict(
packages=[
{
"type": "apt",
"components": ["main"],
"key-id": "\\*\\*",
"url": "http://archive.ubuntu.com/ubuntu",
"suites": ["test", "test-updates", "test-security"],
}
],
message_contains="The 'package-repositories[0]' property does not match the required schema:",
),
),
(
"deb empty architectures",
dict(
packages=[
{
"type": "apt",
"components": ["main"],
"key-id": "test-key-id",
"url": "http://archive.ubuntu.com/ubuntu",
"suites": ["test", "test-updates", "test-security"],
"architectures": [],
}
],
message_contains="The 'package-repositories[0]' property does not match the required schema:",
),
),
(
"deb empty suites",
dict(
packages=[
{
"type": "apt",
"components": ["main"],
"key-id": "test-key-id",
"url": "http://archive.ubuntu.com/ubuntu",
"suites": [],
}
],
message_contains="The 'package-repositories[0]' property does not match the required schema:",
),
),
(
"ppa duplicate",
dict(
packages=[
{"type": "apt", "ppa": "dupe/check"},
{"type": "apt", "ppa": "dupe/check"},
],
message_contains="has non-unique elements",
),
),
(
"deb duplicate",
dict(
packages=[
{
"type": "apt",
"components": ["main"],
"key-id": "test-key-id",
"url": "http://archive.ubuntu.com/ubuntu",
"suites": ["test", "test-updates", "test-security"],
},
{
"type": "apt",
"components": ["main"],
"key-id": "test-key-id",
"url": "http://archive.ubuntu.com/ubuntu",
"suites": ["test", "test-updates", "test-security"],
},
],
message_contains="has non-unique elements",
),
),
]
def test_invalid(self, data, packages, message_contains):
data["package-repositories"] = packages
with pytest.raises(snapcraft.yaml_utils.errors.YamlValidationError) as error:
Validator(data).validate()
assert message_contains in str(error.value)
@pytest.mark.parametrize(
"contact",
("mailto:project@acme.com", ["mailto:project@acme.com", "team@acme.com"], None),
)
@pytest.mark.parametrize(
"donation",
(
"https://paypal.com",
["https://paypal.com", "https://cafecito.app", "https://ko-fi.com"],
None,
),
)
@pytest.mark.parametrize(
"issues",
(
"https://github.com/acme/project/issues",
[
"https://github.com/acme/project/issues",
"https://bugs.launchpad.net/project/+filebug",
],
None,
),
)
@pytest.mark.parametrize("source_code", ("https://github.com/acme/project.git", None))
@pytest.mark.parametrize("website", ("https://project.acme.org", None))
def test_valid_metadata_links(data, contact, donation, issues, source_code, website):
if all(ml is None for ml in [contact, donation, issues, source_code, website]):
pytest.skip("All metadata links are unset")
if contact is not None:
data["contact"] = contact
if donation is not None:
data["donation"] = donation
if issues is not None:
data["issues"] = issues
if source_code is not None:
data["source-code"] = source_code
if website is not None:
data["website"] = website
Validator(data).validate()
@pytest.mark.parametrize(
"contact", (1, {"mailto:project@acme.com", "team@acme.com"}, None),
)
@pytest.mark.parametrize(
"donation",
(1, {"https://paypal.com", "https://cafecito.app", "https://ko-fi.com"}, None,),
)
@pytest.mark.parametrize(
"issues",
(
1,
{
"https://github.com/acme/project/issues",
"https://bugs.launchpad.net/project/+filebug",
},
None,
),
)
@pytest.mark.parametrize(
"source_code", (1, ["https://github.com/acme/project.git"], None)
)
@pytest.mark.parametrize("website", (1, ["https://project.acme.org"], None))
def test_invalid_metadata_links(data, contact, donation, issues, source_code, website):
data["contact"] = contact
data["donation"] = donation
data["issues"] = issues
data["source-code"] = source_code
data["website"] = website
with pytest.raises(snapcraft.yaml_utils.errors.YamlValidationError):
Validator(data).validate()
|
import math
print(math.sqrt(81))
|
from kivy.app import App
from kivy.uix.gridlayout import GridLayout
from kivy.uix.label import Label
from kivy.uix.button import Button
from kivy.uix.togglebutton import ToggleButton
from kivy.uix.scrollview import ScrollView
from kivy.uix.dropdown import DropDown
from kivy.uix.screenmanager import Screen, ScreenManager
from kivy.uix.spinner import Spinner
from kivy.uix.filechooser import FileChooserListView
from kivy.config import Config
from kivy.uix.boxlayout import BoxLayout
from kivy.properties import StringProperty
from kivy.uix.listview import ListView
from kivy.uix.textinput import TextInput
from kivy.core.window import Window
import pdf_config
import enter_section_name
import tab_creation
from functools import partial
import logic
import add_courier
import win32timezone
Config.set("graphics", "resizable", "0")
'''Manager holds all screens. Through the manager you are able to access any aspect of the program if you choose'''
class Manager(ScreenManager):
def __init__(self, **kwargs):
super(Manager, self).__init__(**kwargs)
self.first_screen = Screen(name="main")
self.first_screen_content = tab_creation.BoxLayoutTabCreation(manager=self)
self.first_screen.add_widget(self.first_screen_content)
self.second_screen = Screen(name="pdf_config")
self.second_screen_content = pdf_config.BoxLayoutPDFConfig(manager=self)
self.second_screen.add_widget(self.second_screen_content)
self.third_screen = Screen(name="enter_section_name")
self.third_screen_content = enter_section_name.BoxLayoutEnterSectionName(manager=self)
self.third_screen.add_widget(self.third_screen_content)
self.add_widget(self.third_screen)
# self.fourth_screen = Screen(name="FileChooser")
# self.fourth_screen_content = BoxLayoutFileBrowser(manager=self)
# self.fourth_screen.add_widget(self.fourth_screen_content)
# self.fifth_screen = Screen(name="NamePDF")
class TestApp(App):
def build(self):
self.title = "EloTab"
Window.clearcolor = (0.04705, 0.1019, .10588, 1)
self.icon = 'EloTabLogo16.png'
return Manager()
if __name__ == "__main__":
TestApp().run()
|
print('Let\'s practice everything.')
print('You\'d need to konw \'bout escapes with \\ that do \n newlines and \t tabs.')
poem = """
\tThe lovely world
with logic so firmly planted
cannot discern \n the needs of love
nor comprehend passion from intuition
and requires an explanation
\n\t\twhere there is none.
"""
print("--------------")
print(poem)
print("--------------")
five = 10 - 2 + 3 - 6
print('This should be five: %s' % five)
def secret_formula(started):
jelly_beans = started * 500
jars = jelly_beans / 1000
crates = jars / 100
return jelly_beans, jars, crates
start_point = 10000
beans, jars, crates = secret_formula(start_point)
print('With a starting point of: %d' % start_point)
print('We\'d have %d beans, %d jars, and %d crates.' % (beans, jars, crates))
start_point = start_point / 10
print('We can also do that this way:')
print('We\'d have %d beans, %d jars, and %d crates.' % secret_formula(start_point))
|
from selenium import webdriver
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
driver = webdriver.Remote(command_executor='http://127.0.0.1:4444/wd/hub',desired_capabilities=DesiredCapabilities.FIREFOX)
driver.get('http://example.com')
if driver.get_screenshot_as_file('./screen.png'):
print "Screenshot saved to ./screen.png"
driver.quit
|
"""Class defintition of Elements."""
import time
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC # noqa
LOCATOR_MAP = {'css': By.CSS_SELECTOR,
'id_': By.ID,
'name': By.NAME,
'xpath': By.XPATH,
'link_text': By.LINK_TEXT,
'partial_link_text': By.PARTIAL_LINK_TEXT,
'tag_name': By.TAG_NAME,
'class_name': By.CLASS_NAME,
}
class Component:
"""Class defenition of a component."""
def __init__(self, browser):
"""Init the browser instance."""
self.browser = browser
class Element:
"""Class definition of a selenium element object."""
def __init__(self, **kwargs):
"""Populate values."""
key, value = next(iter(kwargs.items()))
self.locating_key = LOCATOR_MAP[key]
self.locating_value = value
def __get__(self, obj, objtype):
"""Descriptor for retrieving element."""
element = WebDriverWait(obj.browser, 20).until(
EC.visibility_of_element_located(
(self.locating_key, self.locating_value)
)
)
return element
def __set__(self, obj, value):
"""Descriptor for setting a value."""
element = WebDriverWait(obj.browser, 20).until(
EC.visibility_of_element_located(
(self.locating_key, self.locating_value)
)
)
# element.send_keys()
# TODO: this is a workaround for firefox
try:
element.clear()
except: # noqa
pass
element.send_keys(value)
time.sleep(.3)
class Elements:
"""Class definition of multiple selenium element objects."""
def __init__(self, **kwargs):
"""Populate values."""
key, value = next(iter(kwargs.items()))
self.locating_key = LOCATOR_MAP[key]
self.locating_value = value
def __get__(self, obj, objtype):
"""Descriptor for retrieving element."""
elements = WebDriverWait(obj.browser, 20).until(
EC.presence_of_all_elements_located(
(self.locating_key, self.locating_value)
)
)
return elements
def __set__(self, obj, value):
"""Descriptor for setting a value."""
raise('Setting values not supported for this object.')
|
""" SlackAction
This action posts the status change to a Slack channel
It uses Slack Webhooks.
Also compatible with Mattermost, which is an open source alternative to Slack.
To create a webhook URL refer the following :
* Slack : https://api.slack.com/incoming-webhooks
* Mattermost : https://docs.mattermost.com/developer/webhooks-incoming.html
Add the webhook URL to dirac.cfg at Operations/[]/ResourceStatus/Config/Slack
example:
Operations/
Defaults/
ResourceStatus/
Config/
Slack = https://hooks.slack.com/services/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
Mattermost = https://mattermost.web.cern.ch/hooks/axy94k3m1pg5xeyaw3qqb3x8bo
Even if using Mattermost,the URL is still to be placed at
Operations/[]/ResourceStatus/Config/Slack and not Operations/[]/ResourceStatus/Config/Mattermost
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
__RCSID__ = '$Id$'
import json
import requests
import six
from DIRAC import S_ERROR, S_OK
from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations
from DIRAC.ResourceStatusSystem.PolicySystem.Actions.BaseAction import BaseAction
from DIRAC.Core.Utilities.SiteSEMapping import getSitesForSE
from DIRAC.ConfigurationSystem.Client.Helpers.Resources import getCESiteMapping
class SlackAction(BaseAction):
'''
Action that sends a brief Slack Message.
'''
def __init__(self, name, decisionParams, enforcementResult, singlePolicyResults,
clients=None, url=None):
super(SlackAction, self).__init__(name, decisionParams, enforcementResult,
singlePolicyResults, clients)
if url is not None:
self.url = url
else:
self.url = Operations().getValue('ResourceStatus/Config/Slack')
def run(self):
'''
Checks it has the parameters it needs and tries to send an sms to the users
that apply.
'''
if self.url is None:
return S_ERROR('Slack URL not set')
# Minor security checks
element = self.decisionParams['element']
if element is None:
return S_ERROR('element should not be None')
name = self.decisionParams['name']
if name is None:
return S_ERROR('name should not be None')
statusType = self.decisionParams['statusType']
if statusType is None:
return S_ERROR('statusType should not be None')
previousStatus = self.decisionParams['status']
if previousStatus is None:
return S_ERROR('status should not be None')
status = self.enforcementResult['Status']
if status is None:
return S_ERROR('status should not be None')
reason = self.enforcementResult['Reason']
if reason is None:
return S_ERROR('reason should not be None')
if self.decisionParams['element'] == 'Site':
siteName = self.decisionParams['name']
else:
elementType = self.decisionParams['elementType']
if elementType == 'StorageElement':
siteName = getSitesForSE(name)
elif elementType == 'ComputingElement':
res = getCESiteMapping(name)
if not res['OK']:
self.log.error("Failure getting Site2CE mapping", res['Message'])
siteName = 'ERROR'
else:
siteName = res
else:
siteName = {'OK': True, 'Value': 'Unassigned'}
if not siteName['OK']:
self.log.error('Resource %s does not exist at any site: %s' % (name, siteName['Message']))
siteName = "Unassigned Resources"
elif not siteName['Value']:
siteName = "Unassigned Resources"
else:
siteName = siteName['Value'] if isinstance(siteName['Value'], six.string_types) else siteName['Value'][0]
message = "*{name}* _{statusType}_ --> _{status}_ \n{reason}".format(name=name,
statusType=statusType,
status=status,
reason=reason)
return self.sendSlackMessage(message)
def sendSlackMessage(self, message):
"""
Sends a slack message to self.url
:param str message: text message to send
"""
payload = {'text': message}
response = requests.post(self.url, data=json.dumps(payload), headers={'Content-Type': 'application/json'})
response.raise_for_status()
return S_OK()
|
"""
.. module:: fatbotslim.irc.bot
.. moduleauthor:: Mathieu D. (MatToufoutu)
This module contains IRC protocol related stuff.
"""
import re
from random import choice
from gevent import spawn, joinall, killall
from gevent.pool import Group
from fatbotslim.irc import u
from fatbotslim.irc.codes import *
from fatbotslim.irc.tcp import TCP, SSL
from fatbotslim.handlers import CTCPHandler, PingHandler, UnknownCodeHandler, RightsHandler
from fatbotslim.log import create_logger
ctcp_re = re.compile(ur'\x01(.*?)\x01')
log = create_logger(__name__)
class NullMessage(Exception):
"""
Raised when an empty line is received from the server.
"""
pass
class Message(object):
"""
Holds informations about a line received from the server.
"""
def __init__(self, data):
"""
:param data: line received from the server.
:type data: unicode
"""
self._raw = data
self.erroneous = False
self.propagate = True
try:
self.src, self.dst, self.command, self.args = Message.parse(data)
except IndexError:
self.src, self.dst, self.command, self.args = [None] * 4
self.erroneous = True
def __str__(self):
return u"<Message(src='{0}', dst='{1}', command='{2}', args={3})>".format(
self.src.name, self.dst, self.command, self.args
)
@classmethod
def parse(cls, data):
"""
Extracts message informations from `data`.
:param data: received line.
:type data: unicode
:return: extracted informations (source, destination, command, args).
:rtype: tuple(Source, str, str, list)
:raise: :class:`fatbotslim.irc.NullMessage` if `data` is empty.
"""
src = u''
dst = None
if data[0] == u':':
src, data = data[1:].split(u' ', 1)
if u' :' in data:
data, trailing = data.split(u' :', 1)
args = data.split()
args.extend(trailing.split())
else:
args = data.split()
command = args.pop(0)
if command in (PRIVMSG, NOTICE):
dst = args.pop(0)
if ctcp_re.match(args[0]):
args = args[0].strip(u'\x01').split()
command = u'CTCP_' + args.pop(0)
return Source(src), dst, command, args
class Source(object):
"""
Holds informations about a message sender.
"""
def __init__(self, prefix):
"""
:param prefix: prefix with format ``<servername>|<nick>['!'<user>]['@'<host>]``.
:type prefix: unicode
"""
self._raw = prefix
self.name, self.mode, self.user, self.host = Source.parse(prefix)
def __str__(self):
return u"<Source(nick='{0}', mode='{1}', user='{2}', host='{3}')>".format(
self.name, self.mode, self.user, self.host
)
@classmethod
def parse(cls, prefix):
"""
Extracts informations from `prefix`.
:param prefix: prefix with format ``<servername>|<nick>['!'<user>]['@'<host>]``.
:type prefix: unicode
:return: extracted informations (nickname or host, mode, username, host).
:rtype: tuple(str, str, str, str)
"""
try:
nick, rest = prefix.split(u'!')
except ValueError:
return prefix, None, None, None
try:
mode, rest = rest.split(u'=')
except ValueError:
mode, rest = None, rest
try:
user, host = rest.split(u'@')
except ValueError:
return nick, mode, rest, None
return nick, mode, user, host
class IRC(object):
"""
The main IRC bot class.
"""
quit_msg = u"I'll be back!"
default_handlers = [
CTCPHandler,
PingHandler,
UnknownCodeHandler,
RightsHandler
]
def __init__(self, settings):
"""
The only expected argument is the bot's configuration,
it should be a :class:`dict` with at least the following keys defined:
* server: the ircd's host (:class:`str`)
* port: the ircd's port (:class:`int`)
* ssl: connect to the server using SSL (:class:`bool`)
* channels: the channels to join upon connection (:class:`list`)
* nick: the bot's nickname (:class:`str`)
* realname: the bot's real name (:class:`str`)
:param settings: bot configuration.
:type settings: dict
"""
self.server = settings['server']
self.port = settings['port']
self.ssl = settings['ssl']
self.channels = map(u, settings['channels'])
self.nick = u(settings['nick'])
self.realname = u(settings['realname'])
self.handlers = []
self._pool = Group()
self.rights = None
log.setLevel(settings.get('loglevel', 'INFO'))
for handler in self.default_handlers:
self.add_handler(handler)
def _create_connection(self):
"""
Creates a transport channel.
:return: transport channel instance
:rtype: :class:`fatbotslim.irc.tcp.TCP` or :class:`fatbotslim.irc.tcp.SSL`
"""
transport = SSL if self.ssl else TCP
return transport(self.server, self.port)
def _connect(self):
"""
Connects the bot to the server and identifies itself.
"""
self.conn = self._create_connection()
spawn(self.conn.connect)
self.set_nick(self.nick)
self.cmd(u'USER', u'{0} 3 * {1}'.format(self.nick, self.realname))
def _send(self, command):
"""
Sends a raw line to the server.
:param command: line to send.
:type command: unicode
"""
command = command.encode('utf-8')
log.debug('>> ' + command)
self.conn.oqueue.put(command)
def _event_loop(self):
"""
The main event loop.
Data from the server is parsed here using :func:`_parse_msg`.
Parsed events are put in the object's event queue (`self.events`).
"""
while True:
orig_line = self.conn.iqueue.get()
log.debug('<< ' + orig_line)
line = u(orig_line, errors='replace').strip()
err_msg = False
try:
message = Message(line)
except ValueError:
err_msg = True
if err_msg or message.erroneous:
log.error("Received a line that can't be parsed: \"%s\"" % orig_line)
continue
if message.command == ERR_NICKNAMEINUSE:
self.set_nick(IRC.randomize_nick(self.nick))
elif message.command == RPL_CONNECTED:
for channel in self.channels:
self.join(channel)
self._handle(message)
def _handle(self, msg):
"""
Pass a received message to the registered handlers.
:param msg: received message
:type msg: :class:`fatbotslim.irc.Message`
"""
def handler_yielder():
for handler in self.handlers:
yield handler
def handler_callback(_):
if msg.propagate:
try:
h = hyielder.next()
g = self._pool.spawn(handler_runner, h)
g.link(handler_callback)
except StopIteration:
pass
def handler_runner(h):
for command in h.commands:
if command == msg.command:
method = getattr(h, h.commands[command])
method(msg)
hyielder = handler_yielder()
try:
next_handler = hyielder.next()
g = self._pool.spawn(handler_runner, next_handler)
g.link(handler_callback)
except StopIteration:
pass
@classmethod
def randomize_nick(cls, base, suffix_length=3):
"""
Generates a pseudo-random nickname.
:param base: prefix to use for the generated nickname.
:type base: unicode
:param suffix_length: amount of digits to append to `base`
:type suffix_length: int
:return: generated nickname.
:rtype: unicode
"""
suffix = u''.join(choice(u'0123456789') for _ in range(suffix_length))
return u'{0}{1}'.format(base, suffix)
def enable_rights(self):
"""
Enables rights management provided by :class:`fatbotslim.handlers.RightsHandler`.
"""
if self.rights is None:
handler_instance = RightsHandler(self)
self.handlers.insert(len(self.default_handlers), handler_instance)
def disable_rights(self):
"""
Disables rights management provided by :class:`fatbotslim.handlers.RightsHandler`.
"""
for handler in self.handlers:
if isinstance(handler, RightsHandler):
self.handlers.remove(handler)
break
self.rights = None
def add_handler(self, handler, args=None, kwargs=None):
"""
Registers a new handler.
:param handler: handler to register.
:type handler: :class:`fatbotslim.handlers.BaseHandler`
:param args: positional arguments to pass to the handler's constructor.
:type args: list
:param kwargs: keyword arguments to pass to the handler's constructor.
:type kwargs: dict
"""
args = [] if args is None else args
kwargs = {} if kwargs is None else kwargs
handler_instance = handler(self, *args, **kwargs)
if isinstance(handler_instance, RightsHandler):
self.rights = handler_instance
if handler_instance not in self.handlers:
self.handlers.append(handler_instance)
def cmd(self, command, args, prefix=None):
"""
Sends a command to the server.
:param command: IRC code to send.
:type command: unicode
:param args: arguments to pass with the command.
:type args: basestring
:param prefix: optional prefix to prepend to the command.
:type prefix: str or None
"""
if prefix is None:
prefix = u''
raw_cmd = u'{0} {1} {2}'.format(prefix, command, args).strip()
self._send(raw_cmd)
def ctcp_reply(self, command, dst, message=None):
"""
Sends a reply to a CTCP request.
:param command: CTCP command to use.
:type command: str
:param dst: sender of the initial request.
:type dst: str
:param message: data to attach to the reply.
:type message: str
"""
if message is None:
raw_cmd = u'\x01{0}\x01'.format(command)
else:
raw_cmd = u'\x01{0} {1}\x01'.format(command, message)
self.notice(dst, raw_cmd)
def msg(self, target, msg):
"""
Sends a message to an user or channel.
:param target: user or channel to send to.
:type target: str
:param msg: message to send.
:type msg: str
"""
self.cmd(u'PRIVMSG', u'{0} :{1}'.format(target, msg))
def notice(self, target, msg):
"""
Sends a NOTICE to an user or channel.
:param target: user or channel to send to.
:type target: str
:param msg: message to send.
:type msg: basestring
"""
self.cmd(u'NOTICE', u'{0} :{1}'.format(target, msg))
def join(self, channel):
"""
Make the bot join a channel.
:param channel: new channel to join.
:type channel: str
"""
self.cmd(u'JOIN', channel)
def set_nick(self, nick):
"""
Changes the bot's nickname.
:param nick: new nickname to use
:type nick: unicode
"""
self.cmd(u'NICK', nick)
def disconnect(self):
"""
Disconnects the bot from the server.
"""
self.cmd(u'QUIT', u':{0}'.format(self.quit_msg))
def run(self):
"""
Connects the bot and starts the event loop.
"""
self._connect()
self._event_loop()
def run_bots(bots):
"""
Run many bots in parallel.
:param bots: IRC bots to run.
:type bots: list
"""
greenlets = [spawn(bot.run) for bot in bots]
try:
joinall(greenlets)
except KeyboardInterrupt:
for bot in bots:
bot.disconnect()
finally:
killall(greenlets)
|
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_InvalidRedditObjectDialog(object):
def setupUi(self, InvalidRedditObjectDialog):
InvalidRedditObjectDialog.setObjectName("InvalidRedditObjectDialog")
InvalidRedditObjectDialog.resize(596, 651)
self.verticalLayout_2 = QtWidgets.QVBoxLayout(InvalidRedditObjectDialog)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.verticalLayout = QtWidgets.QVBoxLayout()
self.verticalLayout.setSpacing(20)
self.verticalLayout.setObjectName("verticalLayout")
self.label = QtWidgets.QLabel(InvalidRedditObjectDialog)
self.label.setWordWrap(True)
self.label.setObjectName("label")
self.verticalLayout.addWidget(self.label)
self.decision_box = QtWidgets.QVBoxLayout()
self.decision_box.setObjectName("decision_box")
self.select_all_checkbox = QtWidgets.QCheckBox(InvalidRedditObjectDialog)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.select_all_checkbox.setFont(font)
self.select_all_checkbox.setObjectName("select_all_checkbox")
self.decision_box.addWidget(self.select_all_checkbox)
self.decision_list = QtWidgets.QListWidget(InvalidRedditObjectDialog)
self.decision_list.setObjectName("decision_list")
self.decision_box.addWidget(self.decision_list)
self.line = QtWidgets.QFrame(InvalidRedditObjectDialog)
self.line.setFrameShape(QtWidgets.QFrame.HLine)
self.line.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line.setObjectName("line")
self.decision_box.addWidget(self.line)
self.verticalLayout.addLayout(self.decision_box)
self.verticalLayout_2.addLayout(self.verticalLayout)
self.button_box = QtWidgets.QDialogButtonBox(InvalidRedditObjectDialog)
self.button_box.setStandardButtons(QtWidgets.QDialogButtonBox.Ok)
self.button_box.setObjectName("button_box")
self.verticalLayout_2.addWidget(self.button_box)
self.retranslateUi(InvalidRedditObjectDialog)
QtCore.QMetaObject.connectSlotsByName(InvalidRedditObjectDialog)
def retranslateUi(self, InvalidRedditObjectDialog):
_translate = QtCore.QCoreApplication.translate
InvalidRedditObjectDialog.setWindowTitle(_translate("InvalidRedditObjectDialog", "Invalid Reddit Objects"))
self.label.setText(_translate("InvalidRedditObjectDialog", "<html><head/><body><p>The following users/subreddits are not valid. They have either been deleted, or banned/suspended by reddit. </p><p>How would you like to handle these users/subreddits?</p></body></html>"))
self.select_all_checkbox.setText(_translate("InvalidRedditObjectDialog", "Select All"))
|
import os
def getpart(src,dest,start,length,bufsize=1024 * 1024):
f1 = open(src,'rb')
f1.seek(start)
f2 = open(dest,'wb')
while length:
chunk = min(bufsize,length)
data = f1.read(chunk)
f2.write(data)
length -= chunk
f1.close()
f2.close()
def split(f, splitsize = 1024 * 1024 ):
size = os.path.getsize(f)
print "File size is %s" % size
nbblocks = size / splitsize
for i in range(0, size / splitsize +1 ):
mypos = i * splitsize
print "Reading {0} block at position {1}".format(i, mypos)
getpart(f, f + ".split.%s" % i, mypos , splitsize)
if __name__ == '__main__':
myfile = "/home/remi/tmp/binarytest.ori"
split(myfile)
|
"""PyDbLite.py
BSD licence
Author : Pierre Quentel (pierre.quentel@gmail.com)
In-memory database management, with selection by list comprehension
or generator expression
Fields are untyped : they can store anything that can be pickled.
Selected records are returned as dictionaries. Each record is
identified by a unique id and has a version number incremented
at every record update, to detect concurrent access
Syntax :
from PyDbLite import Base
db = Base('dummy')
# create new base with field names
db.create('name','age','size')
# existing base
db.open()
# insert new record
db.insert(name='homer',age=23,size=1.84)
# records are dictionaries with a unique integer key __id__
# simple selection by field value
records = db(name="homer")
# complex selection by list comprehension
res = [ r for r in db if 30 > r['age'] >= 18 and r['size'] < 2 ]
# or generator expression
for r in (r for r in db if r['name'] in ('homer','marge') ):
# delete a record or a list of records
db.delete(one_record)
db.delete(list_of_records)
# delete a record by its id
del db[rec_id]
# direct access by id
record = db[rec_id] # the record such that record['__id__'] == rec_id
# create an index on a field
db.create_index('age')
# update
db.update(record,age=24)
# add and drop fields
db.add_field('new_field',default=0)
db.drop_field('name')
# save changes on disk
db.commit()
version 2.2 : add __contains__
version 2.3 : introduce syntax (db('name')>'f') & (db('age') == 30)
version 2.4 :
- add BSD Licence
- raise exception if unknown fields in insert
version 2.5 :
- test is now in folder test
version 2.6
- Modified for boxee by bartsidee
- db saved using boxee api
"""
version = "2.6"
import os
try: import cPickle
except: import pickle as cPickle
import bisect
import mc
import binascii
import bz2
try:
set([])
except NameError:
from sets import Set as set
class Index:
"""Class used for indexing a base on a field
The instance of Index is an attribute the Base instance"""
def __init__(self,db,field):
self.db = db # database object (instance of Base)
self.field = field # field name
def __iter__(self):
return iter(self.db.indices[self.field])
def keys(self):
return self.db.indices[self.field].keys()
def __getitem__(self,key):
"""Lookup by key : return the list of records where
field value is equal to this key, or an empty list"""
ids = self.db.indices[self.field].get(key,[])
return [ self.db.records[_id] for _id in ids ]
class Tester:
def __init__(self,db,key):
self.db = db
self.key = key
self.records = db.records.values()
def __eq__(self,other):
if len(self.records)==len(self.db.records):
# use db indices if applicable
self.records = eval ("self.db(%s=other)" %self.key)
else:
self.records = [r for r in self.records if r[self.key]==other]
return self
def __ne__(self,other):
self.records = [r for r in self.records if r[self.key]!=other]
return self
def __lt__(self,other):
self.records = [r for r in self.records if r[self.key]<other]
return self
def __le__(self,other):
self.records = [r for r in self.records if r[self.key]<=other]
return self
def __gt__(self,other):
self.records = [r for r in self.records if r[self.key]>other]
return self
def __ge__(self,other):
self.records = [r for r in self.records if r[self.key]>=other]
return self
def __and__(self,other_tester):
ids1 = dict([(id(r),r) for r in self.records])
ids2 = dict([(id(r),r) for r in other_tester.records])
ids = set(ids1.keys()) & set(ids2.keys())
res = Tester(self.db,self.key)
res.records = [ids1[_id] for _id in ids]
return res
def __or__(self,other_tester):
ids = dict([(id(r),r) for r in self.records])
ids.update(dict([(id(r),r) for r in other_tester.records]))
res = Tester(self.db,self.key)
res.records = ids.values()
return res
def extract(self,*fields):
return [ [r[f] for f in fields] for r in self.records ]
def __len__(self):
return len(self.records)
def __iter__(self):
return iter(self.records)
class Base:
def __init__(self,basename,protocol=cPickle.HIGHEST_PROTOCOL):
"""protocol as defined in pickle / cPickle
Defaults to the highest protocol available
For maximum compatibility use protocol = 0"""
self.name = os.path.basename(basename)
self.protocol = protocol
def create(self,*fields,**kw):
"""Create a new base with specified field names
A keyword argument mode can be specified ; it is used if a file
with the base name already exists
- if mode = 'open' : open the existing base, ignore the fields
- if mode = 'override' : erase the existing base and create a
new one with the specified fields"""
self.mode = mode = kw.get("mode",None)
if mode == "open":
bool, self = self.open()
if bool: return self
elif mode == "override":
pass
self.fields = list(fields)
self.records = {}
self.next_id = 0
self.indices = {}
self.commit()
return self
def create_index(self,*fields):
"""Create an index on the specified field names
An index on a field is a mapping between the values taken by the field
and the sorted list of the ids of the records whose field is equal to
this value
For each indexed field, an attribute of self is created, an instance
of the class Index (see above). Its name it the field name, with the
prefix _ to avoid name conflicts
"""
reset = False
for f in fields:
if not f in self.fields:
raise NameError,"%s is not a field name %s" %(f,self.fields)
# initialize the indices
if self.mode == "open" and f in self.indices:
continue
reset = True
self.indices[f] = {}
for _id,record in self.records.iteritems():
# use bisect to quickly insert the id in the list
bisect.insort(self.indices[f].setdefault(record[f],[]),
_id)
# create a new attribute of self, used to find the records
# by this index
setattr(self,'_'+f,Index(self,f))
if reset:
self.commit()
def delete_index(self,*fields):
"""Delete the index on the specified fields"""
for f in fields:
if not f in self.indices:
raise ValueError,"No index on field %s" %f
for f in fields:
del self.indices[f]
self.commit()
def open(self):
"""Open an existing database and load its content into memory"""
# guess protocol
data= bz2.decompress(binascii.unhexlify(mc.GetApp().GetLocalConfig().GetValue(self.name)))
try:
self.fields, self.next_id, self.records, self.indices = cPickle.loads(data)
for f in self.indices.keys():
setattr(self,'_'+f,Index(self,f))
self.mode = "open"
return True, self
except: return False, self
def commit(self):
"""Write the database to a file"""
tupple = (self.fields,self.next_id,self.records,self.indices)
data = cPickle.dumps(tupple, self.protocol)
mc.GetApp().GetLocalConfig().SetValue(self.name, binascii.hexlify(bz2.compress(data)))
def insert(self,*args,**kw):
"""Insert a record in the database
Parameters can be positional or keyword arguments. If positional
they must be in the same order as in the create() method
If some of the fields are missing the value is set to None
Returns the record identifier
"""
if args:
kw = dict([(f,arg) for f,arg in zip(self.fields,args)])
# initialize all fields to None
record = dict([(f,None) for f in self.fields])
# raise exception if unknown field
for key in kw:
if not key in self.fields:
raise NameError,"Invalid field name : %s" %key
# set keys and values
for (k,v) in kw.iteritems():
record[k]=v
# add the key __id__ : record identifier
record['__id__'] = self.next_id
# add the key __version__ : version number
record['__version__'] = 0
# create an entry in the dictionary self.records, indexed by __id__
self.records[self.next_id] = record
# update index
for ix in self.indices.keys():
bisect.insort(self.indices[ix].setdefault(record[ix],[]),
self.next_id)
# increment the next __id__
self.next_id += 1
return record['__id__']
def delete(self,removed):
"""Remove a single record, or the records in an iterable
Before starting deletion, test if all records are in the base
and don't have twice the same __id__
Return the number of deleted items
"""
if isinstance(removed,dict):
# remove a single record
removed = [removed]
else:
# convert iterable into a list (to be able to sort it)
removed = [ r for r in removed ]
if not removed:
return 0
_ids = [ r['__id__'] for r in removed ]
_ids.sort()
keys = set(self.records.keys())
# check if the records are in the base
if not set(_ids).issubset(keys):
missing = list(set(_ids).difference(keys))
raise IndexError,'Delete aborted. Records with these ids' \
' not found in the base : %s' %str(missing)
# raise exception if duplicate ids
for i in range(len(_ids)-1):
if _ids[i] == _ids[i+1]:
raise IndexError,"Delete aborted. Duplicate id : %s" %_ids[i]
deleted = len(removed)
while removed:
r = removed.pop()
_id = r['__id__']
# remove id from indices
for indx in self.indices.keys():
pos = bisect.bisect(self.indices[indx][r[indx]],_id)-1
del self.indices[indx][r[indx]][pos]
if not self.indices[indx][r[indx]]:
del self.indices[indx][r[indx]]
# remove record from self.records
del self.records[_id]
return deleted
def update(self,records,**kw):
"""Update one record of a list of records
with new keys and values and update indices"""
# ignore unknown fields
kw = dict([(k,v) for (k,v) in kw.iteritems() if k in self.fields])
if isinstance(records,dict):
records = [ records ]
# update indices
for indx in set(self.indices.keys()) & set (kw.keys()):
for record in records:
if record[indx] == kw[indx]:
continue
_id = record["__id__"]
# remove id for the old value
old_pos = bisect.bisect(self.indices[indx][record[indx]],_id)-1
del self.indices[indx][record[indx]][old_pos]
if not self.indices[indx][record[indx]]:
del self.indices[indx][record[indx]]
# insert new value
bisect.insort(self.indices[indx].setdefault(kw[indx],[]),_id)
for record in records:
# update record values
record.update(kw)
# increment version number
record["__version__"] += 1
def add_field(self,field,default=None):
if field in self.fields + ["__id__","__version__"]:
raise ValueError,"Field %s already defined" %field
for r in self:
r[field] = default
self.fields.append(field)
self.commit()
def drop_field(self,field):
if field in ["__id__","__version__"]:
raise ValueError,"Can't delete field %s" %field
self.fields.remove(field)
for r in self:
del r[field]
if field in self.indices:
del self.indices[field]
self.commit()
def __call__(self,*args,**kw):
"""Selection by field values
db(key=value) returns the list of records where r[key] = value"""
if args and kw:
raise SyntaxError,"Can't specify positional AND keyword arguments"
if args:
if len(args)>1:
raise SyntaxError,"Only one field can be specified"
elif args[0] not in self.fields:
raise ValueError,"%s is not a field" %args[0]
else:
return Tester(self,args[0])
if not kw:
return self.records.values() # db() returns all the values
# indices and non-indices
keys = kw.keys()
ixs = set(keys) & set(self.indices.keys())
no_ix = set(keys) - ixs
if ixs:
# fast selection on indices
ix = ixs.pop()
res = set(self.indices[ix].get(kw[ix],[]))
if not res:
return []
while ixs:
ix = ixs.pop()
res = res & set(self.indices[ix].get(kw[ix],[]))
else:
# if no index, initialize result with test on first field
field = no_ix.pop()
res = set([r["__id__"] for r in self if r[field] == kw[field] ])
# selection on non-index fields
for field in no_ix:
res = res & set([ _id for _id in res
if self.records[_id][field] == kw[field] ])
return [ self[_id] for _id in res ]
def __getitem__(self,key):
# direct access by record id
return self.records[key]
def __len__(self):
return len(self.records)
def __delitem__(self,record_id):
"""Delete by record id"""
self.delete(self[record_id])
def __contains__(self,record_id):
return record_id in self.records
def __iter__(self):
"""Iteration on the records"""
return self.records.itervalues()
if __name__ == '__main__':
os.chdir(os.path.join(os.getcwd(),'test'))
execfile('PyDbLite_test.py')
|
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('api', '0028_auto_20170720_1024'),
]
operations = [
migrations.AlterField(
model_name='footprint',
name='footprint',
field=models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='footprint', to='api.File'),
),
migrations.AlterField(
model_name='footprint',
name='image',
field=models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='image', to='api.File'),
),
]
|
__author__ = "Harish Narayanan"
__copyright__ = "Copyright (C) 2009 Simula Research Laboratory and %s" % __author__
__license__ = "GNU GPL Version 3 or any later version"
from dolfin import *
from cbc.twist.nonlinear_solver import *
from cbc.common import *
from cbc.common.utils import *
from cbc.twist.kinematics import Grad, DeformationGradient, Jacobian
from sys import exit
from numpy import array, loadtxt, linalg
class CG1MomentumBalanceSolver(CBCSolver):
"""Solves the dynamic balance of linear momentum using a CG1
time-stepping scheme"""
def __init__(self, problem, parameters):
"""Initialise the momentum balance solver"""
CBCSolver.__init__(self)
# Get problem parameters
mesh = problem.mesh()
dt, t_range = timestep_range_cfl(problem, mesh)
end_time = problem.end_time()
info("Using time step dt = %g" % dt)
# Define function spaces
element_degree = parameters["element_degree"]
#scalar = FiniteElement("CG", mesh.ufl_cell(), element_degree)
vector = VectorElement("CG", mesh.ufl_cell(), element_degree)
vector_space = FunctionSpace(mesh, vector)
mixed_space = FunctionSpace(mesh, vector*vector)
V = TestFunction(mixed_space)
dU = TrialFunction(mixed_space)
U = Function(mixed_space)
U0 = Function(mixed_space)
# Get initial conditions
u0, v0 = problem.initial_conditions()
# If no initial conditions are specified, assume they are 0
if not u0:
u0 = Constant((0,)*vector_space.mesh().geometry().dim())
if not v0:
v0 = Constant((0,)*vector_space.mesh().geometry().dim())
# If either are text strings, assume those are file names and
# load conditions from those files
if isinstance(u0, str):
info("Loading initial displacement from file.")
file_name = u0
u0 = Function(vector_space, file_name)
if isinstance(v0, str):
info("Loading initial velocity from file.")
file_name = v0
v0 = Function(vector_space, file_name)
# Create boundary conditions
dirichlet_values = problem.dirichlet_values()
bcu = create_dirichlet_conditions(dirichlet_values,
problem.dirichlet_boundaries(),
mixed_space.sub(0))
# Functions
xi, eta = split(V)
u, v = split(U)
u_plot = Function(vector_space)
# Project u0 and v0 into U0
a_proj = inner(dU, V)*dx
L_proj = inner(u0, xi)*dx + inner(v0, eta)*dx
solve(a_proj == L_proj, U0)
u0, v0 = split(U0)
# Driving forces
B = problem.body_force()
if not B: B = problem.body_force_u(0.5 * (u0 + u))
# If no body forces are specified, assume it is 0
if not B:
B = Constant((0,)*vector_space.mesh().geometry().dim())
# Evaluate displacements and velocities at mid points
u_mid = 0.5*(u0 + u)
v_mid = 0.5*(v0 + v)
# Get reference density
rho0 = problem.reference_density()
# If no reference density is specified, assume it is 1.0
if not rho0:
rho0 = Constant(1.0)
density_type = str(rho0.__class__)
if not ("dolfin" in density_type):
info("Converting given density to a DOLFIN Constant.")
rho0 = Constant(rho0)
# Piola-Kirchhoff stress tensor based on the material model
P = problem.first_pk_stress(u_mid)
# Convert time step to a DOLFIN constant
k = Constant(dt)
# The variational form corresponding to hyperelasticity
L = rho0*inner(v - v0, xi)*dx + k*inner(P, grad(xi))*dx \
- k*inner(B, xi)*dx + inner(u - u0, eta)*dx \
- k*inner(v_mid, eta)*dx
# Add contributions to the form from the Neumann boundary
# conditions
# Get Neumann boundary conditions on the stress
neumann_conditions = problem.neumann_conditions()
neumann_boundaries = problem.neumann_boundaries()
boundary = MeshFunction("size_t", mesh, mesh.topology().dim() - 1)
boundary.set_all(len(neumann_boundaries) + 1)
dsb = ds[boundary]
for (i, neumann_boundary) in enumerate(neumann_boundaries):
info("Applying Neumann boundary condition.")
info(str(neumann_boundary))
compiled_boundary = CompiledSubDomain(neumann_boundary)
compiled_boundary.mark(boundary, i)
L = L - k*inner(neumann_conditions[i], xi)*dsb(i)
a = derivative(L, U, dU)
# Store variables needed for time-stepping
self.dt = dt
self.k = k
self.t_range = t_range
self.end_time = end_time
self.a = a
self.L = L
self.bcu = bcu
self.U0 = U0
self.U = U
self.B = B
self.dirichlet_values = dirichlet_values
self.neumann_conditions = neumann_conditions
# FIXME: Figure out why I am needed
self.mesh = mesh
# Kristoffer's fix in order to sync the F and S solvers dt...
self.t = dt
# Empty file handlers / time series
self.displacement_file = None
self.velocity_file = None
self.displacement_velocity_series = None
#self.u_plot = u_plot
self.uplot = plot(u,mode="displacement",title="Displacement")
# Store parameters
self.parameters = parameters
def solve(self):
"""Solve the mechanics problem and return the computed
displacement field"""
# Time loop
for t in self.t_range:
info("Solving the problem at time t = " + str(self.t))
self.step(self.dt)
self.update()
#if self.parameters["plot_solution"]:
# interactive()
return self.U.split(True)[0]
def step(self, dt):
"""Setup and solve the problem at the current time step"""
# Update time step
self.dt = dt
self.k.assign(dt)
#problem = NonlinearVariationalProblem(self.L, self.U, self.bcu, self.a)
#solver = NonlinearVariationalSolver(problem)
solver = AugmentedNewtonSolver(self.L, self.U, self.a, \
self.bcu)
#solver.parameters["newton_solver"]["absolute_tolerance"] = 1e-12
#solver.parameters["newton_solver"]["relative_tolerance"] = 1e-12
#solver.parameters["newton_solver"]["maximum_iterations"] = 100
solver.solve()
return self.U.split(True)
def update(self):
"""Update problem at time t"""
u, v = self.U.split()
# Propagate the displacements and velocities
self.U0.assign(self.U)
# Plot solution
#if self.parameters["plot_solution"]:
# # Copy to a fixed function to trick Viper into not opening
# # up multiple windows
# "THIS ASSIGN DOES NOT WORK FOR SOME REASON!" #self.u_plot.assign(u)
# #plot(u, title="Displacement", mode="displacement", rescale=True)
# "This is a new ploting"
# self.uplot.plot(u)
# Store solution (for plotting)
if self.parameters["save_solution"]:
if self.displacement_file is None:
self.displacement_file = XDMFFile("displacement.xdmf")
if self.velocity_file is None:
self.velocity_file = XDMFFile("velocity.xdmf")
u.rename('u', "displacement")
v.rename('v', "velocity")
self.displacement_file.write(u, self.t)
self.velocity_file.write(v, self.t)
#self.displacement_file << u
#self.velocity_file << v
# Store solution data
if self.parameters["store_solution_data"]:
if self.displacement_velocity_series is None:
self.displacement_velocity_series = TimeSeries("displacement_velocity")
self.displacement_velocity_series.store(self.U.vector(), self.t)
# Move to next time step
self.t = self.t + self.dt
# Inform time-dependent functions of new time
for bc in self.dirichlet_values:
if isinstance(bc, Expression):
bc.t = self.t
for bc in self.neumann_conditions:
bc.t = self.t
self.B.t = self.t
def solution(self):
"Return current solution values"
return self.U.split(True)
class LinearPoroElasticitySolver(CBCSolver):
"""Solves the dynamic balance of linear momentum using a CG1
time-stepping scheme"""
def __init__(self, problem, parameters):
"""Initialise the momentum balance solver"""
CBCSolver.__init__(self)
# Get problem parameters
mesh = problem.mesh()
dt, t_range = timestep_range_cfl(problem, mesh)
end_time = problem.end_time()
info("Using time step dt = %g" % dt)
# Define function spaces
P_0 = FiniteElement("DG", mesh.ufl_cell(), 0)
P_1 = VectorElement("CG", mesh.ufl_cell(), 1)
mixed_elem = MixedElement([P_1, P_1, P_0])
vector_space = FunctionSpace(mesh, P_1)
scalar_space = FunctionSpace(mesh, P_0)
mixed_space = FunctionSpace(mesh, mixed_elem)
u, z, p = TrialFunctions(mixed_space)
v_0, v_1, q = TestFunctions(mixed_space)
w = Function(mixed_space)
# Get initial conditions
u0, z0, p0 = problem.initial_conditions()
# If no initial conditions are specified, assume they are 0
if u0 == []:
u0 = Constant((0.0,)*vector_space.mesh().geometry().dim())
if z0 == []:
z0 = Constant((0.0,)*vector_space.mesh().geometry().dim())
if p0 == []:
p0 = Constant(0.0)
u_n = project(u0, vector_space)
p_n = project(p0, scalar_space)
# If either are text strings, assume those are file names and
# load conditions from those files
if isinstance(u0, str):
info("Loading initial displacement from file.")
file_name = u0
u0 = Function(vector_space, file_name)
if isinstance(z0, str):
info("Loading initial velocity from file.")
file_name = z0
v0 = Function(vector_space, file_name)
if isinstance(p0, str):
info("Loading initial pressure from file.")
file_name = p0
p0 = Function(vector_space, file_name)
# Create boundary conditions
dirichlet_values = problem.dirichlet_values()
bcu = create_dirichlet_conditions(dirichlet_values_u,
problem.dirichlet_boundaries_u(),
mixed_space.sub(0))
bcz = create_dirichlet_conditions(dirichlet_values_z,
problem.dirichlet_boundaries_z(),
mixed_space.sub(1))
bcp = create_dirichlet_conditions(dirichlet_values_p,
problem.dirichlet_boundaries_p(),
mixed_space.sub(2))
bcs = [bcu, bcz, bcp]
# Driving forces
f_rhs = problem.body_force()
# If no body forces are specified, assume it is 0
if f_rhs is None:
f_rhs = Constant((0.0,)*vector_space.mesh().geometry().dim())
b_rhs = problem.velocity_source()
if b_rhs is None:
b_rhs = Constant((0.0,)*vector_space.mesh().geometry().dim())
g_rhs = problem.fluid_source()
if g_rhs is None:
g_rhs = Constant(0.0)
# Convert time step to a DOLFIN constant
dt = Constant(dt)
# The variational form corresponding to hyperelasticity
def e(u):
return 0.5 * (grad(u) + grad(u).T)
def a(u, v):
mu = problem.mu
lmbda = problem.lmbda
return 2*inner(e(u), e(v))*dx + lmbda*div(u)*div(v)*dx
def b(q, v):
return q*div(v)*dx
def J(p, q):
delta = 0.1
h = MaxFacetEdgeLength(mesh)
return delta*h*jump(p)*jump(q)*dS
LHS_u = a(u, v_0) - b(p, v_0)
RHS_u = inner(f_rhs, v_0)*dx
LHS_z = dt*inner(problem.k_inv()*z, v_1)*dx - dt*b(p, v_1)
RHS_z = dt*inner(b_rhs, v_1)*dx
LHS_p = -b(q, u) - dt*q*div(z)*dx - J(p, q)
RHS_p = dt*g_rhs*q*dx - b(q, u_n) - J(p_n, q)
LHS = LHS_u + LHS_z + LHS_p
RHS = RHS_u + RHS_z + RHS_p
L = LHS - RHS
# Add contributions to the form from the Neumann boundary
# conditions
# Get Neumann boundary conditions on the stress
neumann_conditions = problem.neumann_conditions()
neumann_boundaries = problem.neumann_boundaries()
boundary = MeshFunction("size_t", mesh, mesh.topology().dim() - 1)
boundary.set_all(len(neumann_boundaries) + 1)
#dsb = ds[boundary]
#for (i, neumann_boundary) in enumerate(neumann_boundaries):
# info("Applying Neumann boundary condition.")
# info(str(neumann_boundary))
# compiled_boundary = CompiledSubDomain(neumann_boundary)
# compiled_boundary.mark(boundary, i)
# L = L - k*inner(neumann_conditions[i], xi)*dsb(i)
#a = derivative(L, U, dU)
# Store variables needed for time-stepping
self.dt = dt
self.k = k
self.t_range = t_range
self.end_time = end_time
self.w
self.u_n
self.p_n
self.L = L
self.f_rhs = f_rhs
self.b_rhs = b_rhs
self.g_rhs = g_rhs
self.bcs = bcs
self.problem = problem
#self.dirichlet_values = dirichlet_values
#self.neumann_conditions = neumann_conditions
self.mesh = mesh
# Empty file handlers / time series
self.displacement_file = None
self.velocity_file = None
self.displacement_velocity_series = None
#self.u_plot = u_plot
self.uplot = plot(u,mode="displacement",title="Displacement")
# Store parameters
self.parameters = parameters
def solve(self):
"""Solve the mechanics problem and return the computed
displacement field"""
# Time loop
for t in self.t_range:
info("Solving the problem at time t = " + str(self.t))
self.step(self.dt)
self.update()
#if self.parameters["plot_solution"]:
# interactive()
return self.w
def step(self, dt):
"""Setup and solve the problem at the current time step"""
#problem = NonlinearVariationalProblem(self.L, self.U, self.bcu, self.a)
#solver = NonlinearVariationalSolver(problem)
#solver = AugmentedNewtonSolver(self.L, self.U, self.a, \
# self.bcu)
#solver.parameters["newton_solver"]["absolute_tolerance"] = 1e-12
#solver.parameters["newton_solver"]["relative_tolerance"] = 1e-12
#solver.parameters["newton_solver"]["maximum_iterations"] = 100
#solver.solve()
solve(self.L == 0, self.w, self.bcs)
return self.w
def update(self):
"""Update problem at time t"""
u_n, _, p_n = self.w.split()
self.u_n.assign(u_n)
self.p_n.assign(p_n)
# Plot solution
#if self.parameters["plot_solution"]:
# # Copy to a fixed function to trick Viper into not opening
# # up multiple windows
# "THIS ASSIGN DOES NOT WORK FOR SOME REASON!" #self.u_plot.assign(u)
# #plot(u, title="Displacement", mode="displacement", rescale=True)
# "This is a new ploting"
# self.uplot.plot(self.u_n)
# Store solution (for plotting)
if self.parameters["save_solution"]:
if self.displacement_file is None:
self.displacement_file = XDMFFile("displacement.xdmf")
if self.velocity_file is None:
self.velocity_file = XDMFFile("velocity.xdmf")
self.displacement_file.write(u_n)
#self.velocity_file.write(v)
#self.displacement_file << u
#self.velocity_file << v
# Store solution data
#if self.parameters["store_solution_data"]:
# if self.displacement_velocity_series is None: self.displacement_velocity_series = TimeSeries("displacement_velocity")
# self.displacement_velocity_series.store(self.U.vector(), self.t)
# Move to next time step
self.t = self.t + self.dt
# Inform time-dependent functions of new time
for bc_u in self.problem.dirichlet_values_u():
if isinstance(bc_u, Expression):
bc_u.t = self.t
for bc_z in self.problem.dirichlet_values_z():
if isinstance(bc_z, Expression):
bc_z.t = self.t
for bc_p in self.problem.dirichlet_values_p():
if isinstance(bc_p, Expression):
bc_p.t = self.t
#for bc in self.neumann_conditions:
# bc.t = self.t
self.f_rhs.t = self.t
self.b_rhs.t = self.t
self.g_rhs.t = self.t
def solution(self):
"Return current solution values"
return self.w
|
import MySQLdb
import string
import sys
reload(sys)
sys.setdefaultencoding('utf8')
import ConfigParser
def get_item(data_dict,item):
try:
item_value = data_dict[item]
return item_value
except:
pass
def get_parameters(conn):
try:
curs=conn.cursor()
data=curs.execute('select name,value from v$parameter');
data_list=curs.fetchall()
parameters={}
for item in data_list:
parameters[item[0]] = item[1]
except Exception,e:
print e
finally:
curs.close()
return parameters
def get_sysstat(conn):
try:
curs=conn.cursor()
data=curs.execute('select name,value value from v$sysstat');
data_list=curs.fetchall()
sysstat={}
for item in data_list:
sysstat[item[0]] = item[1]
except Exception,e:
print e
finally:
curs.close()
return sysstat
def get_instance(conn,field):
try:
curs=conn.cursor()
curs.execute("select %s from v$instance" %(field) );
result = curs.fetchone()[0]
except Exception,e:
result = ''
print e
finally:
curs.close()
return result
def get_database(conn,field):
try:
curs=conn.cursor()
curs.execute("select %s from v$database" %(field) );
result = curs.fetchone()[0]
except Exception,e:
result = ''
print e
finally:
curs.close()
return result
def get_version(conn):
try:
curs=conn.cursor()
curs.execute("select product,version from product_component_version where product like '%Database%'");
result = curs.fetchone()[1]
except Exception,e:
print e
finally:
curs.close()
return result
def get_sessions(conn):
try:
curs=conn.cursor()
curs.execute("select count(*) from v$session");
result = curs.fetchone()[0]
return result
except Exception,e:
return null
print e
finally:
curs.close()
def get_actives(conn):
try:
curs=conn.cursor()
curs.execute("select count(*) from v$session where username not in('SYS','SYSTEM') and username is not null and STATUS='ACTIVE'");
result = curs.fetchone()[0]
return result
except Exception,e:
return null
print e
finally:
curs.close()
def get_waits(conn):
try:
curs=conn.cursor()
curs.execute("select count(*) from v$session where event like 'library%' or event like 'cursor%' or event like 'latch%' or event like 'enq%' or event like 'log file%'");
result = curs.fetchone()[0]
return result
except Exception,e:
return null
print e
finally:
curs.close()
def get_dg_stats(conn):
try:
curs=conn.cursor()
qSql = "SELECT substr((SUBSTR(VALUE,5)),0,2)*3600 + substr((SUBSTR(VALUE,5)),4,2)*60 + substr((SUBSTR(VALUE,5)),7,2) AS seconds,VALUE FROM v$dataguard_stats a WHERE NAME ='apply lag'";
qSql = "SELECT status FROM v$archive_dest where dest_name='LOG_ARCHIVE_DEST_1'"
curs.execute(qSql);
list = curs.fetchone()
return list[0]
except Exception,e:
return null
print e
finally:
curs.close()
def get_dg_delay(conn):
try:
curs=conn.cursor()
curs.execute("SELECT substr((SUBSTR(VALUE,5)),0,2)*3600 + substr((SUBSTR(VALUE,5)),4,2)*60 + substr((SUBSTR(VALUE,5)),7,2) AS seconds,VALUE FROM v$dataguard_stats a WHERE NAME ='apply lag'");
list = curs.fetchone()
if list:
result = list[0]
else:
result = '---'
return result
except Exception,e:
return null
print e
finally:
curs.close()
def get_tablespace(conn):
try:
curs=conn.cursor()
curs.execute("select df.tablespace_name ,totalspace total_size, (totalspace-freespace) used_size,freespace avail_size ,round((1-freespace/totalspace)*100) || '%' as used_ratio from (select tablespace_name,round(sum(bytes)/1024/1024) totalspace from dba_data_files group by tablespace_name) df,(select tablespace_name,round(sum(bytes)/1024/1024) freespace from dba_free_space group by tablespace_name) fs where df.tablespace_name=fs.tablespace_name and df.tablespace_name not like 'UNDOTBS%'");
list = curs.fetchall()
return list
except Exception,e:
print e
return null
finally:
curs.close()
|
from wampy.peers.clients import Client
def test_send_really_long_string(router, echo_service):
really_long_string = "a" * 1000
caller = Client(url=router.url)
with caller:
response = caller.rpc.echo(message=really_long_string)
assert response['message'] == really_long_string
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('community', '0025_auto_20200415_0517'),
]
operations = [
migrations.AddField(
model_name='community',
name='verification_threshold_follower',
field=models.PositiveIntegerField(default=1, help_text='Start verification if follower count superior or equal to'),
),
]
|
"""Week3 warm up task 12 importing decimal and fractions."""
import decimal
import fractions
INTVAL = 1
FLOATVAL = 0.1
DECVAL = decimal.Decimal('0.1')
FRACVAL = fractions.Fraction(1, 10)
|
from django.conf import settings
import jinja2
from django.template.loader import render_to_string
from django_jinja import library
from bedrock.firefox.firefox_details import firefox_desktop, firefox_android, firefox_ios
from bedrock.base.urlresolvers import reverse
from lib.l10n_utils import get_locale
def desktop_builds(channel, builds=None, locale=None, force_direct=False,
force_full_installer=False, force_funnelcake=False,
funnelcake_id=False, locale_in_transition=False, classified=False):
builds = builds or []
l_version = firefox_desktop.latest_builds(locale, channel)
# Developer Edition is now based on the Beta channel, so the build list
# should be generated from the Beta locales.
if channel == 'alpha':
l_version = firefox_desktop.latest_builds(locale, 'beta')
if l_version:
version, platforms = l_version
else:
locale = 'en-US'
version, platforms = firefox_desktop.latest_builds('en-US', channel)
for plat_os, plat_os_pretty in firefox_desktop.platforms(channel, classified):
os_pretty = plat_os_pretty
# Firefox Nightly: The Windows stub installer is now universal,
# automatically detecting a 32-bit and 64-bit desktop, so the
# win64-specific entry can be skipped.
if channel == 'nightly':
if plat_os == 'win':
continue
if plat_os == 'win64':
plat_os = 'win'
os_pretty = 'Windows 32/64-bit'
# And generate all the info
download_link = firefox_desktop.get_download_url(
channel, version, plat_os, locale,
force_direct=force_direct,
force_full_installer=force_full_installer,
force_funnelcake=force_funnelcake,
funnelcake_id=funnelcake_id,
locale_in_transition=locale_in_transition,
)
# If download_link_direct is False the data-direct-link attr
# will not be output, and the JS won't attempt the IE popup.
if force_direct:
# no need to run get_download_url again with the same args
download_link_direct = False
else:
download_link_direct = firefox_desktop.get_download_url(
channel, version, plat_os, locale,
force_direct=True,
force_full_installer=force_full_installer,
force_funnelcake=force_funnelcake,
funnelcake_id=funnelcake_id,
)
if download_link_direct == download_link:
download_link_direct = False
builds.append({'os': plat_os,
'os_pretty': os_pretty,
'download_link': download_link,
'download_link_direct': download_link_direct})
return builds
def android_builds(channel, builds=None):
builds = builds or []
link = firefox_android.get_download_url(channel.lower())
builds.append({'os': 'android',
'os_pretty': 'Android',
'download_link': link})
return builds
def ios_builds(channel, builds=None):
builds = builds or []
link = firefox_ios.get_download_url(channel)
builds.append({'os': 'ios',
'os_pretty': 'iOS',
'download_link': link})
return builds
@library.global_function
@jinja2.contextfunction
def download_firefox(ctx, channel='release', platform='all',
dom_id=None, locale=None, force_direct=False,
force_full_installer=False, force_funnelcake=False,
alt_copy=None, button_color='button-green',
locale_in_transition=False, download_location=None):
""" Output a "download firefox" button.
:param ctx: context from calling template.
:param channel: name of channel: 'release', 'beta', 'alpha', or 'nightly'.
:param platform: Target platform: 'desktop', 'android', 'ios', or 'all'.
:param dom_id: Use this string as the id attr on the element.
:param locale: The locale of the download. Default to locale of request.
:param force_direct: Force the download URL to be direct.
:param force_full_installer: Force the installer download to not be
the stub installer (for aurora).
:param force_funnelcake: Force the download version for en-US Windows to be
'latest', which bouncer will translate to the funnelcake build.
:param alt_copy: Specifies alternate copy to use for download buttons.
:param button_color: Color of download button. Default to 'button-green'.
:param locale_in_transition: Include the page locale in transitional download link.
:param download_location: Specify the location of download button for
GA reporting: 'primary cta', 'nav', 'sub nav', or 'other'.
"""
show_desktop = platform in ['all', 'desktop']
show_android = platform in ['all', 'android']
show_ios = platform in ['all', 'ios']
alt_channel = '' if channel == 'release' else channel
locale = locale or get_locale(ctx['request'])
funnelcake_id = ctx.get('funnelcake_id', False)
dom_id = dom_id or 'download-button-%s-%s' % (
'desktop' if platform == 'all' else platform, channel)
# Gather data about the build for each platform
builds = []
if show_desktop:
version = firefox_desktop.latest_version(channel)
builds = desktop_builds(channel, builds, locale, force_direct,
force_full_installer, force_funnelcake,
funnelcake_id, locale_in_transition)
if show_android:
version = firefox_android.latest_version(channel)
builds = android_builds(channel, builds)
if show_ios:
version = firefox_ios.latest_version(channel)
builds.append({'os': 'ios',
'os_pretty': 'iOS',
'download_link': firefox_ios.get_download_url()})
# Get the native name for current locale
langs = firefox_desktop.languages
locale_name = langs[locale]['native'] if locale in langs else locale
data = {
'locale_name': locale_name,
'version': version,
'product': 'firefox-%s' % platform,
'builds': builds,
'id': dom_id,
'channel': alt_channel,
'show_desktop': show_desktop,
'show_android': show_android,
'show_ios': show_ios,
'alt_copy': alt_copy,
'button_color': button_color,
'download_location': download_location
}
html = render_to_string('firefox/includes/download-button.html', data,
request=ctx['request'])
return jinja2.Markup(html)
@library.global_function
@jinja2.contextfunction
def download_firefox_desktop_list(ctx, channel='release', dom_id=None, locale=None,
force_full_installer=False):
"""
Return a HTML list of platform download links for Firefox desktop
:param channel: name of channel: 'release', 'beta', 'alpha' or 'nightly'.
:param dom_id: Use this string as the id attr on the element.
:param locale: The locale of the download. Default to locale of request.
:param force_full_installer: Force the installer download to not be
the stub installer (for aurora).
"""
dom_id = dom_id or 'download-platform-list-%s' % (channel)
locale = locale or get_locale(ctx['request'])
# Make sure funnelcake_id is not passed as builds are often Windows only.
builds = desktop_builds(channel, None, locale, True, force_full_installer,
False, False, False, True)
recommended_builds = []
traditional_builds = []
for plat in builds:
# Add 32-bit label for Windows and Linux builds.
if channel != 'nightly':
if plat['os'] == 'win':
plat['os_pretty'] = 'Windows 32-bit'
if plat['os'] == 'linux':
plat['os_pretty'] = 'Linux 32-bit'
if (plat['os'] in firefox_desktop.platform_classification['recommended'] or
channel == 'nightly' and plat['os'] == 'win'):
recommended_builds.append(plat)
else:
traditional_builds.append(plat)
data = {
'id': dom_id,
'builds': {
'recommended': recommended_builds,
'traditional': traditional_builds,
},
}
html = render_to_string('firefox/includes/download-list.html', data,
request=ctx['request'])
return jinja2.Markup(html)
@library.global_function
def firefox_url(platform, page, channel=None):
"""
Return a product-related URL like /firefox/all/ or /mobile/beta/notes/.
Examples
========
In Template
-----------
{{ firefox_url('desktop', 'all', 'organizations') }}
{{ firefox_url('desktop', 'sysreq', channel) }}
{{ firefox_url('android', 'notes') }}
"""
kwargs = {}
anchor = None
# Tweak the channel name for the naming URL pattern in urls.py
if channel == 'release':
channel = None
if channel == 'alpha':
if platform == 'desktop':
channel = 'developer'
if platform == 'android':
channel = 'aurora'
if channel == 'esr':
channel = 'organizations'
# There is now only one /all page URL - issue 8096
if page == 'all':
if platform == 'desktop':
if channel == 'beta':
anchor = 'product-desktop-beta'
elif channel == 'developer':
anchor = 'product-desktop-developer'
elif channel == 'nightly':
anchor = 'product-desktop-nightly'
elif channel == 'organizations':
anchor = 'product-desktop-esr'
else:
anchor = 'product-desktop-release'
elif platform == 'android':
if channel == 'beta':
anchor = 'product-android-beta'
elif channel == 'nightly':
anchor = 'product-android-nightly'
else:
anchor = 'product-android-release'
else:
if channel:
kwargs['channel'] = channel
if platform != 'desktop':
kwargs['platform'] = platform
# Firefox for Android and iOS have the system requirements page on SUMO
if platform in ['android', 'ios'] and page == 'sysreq':
return settings.FIREFOX_MOBILE_SYSREQ_URL
anchor = '#' + anchor if anchor else ''
return reverse(f'firefox.{page}', kwargs=kwargs) + anchor
|
import logging
from socorro.external import MissingArgumentError, BadArgumentError
from socorro.external.postgresql.base import PostgreSQLBase
from socorro.lib import external_common
logger = logging.getLogger("webapi")
class Bugs(PostgreSQLBase):
"""Implement the /bugs service with PostgreSQL. """
filters = [
("signatures", None, ["list", "str"]),
("bug_ids", None, ["list", "str"]),
]
def get(self, **kwargs):
import warnings
warnings.warn("You should use the POST method to access bugs")
return self.post(**kwargs)
def post(self, **kwargs):
"""Return a list of signatures-to-bug_ids or bug_ids-to-signatures
associations. """
params = external_common.parse_arguments(self.filters, kwargs)
if not params['signatures'] and not params['bug_ids']:
raise MissingArgumentError('specify one of signatures or bug_ids')
elif params['signatures'] and params['bug_ids']:
raise BadArgumentError('specify only one of signatures or bug_ids')
sql_params = []
if params['signatures']:
sql_params.append(tuple(params.signatures))
sql = """/* socorro.external.postgresql.bugs.Bugs.get */
SELECT ba.signature, bugs.id
FROM bugs
JOIN bug_associations AS ba ON bugs.id = ba.bug_id
WHERE EXISTS(
SELECT 1 FROM bug_associations
WHERE bug_associations.bug_id = bugs.id
AND signature IN %s
)
"""
elif params['bug_ids']:
sql_params.append(tuple(params.bug_ids))
sql = """/* socorro.external.postgresql.bugs.Bugs.get */
SELECT ba.signature, bugs.id
FROM bugs
JOIN bug_associations AS ba ON bugs.id = ba.bug_id
WHERE bugs.id IN %s
"""
error_message = "Failed to retrieve bug associations from PostgreSQL"
results = self.query(sql, sql_params, error_message=error_message)
bugs = []
for row in results:
bug = dict(zip(("signature", "id"), row))
bugs.append(bug)
return {
"hits": bugs,
"total": len(bugs)
}
|
from typing import cast
from abc import ABCMeta, abstractmethod
class Result:
def __init__(self, value, pos):
self.value = value
self.pos = pos
def __repr__(self):
return 'Result(%s, %d)' % (self.value, self.pos)
class Parser(metaclass=ABCMeta):
def __add__(self, other):
return Concat(self, other)
def __mul__(self, other):
return Exp(self, other)
def __or__(self, other):
return Alternate(self, other)
def __xor__(self, function):
return Process(self, function)
@abstractmethod
def __call__(self, tokens, pos):
pass
class Tag(Parser):
def __init__(self, tag):
self.tag = tag
def __call__(self, tokens, pos):
if pos < len(tokens) and tokens[pos][1] is self.tag:
return Result(tokens[pos][0], pos + 1)
else:
return None
class Reserved(Parser):
def __init__(self, value, tag):
self.value = value
self.tag = tag
def __call__(self, tokens, pos):
if pos < len(tokens) and \
tokens[pos][0] == self.value and \
tokens[pos][1] is self.tag:
return Result(tokens[pos][0], pos + 1)
else:
return None
class Concat(Parser):
def __init__(self, left, right):
self.left = left
self.right = right.right # ERROR
def __call__(self, tokens, pos):
left_result = self.left(tokens, pos)
if left_result:
right_result = self.right(tokens, left_result.pos)
if right_result:
combined_value = (left_result.value, right_result.value)
return Result(combined_value, right_result.pos)
return None
class Exp(Parser):
def __init__(self, parser, separator):
self.parser = parser
self.separator = separator
def __call__(self, tokens, pos):
result = self.parser(tokens, pos)
def process_next(parsed):
(sepfunc, right) = parsed
return sepfunc(result.value, right)
next_parser = self.separator + self.parser ^ process_next
next_result = result
while next_result:
next_result = next_parser(tokens, result.pos)
if next_result:
result = next_result
return result
class Alternate(Parser):
def __init__(self, left, right):
self.left = left
self.right = right
def __call__(self, tokens, pos):
left_result = self.left(tokens, pos)
if left_result:
return left_result
else:
right_result = self.right(tokens, pos)
return right_result
class Opt(Parser):
def __init__(self, parser):
self.parser = parser
def __call__(self, tokens, pos):
result = self.parser(tokens, pos)
if result:
return result
else:
return Result(None, pos)
class Rep(Parser):
def __init__(self, parser):
self.parser = parser
def __call__(self, tokens, pos):
results = []
result = self.parser(tokens, pos)
while result:
results.append(result.value)
pos = result.pos
result = self.parser(tokens, pos)
return Result(results, pos)
class Process(Parser):
def __init__(self, parser, function):
self.parser = parser
self.function = function
def __call__(self, tokens, pos):
result = self.parser(tokens, pos)
if result:
arg = cast(None, result.value)
result.value = self.function(arg)
return result
class Lazy(Parser):
def __init__(self, parser_func):
self.parser = None
self.parser_func = parser_func
def __call__(self, tokens, pos):
if not self.parser:
self.parser = self.parser_func()
return self.parser(tokens, pos)
class Phrase(Parser):
def __init__(self, parser):
self.parser = parser
def __call__(self, tokens, pos):
result = self.parser(tokens, pos)
if result and result.pos == len(tokens):
return result
else:
return None
|
from __future__ import absolute_import, division, unicode_literals
from jx_base.expressions._utils import builtin_ops
from jx_base.expressions.expression import Expression
from jx_base.expressions.false_op import FALSE
from jx_base.expressions.literal import Literal
from jx_base.expressions.null_op import NULL
from jx_base.language import is_op
from mo_json import NUMBER
class BasicMultiOp(Expression):
"""
PLACEHOLDER FOR BASIC OPERATOR (CAN NOT DEAL WITH NULLS)
"""
data_type = NUMBER
op = None
def __init__(self, terms):
Expression.__init__(self, terms)
self.terms = terms
def vars(self):
output = set()
for t in self.terms:
output.update(t.vars())
return output
def map(self, map):
return self.__class__([t.map(map) for t in self.terms])
def __data__(self):
return {self.op: [t.__data__() for t in self.terms]}
def missing(self, lang):
return FALSE
def partial_eval(self, lang):
acc = None
terms = []
for t in self.terms:
simple = t.partial_eval(lang)
if simple is NULL:
pass
elif is_op(simple, Literal):
if acc is None:
acc = simple.value
else:
acc = builtin_ops[self.op](acc, simple.value)
else:
terms.append(simple)
if len(terms) == 0:
if acc == None:
return self.default.partial_eval(lang)
else:
return Literal(acc)
else:
if acc is not None:
terms.append(Literal(acc))
return self.__class__(terms)
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("services", "0079_update_notification"),
]
operations = [
migrations.AddField(
model_name="announcement",
name="external_url_title",
field=models.CharField(
blank=True, max_length=100, null=True, verbose_name="External URL title"
),
),
migrations.AddField(
model_name="announcement",
name="external_url_title_en",
field=models.CharField(
blank=True, max_length=100, null=True, verbose_name="External URL title"
),
),
migrations.AddField(
model_name="announcement",
name="external_url_title_fi",
field=models.CharField(
blank=True, max_length=100, null=True, verbose_name="External URL title"
),
),
migrations.AddField(
model_name="announcement",
name="external_url_title_sv",
field=models.CharField(
blank=True, max_length=100, null=True, verbose_name="External URL title"
),
),
migrations.AddField(
model_name="errormessage",
name="external_url_title",
field=models.CharField(
blank=True, max_length=100, null=True, verbose_name="External URL title"
),
),
migrations.AddField(
model_name="errormessage",
name="external_url_title_en",
field=models.CharField(
blank=True, max_length=100, null=True, verbose_name="External URL title"
),
),
migrations.AddField(
model_name="errormessage",
name="external_url_title_fi",
field=models.CharField(
blank=True, max_length=100, null=True, verbose_name="External URL title"
),
),
migrations.AddField(
model_name="errormessage",
name="external_url_title_sv",
field=models.CharField(
blank=True, max_length=100, null=True, verbose_name="External URL title"
),
),
]
|
"""Utility mixins that simplify tests for map reduce jobs."""
import json
import datetime
import luigi
class MapperTestMixin(object):
"""
Base class for map function tests.
Assumes that self.task_class is defined in a derived class.
"""
DEFAULT_USER_ID = 10
DEFAULT_TIMESTAMP = "2013-12-17T15:38:32.805444"
DEFAULT_DATE = "2013-12-17"
# This dictionary stores the default values for arguments to various task constructors; if not told otherwise,
# the task constructor will pull needed values from this dictionary.
DEFAULT_ARGS = {
'interval': DEFAULT_DATE,
'output_root': '/fake/output',
'end_date': datetime.datetime.strptime('2014-04-01', '%Y-%m-%d').date(),
'import_date': datetime.datetime.strptime('2014-04-01', '%Y-%m-%d').date(),
'geolocation_data': 'test://data/data.file',
'mapreduce_engine': 'local',
'user_country_output': 'test://output/',
'name': 'test',
'src': ['test://input/'],
'dest': 'test://output/'
}
task_class = None
def setUp(self):
self.event_templates = {}
self.default_event_template = ''
if hasattr(self, 'interval') and self.interval is not None:
self.create_task(interval=self.interval)
else:
self.create_task()
def create_task(self, **kwargs):
"""Allow arguments to be passed to the task constructor."""
new_kwargs = {}
for attr in self.DEFAULT_ARGS:
if not hasattr(self.task_class, attr):
continue
value = kwargs.get(attr, self.DEFAULT_ARGS.get(attr))
if attr == 'interval':
new_kwargs[attr] = luigi.DateIntervalParameter().parse(value)
else:
new_kwargs[attr] = value
self.task = self.task_class(**new_kwargs) # pylint: disable=not-callable
self.task.init_local()
def create_event_log_line(self, **kwargs):
"""Create an event log with test values, as a JSON string."""
return json.dumps(self.create_event_dict(**kwargs))
def create_event_dict(self, **kwargs):
"""Create an event log with test values, as a dict."""
# Define default values for event log entry.
template_name = kwargs.get('template_name', self.default_event_template)
event_dict = kwargs.pop('template', self.event_templates[template_name]).copy()
event_dict.update(**kwargs)
return event_dict
def assert_single_map_output(self, line, expected_key, expected_value):
"""Assert that an input line generates exactly one output record with the expected key and value"""
mapper_output = tuple(self.task.mapper(line))
self.assertEquals(len(mapper_output), 1)
row = mapper_output[0]
self.assertEquals(len(row), 2)
actual_key, actual_value = row
self.assertEquals(expected_key, actual_key)
self.assertEquals(expected_value, actual_value)
def assert_single_map_output_load_jsons(self, line, expected_key, expected_value):
"""
Checks if two tuples are equal, but loading jsons and comparing dictionaries rather than comparing JSON strings
directly to avoid potential ordering issues.
args:
line is a tuple, possibly including json strings.
expected_key is the expected key of the result of the mapping.
expected_value is a tuple, possibly including dictionaries to be compared with the json strings in values.
"""
mapper_output = tuple(self.task.mapper(line))
self.assertEquals(len(mapper_output), 1)
row = mapper_output[0]
self.assertEquals(len(row), 2)
actual_key, actual_value = row
self.assertEquals(actual_key, expected_key)
read_list = []
expected_list = list(expected_value)
for element in actual_value:
# Load the json if we can, otherwise, just put in the element.
try:
read_list.append(json.loads(element))
except ValueError:
read_list.append(element)
self.assertEquals(read_list, expected_list)
def assert_no_map_output_for(self, line):
"""Assert that an input line generates no output."""
self.assertEquals(
tuple(self.task.mapper(line)),
tuple()
)
class ReducerTestMixin(object):
"""
Base class for reduce function tests.
Assumes that self.task_class is defined in a derived class.
"""
DATE = '2013-12-17'
COURSE_ID = 'foo/bar/baz'
USERNAME = 'test_user'
# This dictionary stores the default values for arguments to various task constructors; if not told otherwise,
# the task constructor will pull needed values from this dictionary.
DEFAULT_ARGS = {
'interval': DATE,
'output_root': '/fake/output',
'end_date': datetime.datetime.strptime('2014-04-01', '%Y-%m-%d').date(),
'import_date': datetime.datetime.strptime('2014-04-01', '%Y-%m-%d').date(),
'geolocation_data': 'test://data/data.file',
'mapreduce_engine': 'local',
'user_country_output': 'test://output/',
'name': 'test',
'src': ['test://input/'],
'dest': 'test://output/'
}
reduce_key = tuple()
task_class = None
def setUp(self):
new_kwargs = {}
for attr in self.DEFAULT_ARGS:
if not hasattr(self.task_class, attr):
continue
value = getattr(self, attr, self.DEFAULT_ARGS.get(attr))
if attr == 'interval':
new_kwargs[attr] = luigi.DateIntervalParameter().parse(value)
else:
new_kwargs[attr] = value
self.task = self.task_class(**new_kwargs) # pylint: disable=not-callable
self.task.init_local()
self.reduce_key = tuple()
def assert_no_output(self, input_value):
"""Asserts that the given input produces no output."""
output = self._get_reducer_output(input_value)
self.assertEquals(len(output), 0)
def _get_reducer_output(self, inputs):
"""Runs the reducer and return the output."""
return tuple(self.task.reducer(self.reduce_key, inputs))
def _check_output_complete_tuple(self, inputs, expected):
"""Compare generated with expected output, comparing the entire tuples.
args:
inputs is a valid input to the subclass's reducer.
expected is a tuple containing the expected output of the reducer.
"""
self.assertEquals(self._get_reducer_output(inputs), expected)
def _check_output_by_key(self, inputs, column_values):
"""
Compare generated with expected output, but only checking specified columns
args:
inputs is a valid input to the subclass's reducer.
column_values is a list of dictionaries, where the (key, value) pairs in the dictionary correspond to (column_num, expected_value)
pairs in the expected reducer output.
"""
output = self._get_reducer_output(inputs)
if not isinstance(column_values, list):
column_values = [column_values]
self.assertEquals(len(output), len(column_values), '{0} != {1}'.format(output, column_values))
for output_tuple, expected_columns in zip(output, column_values):
for column_num, expected_value in expected_columns.iteritems():
self.assertEquals(output_tuple[column_num], expected_value)
def _check_output_tuple_with_key(self, inputs, expected):
"""
Compare generated with expected output, checking the whole tuple and including keys in the expected
(in case the reduce_key changes midway through).
args:
inputs is a valid input to the subclass's reducer.
expected is an iterable of (key, value) pairs corresponding to expected output.
"""
expected_with_key = tuple([(key, self.reduce_key + value) for key, value in expected])
self.assertEquals(self._get_reducer_output(inputs), expected_with_key)
|
from openerp.osv import orm, fields
class sale_order_confirm(orm.TransientModel):
_inherit = "sale.order.confirm"
_columns = {
'requested_date': fields.date(string="Requested Date", help="Date requested by the customer for the sale."),
}
def get_sale_order_confirm_line_vals(self, cr, uid, sale_order_line, context=None):
res = super(sale_order_confirm, self).get_sale_order_confirm_line_vals(cr, uid, sale_order_line, context)
if sale_order_line.requested_date:
res.update({'requested_date': sale_order_line.requested_date})
elif sale_order_line.order_id.requested_date:
res.update({
'requested_date': sale_order_line.order_id.requested_date,
})
sale_order_line.write({'requested_date': sale_order_line.order_id.requested_date,})
return res
def get_sale_order_line_vals(self, cr, uid, order_id, sale_order_line_data, context):
res = super(sale_order_confirm, self).get_sale_order_line_vals(cr, uid, order_id, sale_order_line_data, context)
return res
|
from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from gasistafelice.base.utils import get_ctype_from_model_label
from permissions.models import Permission, Role
from gasistafelice.base.models import Resource
class ParamRole(Resource, Role):
"""
A custom role model class inheriting from `django-permissions`'s`Role` model.
This way, we are able to augment the base `Role` model
(carrying only a `name` field attribute) with additional information
needed to describe those 'parametric' roles arising in this application domain.
A parametric role (`ParamRole`) can be tied to:
1) a given GAS (e.g. GAS_REFERRER_CASH, GAS_REFERRER_TECH),
2) a given Supplier (e.g. SUPPLIER_REFERRER, GAS_REFERRER_SUPPLIER),
3) a given Delivery appointment (e.g. GAS_REFERRER_DELIVERY)
4) a given Withdrawal appointment (e.g. GAS_REFERRER_WITHDRAWAL)
5) a given GASSupplierOrder (e.g. GAS_REFERRER_ORDER)
6) a given "Retina" (TODO)
"""
# link to the base model class (`BaseRole`)
role = models.OneToOneField(Role, parent_link=True)
## Generic ForeignKey for the first (optional) Role parameter
content_type_1 = models.ForeignKey(ContentType, related_name="param_role_primary_set")
obj_id_1 = models.PositiveIntegerField()
param1 = generic.GenericForeignKey(ct_field="content_type_1", fk_field="obj_id_1")
## Generic ForeignKey for the second (optional) Role parameter
content_type_2 = models.ForeignKey(ContentType, null=True, blank=True, related_name="param_role_secondary_set")
obj_id_2 = models.PositiveIntegerField(null=True, blank=True)
param2 = generic.GenericForeignKey(ct_field="content_type_2", fk_field="obj_id_2")
class Meta:
# forbid duplicated ParamRole entries in the DB
unique_together = ("role", "content_type_1", "obj_id_1", "content_type_2", "obj_id_2")
@property
def gas(self):
gas_ct = get_ctype_from_model_label('gas.GAS')
GAS = gas_ct.model_class()
if self.content_type_1 == gas_ct:
try:
gas = GAS.objects.get(pk=self.obj_id_2)
return gas
except GAS.DoesNotExist:
return None
elif self.content_type_2 == gas_ct:
try:
gas = GAS.objects.get(pk=self.obj_id_2)
return gas
except GAS.DoesNotExist:
return None
else:
return None
@property
def supplier(self):
supplier_ct = get_ctype_from_model_label('supplier.Supplier')
Supplier = supplier_ct.model_class()
if self.content_type_1 == supplier_ct:
try:
supplier = Supplier.objects.get(pk=self.obj_id_1)
return supplier
except Supplier.DoesNotExist:
return None
elif self.content_type_2 == supplier_ct:
try:
supplier = Supplier.objects.get(pk=self.obj_id_2)
return supplier
except Supplier.DoesNotExist:
return None
else:
return None
@property
def order(self):
order_ct = get_ctype_from_model_label('gas.GASSupplierOrder')
GASSupplierOrder = order_ct.model_class()
if self.content_type_1 == order_ct:
try:
order = GASSupplierOrder.objects.get(pk=self.obj_id_1)
return order
except GASSupplierOrder.DoesNotExist:
return None
elif self.content_type_2 == order_ct:
try:
order = GASSupplierOrder.objects.get(pk=self.obj_id_2)
return order
except GASSupplierOrder.DoesNotExist:
return None
else:
return None
class GlobalPermission(models.Model):
permission = models.ForeignKey(Permission)
role = models.ForeignKey(Role)
content_type = models.ForeignKey(ContentType)
class Meta:
# forbid duplicated GlobalPermission entries in the DB
unique_together = ("permission", "role", "content_type")
|
"""Adobe PDF core font set"""
import os
from . import FONTS_PATH
from ..font import TypeFace, TypeFamily
from ..font.type1 import Type1Font
from ..font.style import REGULAR, MEDIUM, BOLD, OBLIQUE, ITALIC, CONDENSED
def path(name):
return os.path.join(FONTS_PATH, 'adobe14', name)
courier = TypeFace('Courier',
Type1Font(path('Courier'), core=True),
Type1Font(path('Courier-Oblique'), slant=OBLIQUE, core=True),
Type1Font(path('Courier-Bold'), weight=BOLD, core=True),
Type1Font(path('Courier-BoldOblique'), weight=BOLD,
slant=OBLIQUE, core=True))
helvetica = TypeFace('Helvetica',
Type1Font(path('Helvetica'), core=True),
Type1Font(path('Helvetica-Oblique'), slant=OBLIQUE,
core=True),
Type1Font(path('Helvetica-Bold'), weight=BOLD, core=True),
Type1Font(path('Helvetica-BoldOblique'), weight=BOLD,
slant=OBLIQUE, core=True))
symbol = TypeFace('Symbol', Type1Font(path('Symbol'), core=True))
times = TypeFace('Times',
Type1Font(path('Times-Roman'), weight=REGULAR, core=True),
Type1Font(path('Times-Italic'), slant=ITALIC, core=True),
Type1Font(path('Times-Bold'), weight=BOLD, core=True),
Type1Font(path('Times-BoldItalic'), weight=BOLD, slant=ITALIC,
core=True))
zapfdingbats = TypeFace('ITC ZapfDingbats', Type1Font(path('ZapfDingbats'),
core=True))
pdf_family = TypeFamily(serif=times, sans=helvetica, mono=courier,
symbol=symbol, dingbats=zapfdingbats)
|
from twisted.internet.defer import inlineCallbacks
from juju.control import legacy
from juju.control.utils import expand_constraints, get_environment
def configure_subparser(subparsers):
"""Configure bootstrap subcommand"""
sub_parser = subparsers.add_parser("bootstrap", help=command.__doc__)
sub_parser.add_argument(
"--environment", "-e",
help="juju environment to operate in.")
sub_parser.add_argument(
"--constraints",
help="default hardware constraints for this environment.",
default=[],
type=expand_constraints)
return sub_parser
@inlineCallbacks
def command(options):
"""
Bootstrap machine providers in the specified environment.
"""
environment = get_environment(options)
provider = environment.get_machine_provider()
legacy_keys = provider.get_legacy_config_keys()
if legacy_keys:
legacy.error(legacy_keys)
constraint_set = yield provider.get_constraint_set()
constraints = constraint_set.parse(options.constraints)
constraints = constraints.with_series(environment.default_series)
options.log.info(
"Bootstrapping environment %r (origin: %s type: %s)..." % (
environment.name, environment.origin, environment.type))
yield provider.bootstrap(constraints)
|
"""
Provide tests for sysadmin dashboard feature in sysadmin.py
"""
import glob
import os
import re
import shutil
import unittest
from uuid import uuid4
from mock import patch
from pymongo.errors import PyMongoError
from util.date_utils import get_time_display, DEFAULT_DATE_TIME_FORMAT
from nose.plugins.attrib import attr
from django.conf import settings
from django.contrib.auth.hashers import check_password
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.test.client import Client
from django.test.utils import override_settings
from django.utils.timezone import utc as UTC
from django.utils.translation import ugettext as _
import mongoengine
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from xmodule.modulestore.tests.django_utils import TEST_DATA_XML_MODULESTORE
from dashboard.models import CourseImportLog
from dashboard.sysadmin import Users
from dashboard.git_import import GitImportError
from datetime import datetime
from external_auth.models import ExternalAuthMap
from student.roles import CourseStaffRole, GlobalStaff
from student.tests.factories import UserFactory
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.mongo_connection import MONGO_PORT_NUM, MONGO_HOST
from instructor_task.tests.factories import InstructorTaskFactory
TEST_MONGODB_LOG = {
'host': MONGO_HOST,
'port': MONGO_PORT_NUM,
'user': '',
'password': '',
'db': 'test_xlog',
}
FEATURES_WITH_SSL_AUTH = settings.FEATURES.copy()
FEATURES_WITH_SSL_AUTH['AUTH_USE_CERTIFICATES'] = True
class SysadminBaseTestCase(ModuleStoreTestCase):
"""
Base class with common methods used in XML and Mongo tests
"""
TEST_REPO = 'https://github.com/mitocw/edx4edx_lite.git'
TEST_BRANCH = 'testing_do_not_delete'
TEST_BRANCH_COURSE = SlashSeparatedCourseKey('MITx', 'edx4edx_branch', 'edx4edx')
def setUp(self):
"""Setup test case by adding primary user."""
super(SysadminBaseTestCase, self).setUp(create_user=False)
self.user = UserFactory.create(username='test_user',
email='test_user+sysadmin@edx.org',
password='foo')
self.client = Client()
def _setsuperuser_login(self):
"""Makes the test user a superuser and logs them in"""
self.user.is_superuser = True
self.user.save()
self.client.login(username=self.user.username, password='foo')
def _setstaff_login(self):
"""Makes the test user staff and logs them in"""
GlobalStaff().add_users(self.user)
self.client.login(username=self.user.username, password='foo')
def _add_edx4edx(self, branch=None):
"""Adds the edx4edx sample course"""
post_dict = {'repo_location': self.TEST_REPO, 'action': 'add_course', }
if branch:
post_dict['repo_branch'] = branch
return self.client.post(reverse('sysadmin_courses'), post_dict)
def _rm_edx4edx(self):
"""Deletes the sample course from the XML store"""
def_ms = modulestore()
course_path = '{0}/edx4edx_lite'.format(
os.path.abspath(settings.DATA_DIR))
try:
# using XML store
course = def_ms.courses.get(course_path, None)
except AttributeError:
# Using mongo store
course = def_ms.get_course(SlashSeparatedCourseKey('MITx', 'edx4edx', 'edx4edx'))
# Delete git loaded course
response = self.client.post(
reverse('sysadmin_courses'),
{
'course_id': course.id.to_deprecated_string(),
'action': 'del_course',
}
)
self.addCleanup(self._rm_glob, '{0}_deleted_*'.format(course_path))
return response
def _rm_glob(self, path):
"""
Create a shell expansion of passed in parameter and iteratively
remove them. Must only expand to directories.
"""
for path in glob.glob(path):
shutil.rmtree(path)
def _mkdir(self, path):
"""
Create directory and add the cleanup for it.
"""
os.mkdir(path)
self.addCleanup(shutil.rmtree, path)
@attr('shard_1')
@unittest.skipUnless(settings.FEATURES.get('ENABLE_SYSADMIN_DASHBOARD'),
"ENABLE_SYSADMIN_DASHBOARD not set")
@override_settings(GIT_IMPORT_WITH_XMLMODULESTORE=True)
class TestSysadmin(SysadminBaseTestCase):
"""
Test sysadmin dashboard features using XMLModuleStore
"""
MODULESTORE = TEST_DATA_XML_MODULESTORE
def test_staff_access(self):
"""Test access controls."""
test_views = ['sysadmin', 'sysadmin_courses', 'sysadmin_staffing', ]
for view in test_views:
response = self.client.get(reverse(view))
self.assertEqual(response.status_code, 302)
self.user.is_staff = False
self.user.save()
logged_in = self.client.login(username=self.user.username,
password='foo')
self.assertTrue(logged_in)
for view in test_views:
response = self.client.get(reverse(view))
self.assertEqual(response.status_code, 404)
response = self.client.get(reverse('gitlogs'))
self.assertEqual(response.status_code, 404)
self.user.is_staff = True
self.user.save()
self.client.logout()
self.client.login(username=self.user.username, password='foo')
for view in test_views:
response = self.client.get(reverse(view))
self.assertTrue(response.status_code, 200)
response = self.client.get(reverse('gitlogs'))
self.assertTrue(response.status_code, 200)
def test_rename_user(self):
"""
Tests the rename user feature
"""
self._setstaff_login()
self.client.login(username=self.user.username, password='foo')
user1 = UserFactory.create(
username='test_rename_user',
email='test_rename_user@edx.org',
password='foo',
)
user2 = UserFactory.create(
username=user1.username + '_second',
email=user1.username + '_second@edx.org',
password='foo',
)
username_new = 'targetName'
username_nonexistent = 'notFoundName'
# ensures that the test database doesn't have a user with username `notFoundName`
self.assertEqual(0, len(User.objects.filter(
username=username_nonexistent,
)))
# tests response when one field is blank
response = self.client.post(reverse('sysadmin'), {
'action': 'rename_user',
'username_old': '',
'username_new': username_new,
})
self.assertIn("Usernames cannot be blank", response.content.decode('utf-8'))
# tests response when user is not found
response = self.client.post(reverse('sysadmin'), {
'action': 'rename_user',
'username_old': username_nonexistent,
'username_new': username_new,
})
self.assertIn("User '{user}' does not exist".format(
user=username_nonexistent,
), response.content.decode('utf-8'))
# tests response when rename fails due to integrity error
response = self.client.post(reverse('sysadmin'), {
'action': 'rename_user',
'username_old': user1.username,
'username_new': user2.username,
})
self.assertIn("User '{user}' already exists".format(
user=user2.username,
), response.content.decode('utf-8'))
# tests response when rename is successful
response = self.client.post(reverse('sysadmin'), {
'action': 'rename_user',
'username_old': user1.username,
'username_new': username_new,
})
self.assertIn("Changed username of user '{user}'".format(
user=user1.username,
), response.content.decode('utf-8'))
# tests response when PyMongoError is raised
with patch('dashboard.sysadmin.rename_user_util') as mock_rename_user_util:
mock_rename_user_util.side_effect = PyMongoError()
response = self.client.post(reverse('sysadmin'), {
'action': 'rename_user',
'username_old': user1.username,
'username_new': user2.username,
})
self.assertIn("Failed to modify username for user '{user}'".format(
user=user1.username,
), response.content.decode('utf-8'))
# cleanup users
user1.delete()
user2.delete()
def test_user_mod(self):
"""Create and delete a user"""
self._setstaff_login()
self.client.login(username=self.user.username, password='foo')
# Create user tests
# No uname
response = self.client.post(reverse('sysadmin'),
{'action': 'create_user',
'student_fullname': 'blah',
'student_password': 'foozor', })
self.assertIn('Must provide username', response.content.decode('utf-8'))
# no full name
response = self.client.post(reverse('sysadmin'),
{'action': 'create_user',
'student_uname': 'test_cuser+sysadmin@edx.org',
'student_password': 'foozor', })
self.assertIn('Must provide full name', response.content.decode('utf-8'))
# Test create valid user
self.client.post(reverse('sysadmin'),
{'action': 'create_user',
'student_uname': 'test_cuser+sysadmin@edx.org',
'student_fullname': 'test cuser',
'student_password': 'foozor', })
self.assertIsNotNone(
User.objects.get(username='test_cuser+sysadmin@edx.org',
email='test_cuser+sysadmin@edx.org'))
# login as new user to confirm
self.assertTrue(self.client.login(
username='test_cuser+sysadmin@edx.org', password='foozor'))
self.client.logout()
self.client.login(username=self.user.username, password='foo')
# Delete user tests
# Try no username
response = self.client.post(reverse('sysadmin'),
{'action': 'del_user', })
self.assertIn('Must provide username', response.content.decode('utf-8'))
# Try bad usernames
response = self.client.post(reverse('sysadmin'),
{'action': 'del_user',
'student_uname': 'flabbergast@example.com',
'student_fullname': 'enigma jones', })
self.assertIn('Cannot find user with email address', response.content.decode('utf-8'))
response = self.client.post(reverse('sysadmin'),
{'action': 'del_user',
'student_uname': 'flabbergast',
'student_fullname': 'enigma jones', })
self.assertIn('Cannot find user with username', response.content.decode('utf-8'))
self.client.post(reverse('sysadmin'),
{'action': 'del_user',
'student_uname': 'test_cuser+sysadmin@edx.org',
'student_fullname': 'test cuser', })
self.assertEqual(0, len(User.objects.filter(
username='test_cuser+sysadmin@edx.org',
email='test_cuser+sysadmin@edx.org')))
self.assertEqual(1, len(User.objects.all()))
def test_user_csv(self):
"""Download and validate user CSV"""
num_test_users = 100
self._setstaff_login()
# Stuff full of users to test streaming
for user_num in xrange(num_test_users):
Users().create_user('testingman_with_long_name{}'.format(user_num),
'test test')
response = self.client.post(reverse('sysadmin'),
{'action': 'download_users', })
self.assertIn('attachment', response['Content-Disposition'])
self.assertEqual('text/csv', response['Content-Type'])
self.assertIn('test_user', response.content)
self.assertTrue(num_test_users + 2, len(response.content.splitlines()))
# Clean up
User.objects.filter(
username__startswith='testingman_with_long_name').delete()
@override_settings(FEATURES=FEATURES_WITH_SSL_AUTH)
def test_authmap_repair(self):
"""Run authmap check and repair"""
self._setstaff_login()
Users().create_user('test0', 'test test')
# Will raise exception, so no assert needed
eamap = ExternalAuthMap.objects.get(external_name='test test')
mitu = User.objects.get(username='test0')
self.assertTrue(check_password(eamap.internal_password, mitu.password))
mitu.set_password('not autogenerated')
mitu.save()
self.assertFalse(check_password(eamap.internal_password, mitu.password))
# Create really non user AuthMap
ExternalAuthMap(external_id='ll',
external_domain='ll',
external_credentials='{}',
external_email='a@b.c',
external_name='c',
internal_password='').save()
response = self.client.post(reverse('sysadmin'),
{'action': 'repair_eamap', })
self.assertIn('{0} test0'.format('Failed in authenticating'),
response.content)
self.assertIn('fixed password', response.content.decode('utf-8'))
self.assertTrue(self.client.login(username='test0',
password=eamap.internal_password))
# Check for all OK
self._setstaff_login()
response = self.client.post(reverse('sysadmin'),
{'action': 'repair_eamap', })
self.assertIn('All ok!', response.content.decode('utf-8'))
def test_xml_course_add_delete(self):
"""add and delete course from xml module store"""
self._setstaff_login()
# Try bad git repo
response = self.client.post(reverse('sysadmin_courses'), {
'repo_location': 'github.com/mitocw/edx4edx_lite',
'action': 'add_course', })
self.assertIn(_("The git repo location should end with '.git', "
"and be a valid url"), response.content.decode('utf-8'))
response = self.client.post(reverse('sysadmin_courses'), {
'repo_location': 'http://example.com/not_real.git',
'action': 'add_course', })
self.assertIn('Unable to clone or pull repository',
response.content.decode('utf-8'))
# Create git loaded course
response = self._add_edx4edx()
def_ms = modulestore()
self.assertEqual('xml', def_ms.get_modulestore_type(None))
course = def_ms.courses.get('{0}/edx4edx_lite'.format(
os.path.abspath(settings.DATA_DIR)), None)
self.assertIsNotNone(course)
# Delete a course
self._rm_edx4edx()
course = def_ms.courses.get('{0}/edx4edx_lite'.format(
os.path.abspath(settings.DATA_DIR)), None)
self.assertIsNone(course)
# Load a bad git branch
response = self._add_edx4edx('asdfasdfasdf')
self.assertIn(GitImportError.REMOTE_BRANCH_MISSING,
response.content.decode('utf-8'))
# Load a course from a git branch
self._add_edx4edx(self.TEST_BRANCH)
course = def_ms.courses.get('{0}/edx4edx_lite'.format(
os.path.abspath(settings.DATA_DIR)), None)
self.assertIsNotNone(course)
self.assertEqual(self.TEST_BRANCH_COURSE, course.id)
self._rm_edx4edx()
# Try and delete a non-existent course
response = self.client.post(reverse('sysadmin_courses'),
{'course_id': 'foobar/foo/blah',
'action': 'del_course', })
self.assertIn('Error - cannot get course with ID',
response.content.decode('utf-8'))
@override_settings(GIT_IMPORT_WITH_XMLMODULESTORE=False)
def test_xml_safety_flag(self):
"""Make sure the settings flag to disable xml imports is working"""
self._setstaff_login()
response = self._add_edx4edx()
self.assertIn('GIT_IMPORT_WITH_XMLMODULESTORE', response.content)
def_ms = modulestore()
course = def_ms.courses.get('{0}/edx4edx_lite'.format(
os.path.abspath(settings.DATA_DIR)), None)
self.assertIsNone(course)
def test_git_pull(self):
"""Make sure we can pull"""
self._setstaff_login()
response = self._add_edx4edx()
response = self._add_edx4edx()
self.assertIn(_("The course {0} already exists in the data directory! "
"(reloading anyway)").format('edx4edx_lite'),
response.content.decode('utf-8'))
self._rm_edx4edx()
def test_staff_csv(self):
"""Download and validate staff CSV"""
self._setstaff_login()
self._add_edx4edx()
def_ms = modulestore()
course = def_ms.get_course(SlashSeparatedCourseKey('MITx', 'edx4edx', 'edx4edx'))
CourseStaffRole(course.id).add_users(self.user)
response = self.client.post(reverse('sysadmin_staffing'),
{'action': 'get_staff_csv', })
self.assertIn('attachment', response['Content-Disposition'])
self.assertEqual('text/csv', response['Content-Type'])
columns = ['course_id', 'role', 'username',
'email', 'full_name', ]
self.assertIn(','.join('"' + c + '"' for c in columns),
response.content)
self._rm_edx4edx()
def test_enrollment_page(self):
"""
Adds a course and makes sure that it shows up on the staffing and
enrollment page
"""
self._setstaff_login()
self._add_edx4edx()
response = self.client.get(reverse('sysadmin_staffing'))
self.assertIn('edx4edx', response.content)
self._rm_edx4edx()
def test_task_queue(self):
"""Kill an InstructorTask"""
self._setstaff_login()
self.client.login(username=self.user.username, password='foo')
# Missing ID.
response = self.client.post(
reverse(
'sysadmin_task_queue'
),
{
'action': 'kill_task',
}
)
self.assertIn('Must provide an ID', response.content.decode('utf-8'))
# ID not an integer.
response = self.client.post(
reverse(
'sysadmin_task_queue'
),
{
'action': 'kill_task',
'row_id': 'abc',
}
)
self.assertIn('ID must be an integer', response.content.decode('utf-8'))
# InstructorTask with this ID doesn't exist.
response = self.client.post(
reverse(
'sysadmin_task_queue'
),
{
'action': 'kill_task',
'row_id': '123',
}
)
self.assertIn('Cannot find task with ID 123 and task_state QUEUING - InstructorTask matching query does not exist.', response.content.decode('utf-8'))
# Create InstructorTask with incorrect task_state.
instructor_task = InstructorTaskFactory.create(
task_key='dummy value',
task_id=str(uuid4()),
task_state='SUCCESS',
)
response = self.client.post(
reverse(
'sysadmin_task_queue'
),
{
'action': 'kill_task',
'row_id': instructor_task.id,
}
)
self.assertIn(
'Cannot find task with ID {instructor_task_id} and task_state QUEUING - InstructorTask matching query does not exist.'.format(
instructor_task_id=instructor_task.id,
),
response.content.decode(
'utf-8',
)
)
# Create InstructorTask with correct task_state
instructor_task = InstructorTaskFactory.create(
task_key='dummy value',
task_id=str(uuid4()),
task_state='QUEUING',
)
response = self.client.post(
reverse(
'sysadmin_task_queue'
),
{
'action': 'kill_task',
'row_id': instructor_task.id,
}
)
self.assertIn(
'Task with id {instructor_task_id} was successfully killed!'.format(
instructor_task_id=instructor_task.id,
),
response.content.decode(
'utf-8',
)
)
@attr('shard_1')
@override_settings(MONGODB_LOG=TEST_MONGODB_LOG)
@unittest.skipUnless(settings.FEATURES.get('ENABLE_SYSADMIN_DASHBOARD'),
"ENABLE_SYSADMIN_DASHBOARD not set")
class TestSysAdminMongoCourseImport(SysadminBaseTestCase):
"""
Check that importing into the mongo module store works
"""
@classmethod
def tearDownClass(cls):
"""Delete mongo log entries after test."""
super(TestSysAdminMongoCourseImport, cls).tearDownClass()
try:
mongoengine.connect(TEST_MONGODB_LOG['db'])
CourseImportLog.objects.all().delete()
except mongoengine.connection.ConnectionError:
pass
def _setstaff_login(self):
"""
Makes the test user staff and logs them in
"""
self.user.is_staff = True
self.user.save()
self.client.login(username=self.user.username, password='foo')
def test_missing_repo_dir(self):
"""
Ensure that we handle a missing repo dir
"""
self._setstaff_login()
if os.path.isdir(getattr(settings, 'GIT_REPO_DIR')):
shutil.rmtree(getattr(settings, 'GIT_REPO_DIR'))
# Create git loaded course
response = self._add_edx4edx()
self.assertIn(GitImportError.NO_DIR,
response.content.decode('UTF-8'))
def test_mongo_course_add_delete(self):
"""
This is the same as TestSysadmin.test_xml_course_add_delete,
but it uses a mongo store
"""
self._setstaff_login()
self._mkdir(getattr(settings, 'GIT_REPO_DIR'))
def_ms = modulestore()
self.assertFalse('xml' == def_ms.get_modulestore_type(None))
self._add_edx4edx()
course = def_ms.get_course(SlashSeparatedCourseKey('MITx', 'edx4edx', 'edx4edx'))
self.assertIsNotNone(course)
self._rm_edx4edx()
course = def_ms.get_course(SlashSeparatedCourseKey('MITx', 'edx4edx', 'edx4edx'))
self.assertIsNone(course)
def test_course_info(self):
"""
Check to make sure we are getting git info for courses
"""
# Regex of first 3 columns of course information table row for
# test course loaded from git. Would not have sha1 if
# git_info_for_course failed.
table_re = re.compile(r"""
<tr>\s+
<td>edX\sAuthor\sCourse</td>\s+ # expected test git course name
<td>MITx/edx4edx/edx4edx</td>\s+ # expected test git course_id
<td>[a-fA-F\d]{40}</td> # git sha1 hash
""", re.VERBOSE)
self._setstaff_login()
self._mkdir(getattr(settings, 'GIT_REPO_DIR'))
# Make sure we don't have any git hashes on the page
response = self.client.get(reverse('sysadmin_courses'))
self.assertNotRegexpMatches(response.content, table_re)
# Now add the course and make sure it does match
response = self._add_edx4edx()
self.assertRegexpMatches(response.content, table_re)
def test_gitlogs(self):
"""
Create a log entry and make sure it exists
"""
self._setstaff_login()
self._mkdir(getattr(settings, 'GIT_REPO_DIR'))
self._add_edx4edx()
response = self.client.get(reverse('gitlogs'))
# Check that our earlier import has a log with a link to details
self.assertIn('/gitlogs/MITx/edx4edx/edx4edx', response.content)
response = self.client.get(
reverse('gitlogs_detail', kwargs={
'course_id': 'MITx/edx4edx/edx4edx'}))
self.assertIn('======> IMPORTING course',
response.content)
self._rm_edx4edx()
def test_gitlog_date(self):
"""
Make sure the date is timezone-aware and being converted/formatted
properly.
"""
tz_names = [
'America/New_York', # UTC - 5
'Asia/Pyongyang', # UTC + 9
'Europe/London', # UTC
'Canada/Yukon', # UTC - 8
'Europe/Moscow', # UTC + 4
]
tz_format = DEFAULT_DATE_TIME_FORMAT
self._setstaff_login()
self._mkdir(getattr(settings, 'GIT_REPO_DIR'))
self._add_edx4edx()
date = CourseImportLog.objects.first().created.replace(tzinfo=UTC)
for timezone in tz_names:
with (override_settings(TIME_ZONE=timezone)):
date_text = get_time_display(date, tz_format, settings.TIME_ZONE)
response = self.client.get(reverse('gitlogs'))
self.assertIn(date_text, response.content.decode('UTF-8'))
self._rm_edx4edx()
def test_gitlog_bad_course(self):
"""
Make sure we gracefully handle courses that don't exist.
"""
self._setstaff_login()
response = self.client.get(
reverse('gitlogs_detail', kwargs={
'course_id': 'Not/Real/Testing'}))
self.assertEqual(404, response.status_code)
def test_gitlog_no_logs(self):
"""
Make sure the template behaves well when rendered despite there not being any logs.
(This is for courses imported using methods other than the git_add_course command)
"""
self._setstaff_login()
self._mkdir(getattr(settings, 'GIT_REPO_DIR'))
self._add_edx4edx()
# Simulate a lack of git import logs
import_logs = CourseImportLog.objects.all()
import_logs.delete()
response = self.client.get(
reverse('gitlogs_detail', kwargs={
'course_id': 'MITx/edx4edx/edx4edx'
})
)
self.assertIn(
'No git import logs have been recorded for this course.',
response.content
)
self._rm_edx4edx()
def test_gitlog_pagination_out_of_range_invalid(self):
"""
Make sure the pagination behaves properly when the requested page is out
of range.
"""
self._setstaff_login()
mongoengine.connect(TEST_MONGODB_LOG['db'])
for _ in xrange(15):
CourseImportLog(
course_id=SlashSeparatedCourseKey("test", "test", "test"),
location="location",
import_log="import_log",
git_log="git_log",
repo_dir="repo_dir",
created=datetime.now()
).save()
for page, expected in [(-1, 1), (1, 1), (2, 2), (30, 2), ('abc', 1)]:
response = self.client.get(
'{}?page={}'.format(
reverse('gitlogs'),
page
)
)
self.assertIn(
'Page {} of 2'.format(expected),
response.content
)
CourseImportLog.objects.delete()
def test_gitlog_courseteam_access(self):
"""
Ensure course team users are allowed to access only their own course.
"""
self._mkdir(getattr(settings, 'GIT_REPO_DIR'))
self._setstaff_login()
self._add_edx4edx()
self.user.is_staff = False
self.user.save()
logged_in = self.client.login(username=self.user.username,
password='foo')
response = self.client.get(reverse('gitlogs'))
# Make sure our non privileged user doesn't have access to all logs
self.assertEqual(response.status_code, 404)
# Or specific logs
response = self.client.get(reverse('gitlogs_detail', kwargs={
'course_id': 'MITx/edx4edx/edx4edx'
}))
self.assertEqual(response.status_code, 404)
# Add user as staff in course team
def_ms = modulestore()
course = def_ms.get_course(SlashSeparatedCourseKey('MITx', 'edx4edx', 'edx4edx'))
CourseStaffRole(course.id).add_users(self.user)
self.assertTrue(CourseStaffRole(course.id).has_user(self.user))
logged_in = self.client.login(username=self.user.username,
password='foo')
self.assertTrue(logged_in)
response = self.client.get(
reverse('gitlogs_detail', kwargs={
'course_id': 'MITx/edx4edx/edx4edx'
}))
self.assertIn('======> IMPORTING course',
response.content)
self._rm_edx4edx()
|
"""
Tests for the course home page.
"""
from datetime import datetime, timedelta
import ddt
import mock
import six
from django.conf import settings
from django.http import QueryDict
from django.urls import reverse
from django.utils.http import urlquote_plus
from django.utils.timezone import now
from pytz import UTC
from waffle.models import Flag
from waffle.testutils import override_flag
from course_modes.models import CourseMode
from course_modes.tests.factories import CourseModeFactory
from lms.djangoapps.courseware.tests.helpers import get_expiration_banner_text
from experiments.models import ExperimentData
from lms.djangoapps.commerce.models import CommerceConfiguration
from lms.djangoapps.commerce.utils import EcommerceService
from lms.djangoapps.course_goals.api import add_course_goal, remove_course_goal
from lms.djangoapps.courseware.utils import verified_upgrade_deadline_link
from lms.djangoapps.courseware.tests.factories import (
BetaTesterFactory,
GlobalStaffFactory,
InstructorFactory,
OrgInstructorFactory,
OrgStaffFactory,
StaffFactory
)
from lms.djangoapps.discussion.django_comment_client.tests.factories import RoleFactory
from openedx.features.discounts.applicability import get_discount_expiration_date
from openedx.features.discounts.utils import format_strikeout_price, REV1008_EXPERIMENT_ID
from openedx.core.djangoapps.content.course_overviews.models import CourseOverview
from openedx.core.djangoapps.dark_lang.models import DarkLangConfig
from openedx.core.djangoapps.django_comment_common.models import (
FORUM_ROLE_ADMINISTRATOR,
FORUM_ROLE_COMMUNITY_TA,
FORUM_ROLE_GROUP_MODERATOR,
FORUM_ROLE_MODERATOR
)
from openedx.core.djangoapps.schedules.tests.factories import ScheduleFactory
from openedx.core.djangoapps.waffle_utils.testutils import WAFFLE_TABLES, override_waffle_flag
from openedx.core.djangolib.markup import HTML
from openedx.features.course_duration_limits.models import CourseDurationLimitConfig
from openedx.features.course_experience import (
COURSE_ENABLE_UNENROLLED_ACCESS_FLAG,
RELATIVE_DATES_FLAG,
SHOW_REVIEWS_TOOL_FLAG,
SHOW_UPGRADE_MSG_ON_COURSE_HOME,
UNIFIED_COURSE_TAB_FLAG
)
from student.models import CourseEnrollment, FBEEnrollmentExclusion
from student.tests.factories import UserFactory
from util.date_utils import strftime_localized
from xmodule.course_module import COURSE_VISIBILITY_PRIVATE, COURSE_VISIBILITY_PUBLIC, COURSE_VISIBILITY_PUBLIC_OUTLINE
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.tests.django_utils import CourseUserType, ModuleStoreTestCase, SharedModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory, check_mongo_calls
from ... import COURSE_PRE_START_ACCESS_FLAG, ENABLE_COURSE_GOALS
from .helpers import add_course_mode, remove_course_mode
from .test_course_updates import create_course_update, remove_course_updates
TEST_PASSWORD = 'test'
TEST_CHAPTER_NAME = 'Test Chapter'
TEST_COURSE_TOOLS = 'Course Tools'
TEST_BANNER_CLASS = '<div class="course-expiration-message">'
TEST_WELCOME_MESSAGE = '<h2>Welcome!</h2>'
TEST_UPDATE_MESSAGE = '<h2>Test Update!</h2>'
TEST_COURSE_UPDATES_TOOL = '/course/updates">'
TEST_COURSE_HOME_MESSAGE = 'course-message'
TEST_COURSE_HOME_MESSAGE_ANONYMOUS = '/login'
TEST_COURSE_HOME_MESSAGE_UNENROLLED = 'Enroll now'
TEST_COURSE_HOME_MESSAGE_PRE_START = 'Course starts in'
TEST_COURSE_GOAL_OPTIONS = 'goal-options-container'
TEST_COURSE_GOAL_UPDATE_FIELD = 'section-goals'
TEST_COURSE_GOAL_UPDATE_FIELD_HIDDEN = 'section-goals hidden'
COURSE_GOAL_DISMISS_OPTION = 'unsure'
THREE_YEARS_AGO = now() - timedelta(days=(365 * 3))
QUERY_COUNT_TABLE_BLACKLIST = WAFFLE_TABLES
def course_home_url(course):
"""
Returns the URL for the course's home page.
Arguments:
course (CourseDescriptor): The course being tested.
"""
return course_home_url_from_string(six.text_type(course.id))
def course_home_url_from_string(course_key_string):
"""
Returns the URL for the course's home page.
Arguments:
course_key_string (String): The course key as string.
"""
return reverse(
'openedx.course_experience.course_home',
kwargs={
'course_id': course_key_string,
}
)
class CourseHomePageTestCase(SharedModuleStoreTestCase):
"""
Base class for testing the course home page.
"""
@classmethod
def setUpClass(cls):
"""
Set up a course to be used for testing.
"""
# pylint: disable=super-method-not-called
with cls.setUpClassAndTestData():
with cls.store.default_store(ModuleStoreEnum.Type.split):
cls.course = CourseFactory.create(
org='edX',
number='test',
display_name='Test Course',
start=now() - timedelta(days=30),
metadata={"invitation_only": False}
)
cls.private_course = CourseFactory.create(
org='edX',
number='test',
display_name='Test Private Course',
start=now() - timedelta(days=30),
metadata={"invitation_only": True}
)
with cls.store.bulk_operations(cls.course.id):
chapter = ItemFactory.create(
category='chapter',
parent_location=cls.course.location,
display_name=TEST_CHAPTER_NAME,
)
section = ItemFactory.create(category='sequential', parent_location=chapter.location)
section2 = ItemFactory.create(category='sequential', parent_location=chapter.location)
ItemFactory.create(category='vertical', parent_location=section.location)
ItemFactory.create(category='vertical', parent_location=section2.location)
@classmethod
def setUpTestData(cls):
"""Set up and enroll our fake user in the course."""
super(CourseHomePageTestCase, cls).setUpTestData()
cls.staff_user = StaffFactory(course_key=cls.course.id, password=TEST_PASSWORD)
cls.user = UserFactory(password=TEST_PASSWORD)
CourseEnrollment.enroll(cls.user, cls.course.id)
def create_future_course(self, specific_date=None):
"""
Creates and returns a course in the future.
"""
return CourseFactory.create(
display_name='Test Future Course',
start=specific_date if specific_date else now() + timedelta(days=30),
)
class TestCourseHomePage(CourseHomePageTestCase):
def setUp(self):
super(TestCourseHomePage, self).setUp()
self.client.login(username=self.user.username, password=TEST_PASSWORD)
def tearDown(self):
remove_course_updates(self.user, self.course)
super(TestCourseHomePage, self).tearDown()
def test_welcome_message_when_unified(self):
# Create a welcome message
create_course_update(self.course, self.user, TEST_WELCOME_MESSAGE)
url = course_home_url(self.course)
response = self.client.get(url)
self.assertContains(response, TEST_WELCOME_MESSAGE, status_code=200)
@override_waffle_flag(UNIFIED_COURSE_TAB_FLAG, active=False)
def test_welcome_message_when_not_unified(self):
# Create a welcome message
create_course_update(self.course, self.user, TEST_WELCOME_MESSAGE)
url = course_home_url(self.course)
response = self.client.get(url)
self.assertNotContains(response, TEST_WELCOME_MESSAGE, status_code=200)
def test_updates_tool_visibility(self):
"""
Verify that the updates course tool is visible only when the course
has one or more updates.
"""
remove_course_updates(self.user, self.course)
url = course_home_url(self.course)
response = self.client.get(url)
self.assertNotContains(response, TEST_COURSE_UPDATES_TOOL, status_code=200)
create_course_update(self.course, self.user, TEST_UPDATE_MESSAGE)
url = course_home_url(self.course)
response = self.client.get(url)
self.assertContains(response, TEST_COURSE_UPDATES_TOOL, status_code=200)
def test_queries(self):
"""
Verify that the view's query count doesn't regress.
"""
CourseDurationLimitConfig.objects.create(enabled=True, enabled_as_of=datetime(2018, 1, 1))
# Pre-fetch the view to populate any caches
course_home_url(self.course)
# Fetch the view and verify the query counts
# TODO: decrease query count as part of REVO-28
with self.assertNumQueries(77, table_blacklist=QUERY_COUNT_TABLE_BLACKLIST):
with check_mongo_calls(4):
url = course_home_url(self.course)
self.client.get(url)
@mock.patch.dict('django.conf.settings.FEATURES', {'DISABLE_START_DATES': False})
def test_start_date_handling(self):
"""
Verify that the course home page handles start dates correctly.
"""
# The course home page should 404 for a course starting in the future
future_course = self.create_future_course(datetime(2030, 1, 1, tzinfo=UTC))
url = course_home_url(future_course)
response = self.client.get(url)
self.assertRedirects(response, '/dashboard?notlive=Jan+01%2C+2030')
# With the Waffle flag enabled, the course should be visible
with override_flag(COURSE_PRE_START_ACCESS_FLAG.namespaced_flag_name, True):
url = course_home_url(future_course)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
@ddt.ddt
class TestCourseHomePageAccess(CourseHomePageTestCase):
"""
Test access to the course home page.
"""
def setUp(self):
super(TestCourseHomePageAccess, self).setUp()
# Make this a verified course so that an upgrade message might be shown
add_course_mode(self.course, upgrade_deadline_expired=False)
# Add a welcome message
create_course_update(self.course, self.staff_user, TEST_WELCOME_MESSAGE)
def tearDown(self):
remove_course_updates(self.staff_user, self.course)
super(TestCourseHomePageAccess, self).tearDown()
@override_waffle_flag(SHOW_REVIEWS_TOOL_FLAG, active=True)
@ddt.data(
[False, COURSE_VISIBILITY_PRIVATE, CourseUserType.ANONYMOUS, True, False],
[False, COURSE_VISIBILITY_PUBLIC_OUTLINE, CourseUserType.ANONYMOUS, True, False],
[False, COURSE_VISIBILITY_PUBLIC, CourseUserType.ANONYMOUS, True, False],
[True, COURSE_VISIBILITY_PRIVATE, CourseUserType.ANONYMOUS, True, False],
[True, COURSE_VISIBILITY_PUBLIC_OUTLINE, CourseUserType.ANONYMOUS, True, True],
[True, COURSE_VISIBILITY_PUBLIC, CourseUserType.ANONYMOUS, True, True],
[False, COURSE_VISIBILITY_PRIVATE, CourseUserType.UNENROLLED, True, False],
[False, COURSE_VISIBILITY_PUBLIC_OUTLINE, CourseUserType.UNENROLLED, True, False],
[False, COURSE_VISIBILITY_PUBLIC, CourseUserType.UNENROLLED, True, False],
[True, COURSE_VISIBILITY_PRIVATE, CourseUserType.UNENROLLED, True, False],
[True, COURSE_VISIBILITY_PUBLIC_OUTLINE, CourseUserType.UNENROLLED, True, True],
[True, COURSE_VISIBILITY_PUBLIC, CourseUserType.UNENROLLED, True, True],
[False, COURSE_VISIBILITY_PRIVATE, CourseUserType.ENROLLED, False, True],
[True, COURSE_VISIBILITY_PRIVATE, CourseUserType.ENROLLED, False, True],
[True, COURSE_VISIBILITY_PUBLIC_OUTLINE, CourseUserType.ENROLLED, False, True],
[True, COURSE_VISIBILITY_PUBLIC, CourseUserType.ENROLLED, False, True],
[False, COURSE_VISIBILITY_PRIVATE, CourseUserType.UNENROLLED_STAFF, True, True],
[True, COURSE_VISIBILITY_PRIVATE, CourseUserType.UNENROLLED_STAFF, True, True],
[True, COURSE_VISIBILITY_PUBLIC_OUTLINE, CourseUserType.UNENROLLED_STAFF, True, True],
[True, COURSE_VISIBILITY_PUBLIC, CourseUserType.UNENROLLED_STAFF, True, True],
[False, COURSE_VISIBILITY_PRIVATE, CourseUserType.GLOBAL_STAFF, True, True],
[True, COURSE_VISIBILITY_PRIVATE, CourseUserType.GLOBAL_STAFF, True, True],
[True, COURSE_VISIBILITY_PUBLIC_OUTLINE, CourseUserType.GLOBAL_STAFF, True, True],
[True, COURSE_VISIBILITY_PUBLIC, CourseUserType.GLOBAL_STAFF, True, True],
)
@ddt.unpack
def test_home_page(
self, enable_unenrolled_access, course_visibility, user_type,
expected_enroll_message, expected_course_outline,
):
self.create_user_for_course(self.course, user_type)
# Render the course home page
with mock.patch('xmodule.course_module.CourseDescriptor.course_visibility', course_visibility):
# Test access with anonymous flag and course visibility
with override_waffle_flag(COURSE_ENABLE_UNENROLLED_ACCESS_FLAG, enable_unenrolled_access):
url = course_home_url(self.course)
response = self.client.get(url)
private_url = course_home_url(self.private_course)
private_response = self.client.get(private_url)
# Verify that the course tools and dates are always shown
self.assertContains(response, TEST_COURSE_TOOLS)
is_anonymous = user_type is CourseUserType.ANONYMOUS
is_enrolled = user_type is CourseUserType.ENROLLED
is_enrolled_or_staff = is_enrolled or user_type in (
CourseUserType.UNENROLLED_STAFF, CourseUserType.GLOBAL_STAFF
)
self.assertContains(response, 'Learn About Verified Certificate', count=(1 if is_enrolled else 0))
# Verify that start button, course sock, and welcome message
# are only shown to enrolled users or staff.
self.assertContains(response, 'Start Course', count=(1 if is_enrolled_or_staff else 0))
self.assertContains(response, TEST_WELCOME_MESSAGE, count=(1 if is_enrolled_or_staff else 0))
# Verify the outline is shown to enrolled users, unenrolled_staff and anonymous users if allowed
self.assertContains(response, TEST_CHAPTER_NAME, count=(1 if expected_course_outline else 0))
# Verify the message shown to the user
if not enable_unenrolled_access or course_visibility != COURSE_VISIBILITY_PUBLIC:
self.assertContains(
response, 'To see course content', count=(1 if is_anonymous else 0)
)
self.assertContains(response, '<div class="user-messages"', count=(1 if expected_enroll_message else 0))
if expected_enroll_message:
self.assertContains(response, 'You must be enrolled in the course to see course content.')
if enable_unenrolled_access and course_visibility == COURSE_VISIBILITY_PUBLIC:
if user_type == CourseUserType.UNENROLLED and self.private_course.invitation_only:
if expected_enroll_message:
self.assertContains(private_response,
'You must be enrolled in the course to see course content.')
@override_waffle_flag(UNIFIED_COURSE_TAB_FLAG, active=False)
@override_waffle_flag(SHOW_REVIEWS_TOOL_FLAG, active=True)
@ddt.data(
[CourseUserType.ANONYMOUS, 'To see course content'],
[CourseUserType.ENROLLED, None],
[CourseUserType.UNENROLLED, 'You must be enrolled in the course to see course content.'],
[CourseUserType.UNENROLLED_STAFF, 'You must be enrolled in the course to see course content.'],
)
@ddt.unpack
def test_home_page_not_unified(self, user_type, expected_message):
"""
Verifies the course home tab when not unified.
"""
self.create_user_for_course(self.course, user_type)
# Render the course home page
url = course_home_url(self.course)
response = self.client.get(url)
# Verify that the course tools and dates are always shown
self.assertContains(response, TEST_COURSE_TOOLS)
# Verify that welcome messages are never shown
self.assertNotContains(response, TEST_WELCOME_MESSAGE)
# Verify that the outline, start button, course sock, and welcome message
# are only shown to enrolled users.
is_enrolled = user_type is CourseUserType.ENROLLED
is_unenrolled_staff = user_type is CourseUserType.UNENROLLED_STAFF
expected_count = 1 if (is_enrolled or is_unenrolled_staff) else 0
self.assertContains(response, TEST_CHAPTER_NAME, count=expected_count)
self.assertContains(response, 'Start Course', count=expected_count)
self.assertContains(response, 'Learn About Verified Certificate', count=(1 if is_enrolled else 0))
# Verify that the expected message is shown to the user
self.assertContains(response, '<div class="user-messages"', count=1 if expected_message else 0)
if expected_message:
self.assertContains(response, expected_message)
def test_sign_in_button(self):
"""
Verify that the sign in button will return to this page.
"""
url = course_home_url(self.course)
response = self.client.get(url)
self.assertContains(response, '/login?next={url}'.format(url=urlquote_plus(url)))
@mock.patch.dict(settings.FEATURES, {'DISABLE_START_DATES': False})
def test_non_live_course(self):
"""
Ensure that a user accessing a non-live course sees a redirect to
the student dashboard, not a 404.
"""
future_course = self.create_future_course()
self.create_user_for_course(future_course, CourseUserType.ENROLLED)
url = course_home_url(future_course)
response = self.client.get(url)
start_date = strftime_localized(future_course.start, 'SHORT_DATE')
expected_params = QueryDict(mutable=True)
expected_params['notlive'] = start_date
expected_url = '{url}?{params}'.format(
url=reverse('dashboard'),
params=expected_params.urlencode()
)
self.assertRedirects(response, expected_url)
@mock.patch('openedx.features.discounts.utils.discount_percentage')
@mock.patch('openedx.features.discounts.utils.can_receive_discount')
@ddt.data(
[True, 15],
[True, 13],
[True, 0],
[False, 15])
@ddt.unpack
def test_first_purchase_offer_banner_display(self,
applicability,
percentage,
can_receive_discount_mock,
discount_percentage_mock):
"""
Ensure first purchase offer banner displays correctly
"""
can_receive_discount_mock.return_value = applicability
discount_percentage_mock.return_value = percentage
user = self.create_user_for_course(self.course, CourseUserType.ENROLLED)
now_time = datetime.now(tz=UTC).strftime(u"%Y-%m-%d %H:%M:%S%z")
ExperimentData.objects.create(
user=user, experiment_id=REV1008_EXPERIMENT_ID, key=str(self.course), value=now_time
)
self.client.login(username=user.username, password=self.TEST_PASSWORD)
url = course_home_url(self.course)
response = self.client.get(url)
discount_expiration_date = get_discount_expiration_date(user, self.course).strftime(u'%B %d')
upgrade_link = verified_upgrade_deadline_link(user=user, course=self.course)
bannerText = u'''<div class="first-purchase-offer-banner" role="note">
<span class="first-purchase-offer-banner-bold">
Upgrade by {discount_expiration_date} and save {percentage}% [{strikeout_price}]</span>
<br>Use code <b>EDXWELCOME</b> at checkout! <a href="{upgrade_link}">Upgrade Now</a>
</div>'''.format(
discount_expiration_date=discount_expiration_date,
percentage=percentage,
strikeout_price=HTML(format_strikeout_price(user, self.course, check_for_discount=False)[0]),
upgrade_link=upgrade_link
)
if applicability:
self.assertContains(response, bannerText, html=True)
else:
self.assertNotContains(response, bannerText, html=True)
@mock.patch.dict(settings.FEATURES, {'DISABLE_START_DATES': False})
def test_course_does_not_expire_for_verified_user(self):
"""
There are a number of different roles/users that should not lose access after the expiration date.
Ensure that users who should not lose access get a 200 (ok) response
when attempting to visit the course after their would be expiration date.
"""
course = CourseFactory.create(start=THREE_YEARS_AGO)
url = course_home_url(course)
user = UserFactory.create(password=self.TEST_PASSWORD)
ScheduleFactory(
start_date=THREE_YEARS_AGO,
enrollment__mode=CourseMode.VERIFIED,
enrollment__course_id=course.id,
enrollment__user=user
)
# ensure that the user who has indefinite access
self.client.login(username=user.username, password=self.TEST_PASSWORD)
response = self.client.get(url)
self.assertEqual(
response.status_code,
200,
"Should not expire access for user",
)
@mock.patch.dict(settings.FEATURES, {'DISABLE_START_DATES': False})
@ddt.data(
InstructorFactory,
StaffFactory,
BetaTesterFactory,
OrgStaffFactory,
OrgInstructorFactory,
)
def test_course_does_not_expire_for_course_staff(self, role_factory):
"""
There are a number of different roles/users that should not lose access after the expiration date.
Ensure that users who should not lose access get a 200 (ok) response
when attempting to visit the course after their would be expiration date.
"""
course = CourseFactory.create(start=THREE_YEARS_AGO)
url = course_home_url(course)
user = role_factory.create(password=self.TEST_PASSWORD, course_key=course.id)
ScheduleFactory(
start_date=THREE_YEARS_AGO,
enrollment__mode=CourseMode.AUDIT,
enrollment__course_id=course.id,
enrollment__user=user
)
# ensure that the user has indefinite access
self.client.login(username=user.username, password=self.TEST_PASSWORD)
response = self.client.get(url)
self.assertEqual(
response.status_code,
200,
"Should not expire access for user",
)
@ddt.data(
FORUM_ROLE_COMMUNITY_TA,
FORUM_ROLE_GROUP_MODERATOR,
FORUM_ROLE_MODERATOR,
FORUM_ROLE_ADMINISTRATOR
)
def test_course_does_not_expire_for_user_with_course_role(self, role_name):
"""
Test that users with the above roles for a course do not lose access
"""
course = CourseFactory.create(start=THREE_YEARS_AGO)
url = course_home_url(course)
user = UserFactory.create()
role = RoleFactory(name=role_name, course_id=course.id)
role.users.add(user)
# ensure the user has indefinite access
self.client.login(username=user.username, password=self.TEST_PASSWORD)
response = self.client.get(url)
self.assertEqual(
response.status_code,
200,
"Should not expire access for user"
)
@mock.patch.dict(settings.FEATURES, {'DISABLE_START_DATES': False})
@ddt.data(
GlobalStaffFactory,
)
def test_course_does_not_expire_for_global_users(self, role_factory):
"""
There are a number of different roles/users that should not lose access after the expiration date.
Ensure that users who should not lose access get a 200 (ok) response
when attempting to visit the course after their would be expiration date.
"""
course = CourseFactory.create(start=THREE_YEARS_AGO)
url = course_home_url(course)
user = role_factory.create(password=self.TEST_PASSWORD)
ScheduleFactory(
start_date=THREE_YEARS_AGO,
enrollment__mode=CourseMode.AUDIT,
enrollment__course_id=course.id,
enrollment__user=user
)
# ensure that the user who has indefinite access
self.client.login(username=user.username, password=self.TEST_PASSWORD)
response = self.client.get(url)
self.assertEqual(
response.status_code,
200,
"Should not expire access for user",
)
@mock.patch.dict(settings.FEATURES, {'DISABLE_START_DATES': False})
def test_expired_course(self):
"""
Ensure that a user accessing an expired course sees a redirect to
the student dashboard, not a 404.
"""
CourseDurationLimitConfig.objects.create(enabled=True, enabled_as_of=datetime(2010, 1, 1))
course = CourseFactory.create(start=THREE_YEARS_AGO)
url = course_home_url(course)
for mode in [CourseMode.AUDIT, CourseMode.VERIFIED]:
CourseModeFactory.create(course_id=course.id, mode_slug=mode)
# assert that an if an expired audit user tries to access the course they are redirected to the dashboard
audit_user = UserFactory(password=self.TEST_PASSWORD)
self.client.login(username=audit_user.username, password=self.TEST_PASSWORD)
audit_enrollment = CourseEnrollment.enroll(audit_user, course.id, mode=CourseMode.AUDIT)
ScheduleFactory(start_date=THREE_YEARS_AGO + timedelta(days=1), enrollment=audit_enrollment)
response = self.client.get(url)
expiration_date = strftime_localized(course.start + timedelta(weeks=4) + timedelta(days=1), u'%b %-d, %Y')
expected_params = QueryDict(mutable=True)
course_name = CourseOverview.get_from_id(course.id).display_name_with_default
expected_params['access_response_error'] = u'Access to {run} expired on {expiration_date}'.format(
run=course_name,
expiration_date=expiration_date
)
expected_url = '{url}?{params}'.format(
url=reverse('dashboard'),
params=expected_params.urlencode()
)
self.assertRedirects(response, expected_url)
@mock.patch.dict(settings.FEATURES, {'DISABLE_START_DATES': False})
def test_expiration_banner_with_expired_upgrade_deadline(self):
"""
Ensure that a user accessing a course with an expired upgrade deadline
will still see the course expiration banner without the upgrade related text.
"""
past = datetime(2010, 1, 1)
CourseDurationLimitConfig.objects.create(enabled=True, enabled_as_of=past)
course = CourseFactory.create(start=now() - timedelta(days=10))
CourseModeFactory.create(course_id=course.id, mode_slug=CourseMode.AUDIT)
CourseModeFactory.create(course_id=course.id, mode_slug=CourseMode.VERIFIED, expiration_datetime=past)
user = UserFactory(password=self.TEST_PASSWORD)
self.client.login(username=user.username, password=self.TEST_PASSWORD)
CourseEnrollment.enroll(user, course.id, mode=CourseMode.AUDIT)
url = course_home_url(course)
response = self.client.get(url)
bannerText = get_expiration_banner_text(user, course)
self.assertContains(response, bannerText, html=True)
self.assertContains(response, TEST_BANNER_CLASS)
def test_audit_only_not_expired(self):
"""
Verify that enrolled users are NOT shown the course expiration banner and can
access the course home page if course audit only
"""
CourseDurationLimitConfig.objects.create(enabled=True, enabled_as_of=datetime(2010, 1, 1))
audit_only_course = CourseFactory.create()
self.create_user_for_course(audit_only_course, CourseUserType.ENROLLED)
response = self.client.get(course_home_url(audit_only_course))
self.assertEqual(response.status_code, 200)
self.assertContains(response, TEST_COURSE_TOOLS)
self.assertNotContains(response, TEST_BANNER_CLASS)
@mock.patch.dict(settings.FEATURES, {'DISABLE_START_DATES': False})
def test_expired_course_in_holdback(self):
"""
Ensure that a user accessing an expired course that is in the holdback
does not get redirected to the student dashboard, not a 404.
"""
CourseDurationLimitConfig.objects.create(enabled=True, enabled_as_of=datetime(2010, 1, 1))
course = CourseFactory.create(start=THREE_YEARS_AGO)
url = course_home_url(course)
for mode in [CourseMode.AUDIT, CourseMode.VERIFIED]:
CourseModeFactory.create(course_id=course.id, mode_slug=mode)
# assert that an if an expired audit user in the holdback tries to access the course
# they are not redirected to the dashboard
audit_user = UserFactory(password=self.TEST_PASSWORD)
self.client.login(username=audit_user.username, password=self.TEST_PASSWORD)
audit_enrollment = CourseEnrollment.enroll(audit_user, course.id, mode=CourseMode.AUDIT)
ScheduleFactory(start_date=THREE_YEARS_AGO, enrollment=audit_enrollment)
FBEEnrollmentExclusion.objects.create(
enrollment=audit_enrollment
)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
@mock.patch.dict(settings.FEATURES, {'DISABLE_START_DATES': False})
@mock.patch("util.date_utils.strftime_localized")
def test_non_live_course_other_language(self, mock_strftime_localized):
"""
Ensure that a user accessing a non-live course sees a redirect to
the student dashboard, not a 404, even if the localized date is unicode
"""
future_course = self.create_future_course()
self.create_user_for_course(future_course, CourseUserType.ENROLLED)
fake_unicode_start_time = u"üñîçø∂é_ßtå®t_tîµé"
mock_strftime_localized.return_value = fake_unicode_start_time
url = course_home_url(future_course)
response = self.client.get(url)
expected_params = QueryDict(mutable=True)
expected_params['notlive'] = fake_unicode_start_time
expected_url = u'{url}?{params}'.format(
url=reverse('dashboard'),
params=expected_params.urlencode()
)
self.assertRedirects(response, expected_url)
def test_nonexistent_course(self):
"""
Ensure a non-existent course results in a 404.
"""
self.create_user_for_course(self.course, CourseUserType.ANONYMOUS)
url = course_home_url_from_string('not/a/course')
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
@override_waffle_flag(COURSE_PRE_START_ACCESS_FLAG, active=True)
def test_masters_course_message(self):
enroll_button_html = "<button class=\"enroll-btn btn-link\">Enroll now</button>"
# Verify that unenrolled users visiting a course with a Master's track
# that is not the only track are shown an enroll call to action message
add_course_mode(self.course, CourseMode.MASTERS, 'Master\'s Mode', upgrade_deadline_expired=False)
self.create_user_for_course(self.course, CourseUserType.UNENROLLED)
url = course_home_url(self.course)
response = self.client.get(url)
self.assertContains(response, TEST_COURSE_HOME_MESSAGE)
self.assertContains(response, TEST_COURSE_HOME_MESSAGE_UNENROLLED)
self.assertContains(response, enroll_button_html)
# Verify that unenrolled users visiting a course that contains only a Master's track
# are not shown an enroll call to action message
remove_course_mode(self.course, CourseMode.VERIFIED)
response = self.client.get(url)
expected_message = ('You must be enrolled in the course to see course content. '
'Please contact your degree administrator or edX Support if you have questions.')
self.assertContains(response, TEST_COURSE_HOME_MESSAGE)
self.assertContains(response, expected_message)
self.assertNotContains(response, enroll_button_html)
@override_waffle_flag(COURSE_PRE_START_ACCESS_FLAG, active=True)
def test_course_messaging(self):
"""
Ensure that the following four use cases work as expected
1) Anonymous users are shown a course message linking them to the login page
2) Unenrolled users are shown a course message allowing them to enroll
3) Enrolled users who show up on the course page after the course has begun
are not shown a course message.
4) Enrolled users who show up on the course page after the course has begun will
see the course expiration banner if course duration limits are on for the course.
5) Enrolled users who show up on the course page before the course begins
are shown a message explaining when the course starts as well as a call to
action button that allows them to add a calendar event.
"""
# Verify that anonymous users are shown a login link in the course message
url = course_home_url(self.course)
response = self.client.get(url)
self.assertContains(response, TEST_COURSE_HOME_MESSAGE)
self.assertContains(response, TEST_COURSE_HOME_MESSAGE_ANONYMOUS)
# Verify that unenrolled users are shown an enroll call to action message
user = self.create_user_for_course(self.course, CourseUserType.UNENROLLED)
url = course_home_url(self.course)
response = self.client.get(url)
self.assertContains(response, TEST_COURSE_HOME_MESSAGE)
self.assertContains(response, TEST_COURSE_HOME_MESSAGE_UNENROLLED)
# Verify that enrolled users are not shown any state warning message when enrolled and course has begun.
CourseEnrollment.enroll(user, self.course.id)
url = course_home_url(self.course)
response = self.client.get(url)
self.assertNotContains(response, TEST_COURSE_HOME_MESSAGE_ANONYMOUS)
self.assertNotContains(response, TEST_COURSE_HOME_MESSAGE_UNENROLLED)
self.assertNotContains(response, TEST_COURSE_HOME_MESSAGE_PRE_START)
# Verify that enrolled users are shown the course expiration banner if content gating is enabled
# We use .save() explicitly here (rather than .objects.create) in order to force the
# cache to refresh.
config = CourseDurationLimitConfig(
course=CourseOverview.get_from_id(self.course.id),
enabled=True,
enabled_as_of=datetime(2018, 1, 1)
)
config.save()
url = course_home_url(self.course)
response = self.client.get(url)
bannerText = get_expiration_banner_text(user, self.course)
self.assertContains(response, bannerText, html=True)
# Verify that enrolled users are not shown the course expiration banner if content gating is disabled
config.enabled = False
config.save()
url = course_home_url(self.course)
response = self.client.get(url)
bannerText = get_expiration_banner_text(user, self.course)
self.assertNotContains(response, bannerText, html=True)
# Verify that enrolled users are shown 'days until start' message before start date
future_course = self.create_future_course()
CourseEnrollment.enroll(user, future_course.id)
url = course_home_url(future_course)
response = self.client.get(url)
self.assertContains(response, TEST_COURSE_HOME_MESSAGE)
self.assertContains(response, TEST_COURSE_HOME_MESSAGE_PRE_START)
def test_course_messaging_for_staff(self):
"""
Staff users will not see the expiration banner when course duration limits
are on for the course.
"""
config = CourseDurationLimitConfig(
course=CourseOverview.get_from_id(self.course.id),
enabled=True,
enabled_as_of=datetime(2018, 1, 1)
)
config.save()
url = course_home_url(self.course)
CourseEnrollment.enroll(self.staff_user, self.course.id)
response = self.client.get(url)
bannerText = get_expiration_banner_text(self.staff_user, self.course)
self.assertNotContains(response, bannerText, html=True)
@mock.patch("util.date_utils.strftime_localized")
@mock.patch("openedx.features.course_duration_limits.access.get_date_string")
def test_course_expiration_banner_with_unicode(self, mock_strftime_localized, mock_get_date_string):
"""
Ensure that switching to other languages that have unicode in their
date representations will not cause the course home page to 404.
"""
fake_unicode_start_time = u"üñîçø∂é_ßtå®t_tîµé"
mock_strftime_localized.return_value = fake_unicode_start_time
date_string = u'<span class="localized-datetime" data-format="shortDate" \
data-datetime="{formatted_date}" data-language="{language}">{formatted_date_localized}</span>'
mock_get_date_string.return_value = date_string
config = CourseDurationLimitConfig(
course=CourseOverview.get_from_id(self.course.id),
enabled=True,
enabled_as_of=datetime(2018, 1, 1)
)
config.save()
url = course_home_url(self.course)
user = self.create_user_for_course(self.course, CourseUserType.UNENROLLED)
CourseEnrollment.enroll(user, self.course.id)
language = 'eo'
DarkLangConfig(
released_languages=language,
changed_by=user,
enabled=True
).save()
response = self.client.get(url, HTTP_ACCEPT_LANGUAGE=language)
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Language'], language)
# Check that if the string is incorrectly not marked as unicode we still get the error
with mock.patch("openedx.features.course_duration_limits.access.get_date_string",
return_value=date_string.encode('utf-8')):
response = self.client.get(url, HTTP_ACCEPT_LANGUAGE=language)
self.assertEqual(response.status_code, 500)
@override_waffle_flag(COURSE_PRE_START_ACCESS_FLAG, active=True)
@override_waffle_flag(ENABLE_COURSE_GOALS, active=True)
def test_course_goals(self):
"""
Ensure that the following five use cases work as expected.
1) Unenrolled users are not shown the set course goal message.
2) Enrolled users are shown the set course goal message if they have not yet set a course goal.
3) Enrolled users are not shown the set course goal message if they have set a course goal.
4) Enrolled and verified users are not shown the set course goal message.
5) Enrolled users are not shown the set course goal message in a course that cannot be verified.
"""
# Create a course with a verified track.
verifiable_course = CourseFactory.create()
add_course_mode(verifiable_course, upgrade_deadline_expired=False)
# Verify that unenrolled users are not shown the set course goal message.
user = self.create_user_for_course(verifiable_course, CourseUserType.UNENROLLED)
response = self.client.get(course_home_url(verifiable_course))
self.assertNotContains(response, TEST_COURSE_GOAL_OPTIONS)
# Verify that enrolled users are shown the set course goal message in a verified course.
CourseEnrollment.enroll(user, verifiable_course.id)
response = self.client.get(course_home_url(verifiable_course))
self.assertContains(response, TEST_COURSE_GOAL_OPTIONS)
# Verify that enrolled users that have set a course goal are not shown the set course goal message.
add_course_goal(user, verifiable_course.id, COURSE_GOAL_DISMISS_OPTION)
response = self.client.get(course_home_url(verifiable_course))
self.assertNotContains(response, TEST_COURSE_GOAL_OPTIONS)
# Verify that enrolled and verified users are not shown the set course goal message.
remove_course_goal(user, str(verifiable_course.id))
CourseEnrollment.enroll(user, verifiable_course.id, CourseMode.VERIFIED)
response = self.client.get(course_home_url(verifiable_course))
self.assertNotContains(response, TEST_COURSE_GOAL_OPTIONS)
# Verify that enrolled users are not shown the set course goal message in an audit only course.
audit_only_course = CourseFactory.create()
CourseEnrollment.enroll(user, audit_only_course.id)
response = self.client.get(course_home_url(audit_only_course))
self.assertNotContains(response, TEST_COURSE_GOAL_OPTIONS)
@override_waffle_flag(COURSE_PRE_START_ACCESS_FLAG, active=True)
@override_waffle_flag(ENABLE_COURSE_GOALS, active=True)
def test_course_goal_updates(self):
"""
Ensure that the following five use cases work as expected.
1) Unenrolled users are not shown the update goal selection field.
2) Enrolled users are not shown the update goal selection field if they have not yet set a course goal.
3) Enrolled users are shown the update goal selection field if they have set a course goal.
4) Enrolled users in the verified track are shown the update goal selection field.
"""
# Create a course with a verified track.
verifiable_course = CourseFactory.create()
add_course_mode(verifiable_course, upgrade_deadline_expired=False)
# Verify that unenrolled users are not shown the update goal selection field.
user = self.create_user_for_course(verifiable_course, CourseUserType.UNENROLLED)
response = self.client.get(course_home_url(verifiable_course))
self.assertNotContains(response, TEST_COURSE_GOAL_UPDATE_FIELD)
# Verify that enrolled users that have not set a course goal are shown a hidden update goal selection field.
enrollment = CourseEnrollment.enroll(user, verifiable_course.id)
response = self.client.get(course_home_url(verifiable_course))
self.assertContains(response, TEST_COURSE_GOAL_UPDATE_FIELD_HIDDEN)
# Verify that enrolled users that have set a course goal are shown a visible update goal selection field.
add_course_goal(user, verifiable_course.id, COURSE_GOAL_DISMISS_OPTION)
response = self.client.get(course_home_url(verifiable_course))
self.assertContains(response, TEST_COURSE_GOAL_UPDATE_FIELD)
self.assertNotContains(response, TEST_COURSE_GOAL_UPDATE_FIELD_HIDDEN)
# Verify that enrolled and verified users are shown the update goal selection
CourseEnrollment.update_enrollment(enrollment, is_active=True, mode=CourseMode.VERIFIED)
response = self.client.get(course_home_url(verifiable_course))
self.assertContains(response, TEST_COURSE_GOAL_UPDATE_FIELD)
self.assertNotContains(response, TEST_COURSE_GOAL_UPDATE_FIELD_HIDDEN)
@ddt.ddt
class CourseHomeFragmentViewTests(ModuleStoreTestCase):
"""
Test Messages Displayed on the Course Home
"""
CREATE_USER = False
def setUp(self):
super(CourseHomeFragmentViewTests, self).setUp()
CommerceConfiguration.objects.create(checkout_on_ecommerce_service=True)
end = now() + timedelta(days=30)
self.course = CourseFactory(
start=now() - timedelta(days=30),
end=end,
self_paced=True,
)
self.url = course_home_url(self.course)
CourseMode.objects.create(course_id=self.course.id, mode_slug=CourseMode.AUDIT)
self.verified_mode = CourseMode.objects.create(
course_id=self.course.id,
mode_slug=CourseMode.VERIFIED,
min_price=100,
expiration_datetime=end,
sku='test'
)
self.user = UserFactory()
self.client.login(username=self.user.username, password=TEST_PASSWORD)
name = SHOW_UPGRADE_MSG_ON_COURSE_HOME.waffle_namespace._namespaced_name(
SHOW_UPGRADE_MSG_ON_COURSE_HOME.flag_name)
self.flag, __ = Flag.objects.update_or_create(name=name, defaults={'everyone': True})
def assert_upgrade_message_not_displayed(self):
response = self.client.get(self.url)
self.assertNotContains(response, 'section-upgrade')
def assert_upgrade_message_displayed(self):
response = self.client.get(self.url)
self.assertContains(response, 'section-upgrade')
url = EcommerceService().get_checkout_page_url(self.verified_mode.sku)
self.assertContains(response, '<a class="btn-brand btn-upgrade"')
self.assertContains(response, url)
self.assertContains(
response,
u"Upgrade (<span class='price'>${price}</span>)".format(price=self.verified_mode.min_price),
)
def test_no_upgrade_message_if_logged_out(self):
self.client.logout()
self.assert_upgrade_message_not_displayed()
def test_no_upgrade_message_if_not_enrolled(self):
self.assertEqual(len(CourseEnrollment.enrollments_for_user(self.user)), 0)
self.assert_upgrade_message_not_displayed()
def test_no_upgrade_message_if_verified_track(self):
CourseEnrollment.enroll(self.user, self.course.id, CourseMode.VERIFIED)
self.assert_upgrade_message_not_displayed()
def test_no_upgrade_message_if_upgrade_deadline_passed(self):
self.verified_mode.expiration_datetime = now() - timedelta(days=20)
self.verified_mode.save()
self.assert_upgrade_message_not_displayed()
def test_no_upgrade_message_if_flag_disabled(self):
self.flag.everyone = False
self.flag.save()
CourseEnrollment.enroll(self.user, self.course.id, CourseMode.AUDIT)
self.assert_upgrade_message_not_displayed()
def test_display_upgrade_message_if_audit_and_deadline_not_passed(self):
CourseEnrollment.enroll(self.user, self.course.id, CourseMode.AUDIT)
self.assert_upgrade_message_displayed()
@mock.patch(
'openedx.features.course_experience.views.course_home.format_strikeout_price',
mock.Mock(return_value=(HTML("<span>DISCOUNT_PRICE</span>"), True))
)
def test_upgrade_message_discount(self):
CourseEnrollment.enroll(self.user, self.course.id, CourseMode.AUDIT)
with SHOW_UPGRADE_MSG_ON_COURSE_HOME.override(True):
response = self.client.get(self.url)
self.assertContains(response, "<span>DISCOUNT_PRICE</span>")
|
from openerp.report import report_sxw
from openerp import models
class PayslipRunReport(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(PayslipRunReport, self).__init__(cr, uid, name, context=context)
self.localcontext.update({
'get_payslips_by_department': self._get_payslips_by_department,
'get_worked_days_hours': self._get_worked_days_hours,
'get_worked_days_hours_group': self._get_worked_days_hours_group,
'get_worked_hours': self._get_worked_hours,
'get_worked_hours_group': self._get_worked_hours_group,
'get_line_total': self._get_line_total,
'get_line_total_group': self._get_line_total_group,
})
def _get_payslips_by_department(self, payslip_run):
dep_list = []
department_obj = self.pool.get('hr.department')
# Create a list of departments
for payslip in payslip_run.slip_ids:
department_id = payslip.employee_id.department_id.id
if department_id not in dep_list:
dep_list.append(department_id)
res = {}
for department_id in dep_list:
dep_emp = []
for payslip in payslip_run.slip_ids:
if payslip.employee_id.department_id.id == department_id:
dep_emp.append(payslip)
department = department_obj.browse(
self.cr, self.uid, department_id)
res[department_id] = (department, dep_emp)
return res
def _get_worked_days_hours(self, payslip, code='HN'):
total = 0.00
for line in payslip.worked_days_line_ids:
if line.code == code:
if payslip.credit_note:
# normal schedule in Costa Rica
total -= line.number_of_hours + \
line.number_of_days * 8.0
else:
total += line.number_of_hours + \
line.number_of_days * 8.0
return total
def _get_worked_days_hours_group(self, payslip, code=['HE', 'HEF', 'FE']):
total = 0.00
for line in payslip.worked_days_line_ids:
if line.code in code:
if payslip.credit_note:
# normal schedule in Costa Rica
total -= line.number_of_hours + \
line.number_of_days * 8.0
else:
total += line.number_of_hours + \
line.number_of_days * 8.0
return total
def _get_worked_hours(self, payslip, code='HN'):
total = 0.00
for line in payslip.worked_days_line_ids:
if line.code == code:
if payslip.credit_note:
# normal schedule in Costa Rica
total -= line.number_of_hours
else:
total += line.number_of_hours
return total
def _get_worked_hours_group(self, payslip, code=['HE', 'HEF', 'FE']):
total = 0.00
for line in payslip.worked_days_line_ids:
if line.code in code:
if payslip.credit_note:
# normal schedule in Costa Rica
total -= line.number_of_hours
else:
total += line.number_of_hours
return total
def _get_line_total(self, payslip, code='BASE'):
total = 0.00
for line in payslip.line_ids:
if line.code == code:
if payslip.credit_note:
total -= line.total
else:
total += line.total
return total
def _get_line_total_group(self, payslip, code=['EXT', 'EXT-FE', 'FE']):
total = 0.00
for line in payslip.line_ids:
if line.code in code:
if payslip.credit_note:
total -= line.total
else:
total += line.total
return total
class report_payslip_run(models.AbstractModel):
_name = 'report.l10n_cr_hr_payroll.report_payslip_run'
_inherit = 'report.abstract_report'
_template = 'l10n_cr_hr_payroll.report_payslip_run'
_wrapped_report_class = PayslipRunReport
|
from django.conf.urls import url, patterns, include
from django.conf import settings
urlpatterns = patterns('')
if getattr(settings, 'IDP_SAML2', False):
urlpatterns += patterns('',
(r'^saml2/', include('authentic2.idp.saml.urls')),)
if getattr(settings, 'IDP_OPENID', False):
urlpatterns += patterns('',
(r'^openid/', include('authentic2.idp.idp_openid.urls')))
urlpatterns += patterns('authentic2.idp.interactions',
url(r'^consent_federation', 'consent_federation',
name='a2-consent-federation'),
url(r'^consent_attributes', 'consent_attributes',
name='a2-consent-attributes'))
|
import logging
from odoo import api, fields, models
_logger = logging.getLogger(__name__)
class PersonAuxAssociateToFamilyAux(models.TransientModel):
_description = 'Person (Aux) Associate to Family (Aux)'
_name = 'clv.person_aux.associate_to_family_aux'
def _default_person_aux_ids(self):
return self._context.get('active_ids')
person_aux_ids = fields.Many2many(
comodel_name='clv.person_aux',
relation='clv_person_aux_associate_to_family_aux_rel',
string='Persons (Aux)',
default=_default_person_aux_ids
)
create_new_family_aux = fields.Boolean(
string='Create new Family (Aux)',
default=True,
readonly=False
)
@api.multi
def _reopen_form(self):
self.ensure_one()
action = {
'type': 'ir.actions.act_window',
'res_model': self._name,
'res_id': self.id,
'view_type': 'form',
'view_mode': 'form',
'target': 'new',
}
return action
@api.multi
def do_person_aux_associate_to_family_aux(self):
self.ensure_one()
person_aux_count = 0
for person_aux in self.person_aux_ids:
person_aux_count += 1
_logger.info(u'%s %s %s', '>>>>>', person_aux_count, person_aux.name)
FamilyAux = self.env['clv.family_aux']
family_aux = False
if person_aux.family_id.id is not False:
family_aux = FamilyAux.search([
('related_family_id', '=', person_aux.family_id.id),
])
elif person_aux.ref_address_aux_id.id is not False:
family_aux = FamilyAux.search([
('ref_address_aux_id', '=', person_aux.ref_address_aux_id.id),
])
_logger.info(u'%s %s %s', '>>>>>>>>>>', 'family_aux_id:', family_aux.id)
new_family_aux = False
if family_aux.id is not False:
new_family_aux = family_aux
else:
if self.create_new_family_aux:
values = {}
values['street'] = person_aux.family_id.street
_logger.info(u'%s %s %s', '>>>>>>>>>>', 'values:', values)
new_family_aux = FamilyAux.create(values)
_logger.info(u'%s %s %s', '>>>>>>>>>>', 'new_family_aux:', new_family_aux)
values = {}
values['related_family_id'] = person_aux.family_id.id
values['ref_address_id'] = person_aux.ref_address_id.id
values['ref_address_aux_id'] = person_aux.ref_address_aux_id.id
_logger.info(u'%s %s %s', '>>>>>>>>>>', 'values:', values)
new_family_aux.write(values)
# new_family_aux.do_family_aux_get_related_family_data()
new_family_aux.do_family_aux_get_ref_address_aux_data()
if new_family_aux is not False:
data_values = {}
data_values['family_aux_id'] = new_family_aux.id
_logger.info(u'>>>>>>>>>> %s', data_values)
person_aux.write(data_values)
if person_aux_count == 1:
action = {
'type': 'ir.actions.act_window',
'name': 'Families (Aux)',
'res_model': 'clv.family_aux',
'res_id': new_family_aux.id,
'view_type': 'form',
'view_mode': 'tree,kanban,form',
'target': 'current',
'context': {'search_default_name': new_family_aux.name},
}
else:
action = {
'type': 'ir.actions.act_window',
'name': 'Families (Aux)',
'res_model': 'clv.family_aux',
'view_type': 'form',
'view_mode': 'tree,kanban,form',
'target': 'current',
}
return action
# return True
|
bind = "127.0.0.1:4567"
logfile = "/usr/local/ohc/log/elcid.gunicorn.log"
workers = 6
timeout = 120
accesslog = "/usr/local/ohc/log/elcid.access.log"
errorlog = "/usr/local/ohc/log/elcid.error.log"
|
"""
The latest version of this package is available at:
<http://github.com/jantman/biweeklybudget>
Copyright 2016 Jason Antman <jason@jasonantman.com> <http://www.jasonantman.com>
This file is part of biweeklybudget, also known as biweeklybudget.
biweeklybudget is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
biweeklybudget is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with biweeklybudget. If not, see <http://www.gnu.org/licenses/>.
The Copyright and Authors attributions contained herein may not be removed or
otherwise altered, except to add the Author attribution of a contributor to
this work. (Additional Terms pursuant to Section 7b of the AGPL v3)
While not legally required, I sincerely request that anyone who finds
bugs please submit them at <https://github.com/jantman/biweeklybudget> or
to me via email, and that you send any contributions or improvements
either as a pull request on GitHub, or to me via email.
AUTHORS:
Jason Antman <jason@jasonantman.com> <http://www.jasonantman.com>
"""
import sys
from datetime import datetime, date
from decimal import Decimal
from ofxparse.ofxparse import Transaction
from pytz import UTC
from sqlalchemy.orm.query import Query
from sqlalchemy.sql.expression import null
from biweeklybudget.models.ofx_transaction import OFXTransaction
from biweeklybudget.models.ofx_statement import OFXStatement
from biweeklybudget.models.account import Account
from biweeklybudget.tests.unit_helpers import binexp_to_dict
if (
sys.version_info[0] < 3 or
sys.version_info[0] == 3 and sys.version_info[1] < 4
):
from mock import Mock, patch, call # noqa
else:
from unittest.mock import Mock, patch, call # noqa
pbm = 'biweeklybudget.models.ofx_transaction'
class TestParamsFromOfxparserTransaction(object):
def setup(self):
trans = Transaction()
trans.payee = 'PayeeName'
trans.type = 'TType'
trans.date = datetime(2017, 3, 10, 14, 15, 16)
trans.amount = Decimal('123.45')
trans.id = 'ABC123'
trans.memo = 'TMemo'
self.trans = trans
self.stmt = Mock(spec_set=OFXStatement)
self.acct_id = 2
def test_simple(self):
res = OFXTransaction.params_from_ofxparser_transaction(
self.trans, self.acct_id, self.stmt
)
assert res == {
'account_id': self.acct_id,
'statement': self.stmt,
'memo': 'TMemo',
'name': 'PayeeName',
'amount': Decimal('123.45'),
'trans_type': 'TType',
'date_posted': datetime(2017, 3, 10, 14, 15, 16, tzinfo=UTC),
'fitid': 'ABC123',
'sic': None,
'mcc': ''
}
def test_cat_memo(self):
res = OFXTransaction.params_from_ofxparser_transaction(
self.trans, self.acct_id, self.stmt, cat_memo=True
)
assert res == {
'account_id': self.acct_id,
'statement': self.stmt,
'name': 'PayeeNameTMemo',
'amount': Decimal('123.45'),
'trans_type': 'TType',
'date_posted': datetime(2017, 3, 10, 14, 15, 16, tzinfo=UTC),
'fitid': 'ABC123',
'sic': None,
'mcc': ''
}
def test_extra_attrs(self):
self.trans.mcc = 'TMCC'
self.trans.sic = 456
self.trans.checknum = 789
res = OFXTransaction.params_from_ofxparser_transaction(
self.trans, self.acct_id, self.stmt
)
assert res == {
'account_id': self.acct_id,
'statement': self.stmt,
'memo': 'TMemo',
'name': 'PayeeName',
'amount': Decimal('123.45'),
'trans_type': 'TType',
'date_posted': datetime(2017, 3, 10, 14, 15, 16, tzinfo=UTC),
'fitid': 'ABC123',
'sic': 456,
'mcc': 'TMCC',
'checknum': 789
}
def test_account_amount(self):
ot = OFXTransaction(
account=Mock(spec_set=Account, negate_ofx_amounts=False),
amount=Decimal('123.45')
)
assert ot.account_amount == Decimal('123.45')
def test_account_amount_negated(self):
ot = OFXTransaction(
account=Mock(spec_set=Account, negate_ofx_amounts=True),
amount=Decimal('123.45')
)
assert ot.account_amount == Decimal('-123.45')
@patch('%s.RECONCILE_BEGIN_DATE' % pbm, date(2017, 3, 17))
def test_unreconciled(self):
m_db = Mock()
m_q = Mock(spec_set=Query)
m_filt = Mock(spec_set=Query)
m_db.query.return_value = m_q
m_q.filter.return_value = m_filt
res = OFXTransaction.unreconciled(m_db)
assert res == m_filt
assert len(m_db.mock_calls) == 2
assert m_db.mock_calls[0] == call.query(OFXTransaction)
kall = m_db.mock_calls[1]
assert kall[0] == 'query().filter'
expected1 = OFXTransaction.reconcile.__eq__(null())
cutoff = datetime(2017, 3, 17, 0, 0, 0, tzinfo=UTC)
expected2 = OFXTransaction.date_posted.__ge__(cutoff)
expected3 = OFXTransaction.account.has(reconcile_trans=True)
assert len(kall[1]) == 8
assert str(expected1) == str(kall[1][0])
assert binexp_to_dict(expected2) == binexp_to_dict(kall[1][1])
assert str(kall[1][2]) == str(expected3)
assert str(
OFXTransaction.is_payment.__ne__(True)
) == str(kall[1][3])
assert str(
OFXTransaction.is_late_fee.__ne__(True)
) == str(kall[1][4])
assert str(
OFXTransaction.is_interest_charge.__ne__(True)
) == str(kall[1][5])
assert str(
OFXTransaction.is_other_fee.__ne__(True)
) == str(kall[1][6])
assert str(
OFXTransaction.is_interest_payment.__ne__(True)
) == str(kall[1][7])
|
from openerp import fields, models
class ResCompany(models.Model):
_inherit = "res.company"
leave_request_sequence_id = fields.Many2one(
string="Leave Request Sequence",
comodel_name="ir.sequence",
company_dependent=True,
)
allocation_request_sequence_id = fields.Many2one(
string="Allocation Request Sequence",
comodel_name="ir.sequence",
company_dependent=True,
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.