text stringlengths 957 885k |
|---|
<reponame>sheffler/login_logic_jinja2
#
# Login Page logic using Twisted sessions
#
# Defines two functions used like before filters
# current_user(reqeust) - return the username or ""
# require_login(request) - go to login page if not logged in
#
# LoginResource()
# LogoutResource()
# IndexResource({ dict })
#
# pip install Jinja2
# python -m muet.login_logic_jinga2
import sys
import cgi
import random
from twisted.web.server import Site, NOT_DONE_YET
from twisted.web import static
from twisted.web.resource import Resource
from twisted.internet import reactor
from zope.interface import Interface, Attribute, implements
from twisted.web.server import Session
from twisted.python import log
from twisted.python.components import registerAdapter
from jinja2 import Template, Environment, PackageLoader
env = Environment(loader=PackageLoader('muet', 'templates')) # templates dir under muet package
#
# Access to session data is through a componentized interface
#
class ILoginSessionData(Interface):
username = Attribute("the users name")
csrf = Attribute("the csrf token")
urlref = Attribute("where to go after login")
class LoginSessionData(object):
implements(ILoginSessionData)
def __init__(self, session):
self.username = ""
self.csrf = ""
self.urlref = ""
registerAdapter(LoginSessionData, Session, ILoginSessionData)
#
# get the current user from the session
#
def current_user(request):
session = request.getSession()
login = ILoginSessionData(session)
username = login.username
print "CURRENT_USER:%s" % username
return username
#
# guard a page and redirect if not logged in
#
def require_login(request):
urlref = request.path
print "REQURE_LOGIN:%s" % urlref
session = request.getSession()
login = ILoginSessionData(session)
login.urlref = urlref
print "DOING REDIRECT"
request.redirect("/login")
request.finish()
return NOT_DONE_YET
#
# This is a very simple login page
#
class LoginPage(Resource):
def __init__(self, db):
self.db = db # the database connection
Resource.__init__(self)
# unconditionally render the login page
def render_GET(self, request):
session = request.getSession()
login = ILoginSessionData(session)
login.csrf = str(random.randint(0, 1000000))
ctx = {
'_csrf' : login.csrf
}
template = env.get_template("login_greeting.html")
return str(template.render(ctx))
#
# The following section implements the callback chain for login database query
#
def onResult(self, dbdata, request, username, password):
log.msg("On Result:%s %s %s" % (dbdata, username, password))
dbusername = ""
dbpassword = ""
success = False
if len(dbdata) != 0:
dbusername = dbdata[0][0]
dbpassword = dbdata[0][1]
if password == <PASSWORD>:
success = True
if success:
session = request.getSession()
login = ILoginSessionData(session)
login.username = username
# retrieve from session and reset
urlref = login.urlref
login.urlref = ""
log.msg("ON RESULT username, password, urlref:%s:%s:%s:" % (username, password, urlref))
if urlref:
request.redirect(urlref)
request.finish()
else:
request.write("""<html><body>
You are now logged in as %s
</body></html>
""" % username)
request.finish()
pass
else:
request.redirect("/login")
request.finish()
#
# Retrieve the username/password post data and start the database query
#
def render_POST(self, request):
session = request.getSession()
login = ILoginSessionData(session)
# retrieve from post data
username = cgi.escape(request.args["username"][0],)
password = cgi.escape(request.args["password"][0],)
csrf = cgi.escape(request.args["_csrf"][0],)
log.msg("POST csrf:%s username:%s password:%s" % (csrf, username, password))
if csrf != login.csrf:
log.msg("CSRF ATTACK!")
request.redirect("/login")
request.finish()
return NOT_DONE_YET
# Run the query
d = self.db.runQuery("SELECT username, password from Users WHERE username = ? LIMIT 1", (username,))
d.addCallback(self.onResult, request, username, password)
return NOT_DONE_YET
class LogoutPage(Resource):
def render_GET(self, request):
request.getSession().expire()
ctx = {
}
template = env.get_template("logout_greeting.html")
return str(template.render(ctx))
#
# Every site should have a main index.html
# This is also the outline of how every page protects itself.
#
class IndexPage(Resource):
isLeaf = True
def __init__(self, ctx):
self.ctx = ctx
Resource.__init__(self)
def render_GET(self, request):
user = current_user(request)
if not user:
# this should store the current path, render the login page, and finally redirect back here
return require_login(request)
# add the user to the context
ctx = self.ctx.copy()
ctx['user'] = user
template = env.get_template("index.html")
return str(template.render(ctx))
#
# The root page usually wants to redirect to somewhere else
#
class RootPage(Resource):
def render_GET(self, request):
log.msg("ROOT REDIRECT")
request.redirect("/index")
request.finish()
return NOT_DONE_YET
if __name__ == "__main__":
log.startLogging(sys.stdout)
from twisted.enterprise import adbapi
connection = adbapi.ConnectionPool("sqlite3", "db/test.sqlite", check_same_thread=False)
root = Resource()
root.putChild("", RootPage())
ctx = { }
root.putChild("index", IndexPage(ctx))
root.putChild("login", LoginPage(connection))
root.putChild("logout", LogoutPage())
root.putChild("javascript", static.File("javascript", "application/javascript"))
factory = Site(root)
reactor.listenTCP(8880, factory)
reactor.run()
|
from datetime import date
from decimal import Decimal
from itertools import groupby
from model import Cash, Currency, Stock, Bond, Option, OptionType, Position, Trade, TradeFlags
from pathlib import Path
import helpers
import schwab
import unittest
class TestSchwabPositions(unittest.TestCase):
def setUp(self) -> None:
self.positions = schwab.parsePositions(
Path('tests/schwab_positions.CSV'))
self.positions.sort(key=lambda p: p.instrument.symbol)
def test_positionValidity(self) -> None:
self.assertEqual(len(self.positions), 4)
def test_tBill(self) -> None:
self.assertEqual(self.positions[0].instrument,
Bond('193845XM2', Currency.USD))
self.assertEqual(self.positions[0].quantity, 10000)
self.assertEqual(self.positions[0].costBasis,
helpers.cashUSD(Decimal('9956.80')))
def test_bnd(self) -> None:
self.assertEqual(self.positions[1].instrument,
Stock('BND', Currency.USD))
self.assertEqual(self.positions[1].quantity, Decimal('36.8179'))
self.assertEqual(self.positions[1].costBasis,
helpers.cashUSD(Decimal('1801.19')))
def test_uvxy(self) -> None:
self.assertEqual(self.positions[2].instrument,
Stock('UVXY', Currency.USD))
self.assertEqual(self.positions[2].quantity, Decimal('0'))
self.assertEqual(self.positions[2].costBasis,
helpers.cashUSD(Decimal('0')))
def test_vti(self) -> None:
self.assertEqual(self.positions[3].instrument,
Stock('VTI', Currency.USD))
self.assertEqual(self.positions[3].quantity, Decimal('48.2304'))
self.assertEqual(self.positions[3].costBasis,
helpers.cashUSD(Decimal('3283.04')))
class TestSchwabTransactions(unittest.TestCase):
def setUp(self) -> None:
self.trades = schwab.parseTransactions(
Path('tests/schwab_transactions.CSV'))
self.trades.sort(key=lambda t: t.date)
self.tradesByDate = {
d: list(t)
for d, t in groupby(self.trades, key=lambda t: t.date.date())
}
def test_tradeValidity(self) -> None:
self.assertGreater(len(self.trades), 0)
def test_buySecurity(self) -> None:
ts = self.tradesByDate[date(2017, 2, 22)]
self.assertEqual(len(ts), 1)
self.assertEqual(ts[0].instrument, Stock('VOO', Currency.USD))
self.assertEqual(ts[0].quantity, Decimal('23'))
self.assertEqual(
ts[0].amount,
Cash(currency=Currency.USD, quantity=Decimal('-4981.11')))
self.assertEqual(ts[0].fees,
Cash(currency=Currency.USD, quantity=Decimal('6.95')))
self.assertEqual(ts[0].flags, TradeFlags.OPEN)
def test_reinvestShares(self) -> None:
ts = self.tradesByDate[date(2017, 3, 29)]
self.assertEqual(len(ts), 1)
self.assertEqual(ts[0].instrument, Stock('VOO', Currency.USD))
self.assertEqual(ts[0].quantity, Decimal('0.1062'))
self.assertEqual(
ts[0].amount,
Cash(currency=Currency.USD, quantity=Decimal('-22.95')))
self.assertEqual(ts[0].fees,
Cash(currency=Currency.USD, quantity=Decimal(0)))
self.assertEqual(ts[0].flags, TradeFlags.OPEN | TradeFlags.DRIP)
def test_shortSaleAndCover(self) -> None:
ts = self.tradesByDate[date(2018, 1, 2)]
self.assertEqual(len(ts), 2)
self.assertEqual(ts[0].instrument, Stock('HD', Currency.USD))
self.assertEqual(ts[0].quantity, Decimal('-6'))
self.assertEqual(
ts[0].amount,
Cash(currency=Currency.USD, quantity=Decimal('1017.3')))
self.assertEqual(ts[0].fees,
Cash(currency=Currency.USD, quantity=Decimal('4.96')))
self.assertEqual(ts[0].flags, TradeFlags.OPEN)
self.assertEqual(ts[1].instrument, Stock('HD', Currency.USD))
self.assertEqual(ts[1].quantity, Decimal('6'))
self.assertEqual(
ts[1].amount,
Cash(currency=Currency.USD, quantity=Decimal('-1033.12')))
self.assertEqual(ts[1].fees,
Cash(currency=Currency.USD, quantity=Decimal('4.95')))
self.assertEqual(ts[1].flags, TradeFlags.CLOSE)
def test_buyToOpenOption(self) -> None:
ts = self.tradesByDate[date(2018, 11, 5)]
self.assertEqual(len(ts), 1)
self.assertEqual(
ts[0].instrument,
Option(underlying='INTC',
currency=Currency.USD,
optionType=OptionType.PUT,
expiration=date(2018, 12, 7),
strike=Decimal('48.50')))
self.assertEqual(ts[0].quantity, Decimal('1'))
self.assertEqual(ts[0].amount,
Cash(currency=Currency.USD, quantity=Decimal('-248')))
self.assertEqual(ts[0].fees,
Cash(currency=Currency.USD, quantity=Decimal('5.60')))
self.assertEqual(ts[0].flags, TradeFlags.OPEN)
def test_sellToCloseOption(self) -> None:
ts = self.tradesByDate[date(2018, 11, 9)]
self.assertEqual(len(ts), 1)
self.assertEqual(
ts[0].instrument,
Option(underlying='INTC',
currency=Currency.USD,
optionType=OptionType.PUT,
expiration=date(2018, 12, 7),
strike=Decimal('48.50')))
self.assertEqual(ts[0].quantity, Decimal('-1'))
self.assertEqual(ts[0].amount,
Cash(currency=Currency.USD, quantity=Decimal('140')))
self.assertEqual(ts[0].fees,
Cash(currency=Currency.USD, quantity=Decimal('5.60')))
self.assertEqual(ts[0].flags, TradeFlags.CLOSE)
def test_exercisedOption(self) -> None:
ts = self.tradesByDate[date(2018, 2, 4)]
self.assertEqual(len(ts), 4)
self.assertEqual(
ts[2].instrument,
Option(underlying='QQQ',
currency=Currency.USD,
optionType=OptionType.CALL,
expiration=date(2018, 2, 1),
strike=Decimal('155')))
self.assertEqual(ts[2].quantity, Decimal('-1'))
self.assertEqual(ts[2].amount,
Cash(currency=Currency.USD, quantity=Decimal(0)))
self.assertEqual(ts[2].fees,
Cash(currency=Currency.USD, quantity=Decimal(0)))
self.assertEqual(ts[2].flags,
TradeFlags.CLOSE | TradeFlags.ASSIGNED_OR_EXERCISED)
def test_assignedOption(self) -> None:
ts = self.tradesByDate[date(2018, 2, 4)]
self.assertEqual(len(ts), 4)
self.assertEqual(
ts[3].instrument,
Option(underlying='QQQ',
currency=Currency.USD,
optionType=OptionType.CALL,
expiration=date(2018, 2, 1),
strike=Decimal('130')))
self.assertEqual(ts[3].quantity, Decimal('1'))
self.assertEqual(ts[3].amount,
Cash(currency=Currency.USD, quantity=Decimal(0)))
self.assertEqual(ts[3].fees,
Cash(currency=Currency.USD, quantity=Decimal(0)))
self.assertEqual(ts[3].flags,
TradeFlags.CLOSE | TradeFlags.ASSIGNED_OR_EXERCISED)
def test_expiredShortOption(self) -> None:
ts = self.tradesByDate[date(2018, 12, 3)]
self.assertEqual(len(ts), 1)
self.assertEqual(
ts[0].instrument,
Option(underlying='CSCO',
currency=Currency.USD,
optionType=OptionType.PUT,
expiration=date(2018, 11, 30),
strike=Decimal('44.50')))
self.assertEqual(ts[0].quantity, Decimal('1'))
self.assertEqual(ts[0].amount,
Cash(currency=Currency.USD, quantity=Decimal(0)))
self.assertEqual(ts[0].fees,
Cash(currency=Currency.USD, quantity=Decimal(0)))
self.assertEqual(ts[0].flags, TradeFlags.CLOSE | TradeFlags.EXPIRED)
def test_buyToCloseOption(self) -> None:
ts = self.tradesByDate[date(2018, 12, 12)]
self.assertEqual(len(ts), 2)
self.assertEqual(
ts[0].instrument,
Option(underlying='MAR',
currency=Currency.USD,
optionType=OptionType.CALL,
expiration=date(2018, 12, 28),
strike=Decimal('116')))
self.assertEqual(ts[0].quantity, Decimal('1'))
self.assertEqual(ts[0].amount,
Cash(currency=Currency.USD, quantity=Decimal('-70')))
self.assertEqual(ts[0].fees,
Cash(currency=Currency.USD, quantity=Decimal('5.60')))
self.assertEqual(ts[0].flags, TradeFlags.CLOSE)
def test_sellToOpenOption(self) -> None:
ts = self.tradesByDate[date(2018, 12, 12)]
self.assertEqual(len(ts), 2)
self.assertEqual(
ts[1].instrument,
Option(underlying='MAR',
currency=Currency.USD,
optionType=OptionType.CALL,
expiration=date(2018, 12, 28),
strike=Decimal('112')))
self.assertEqual(ts[1].quantity, Decimal('-1'))
self.assertEqual(ts[1].amount,
Cash(currency=Currency.USD, quantity=Decimal('190')))
self.assertEqual(ts[1].fees,
Cash(currency=Currency.USD, quantity=Decimal('5.60')))
self.assertEqual(ts[1].flags, TradeFlags.OPEN)
def test_securityTransferSale(self) -> None:
ts = self.tradesByDate[date(2018, 1, 4)]
self.assertEqual(len(ts), 1)
self.assertEqual(ts[0].instrument, Stock('MSFT', Currency.USD))
self.assertEqual(ts[0].quantity, Decimal('-10'))
self.assertEqual(
ts[0].amount,
Cash(currency=Currency.USD, quantity=Decimal('920.78')))
self.assertEqual(
ts[0].fees, Cash(currency=Currency.USD, quantity=Decimal('13.65')))
self.assertEqual(ts[0].flags, TradeFlags.CLOSE)
if __name__ == '__main__':
unittest.main()
|
<reponame>ysh329/darknet2caffe<gh_stars>10-100
"""
net_compiler.py
Copyright 2017 <NAME> <<EMAIL>>
Portions Copyright 2017 <NAME> <http://xianyi.github.io> and <NAME> <<EMAIL>>
This script made to build caffe net protobuf file to inferxlite[website] .c file
"""
__author__ = "<NAME> <https://mrlittlepig.github.io>"
__version__ = "0.1"
__date__ = "March 4,2017"
__copyright__ = "Copyright: 2017 <NAME>; Portions: 2017 Xiany<NAME>hang <http://xianyi.github.io>; Portions: 2017 Chaowei Wang;"
import re
import sys
from abc import abstractmethod
DEBUG = False
def cformatparam(string_param):
cformat = ""
for cha in string_param:
if re.match('^[0-9a-zA-Z]+$', cha):
cformat += cha
return cformat
def isac(c):
"""
A simple function, which determine whether the
string element char c is belong to a decimal number
:param c: a string element type of char
:return: a bool type of determination
"""
try:
int(c)
return True
except:
if c == '.' or c == '-' or c == 'e':
return True
else:
return False
def hasannotation(string_list):
"""
Judge whether the string type parameter string_list contains annotation
"""
for c in string_list:
if c == "#":
return True
return False
def dropannotation(annotation_list):
"""
Drop out the annotation contained in annotation_list
"""
target = ""
for c in annotation_list:
if not c == "#":
target += c
else:
return target
return target
class LayerFactory(object):
"""
Layer factory used to connect layer and sublayer.
Members
----------
__layer_register: a list to store layer type, which is registered in layer system.
layer_string: contain a whole layer information.
type: all the layers type are included in __layer_register.
layer: which sotres layer object by __gen_layer__ function, using
statement exec ('self.layer = %s(self.layer_string)'%self.__type)
as self.layer = Convolution(self.layer_string) an example.
----------
"""
__layer_register = ['Input', 'Convolution', 'Deconvolution', 'Pooling',
'Crop', 'Eltwise', 'ArgMax', 'BatchNorm', 'Concat',
'Scale', 'Sigmoid', 'Softmax', 'TanH', 'ReLU', 'LRN',
'InnerProduct', 'Dropout','Reshape',
# darknet layers below
'Reorg',]
def __init__(self, layer_string=None,net_name=None):
self.layer_string = layer_string
self.type = None
self.net_name = net_name
self.layer = None
self.__init_type__()
self.__gen_layer__()
def __init_type__(self):
phase_list = self.layer_string.split('type')
phase_num = len(phase_list)
if phase_num == 1:
self.type = "Input"
elif phase_num >= 2:
self.type = phase_list[1].split('\"')[1]
def __gen_layer__(self):
if self.type in self.__layer_register:
exec ('self.layer = %s(self.layer_string,self.net_name)'%self.type)
else:
print("[WARN] Type {} layer is not in layer register".format(self.type))
type_pattern = '.*type: "(.*)"\n'
try:
layer_type = re.findall(type_pattern, self.layer_string)[0]
if DEBUG: print(layer_type)
except:
print("Can't find this layer type")
exit(-1)
class Layer(object):
"""Layer parent class"""
__phases_string = ['name', 'type', 'bottom', 'top']
modelstr="model"
datastr="data"
pdata="pdata"
context="context_id"
def __init__(self, layer_string=None,net_name=None):
self.layer_string = layer_string
self.type = None
self.name = None
self.bottom = None
self.top = None
context="context_id"
self.net_name = net_name
self.bottom_layer = None
self.num_input = None
self.num_output = None
self.interface_c = None
self.interface_criterion = None
self.other = None
self.__init_string_param__()
self.__init_top__()
self.__list_all_member__()
@abstractmethod
def __calc_ioput__(self):
"""Calculate num_input and num_output"""
pass
@abstractmethod
def __interface_c__(self):
"""Write the predestinate parameter into c type layer function"""
pass
def __debug_print__(self, string_list, printout=False):
"""Choose to print or not controled by printout"""
if printout:
print(string_list)
def __init_bottom__(self):
"""Sometimes a layer has more than one bottom, so we pull it out alone"""
bottoms_tmp = self.layer_string.split('bottom')
bottom_num = len(bottoms_tmp)
bottoms = []
if bottom_num == 1:
self.bottom = None
else:
for index in range(1, bottom_num):
bottoms.append(bottoms_tmp[index].split('\"')[1]+"_data")
self.bottom = bottoms
def __init_top__(self):
self.top += "_data"
def __init_string_param__(self):
"""
String parameters like name: "layername", key is name the value
is the string type "layername", this function finds string parameters,
which are stored in private list __phases_string, then stores the keys
values in member variables by using exec function.
"""
for phase in self.__phases_string:
if phase == 'bottom':
self.__init_bottom__()
continue
phase_list = self.layer_string.split(phase)
phase_num = len(phase_list)
if phase_num == 1:
continue
elif phase_num == 2:
exec ('self.%s=phase_list[1].split(\'\"\')[1]' % phase)
else:
member = []
for index in range(1, phase_num):
member.append(phase_list[index].split('\"')[1])
exec ('self.%s=member' % phase)
if phase == "type":
self.type = self.type[0]
self.__debug_print__("Init string param.")
def __init_number_param__(self, phases_number):
"""
Number parameters like num_output: 21, key is num_output the value
is the number 21, this function finds number parameters, which are
stored in list phases_number, then stores the keys values in member
variables by using exec function.
"""
for phase in phases_number:
phase_list = self.layer_string.split(phase)
phase_num = len(phase_list)
if phase_num == 1:
continue
elif phase_num == 2:
exec ('self.%s = self.__find_all_num__(phase_list[1])[0]' % phase)
else:
print("Error phase_num:%d" % phase_num)
self.__debug_print__("Init number param.")
def __init_decimal_param__(self, phases_decimal):
"""
Decimal parameters like eps: 0.0001, key is eps the value is the
decimal 0.0001, this function finds decimal parameters, which are
stored in list phases_decimal, then stores the keys values in member
variables by using exec function.
"""
for phase in phases_decimal:
phase_list = self.layer_string.split(phase)
phase_num = len(phase_list)
if phase_num == 1:
continue
elif phase_num >= 2:
exec ('self.%s = []' % phase)
for index in range(1, phase_num):
exec ('self.%s.append(self.__find_first_decimal__(phase_list[index].split(\':\')[1]))' % phase)
self.__debug_print__("Init decimal param.")
def __init_binary_param__(self, phase, default='false'):
"""
Binary parameters like bias_term: false, key is bias_term the value
is the bool type false, this function finds binary parameter, which
pass in as phase, then stores the keys values in member variable by
using exec function. Parameter default to set the default satus of
the phase parameter
"""
if default == 'false':
neg_default = 'true'
else:
neg_default = 'false'
phase_list = self.layer_string.split(phase)
phase_num = len(phase_list)
if phase_num == 1:
exec ('self.%s = \'%s\'' % (phase, default))
elif phase_num >= 2:
if len(phase_list[1].split(':')[1].split(default)) == 1:
exec ('self.%s = \'%s\'' % (phase, neg_default))
else:
exec ('self.%s = \'%s\'' % (phase, default))
def __find_all_num__(self, string_phase):
"""
A function to find series of numbers
:param string_phase: string type key like num_output
:return: a list stores numbers found in string_phase
"""
number = re.findall(r'(\w*[0-9]+)\w*', string_phase)
return number
def __find_first_decimal__(self, string_phase):
"""
A function to find series of decimal
:param string_phase: string type key like moving_average_fraction
:return: a list stores decimals found in string_phase
"""
decimals = ""
for index in range(len(string_phase)):
if isac(string_phase[index]):
decimals += string_phase[index]
else:
decimals += ' '
for decimal in decimals.split(' '):
if not decimal == '':
return decimal
def __list_all_member__(self, listout=False):
"""Show all member variables"""
if listout:
for name, value in vars(self).items():
if value == None:
continue
self.__debug_print__('%s = %s' % (name, value),printout=True)
class Input(Layer):
"""Input layer"""
__phases_string = ['name', 'type', 'top']
def __init__(self, layer_string=None,net_name=None):
Layer.__init__(self, layer_string,net_name)
self.dim = []
self.__init_dim__()
self.__list_all_member__()
def __init_dim__(self):
phase_list = self.layer_string.split('dim:')
phase_num = len(phase_list)
if phase_num == 1:
self.__debug_print__("Input layer %s has no input dims" % self.name, printout=True)
elif phase_num >= 2:
for index in range(1, phase_num):
self.dim.append(self.__find_all_num__(phase_list[index]))
def __init_string_param__(self):
if len(self.layer_string.split("type")) == 1 \
and len(self.layer_string.split("top")) == 1\
and len(self.layer_string.split("dim:")) >= 2:
self.name = "data"
self.type = "Input"
self.top = "data"
return
for phase in self.__phases_string:
if phase == 'bottom':
self.__init_bottom__()
continue
phase_list = self.layer_string.split(phase)
phase_num = len(phase_list)
if phase_num == 1:
continue
elif phase_num == 2:
exec ('self.%s=phase_list[1].split(\'\"\')[1]' % phase)
else:
member = []
for index in range(1, phase_num):
member.append(phase_list[index].split('\"')[1])
exec ('self.%s=member' % phase)
self.__debug_print__("Init string param.")
def __interface_c__(self):
self.interface_criterion = \
"Input(int dim1,int dim2,int dim3,int " \
"dim4,char *top,char *name)"
self.interface_c = "inferx_input("
# for d in self.dim:
# self.interface_c += "{}".format(d[0])
# self.interface_c += ','
self.interface_c += "{},".format("nchw")
self.interface_c += '{},'.format(Layer.pdata)
self.interface_c += '\"{}\",'.format(self.top)
self.interface_c += '\"{}\",'.format(self.name)
self.interface_c += '{},'.format(Layer.modelstr)
self.interface_c += '{});'.format(Layer.datastr)
def __calc_ioput__(self):
self.num_input = None
self.__debug_print__(self.name)
self.num_output = int(self.dim[1][0])
class Convolution(Layer):
"""Convolution layer"""
__phases_number = ['num_output', 'kernel_size', 'stride', 'pad',
'group', 'dilation', 'axis']
__phases_binary = ['bias_term', 'force_nd_im2col']
def __init__(self, layer_string=None,net_name=None):
self.group = 1
self.axis = 1
self.kernel_size = None
self.dilation = 1
self.stride = 1
self.pad = 0
self.bias_term = 'true'
self.force_nd_im2col = 'false'
Layer.__init__(self, layer_string,net_name)
self.__init_number_param__(self.__phases_number)
self.__init_binary_param__(self.__phases_binary[0], default='true')
self.__init_binary_param__(self.__phases_binary[1], default='false')
self.__list_all_member__()
self.kernel_h = self.kernel_size
self.kernel_w = self.kernel_size
self.stride_h = self.stride
self.stride_w = self.stride
self.pad_h = self.pad
self.pad_w = self.pad
self.activation_type = 0
def __interface_c__(self):
self.interface_criterion = \
"Convolution(int num_input,int num_output,int kernel_h,int kernel_w,int stride_h," \
"int stride_w,int pad_h,int pad_w,int group,int dilation,int axis," \
"bool bias_term,bool force_nd_im2col,char *bottom,char *top, char *name, int activation_type)"
self.interface_c = "inferx_convolution("
self.interface_c += "{},{},{},{},{},{},{},{}".\
format(self.num_input,self.num_output,self.kernel_h,self.kernel_w,
self.stride_h,self.stride_w,self.pad_h,self.pad_w)
self.interface_c += ",{},{},{}".format(self.group,self.dilation,self.axis)
self.interface_c += ",{},{}".format(self.bias_term,self.force_nd_im2col)
self.interface_c += ",\"{}\",\"{}\",\"{}\",{},{},{});".format(self.bottom_layer[0].top,self.top,self.name,Layer.modelstr,Layer.datastr, self.activation_type)
def __calc_ioput__(self):
self.num_input = self.bottom_layer[0].num_output
class Deconvolution(Convolution):
"""Deconvolution layer"""
__phases_number = ['num_output', 'kernel_size', 'stride', 'pad', 'dilation']
def __init__(self, layer_string=None,net_name=None):
Convolution.__init__(self, layer_string,net_name=None)
def __interface_c__(self):
self.interface_criterion = \
"Deconvolution(int num_input,int num_output,int kernel_h,int kernel_w," \
"int stride_h,int stride_w,int pad_h,int pad_w,int group,int dilation,int axis," \
"bool bias_term,bool force_nd_im2col,char *bottom,char *top,char *name)"
self.interface_c = "inferx_deconvolution("
self.interface_c += "{},{},{},{},{},{},{},{}". \
format(self.num_input, self.num_output, self.kernel_h, self.kernel_w,
self.stride_h, self.stride_w, self.pad_h, self.pad_w)
self.interface_c += ",{},{},{}".format(self.group, self.dilation, self.axis)
self.interface_c += ",{},{}".format(self.bias_term, self.force_nd_im2col)
self.interface_c += ",\"{}\",\"{}\",\"{}\",{},{});".format(self.bottom_layer[0].top, self.top, self.name,Layer.modelstr,Layer.datastr)
class Pooling(Layer):
"""Pooling layer"""
__phases_number = ['kernel_size', 'stride', 'pad']
__phases_binary = ['global_pooling']
__pool_phases = ['MAX', 'AVE', 'STOCHASTIC']
def __init__(self, layer_string=None,net_name=None):
Layer.__init__(self, layer_string,net_name)
self.kernel_size = None
self.stride = 1
self.pool = 'MAX'
self.global_pooling = 'false'
self.pad = 0
self.__init_number_param__(self.__phases_number)
self.__init_binary_param__(self.__phases_binary[0], default='false')
self.__init_pool__()
self.__list_all_member__()
self.kernel_h = self.kernel_size
self.kernel_w = self.kernel_size
self.stride_h = self.stride
self.stride_w = self.stride
self.pad_h = self.pad
self.pad_w = self.pad
def __init_pool__(self):
for phase in self.__pool_phases:
phase_list = self.layer_string.split(phase)
phase_num = len(phase_list)
if phase_num == 1:
self.__debug_print__("Pooling layer %s has no pool method %s." % (self.name, phase))
continue
elif phase_num == 2:
self.pool = phase
self.__debug_print__("Pooling layer %s has pool method %s." % (self.name, self.pool))
else:
self.__debug_print__("Pool layer method error.",printout=True)
def __calc_ioput__(self):
self.num_input = self.bottom_layer[0].num_output
self.num_output = self.num_input
def __interface_c__(self):
if self.global_pooling == 'true':
self.interface_criterion = \
"inferx_globalpooling(enum PoolMethod pool,char *bottom,char *top,char *name)"
self.interface_c = "inferx_globalpooling("
else:
self.interface_criterion = \
"inferx_pooling(int kernel_h,int kernel_w,int stride_h,int stride_w,int pad_h," \
"int pad_w,enum PoolMethod pool,char *bottom,char *top,char *name)"
self.interface_c = "inferx_pooling("
if (str(self.stride_h) == "1") and (str(self.stride_w) == "1"):
self.interface_c = "inferx_pooling_yolo("
self.interface_c += "{},{},{},{},{},{},". \
format(self.kernel_h,self.kernel_w,
self.stride_h,self.stride_w,self.pad_h,self.pad_w)
self.interface_c += "{}".format(self.pool)
self.interface_c += ",\"{}\",\"{}\",\"{}\",{},{});".format(self.bottom_layer[0].top, self.top, self.name,Layer.modelstr,Layer.datastr)
class Crop(Layer):
"""Crop layer"""
__phases_number = ['axis', 'offset']
def __init__(self, layer_string=None,net_name=None):
Layer.__init__(self, layer_string,net_name)
self.axis = 2
self.offset = 0
self.__init_number_param__(self.__phases_number)
self.__list_all_member__()
def __calc_ioput__(self):
self.num_input = self.bottom_layer[0].num_output
self.num_output = self.num_input
def __interface_c__(self):
self.interface_criterion = \
"Crop(int axis,int offset,char* bottom, char* bottom_mode,char *top,char *name)"
self.interface_c = "inferx_crop("
self.interface_c += "{},{}".format(self.axis, self.offset)
for index in range(len(self.bottom_layer)):
self.interface_c += ",\"{}\"".format(self.bottom_layer[index].top)
#self.interface_c += ",%d,bottom_vector" % len(self.bottom_layer)
self.interface_c += ",\"{}\"".format(self.top)
self.interface_c += ",\"{}\"".format(self.name)
self.interface_c += ",{}".format(Layer.modelstr)
self.interface_c += ",{});".format(Layer.datastr)
class Eltwise(Layer):
"""Eltwise layer"""
__eltwise_phases = ['PROD', 'SUM', 'MAX']
__phases_decimal = ['coeff']
__phases_binary = ['stable_prod_grad']
def __init__(self, layer_string=None,net_name=None):
Layer.__init__(self, layer_string,net_name)
self.operation = 'SUM'
self.stabel_prod_grad = 'true'
self.coeff = [1,1]
self.__init_eltwise__()
self.__init_binary_param__(self.__phases_binary[0], default='true')
self.__init_decimal_param__(self.__phases_decimal)
self.__list_all_member__()
def __init_eltwise__(self):
for phase in self.__eltwise_phases:
phase_list = self.layer_string.split(phase)
phase_num = len(phase_list)
if phase_num == 1:
self.__debug_print__("Eltwise layer %s has no eltwise operations named %s." % (self.name, phase))
elif phase_num == 2:
self.operation = phase
self.__debug_print__("Eltwise layer %s has eltwise operations %s." % (self.name, self.operation))
else:
self.__debug_print__("Eltwise layer %s layer method error." % self.name)
def __calc_ioput__(self):
self.num_input = self.bottom_layer[0].num_output
self.num_output = self.num_input
def __interface_c__(self):
self.interface_criterion = \
"Eltwise(int coeffs_num, float* coeffs,enum EltwiseOp operation," \
"bool stabel_prod_grad,int bottom_num,char **bottoms,char *top, char *name)"
self.interface_c = ""
for index in range(len(self.coeff)):
self.interface_c += "coeffs[%d]=%f; " % (index,self.coeff[index])
self.interface_c += "\n\tinferx_eltwise("
# for index in range(len(self.coeff)):
# self.interface_c += "{}".format(self.coeff[index])
self.interface_c += "%d,coeffs" % len(self.coeff)
self.interface_c += ",{}".format(self.operation)
self.interface_c += ",{}".format(self.stabel_prod_grad)
# for index in range(len(self.bottom_layer)):
# self.interface_c += ",\"{}\"".format(self.bottom_layer[index].top)
self.interface_c += ",%d,bottom_vector" % len(self.bottom_layer)
self.interface_c += ",\"{}\"".format(self.top)
self.interface_c += ",\"{}\"".format(self.name)
self.interface_c += ",{}".format(Layer.modelstr)
self.interface_c += ",{});".format(Layer.datastr)
class ReLU(Layer):
"""ReLU layer"""
def __init__(self, layer_string=None,net_name=None):
Layer.__init__(self, layer_string,net_name)
self.__list_all_member__()
def __calc_ioput__(self):
self.num_input = self.bottom_layer[0].num_output
self.num_output = self.num_input
def __interface_c__(self):
self.interface_criterion = \
"ReLU(char *bottom,char *top,char *name)"
self.interface_c = "inferx_relu("
for index in range(len(self.bottom_layer)):
self.interface_c += "\"{}\"".format(self.bottom_layer[index].top)
self.interface_c += ",\"{}\"".format(self.top)
self.interface_c += ",\"{}\"".format(self.name)
self.interface_c += ",{}".format(Layer.modelstr)
self.interface_c += ",{});".format(Layer.datastr)
class InnerProduct(Layer):
"""InnerProduct layer"""
__phases_number = ['num_output', 'axis']
__phases_binary = ['bias_term', 'transpose']
def __init__(self, layer_string=None,net_name=None):
Layer.__init__(self, layer_string,net_name)
self.axis = 1
self.bias_term = 'true'
self.transpose = 'false'
self.__init_binary_param__(self.__phases_binary[0], default='true')
self.__init_binary_param__(self.__phases_binary[1], default='false')
self.__init_number_param__(self.__phases_number)
def __calc_ioput__(self):
self.num_input = self.bottom_layer[0].num_output
def __interface_c__(self):
self.interface_criterion = \
"InnerProduct(int num_input,int num_output,bool bias_term," \
"bool transpose,char *bottom,char *top,char *name)"
self.interface_c = "inferx_innerproduct("
self.interface_c += "{},{}".format(self.num_input, self.num_output)
self.interface_c += ",{}".format(self.bias_term)
self.interface_c += ",{}".format(self.transpose)
for index in range(len(self.bottom_layer)):
self.interface_c += ",\"{}\"".format(self.bottom_layer[index].top)
self.interface_c += ",\"{}\"".format(self.top)
self.interface_c += ",\"{}\"".format(self.name)
self.interface_c += ",{}".format(self.modelstr)
self.interface_c += ",{});".format(Layer.datastr)
class ArgMax(Layer):
"""ArgMax layer"""
__phases_number = ['top_k', 'axis']
__phases_binary = ['out_max_val']
def __init__(self, layer_string=None,net_name=None):
Layer.__init__(self, layer_string,net_name)
self.out_max_val = 'false'
self.top_k = 1
self.axis = None
self.__init_binary_param__(self.__phases_binary[0], default='false')
self.__init_number_param__(self.__phases_number)
self.__list_all_member__()
def __calc_ioput__(self):
self.num_input = self.bottom_layer[0].num_output
self.num_output = self.num_input
def __interface_c__(self):
self.interface_criterion = \
"ArgMax(int top_k,int axis,bool out_max_val,char *bottom,char *top,char *name)"
self.interface_c = "inferx_argmax("
self.interface_c += "{},{}".format(self.top_k, self.axis)
self.interface_c += ",{}".format(self.out_max_val)
for index in range(len(self.bottom_layer)):
self.interface_c += ",\"{}\"".format(self.bottom_layer[index].top)
self.interface_c += ",\"{}\"".format(self.top)
self.interface_c += ",\"{}\"".format(self.name)
self.interface_c += ",{}".format(Layer.modelstr)
self.interface_c += ",{});".format(Layer.datastr)
class BatchNorm(Layer):
"""BatchNorm layer"""
__phases_decimal = ['moving_average_fraction', 'eps']
__phases_binary = ['use_global_stats']
def __init__(self, layer_string=None,net_name=None):
Layer.__init__(self, layer_string,net_name)
self.use_global_stats = 'true'
self.moving_average_fraction = ['0.999']
self.eps = ['1e-9']
self.__init_decimal_param__(self.__phases_decimal)
self.__init_binary_param__(self.__phases_binary[0], default='true')
self.__list_all_member__()
def __calc_ioput__(self):
self.__debug_print__(self.name)
self.num_input = self.bottom_layer[0].num_output
self.num_output = self.num_input
def __interface_c__(self):
self.interface_criterion = \
"BatchNorm(float moving_average_fraction,float eps," \
"bool use_global_stats,char *bottom,char *top,char *name)"
self.interface_c = "inferx_batchnorm("
self.interface_c += "{},{}".format(self.moving_average_fraction[0],self.eps[0])
self.interface_c += ",{}".format(self.use_global_stats)
for index in range(len(self.bottom_layer)):
self.interface_c += ",\"{}\"".format(self.bottom_layer[index].top)
self.interface_c += ",\"{}\"".format(self.top)
self.interface_c += ",\"{}\"".format(self.name)
self.interface_c += ",{}".format(Layer.modelstr)
self.interface_c += ",{});".format(Layer.datastr)
class Concat(Layer):
"""Concat layer"""
__phases_number = ['axis', 'concat_dim']
def __init__(self, layer_string=None,net_name=None):
Layer.__init__(self, layer_string,net_name)
self.axis = 1
self.concat_dim = 1
self.__init_number_param__(self.__phases_number)
self.__list_all_member__()
def __calc_ioput__(self):
self.num_input = []
self.num_output = 0
for bottom in self.bottom_layer:
self.num_input.append(bottom.num_output)
for input in self.num_input:
self.num_output += int(input)
def __interface_c__(self):
self.interface_criterion = \
"Concat(int num_output,int axis,int concat_dim," \
"int bottom_num,char **bottoms,char *top,char *name)"
self.interface_c = "inferx_concat("
self.interface_c += "{},".format(self.num_output)
self.interface_c += "{},{}".format(self.axis,self.concat_dim)
# for index in range(len(self.bottom_layer)):
# self.interface_c += ",\"{}\"".format(self.bottom_layer[index].top)
self.interface_c += ",%d,bottom_vector"%len(self.bottom_layer)
self.interface_c += ",\"{}\"".format(self.top)
self.interface_c += ",\"{}\"".format(self.name)
self.interface_c += ",{}".format(Layer.modelstr)
self.interface_c += ",{});".format(Layer.datastr)
class Scale(Layer):
"""Scale layer"""
__phases_number = ['axis', 'num_axes']
__phases_binary = ['bias_term']
def __init__(self, layer_string=None,net_name=None):
Layer.__init__(self, layer_string,net_name)
self.axis = 1
self.num_axes = 1
self.bias_term = 'false'
self.__init_number_param__(self.__phases_number)
self.__init_binary_param__(self.__phases_binary[0], default='false')
self.__list_all_member__()
def __calc_ioput__(self):
self.num_input = self.bottom_layer[0].num_output
self.num_output = self.num_input
def __interface_c__(self):
self.interface_criterion = \
"Scale(int axis,int num_axes,bool bias_term,char *bottom,char *top,char *name)"
self.interface_c = "inferx_scale("
self.interface_c += "{},{}".format(self.axis, self.num_axes)
self.interface_c += ",{}".format(self.bias_term)
for index in range(len(self.bottom_layer)):
self.interface_c += ",\"{}\"".format(self.bottom_layer[index].top)
self.interface_c += ",\"{}\"".format(self.top)
self.interface_c += ",\"{}\"".format(self.name)
self.interface_c += ",{}".format(Layer.modelstr)
self.interface_c += ",{});".format(Layer.datastr)
class Sigmoid(Layer):
"""Sigmoid layer"""
def __init__(self, layer_string=None,net_name=None):
Layer.__init__(self, layer_string,net_name)
self.__list_all_member__()
def __calc_ioput__(self):
self.num_input = self.bottom_layer[0].num_output
self.num_output = self.num_input
def __interface_c__(self):
self.interface_criterion = \
"Sigmoid(char *bottom,char *top,char *name)"
self.interface_c = "inferx_sigmoid("
for index in range(len(self.bottom_layer)):
self.interface_c += "\"{}\"".format(self.bottom_layer[index].top)
self.interface_c += ",\"{}\"".format(self.top)
self.interface_c += ",\"{}\"".format(self.name)
self.interface_c += ",{}".format(Layer.modelstr)
self.interface_c += ",{});".format(Layer.datastr)
class Softmax(Layer):
"""Softmax layer"""
__phases_number = ['axis']
def __init__(self, layer_string=None,net_name=None):
Layer.__init__(self, layer_string,net_name)
self.axis = 1
self.__init_number_param__(self.__phases_number)
self.__list_all_member__()
def __calc_ioput__(self):
self.num_input = self.bottom_layer[0].num_output
self.num_output = self.num_input
def __interface_c__(self):
self.interface_criterion = \
"Softmax(int axis,char *bottom,char *top,char *name)"
self.interface_c = "inferx_softmax("
self.interface_c += "{}".format(self.axis)
for index in range(len(self.bottom_layer)):
self.interface_c += ",\"{}\"".format(self.bottom_layer[index].top)
self.interface_c += ",\"{}\"".format(self.top)
self.interface_c += ",\"{}\"".format(self.name)
self.interface_c += ",{}".format(Layer.modelstr)
self.interface_c += ",{});".format(Layer.datastr)
class TanH(Layer):
"""TanH layer"""
def __init__(self, layer_string=None,net_name=None):
Layer.__init__(self, layer_string,net_name)
self.__list_all_member__()
def __calc_ioput__(self):
self.num_input = self.bottom_layer[0].num_output
self.num_output = self.num_input
def __interface_c__(self):
self.interface_criterion = \
"TanH(char *bottom,char *top,char *name)"
self.interface_c = "inferx_tanh("
for index in range(len(self.bottom_layer)):
self.interface_c += "\"{}\"".format(self.bottom_layer[index].top)
self.interface_c += ",\"{}\"".format(self.top)
self.interface_c += ",\"{}\"".format(self.name)
self.interface_c += ",{}".format(Layer.modelstr)
self.interface_c += ",{});".format(Layer.datastr)
class LRN(Layer):
"""LRN layer"""
__phases_decimal = ['alpha', 'beta', 'k']
__phases_number = ['local_size']
def __init__(self, layer_string=None,net_name=None):
Layer.__init__(self, layer_string,net_name)
self.local_size = 5
self.alpha = [1.0]
self.beta = [0.75]
self.k = [1.0]
self.__init_number_param__(self.__phases_number)
self.__init_decimal_param__(self.__phases_decimal)
def __calc_ioput__(self):
self.num_input = self.bottom_layer[0].num_output
self.num_output = self.num_input
def __interface_c__(self):
self.interface_criterion = \
"LRN(int local_size,float alpha,float beta,float k,char *bottom,char *top,char *name)"
self.interface_c = "inferx_LRN("
self.interface_c += "{},{},{},{}".format(self.local_size, self.alpha[0], self.beta[0],self.k[0])
for index in range(len(self.bottom_layer)):
self.interface_c += ",\"{}\"".format(self.bottom_layer[index].top)
self.interface_c += ",\"{}\"".format(self.top)
self.interface_c += ",\"{}\"".format(self.name)
self.interface_c += ",{}".format(Layer.modelstr)
self.interface_c += ",{});".format(Layer.datastr)
class Dropout(Layer):
"""Dropout layer"""
def __init__(self, layer_string,net_name=None):
Layer.__init__(self, layer_string,net_name)
class Reshape(Layer):
"""Reshape layer"""
__phases_number = ['dim']
def __init__(self, layer_string,net_name=None):
Layer.__init__(self, layer_string,net_name)
self.dim= []
self.__init_dim__()
self.__list_all_member__()
self.batch_size = self.dim[0]
self.channels = self.dim[1]
self.height = self.dim[2]
self.width = self.dim[3]
def __init_dim__(self):
phase_list = self.layer_string.split('dim:')
phase_num = len(phase_list)
if phase_num == 1:
self.__debug_print__("Input layer %s has no input dims" % self.name, printout=True)
elif phase_num >= 2:
if DEBUG: print("phase_num".format(phase_num))
if DEBUG: print(phase_list[1])
for index in range(1, phase_num):
self.dim.extend(self.__find_all_num__(phase_list[index]))
def __calc_ioput__(self):
self.num_input = self.bottom_layer[0].num_output
self.num_output = self.channels
def __interface_c__(self):
self.interface_criterion = \
"Reshape(int batch_size, int channels, int height, int weidth char *bottom, char *top char *name)"
self.interface_c = "inferx_reshape("
self.interface_c +="{},{},{},{}".\
format(self.batch_size,self.channels,self.height,self.width)
self.interface_c +=",\"{}\",\"{}\",\"{}\",{},{});".format(self.bottom_layer[0].top,self.top,self.name,Layer.modelstr,Layer.datastr)
class Reorg(Layer):
"""Reorg layer from Darknet"""
__phases_number = ['dim']
def __init__(self, layer_string,net_name=None):
Layer.__init__(self, layer_string,net_name)
self.dim= []
self.__init_dim__()
self.__list_all_member__()
self.batch_size = self.dim[0]
self.channels = self.dim[1]
self.height = self.dim[2]
self.width = self.dim[3]
def __init_dim__(self):
phase_list = self.layer_string.split('dim:')
phase_num = len(phase_list)
if phase_num == 1:
self.__debug_print__("Input layer %s has no input dims" % self.name, printout=True)
elif phase_num >= 2:
if DEBUG: print("phase_num".format(phase_num))
if DEBUG: print(phase_list[1])
for index in range(1, phase_num):
self.dim.extend(self.__find_all_num__(phase_list[index]))
def __calc_ioput__(self):
self.num_input = self.bottom_layer[0].num_output
self.num_output = self.num_input
def __interface_c__(self):
self.interface_criterion = \
"Reorg(int batch_size, int channels, int height, int weidth char *bottom, char *top char *name)"
self.interface_c = "inferx_reshape("
self.interface_c +="{},{},{},{}".\
format(self.batch_size,self.channels,self.height,self.width)
self.interface_c +=",\"{}\",\"{}\",\"{}\",{},{});".format(self.bottom_layer[0].top,self.top,self.name,Layer.modelstr,Layer.datastr)
class Net(object):
"""Convert caffe net protobuf file to inferxlite's *.c and *.h files"""
def __init__(self, proto=None):
self.__loaded = False
self.__proto = proto
self.__merge_bn=False
# this name from model ile name, special charactors removed
self.__name = None
# file name, used to save *.c, *.h files
self.__file_name = proto.replace(".prototxt", "")
self.__layers_string = None
self.__layers = []
self.non_layer_idx_list = []
self.__layernum = None
self.__log = []
self.__net = ""
self.__cfile = []
self.__read_proto__()
self.__init_layers_()
self.__link_layers__()
self.__all_layers_type = self.__all_layers_type__()
self.__write_c_format__(annotation=True)
self.__write_h_format__(annotation=True)
self.__write_non_layer_h_format__()
def __update_log__(self, log, printout=False):
"""Print log from here"""
if printout:
print(log)
self.__log.append(log)
def __update_line__(self, line, outlines, printout=False):
"""Print line from here"""
if printout:
print(line)
outlines.append(line)
def __read_proto__(self):
"""Read caffe net protobuf file"""
try:
net_lines = open(self.__proto, "r").readlines()
for line in net_lines:
if not hasannotation(line):
self.__net += line
else:
self.__net += dropannotation(line)
except IOError:
self.__update_log__("IOError file {} not opened." % self.__proto)
return
self.__layers_string = self.__net.split('layer {')
if not len(self.__layers_string[0].split("name:")) == 1:
if not len(self.__layers_string[0].split("name:")[1].split('\"')) == 1:
self.__name = self.__layers_string[0].split("name:")[1].split('\"')[1]
if self.__name == None:
self.__name = input("Please input the net name using \"name\" format:")
self.__name = cformatparam(self.__name)
self.__update_log__("Net has been loaded successfully.")
self.__loaded = True
def __init_layers_(self):
if not self.__loaded:
self.__update_log__("Net not loaded, please check your net proto file.")
else:
# data input
if len(self.__layers_string[0].split("dim:")) >= 2:
self.__layers.append(LayerFactory(layer_string=self.__layers_string[0],net_name=self.__name).layer)
# non-data input
for layer_string_idx in xrange(len(self.__layers_string[1:])):
layer_string = self.__layers_string[1:][layer_string_idx]
# print each layer
#print(layer_string_idx, layer_string)
self.__layers.append(LayerFactory(layer_string=layer_string,net_name=self.__name).layer)
self.__update_log__("Layers has initialized successfully.")
def __link_layers__(self):
if DEBUG: print(len(self.__layers))
for index_i in range(len(self.__layers)):
if DEBUG: print(index_i,self.__layers[index_i])
if self.__layers[index_i] == None:
self.non_layer_idx_list.append(index_i)
del self.__layers[index_i]
continue
if self.__layers[index_i].bottom == None:
self.__layers[index_i].__calc_ioput__()
self.__layers[index_i].__interface_c__()
continue
bottom_num = len(self.__layers[index_i].bottom)
self.__layers[index_i].bottom_layer = []
for index_ib in range(bottom_num):
for index_j in range(index_i):
if self.__layers[index_i].bottom[index_ib] == self.__layers[index_j].top:
self.__layers[index_i].bottom_layer.append(self.__layers[index_j])
break
self.__layers[index_i].__calc_ioput__()
self.__layers[index_i].__interface_c__()
def __all_layers_type__(self):
types = []
for index in range(len(self.__layers)):
type = {"{}".format(self.__layers[index].type):self.__layers[index].interface_criterion}
if not type in types:
types.append(type)
return types
def __write_annotations__(self):
line = "/*\n" \
"\tThis file is generated by net_compiler.py.\n" \
"\tThe use of included functions list as follows:\n"
for type in self.__all_layers_type:
for key in type:
if not type[key] == None:
line += '\n\t' + key + ':\n'
line += '\t' + type[key] + '\n'
line += '*/\n\n'
self.__update_line__(line, self.__cfile)
def __write_c_format__(self, annotation=False):
outf = open("{}.c".format(self.__file_name), 'w+')
if annotation:
self.__write_annotations__()
lines = "#include \"inferxlite_common.h\"\n"
lines += "#include \"interface.h\"\n"
#lines += "#include \"caffe.h\"\n\n"
#lines += "void " + self.__name + "(char * path, char * model, char * data_c, void * pdata)\n{\n"
lines += "void " + self.__name + "(char * path, char * model, char * data_c, void * pdata, void **pout)\n{\n"
max_bottom = 1
max_len_coeff = 1
for index in range(len(self.__layers)):
if self.__layers[index].type == "Eltwise":
if len(self.__layers[index].coeff) > max_len_coeff:
max_len_coeff = len(self.__layers[index].coeff)
if not self.__layers[index].bottom == None:
if len(self.__layers[index].bottom) > max_bottom:
max_bottom = len(self.__layers[index].bottom)
if max_len_coeff > 1:
lines += "\tfloat coeffs[%d];\n" % max_len_coeff
if max_bottom > 1:
lines += "\tchar* bottom_vector[%d];\n\n" % max_bottom
##add function
lines += "\tlong nchw[4];\n"
lines += "\tchar data[1000];\n"
lines += "\tinferx_parse_str(data_c, nchw, data);\n"
lines += "\tinferx_set_init_var(&weightHasLoad, &dataHasInit, model, data);\n"
lines += "\tinferx_var_add_init(model);\n"
lines += "\tinferx_var_add_init(data);\n"
lines += "\n"
#lines += "\tinsertModelFunc"+ "(\""+self.__name+"\","+self.__name+");\n"
for index in range(len(self.__layers)):
if(self.__layers[index].interface_c == None):
self.__update_log__("Ignore layer {}.".format(self.__layers[index].name))
continue
if not self.__layers[index].bottom == None:
if self.__layers[index].type == "Eltwise" or self.__layers[index].type == "Concat":
for bottom_i in range(len(self.__layers[index].bottom)):
lines += "\tbottom_vector[%d] = \"%s\";" % (bottom_i,self.__layers[index].bottom[bottom_i])
lines += "\n"
if(self.__merge_bn==True and self.__layers[index-1].type == "Convolution" and self.__layers[index].type =="BatchNorm"):
continue
if(self.__merge_bn==True and self.__layers[index-2].type == "Convolution" and self.__layers[index-1].type == "BatchNorm" and self.__layers[index].type =="Scale"):
continue
lines += "\t{}\n".format(self.__layers[index].interface_c)
lines += "\n\t//DEBUG mode\n"
lines += "\t//inferx_sort_data(\"{}\",{});\n".format(self.__layers[-1].top,"data")
lines += "\t//inferx_print_data(\"{}\",{});\n".format(self.__layers[-1].top,"data")
#lines += "\tsaveData(\"{}\");\n\n".format(self.__layers[-1].top)
lines += "\tinferx_finalize(\"{}\");\n".format(self.__name)
lines += "\n\treturn;\n}"
self.__update_line__(lines, self.__cfile)
outf.writelines(self.__cfile)
outf.close()
def __write_h_format__(self, annotation=False):
outf = open("{}.h".format(self.__file_name), 'w+')
line = "extern void {}(char * path, char * model, char * data_c, void * pdata, void **pout);".format(self.__name)
outf.writelines(line)
outf.close()
def __write_non_layer_h_format__(self):
self.__non_layer_register = ["Region"]
h_file_line_list = []
# start loop from non-layer-idx
for idx in self.non_layer_idx_list:
non_layer_str = self.__layers_string[idx+1]
try:
type_pattern = '.*type: "(.*)"\n'
layer_type = re.findall(type_pattern, non_layer_str)[0]
if DEBUG: print("layer_type:%s" % layer_type)
except:
print("can't match layer type, its layer_string:%s" % non_layer_str)
exit(-1)
if layer_type in self.__non_layer_register:
if layer_type == "Region":
region_c_code_line_list = parse_region(non_layer_str)
region_c_code_str = "\n\n" + "\n".join(region_c_code_line_list)
h_file_line_list.append(region_c_code_str)
# For other non-layers support
if layer_type == "xxxx":
pass
# add input shape, etc variables
input_shape_c_code_str = parse_network_input(self.__proto)
h_file_line_list.append(input_shape_c_code_str)
with open("{}.h".format(self.__file_name), "a") as h_file_handle:
h_file_handle.writelines(h_file_line_list)
def var_from_py_to_c(var, var_name, var_len=1):
if var_len == 1:
if type(var) == str:
var_type = "char *"
elif type(var) == int:
var_type = "int"
elif type(var) == float:
var_type = "float"
else:
print("don't support type for variable %s" % var_name)
exit(-1)
else: # var_len > 1
if type(var) == list or \
type(var) == tuple:
if type(var[0]) == float:
var_type = "float"
elif type(var[0]) == int:
var_type = "int"
elif type(var[0]) == str:
var_type = "char *"
else:
print("don't support type for variable %s" % var_name)
exit(-1)
# reduce dimension to 1
var = eval('[%s]' % repr(var) \
.replace("(),", "") \
.replace("[],","") \
.replace('[', '') \
.replace("(", "") \
.replace(']', '') \
.replace(")", ""))
var = map(str, var)
var = "".join(["{", ", ".join(var), "}"])
var_name = "".join([var_name, "[", str(var_len), "]"])
else:
print("don't support type for variable %s" % var_name)
exit(-1)
c_str_list = [" "*0, var_type, " ", var_name, " = ", str(var), ";"]
c_str = "".join(map(str, c_str_list))
return(c_str)
def parse_network_input(prototxt_file):
dim_pattern = r"dim: (.*)\n"
with open(prototxt_file) as prototxt_handle:
prototxt_content = prototxt_handle.read()
dim_list = re.findall(dim_pattern, prototxt_content)
dim_list = map(int, dim_list)
var_name_list = ["input_batch_size", "input_channel", "input_width", "input_height"]
dim_c_code_line_list = map(lambda var, var_name: \
var_from_py_to_c(var, var_name), \
dim_list[:len(var_name_list)], var_name_list)
dim_c_code_str = "\n"*2 + "\n".join(dim_c_code_line_list)
return dim_c_code_str
def parse_region(layer_str, var_name_prefix="region", var_name_prefix_pattern=r'parse_(.*)'):
c_code_line_list = []
var_name_prefix = re.findall(var_name_prefix_pattern, parse_region.func_name)[0]
var_name_generator = lambda prefix, name: "_".join([prefix, name])
# =======================================
# 20 parameters
# 1-4: anchors, bias_match, classes, coords
# 5-8: num, softmax, jitter, rescore
# 9-12: object_scale, noobject_scale,
# class_scale, coord_scale
# 13-16: absolute, thresh,
# random, nms_thresh
# 17-20: tree_thresh, background,
# relative, box_thresh
# ======================================
# 1: anchors
anchors_pattern = r'anchors: "(.*)"\n'
anchors = re.findall(anchors_pattern, layer_str)[0]
anchors_2d_list = map(lambda t: t.split(","), anchors.split(", "))
anchors = sum(anchors_2d_list, [])
anchors = map(float, anchors)
var_name = var_name_generator(var_name_prefix, "anchors")
c_code_str = var_from_py_to_c(anchors, var_name, len(anchors))
c_code_line_list.append(c_code_str)
# ======================================
# 2: bias_match
bias_match_pattern = r"bias_match: (.*)\n"
bias_match = int(re.findall(bias_match_pattern, layer_str)[0])
var_name = var_name_generator(var_name_prefix, "bias_match")
c_code_str = var_from_py_to_c(bias_match, var_name)
c_code_line_list.append(c_code_str)
# ======================================
# 3: classes
classes_pattern = r"classes: (.*)\n"
classes = int(re.findall(classes_pattern, layer_str)[0])
var_name = var_name_generator(var_name_prefix, "classes")
c_code_str = var_from_py_to_c(classes, var_name)
c_code_line_list.append(c_code_str)
# ======================================
# 4: coords
coords_pattern = r"coords: (.*)\n"
coords = int(re.findall(coords_pattern, layer_str)[0])
var_name = var_name_generator(var_name_prefix, "coords")
c_code_str = var_from_py_to_c(coords, var_name)
c_code_line_list.append(c_code_str)
# ======================================
# 5: num
num_pattern = r"num: (.*)\n"
num = int(re.findall(num_pattern, layer_str)[0])
var_name = var_name_generator(var_name_prefix, "num")
c_code_str = var_from_py_to_c(num, var_name)
c_code_line_list.append(c_code_str)
# ======================================
# 6: softmax
softmax_pattern = r"softmax: (.*)\n"
softmax = int(re.findall(softmax_pattern, layer_str)[0])
var_name = var_name_generator(var_name_prefix, "softmax")
c_code_str = var_from_py_to_c(softmax, var_name)
c_code_line_list.append(c_code_str)
# ======================================
# 7: jitter
jitter_pattern = r"jitter: (.*)\n"
jitter = float(re.findall(jitter_pattern, layer_str)[0])
var_name = var_name_generator(var_name_prefix, "jitter")
c_code_str = var_from_py_to_c(jitter, var_name)
c_code_line_list.append(c_code_str)
# ======================================
# 8: rescore
rescore_pattern = r"rescore: (.*)\n"
rescore = int(re.findall(rescore_pattern, layer_str)[0])
var_name = var_name_generator(var_name_prefix, "rescore")
c_code_str = var_from_py_to_c(rescore, var_name)
c_code_line_list.append(c_code_str)
# ======================================
# 9: object_scale
object_scale_pattern = r"object_scale: (.*)\n"
object_scale = int(re.findall(object_scale_pattern, layer_str)[0])
var_name = var_name_generator(var_name_prefix, "object_scale")
c_code_str = var_from_py_to_c(object_scale, var_name)
c_code_line_list.append(c_code_str)
# ======================================
# 10: noobject_scale
noobject_scale_pattern = r"noobject_scale: (.*)\n"
noobject_scale = int(re.findall(noobject_scale_pattern, layer_str)[0])
var_name = var_name_generator(var_name_prefix, "noobject_scale")
c_code_str = var_from_py_to_c(noobject_scale, var_name)
c_code_line_list.append(c_code_str)
# ======================================
# 11: class_scale
class_scale_pattern = r"class_scale: (.*)\n"
class_scale = int(re.findall(class_scale_pattern, layer_str)[0])
var_name = var_name_generator(var_name_prefix, "class_scale")
c_code_str = var_from_py_to_c(class_scale, var_name)
c_code_line_list.append(c_code_str)
# ======================================
# 12: coords_scale
coord_scale_pattern = r"coord_scale: (.*)\n"
coord_scale = int(re.findall(coord_scale_pattern, layer_str)[0])
var_name = var_name_generator(var_name_prefix, "coord_scale")
c_code_str = var_from_py_to_c(coord_scale, var_name)
c_code_line_list.append(c_code_str)
# ======================================
# 13: absolute
absolute_pattern = r"absolute: (.*)\n"
absolute = int(re.findall(absolute_pattern, layer_str)[0])
var_name = var_name_generator(var_name_prefix, "absolute")
c_code_str = var_from_py_to_c(absolute, var_name)
c_code_line_list.append(c_code_str)
# ======================================
# 14: thresh
thresh_pattern = r"thresh: (.*)\n"
thresh = float(re.findall(thresh_pattern, layer_str)[0])
var_name = var_name_generator(var_name_prefix, "thresh")
c_code_str = var_from_py_to_c(thresh, var_name)
c_code_line_list.append(c_code_str)
# ======================================
# 15: random
random_pattern = r"random: (.*)\n"
random = float(re.findall(random_pattern, layer_str)[0])
var_name = var_name_generator(var_name_prefix, "random")
c_code_str = var_from_py_to_c(random, var_name)
c_code_line_list.append(c_code_str)
# ======================================
# 16: nms_thresh
nms_thresh_pattern = r"nms_thresh: (.*)\n"
nms_thresh = float(re.findall(nms_thresh_pattern, layer_str)[0])
var_name = var_name_generator(var_name_prefix, "nms_thresh")
c_code_str = var_from_py_to_c(nms_thresh, var_name)
c_code_line_list.append(c_code_str)
# ======================================
# 17: background
background_pattern = r"background: (.*)\n"
background = int(re.findall(background_pattern, layer_str)[0])
var_name = var_name_generator(var_name_prefix, "background")
c_code_str = var_from_py_to_c(background, var_name)
c_code_line_list.append(c_code_str)
# ======================================
# 18: tree_thresh
tree_thresh_pattern = r"tree_thresh: (.*)\n"
tree_thresh = float(re.findall(tree_thresh_pattern, layer_str)[0])
var_name = var_name_generator(var_name_prefix, "tree_thresh")
c_code_str = var_from_py_to_c(tree_thresh, var_name)
c_code_line_list.append(c_code_str)
# ======================================
# 19: relative
relative_pattern = r"relative: (.*)\n"
relative = int(re.findall(relative_pattern, layer_str)[0])
var_name = var_name_generator(var_name_prefix, "relative")
c_code_str = var_from_py_to_c(relative, var_name)
c_code_line_list.append(c_code_str)
# ======================================
# 20: box_thresh
box_thresh_pattern = r"box_thresh: (.*)\n"
box_thresh = float(re.findall(box_thresh_pattern, layer_str)[0])
var_name = var_name_generator(var_name_prefix, "box_thresh")
c_code_str = var_from_py_to_c(box_thresh, var_name)
c_code_line_list.append(c_code_str)
return c_code_line_list
if __name__ == "__main__":
if len(sys.argv) != 2:
python_str = "python3"
this_pyfile_name = sys.argv[0]
print("Usage: %s %s CAFFE_PROTOTXT\n" % (python_str, this_pyfile_name))
exit(-1)
else:
prototxt_file = sys.argv[1]
net = Net(prototxt_file)
print("Successful conversion from {}.prototxt to {}.c and {}.h" \
.format(self.__name, self.__name, self.__name))
|
<gh_stars>0
import base64
import os
import platform
import shutil
import time
from queue import Queue
from typing import Any, Dict, Generator, List, Optional, Tuple, Union, cast
import click
import requests
from hypothesis import settings
from ... import service
from ..._compat import metadata
from ...constants import CodeSampleStyle, __version__
from ...models import Response, Status
from ...runner import events
from ...runner.serialization import SerializedCase, SerializedError, SerializedTestResult, deduplicate_failures
from ..context import ExecutionContext
from ..handlers import EventHandler
DISABLE_SCHEMA_VALIDATION_MESSAGE = (
"\nYou can disable input schema validation with --validate-schema=false "
"command-line option\nIn this case, Schemathesis cannot guarantee proper"
" behavior during the test run"
)
ISSUE_TRACKER_URL = (
"https://github.com/schemathesis/schemathesis/issues/new?"
"labels=Status%3A+Review+Needed%2C+Type%3A+Bug&template=bug_report.md&title=%5BBUG%5D"
)
SPINNER_REPETITION_NUMBER = 10
def get_terminal_width() -> int:
# Some CI/CD providers (e.g. CircleCI) return a (0, 0) terminal size so provide a default
return shutil.get_terminal_size((80, 24)).columns
def display_section_name(title: str, separator: str = "=", extra: str = "", **kwargs: Any) -> None:
"""Print section name with separators in terminal with the given title nicely centered."""
extra = extra if not extra else f" [{extra}]"
message = f" {title}{extra} ".center(get_terminal_width(), separator)
kwargs.setdefault("bold", True)
click.secho(message, **kwargs)
def display_subsection(result: SerializedTestResult, color: Optional[str] = "red") -> None:
display_section_name(result.verbose_name, "_", result.data_generation_method, fg=color)
def get_percentage(position: int, length: int) -> str:
"""Format completion percentage in square brackets."""
percentage_message = f"{position * 100 // length}%".rjust(4)
return f"[{percentage_message}]"
def display_execution_result(context: ExecutionContext, event: events.AfterExecution) -> None:
"""Display an appropriate symbol for the given event's execution result."""
symbol, color = {Status.success: (".", "green"), Status.failure: ("F", "red"), Status.error: ("E", "red")}[
event.status
]
context.current_line_length += len(symbol)
click.secho(symbol, nl=False, fg=color)
def display_percentage(context: ExecutionContext, event: events.AfterExecution) -> None:
"""Add the current progress in % to the right side of the current line."""
operations_count = cast(int, context.operations_count) # is already initialized via `Initialized` event
current_percentage = get_percentage(context.operations_processed, operations_count)
styled = click.style(current_percentage, fg="cyan")
# Total length of the message, so it will fill to the right border of the terminal.
# Padding is already taken into account in `context.current_line_length`
length = max(get_terminal_width() - context.current_line_length + len(styled) - len(current_percentage), 1)
template = f"{{:>{length}}}"
click.echo(template.format(styled))
def display_summary(event: events.Finished) -> None:
message, color = get_summary_output(event)
display_section_name(message, fg=color)
def get_summary_message_parts(event: events.Finished) -> List[str]:
parts = []
passed = event.passed_count
if passed:
parts.append(f"{passed} passed")
failed = event.failed_count
if failed:
parts.append(f"{failed} failed")
errored = event.errored_count
if errored:
parts.append(f"{errored} errored")
return parts
def get_summary_output(event: events.Finished) -> Tuple[str, str]:
parts = get_summary_message_parts(event)
if not parts:
message = "Empty test suite"
color = "yellow"
else:
message = f'{", ".join(parts)} in {event.running_time:.2f}s'
if event.has_failures or event.has_errors:
color = "red"
else:
color = "green"
return message, color
def display_hypothesis_output(hypothesis_output: List[str]) -> None:
"""Show falsifying examples from Hypothesis output if there are any."""
if hypothesis_output:
display_section_name("HYPOTHESIS OUTPUT")
output = "\n".join(hypothesis_output)
click.secho(output, fg="red")
def display_errors(context: ExecutionContext, event: events.Finished) -> None:
"""Display all errors in the test run."""
if not event.has_errors:
return
display_section_name("ERRORS")
should_display_full_traceback_message = False
for result in context.results:
if not result.has_errors:
continue
should_display_full_traceback_message |= display_single_error(context, result)
if event.generic_errors:
display_generic_errors(context, event.generic_errors)
if should_display_full_traceback_message and not context.show_errors_tracebacks:
click.secho(
"Add this option to your command line parameters to see full tracebacks: --show-errors-tracebacks", fg="red"
)
def display_single_error(context: ExecutionContext, result: SerializedTestResult) -> bool:
display_subsection(result)
should_display_full_traceback_message = False
for error in result.errors:
should_display_full_traceback_message |= _display_error(context, error, result.seed)
return should_display_full_traceback_message
def display_generic_errors(context: ExecutionContext, errors: List[SerializedError]) -> None:
for error in errors:
display_section_name(error.title or "Generic error", "_", fg="red")
_display_error(context, error)
def display_full_traceback_message(exception: str) -> bool:
# Some errors should not trigger the message that suggests to show full tracebacks to the user
return not exception.startswith("DeadlineExceeded")
def _display_error(context: ExecutionContext, error: SerializedError, seed: Optional[int] = None) -> bool:
if context.show_errors_tracebacks:
message = error.exception_with_traceback
else:
message = error.exception
if error.exception.startswith("InvalidSchema") and context.validate_schema:
message += DISABLE_SCHEMA_VALIDATION_MESSAGE + "\n"
if error.exception.startswith("DeadlineExceeded"):
message += (
"Consider extending the deadline with the `--hypothesis-deadline` CLI option.\n"
"You can disable it completely with `--hypothesis-deadline=None`.\n"
)
click.secho(message, fg="red")
if error.example is not None:
display_example(context, error.example, seed=seed)
return display_full_traceback_message(error.exception)
def display_failures(context: ExecutionContext, event: events.Finished) -> None:
"""Display all failures in the test run."""
if not event.has_failures:
return
relevant_results = [result for result in context.results if not result.is_errored]
if not relevant_results:
return
display_section_name("FAILURES")
for result in relevant_results:
if not result.has_failures:
continue
display_failures_for_single_test(context, result)
def display_failures_for_single_test(context: ExecutionContext, result: SerializedTestResult) -> None:
"""Display a failure for a single method / path."""
display_subsection(result)
checks = deduplicate_failures(result.checks)
for idx, check in enumerate(checks, 1):
message: Optional[str]
if check.message:
message = f"{idx}. {check.message}"
else:
message = None
display_example(context, check.example, check.response, message, result.seed)
# Display every time except the last check
if idx != len(checks):
click.echo("\n")
def reduce_schema_error(message: str) -> str:
"""Reduce the error schema output."""
end_of_message_index = message.find(":", message.find("Failed validating"))
if end_of_message_index != -1:
return message[:end_of_message_index]
return message
def display_example(
context: ExecutionContext,
case: SerializedCase,
response: Optional[Response] = None,
message: Optional[str] = None,
seed: Optional[int] = None,
) -> None:
if message is not None:
if not context.verbosity:
message = reduce_schema_error(message)
click.secho(message, fg="red")
click.echo()
for line in case.text_lines:
click.secho(line, fg="red")
click.echo()
if response is not None and response.body is not None:
payload = base64.b64decode(response.body).decode(response.encoding or "utf8", errors="replace")
click.secho(f"----------\n\nResponse payload: `{payload}`\n", fg="red")
if context.code_sample_style == CodeSampleStyle.python:
click.secho(f"Run this Python code to reproduce this failure: \n\n {case.requests_code}\n", fg="red")
if context.code_sample_style == CodeSampleStyle.curl:
click.secho(f"Run this cURL command to reproduce this failure: \n\n {case.curl_code}\n", fg="red")
if seed is not None:
click.secho(f"Or add this option to your command line parameters: --hypothesis-seed={seed}", fg="red")
def display_application_logs(context: ExecutionContext, event: events.Finished) -> None:
"""Print logs captured during the application run."""
if not event.has_logs:
return
display_section_name("APPLICATION LOGS")
for result in context.results:
if not result.has_logs:
continue
display_single_log(result)
def display_single_log(result: SerializedTestResult) -> None:
display_subsection(result, None)
click.echo("\n\n".join(result.logs))
def display_statistic(context: ExecutionContext, event: events.Finished) -> None:
"""Format and print statistic collected by :obj:`models.TestResult`."""
display_section_name("SUMMARY")
click.echo()
total = event.total
if event.is_empty or not total:
click.secho("No checks were performed.", bold=True)
if total:
display_checks_statistics(total)
if context.cassette_file_name:
click.echo()
category = click.style("Network log", bold=True)
click.secho(f"{category}: {context.cassette_file_name}")
if context.junit_xml_file:
click.echo()
category = click.style("JUnit XML file", bold=True)
click.secho(f"{category}: {context.junit_xml_file}")
handle_service_integration(context)
def handle_service_integration(context: ExecutionContext) -> None:
"""If Schemathesis.io integration is enabled, wait for the handler & print the resulting status."""
if context.service:
click.echo()
title = click.style("Schemathesis.io", bold=True)
event = wait_for_service_handler(context.service.queue, title)
color = {
service.Completed: "green",
service.Error: "red",
service.Timeout: "red",
}[event.__class__]
status = click.style(event.name, fg=color, bold=True)
click.echo(f"{title}: {status}\r", nl=False)
click.echo()
if isinstance(event, service.Completed):
report_title = click.style("Report", bold=True)
click.echo(f"{report_title}: {event.short_url}")
if isinstance(event, service.Error):
display_service_error(event, context)
def display_service_error(event: service.Error, context: ExecutionContext) -> None:
"""Show information about an error during communication with Schemathesis.io."""
click.echo()
if isinstance(event.exception, requests.HTTPError):
status_code = event.exception.response.status_code
click.secho(f"Schemathesis.io responded with HTTP {status_code}", fg="red")
if 500 <= status_code <= 599:
# Server error, should be resolved soon
click.secho(
"It is likely that we are already notified about the issue and working on a fix\n"
"Please, try again in 30 minutes",
fg="red",
)
elif status_code == 401:
# Likely an invalid token
click.secho(
"Please, check that you use the proper CLI upload token\n"
"See https://schemathesis.readthedocs.io/en/stable/service.html for more details",
fg="red",
)
else:
# Other client-side errors are likely caused by a bug on the CLI side
ask_to_report(event)
elif isinstance(event.exception, requests.RequestException):
ask_to_report(event, report_to_issues=False)
else:
ask_to_report(event)
def ask_to_report(event: service.Error, report_to_issues: bool = True, extra: str = "") -> None:
# Likely an internal Schemathesis error
message = event.get_message(True)
if isinstance(event.exception, requests.RequestException) and event.exception.response is not None:
response = f"Response: {event.exception.response.text}"
else:
response = ""
if report_to_issues:
ask = f"Please, consider reporting the traceback below it to our issue tracker: {ISSUE_TRACKER_URL}\n"
else:
ask = ""
click.secho(
f"An error happened during uploading reports to Schemathesis.io:\n"
f"{extra}"
f"{ask}"
f"{response}"
f"\n {message.strip()}",
fg="red",
)
def wait_for_service_handler(queue: Queue, title: str) -> service.Event:
"""Wait for the Schemathesis.io handler to finish its job."""
start = time.monotonic()
spinner = create_spinner(SPINNER_REPETITION_NUMBER)
# The testing process it done and we need to wait for the Schemathesis.io handler to finish
# It might still have some data to send
while queue.empty():
if time.monotonic() - start >= service.WORKER_FINISH_TIMEOUT:
return service.Timeout()
click.echo(f"{title}: {next(spinner)}\r", nl=False)
time.sleep(service.WORKER_CHECK_PERIOD)
return queue.get()
def create_spinner(repetitions: int) -> Generator[str, None, None]:
"""A simple spinner that yields its individual characters."""
assert repetitions > 0, "The number of repetitions should be greater than zero"
while True:
for ch in "⠋⠙⠹⠸⠼⠴⠦⠧⠇⠏":
# Skip branch coverage, as it is not possible because of the assertion above
for _ in range(repetitions): # pragma: no branch
yield ch
def display_checks_statistics(total: Dict[str, Dict[Union[str, Status], int]]) -> None:
padding = 20
col1_len = max(map(len, total.keys())) + padding
col2_len = len(str(max(total.values(), key=lambda v: v["total"])["total"])) * 2 + padding
col3_len = padding
click.secho("Performed checks:", bold=True)
template = f" {{:{col1_len}}}{{:{col2_len}}}{{:{col3_len}}}"
for check_name, results in total.items():
display_check_result(check_name, results, template)
def display_check_result(check_name: str, results: Dict[Union[str, Status], int], template: str) -> None:
"""Show results of single check execution."""
if Status.failure in results:
verdict = "FAILED"
color = "red"
else:
verdict = "PASSED"
color = "green"
success = results.get(Status.success, 0)
total = results.get("total", 0)
click.echo(template.format(check_name, f"{success} / {total} passed", click.style(verdict, fg=color, bold=True)))
def display_internal_error(context: ExecutionContext, event: events.InternalError) -> None:
click.secho(event.message, fg="red")
if event.exception:
if context.show_errors_tracebacks:
message = event.exception_with_traceback
else:
message = event.exception
message = (
f"Error: {message}\n"
f"Add this option to your command line parameters to see full tracebacks: --show-errors-tracebacks"
)
if event.exception_type == "schemathesis.exceptions.SchemaLoadingError":
message += "\n" + DISABLE_SCHEMA_VALIDATION_MESSAGE
click.secho(message, fg="red")
def handle_initialized(context: ExecutionContext, event: events.Initialized) -> None:
"""Display information about the test session."""
context.operations_count = cast(int, event.operations_count) # INVARIANT: should not be `None`
display_section_name("Schemathesis test session starts")
versions = (
f"platform {platform.system()} -- "
f"Python {platform.python_version()}, "
f"schemathesis-{__version__}, "
f"hypothesis-{metadata.version('hypothesis')}, "
f"hypothesis_jsonschema-{metadata.version('hypothesis_jsonschema')}, "
f"jsonschema-{metadata.version('jsonschema')}"
)
click.echo(versions)
click.echo(f"rootdir: {os.getcwd()}")
current_profile = settings._current_profile # type: ignore[attr-defined]
click.echo(
f"hypothesis profile '{current_profile}' " # type: ignore
f"-> {settings.get_profile(current_profile).show_changed()}"
)
if event.location is not None:
click.echo(f"Schema location: {event.location}")
click.echo(f"Base URL: {event.base_url}")
click.echo(f"Specification version: {event.specification_name}")
click.echo(f"Workers: {context.workers_num}")
click.secho(f"Collected API operations: {context.operations_count}", bold=True)
if context.service is not None:
click.secho("Schemathesis.io: ENABLED", bold=True)
if context.operations_count >= 1:
click.echo()
TRUNCATION_PLACEHOLDER = "[...]"
def handle_before_execution(context: ExecutionContext, event: events.BeforeExecution) -> None:
"""Display what method / path will be tested next."""
# We should display execution result + percentage in the end. For example:
max_length = get_terminal_width() - len(" . [XXX%]") - len(TRUNCATION_PLACEHOLDER)
message = event.verbose_name
if event.recursion_level > 0:
message = f"{' ' * event.recursion_level}-> {message}"
# This value is not `None` - the value is set in runtime before this line
context.operations_count += 1 # type: ignore
message = message[:max_length] + (message[max_length:] and "[...]") + " "
context.current_line_length = len(message)
click.echo(message, nl=False)
def handle_after_execution(context: ExecutionContext, event: events.AfterExecution) -> None:
"""Display the execution result + current progress at the same line with the method / path names."""
context.operations_processed += 1
context.results.append(event.result)
display_execution_result(context, event)
display_percentage(context, event)
def handle_finished(context: ExecutionContext, event: events.Finished) -> None:
"""Show the outcome of the whole testing session."""
click.echo()
display_hypothesis_output(context.hypothesis_output)
display_errors(context, event)
display_failures(context, event)
display_application_logs(context, event)
display_statistic(context, event)
click.echo()
display_summary(event)
def handle_interrupted(context: ExecutionContext, event: events.Interrupted) -> None:
click.echo()
display_section_name("KeyboardInterrupt", "!", bold=False)
def handle_internal_error(context: ExecutionContext, event: events.InternalError) -> None:
display_internal_error(context, event)
raise click.Abort
class DefaultOutputStyleHandler(EventHandler):
def handle_event(self, context: ExecutionContext, event: events.ExecutionEvent) -> None:
"""Choose and execute a proper handler for the given event."""
if isinstance(event, events.Initialized):
handle_initialized(context, event)
if isinstance(event, events.BeforeExecution):
handle_before_execution(context, event)
if isinstance(event, events.AfterExecution):
context.hypothesis_output.extend(event.hypothesis_output)
handle_after_execution(context, event)
if isinstance(event, events.Finished):
handle_finished(context, event)
if isinstance(event, events.Interrupted):
handle_interrupted(context, event)
if isinstance(event, events.InternalError):
handle_internal_error(context, event)
|
# -*- coding: utf-8 -*-
import os
import sys
from django.conf import settings
from django.contrib.auth.models import Group, Permission
from django.contrib.sites.models import Site
from django.core.management.base import BaseCommand, CommandError
from django.utils.translation import ugettext_lazy as _
from django.core.management import call_command
from shared_foundation.constants import *
class Command(BaseCommand):
"""
Console:
python manage.py init_app
"""
help = _('Command will setup the application database to be ready for usage.')
def handle(self, *args, **options):
self.process_site()
self.process_groups()
self.stdout.write(
self.style.SUCCESS(_('Successfully initialized application.'))
)
def process_site(self):
"""
Site
"""
current_site = Site.objects.get_current()
current_site.domain = settings.COMICSCANTINA_APP_HTTP_DOMAIN
current_site.name = "Workery"
current_site.save()
def process_groups(self):
'''
Executives Group
'''
group, created = Group.objects.get_or_create(id=EXECUTIVE_GROUP_ID)
group.name = "Executives"
group.save()
permission_codenames = [
# --- Customers --- #
'can_get_customers',
'can_get_customer',
'can_post_customer',
'can_put_customer',
'can_delete_customer',
# --- Associate --- #
'can_get_associates',
'can_get_associate',
'can_post_associate',
'can_put_associate',
'can_delete_associate',
# --- Work Order --- #
'can_get_orders',
'can_get_order',
'can_post_order',
'can_put_order',
'can_delete_order',
# --- Comment --- #
'can_get_comments',
'can_get_comment',
'can_post_comment',
'can_put_comment',
'can_delete_comment',
# --- Tag --- #
'can_get_tags',
'can_get_tag',
'can_post_tag',
'can_put_tag',
'can_delete_tag',
# --- Skill Set --- #
'can_get_skill_sets',
'can_get_skill_set',
'can_post_skill_set',
'can_put_skill_set',
'can_delete_skill_set',
# --- Staff --- #
'can_get_staves',
'can_get_staff',
'can_post_staff',
'can_put_staff',
'can_delete_staff',
# --- Partner --- #
'can_get_partners',
'can_get_partner',
'can_post_partner',
'can_put_partner',
'can_delete_partner',
# --- Away Log --- #
'can_get_away_logs',
'can_get_away_log',
'can_post_away_log',
'can_put_away_log',
'can_delete_away_log',
# --- Insurance Requirement --- #
'can_get_insurance_requirements',
'can_get_insurance_requirement',
'can_post_insurance_requirement',
'can_put_insurance_requirement',
'can_delete_insurance_requirement',
# --- Task Item --- #
'can_get_task_items',
'can_get_task_item',
'can_post_task_item',
'can_put_task_item',
'can_delete_task_item',
# --- Work Order Service Fee --- #
'can_get_order_service_fees',
'can_get_order_service_fee',
'can_post_order_service_fee',
'can_put_order_service_fee',
'can_delete_order_service_fee',
# --- Public Image Upload --- #
'can_get_public_image_uploads',
'can_get_public_image_upload',
'can_post_public_image_upload',
'can_put_public_image_upload',
'can_delete_public_image_upload',
]
permissions = Permission.objects.filter(codename__in=permission_codenames)
for permission in permissions.all():
group.permissions.add(permission)
'''
Management Group
'''
group, created = Group.objects.get_or_create(id=MANAGEMENT_GROUP_ID)
group.name = "Management"
group.save()
permission_codenames = [
# --- Customers --- #
'can_get_customers',
'can_get_customer',
'can_post_customer',
'can_put_customer',
'can_delete_customer',
# --- Associate --- #
'can_get_associates',
'can_get_associate',
'can_post_associate',
'can_put_associate',
'can_delete_associate',
# --- Work Order --- #
'can_get_orders',
'can_get_order',
'can_post_order',
'can_put_order',
'can_delete_order',
# --- Comment --- #
'can_get_comments',
'can_get_comment',
'can_post_comment',
'can_put_comment',
'can_delete_comment',
# --- Tag --- #
'can_get_tags',
'can_get_tag',
'can_post_tag',
'can_put_tag',
'can_delete_tag',
# --- Skill Set --- #
'can_get_skill_sets',
'can_get_skill_set',
'can_post_skill_set',
'can_put_skill_set',
'can_delete_skill_set',
# --- Staff --- #
'can_get_staves',
'can_get_staff',
'can_post_staff',
'can_put_staff',
'can_delete_staff',
# --- Partner --- #
'can_get_partners',
'can_get_partner',
'can_post_partner',
'can_put_partner',
'can_delete_partner',
# --- Away Log --- #
'can_get_away_logs',
'can_get_away_log',
'can_post_away_log',
'can_put_away_log',
'can_delete_away_log',
# --- Insurance Requirement --- #
'can_get_insurance_requirements',
'can_get_insurance_requirement',
'can_post_insurance_requirement',
'can_put_insurance_requirement',
'can_delete_insurance_requirement',
# --- Task Item --- #
'can_get_task_items',
'can_get_task_item',
'can_post_task_item',
'can_put_task_item',
'can_delete_task_item',
# --- Work Order Service Fee --- #
'can_get_order_service_fees',
'can_get_order_service_fee',
'can_post_order_service_fee',
'can_put_order_service_fee',
'can_delete_order_service_fee',
# --- Public Image Upload --- #
'can_get_public_image_uploads',
'can_get_public_image_upload',
'can_post_public_image_upload',
'can_put_public_image_upload',
'can_delete_public_image_upload',
]
permissions = Permission.objects.filter(codename__in=permission_codenames)
for permission in permissions.all():
group.permissions.add(permission)
'''
Frontline Group
'''
group, created = Group.objects.get_or_create(id=FRONTLINE_GROUP_ID)
group.name = "Frontline Staff"
group.save()
permission_codenames = [
# --- Customers --- #
'can_get_customers',
'can_get_customer',
'can_post_customer',
'can_put_customer',
# 'can_delete_customer',
# --- Associate --- #
'can_get_associates',
'can_get_associate',
'can_post_associate',
'can_put_associate',
# 'can_delete_associate',
# --- Work Order --- #
'can_get_orders',
'can_get_order',
'can_post_order',
'can_put_order',
# 'can_delete_order',
# --- Comment --- #
'can_get_comments',
'can_get_comment',
'can_post_comment',
'can_put_comment',
# 'can_delete_comment',
# --- Tag --- #
'can_get_tags',
'can_get_tag',
'can_post_tag',
'can_put_tag',
# 'can_delete_tag',
# --- Skill Set --- #
'can_get_skill_sets',
'can_get_skill_set',
# --- Staff --- #
# 'can_get_staves',
'can_get_staff',
# 'can_post_staff',
'can_put_staff',
# 'can_delete_staff',
# --- Partner --- #
'can_get_partners',
'can_get_partner',
'can_post_partner',
'can_put_partner',
'can_delete_partner',
# --- Away Log --- #
'can_get_away_logs',
'can_get_away_log',
'can_post_away_log',
'can_put_away_log',
'can_delete_away_log',
# --- Insurance Requirement --- #
'can_get_insurance_requirements',
'can_get_insurance_requirement',
'can_post_insurance_requirement',
'can_put_insurance_requirement',
'can_delete_insurance_requirement',
# --- Task Item --- #
'can_get_task_items',
'can_get_task_item',
'can_post_task_item',
'can_put_task_item',
'can_delete_task_item',
# --- Work Order Service Fee --- #
'can_get_order_service_fees',
'can_get_order_service_fee',
'can_post_order_service_fee',
'can_put_order_service_fee',
'can_delete_order_service_fee',
# --- Public Image Upload --- #
'can_get_public_image_uploads',
'can_get_public_image_upload',
'can_post_public_image_upload',
'can_put_public_image_upload',
'can_delete_public_image_upload',
]
permissions = Permission.objects.filter(codename__in=permission_codenames)
for permission in permissions.all():
group.permissions.add(permission)
# Associate Group
group, created = Group.objects.get_or_create(id=ASSOCIATE_GROUP_ID)
group.name = "Associates"
group.save()
permission_codenames = [
# --- Associate --- #
'can_get_associate',
'can_put_associate',
# --- Tag --- #
'can_get_tags',
'can_get_tag',
# --- Skill Set --- #
'can_get_skill_sets',
'can_get_skill_set',
# --- Insurance Requirement --- #
'can_get_insurance_requirements',
'can_get_insurance_requirement',
# --- Work Order Service Fee --- #
'can_get_order_service_fees',
'can_get_order_service_fee',
# --- Public Image Upload --- #
'can_get_public_image_uploads',
'can_get_public_image_upload',
'can_post_public_image_upload',
'can_put_public_image_upload',
'can_delete_public_image_upload',
]
permissions = Permission.objects.filter(codename__in=permission_codenames)
for permission in permissions.all():
group.permissions.add(permission)
'''
Customer Group
'''
group, created = Group.objects.get_or_create(id=CUSTOMER_GROUP_ID)
group.name = "Customers"
group.save()
permission_codenames = [
# --- Customers --- #
'can_get_customer',
'can_put_customer',
# --- Tag --- #
'can_get_tags',
'can_get_tag',
# --- Skill Set --- #
'can_get_skill_sets',
'can_get_skill_set',
# --- Insurance Requirement --- #
'can_get_insurance_requirements',
'can_get_insurance_requirement',
# --- Work Order Service Fee --- #
'can_get_order_service_fees',
'can_get_order_service_fee',
# --- Public Image Upload --- #
'can_get_public_image_uploads',
'can_get_public_image_upload',
'can_post_public_image_upload',
'can_put_public_image_upload',
'can_delete_public_image_upload',
]
permissions = Permission.objects.filter(codename__in=permission_codenames)
for permission in permissions.all():
group.permissions.add(permission)
|
<gh_stars>1-10
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import math
import time
import apache_beam as beam
import apache_beam.runners.sdf_utils as sdf_utils
from apache_beam.io.restriction_trackers import OffsetRange
from apache_beam.io.restriction_trackers import OffsetRestrictionTracker
from apache_beam.transforms import core
from apache_beam.transforms import window
from apache_beam.transforms.ptransform import PTransform
from apache_beam.transforms.window import TimestampedValue
from apache_beam.utils import timestamp
from apache_beam.utils.timestamp import MAX_TIMESTAMP
from apache_beam.utils.timestamp import Timestamp
class ImpulseSeqGenRestrictionProvider(core.RestrictionProvider):
def initial_restriction(self, element):
start, end, interval = element
if isinstance(start, Timestamp):
start = start.micros / 1000000
if isinstance(end, Timestamp):
end = end.micros / 1000000
assert start <= end
assert interval > 0
total_outputs = math.ceil((end - start) / interval)
return OffsetRange(0, total_outputs)
def create_tracker(self, restriction):
return OffsetRestrictionTracker(restriction)
def restriction_size(self, unused_element, restriction):
return restriction.size()
class ImpulseSeqGenDoFn(beam.DoFn):
'''
ImpulseSeqGenDoFn fn receives tuple elements with three parts:
* first_timestamp = first timestamp to output element for.
* last_timestamp = last timestamp/time to output element for.
* fire_interval = how often to fire an element.
For each input element received, ImpulseSeqGenDoFn fn will start
generating output elements in following pattern:
* if element timestamp is less than current runtime then output element.
* if element timestamp is greater than current runtime, wait until next
element timestamp.
ImpulseSeqGenDoFn can't guarantee that each element is output at exact time.
ImpulseSeqGenDoFn guarantees that elements would not be output prior to
given runtime timestamp.
'''
def process(
self,
element,
restriction_tracker=beam.DoFn.RestrictionParam(
ImpulseSeqGenRestrictionProvider())):
'''
:param element: (start_timestamp, end_timestamp, interval)
:param restriction_tracker:
:return: yields elements at processing real-time intervals with value of
target output timestamp for the element.
'''
start, _, interval = element
if isinstance(start, Timestamp):
start = start.micros / 1000000
assert isinstance(restriction_tracker, sdf_utils.RestrictionTrackerView)
current_output_index = restriction_tracker.current_restriction().start
current_output_timestamp = start + interval * current_output_index
current_time = time.time()
while current_output_timestamp <= current_time:
if restriction_tracker.try_claim(current_output_index):
yield current_output_timestamp
current_output_index += 1
current_output_timestamp = start + interval * current_output_index
current_time = time.time()
else:
return
restriction_tracker.defer_remainder(
timestamp.Timestamp(current_output_timestamp))
class PeriodicSequence(PTransform):
'''
PeriodicSequence transform receives tuple elements with three parts:
* first_timestamp = first timestamp to output element for.
* last_timestamp = last timestamp/time to output element for.
* fire_interval = how often to fire an element.
For each input element received, PeriodicSequence transform will start
generating output elements in following pattern:
* if element timestamp is less than current runtime then output element.
* if element timestamp is greater than current runtime, wait until next
element timestamp.
PeriodicSequence can't guarantee that each element is output at exact time.
PeriodicSequence guarantees that elements would not be output prior to given
runtime timestamp.
'''
def __init_(self):
pass
def expand(self, pcoll):
return (
pcoll
| 'GenSequence' >> beam.ParDo(ImpulseSeqGenDoFn())
| 'MapToTimestamped' >> beam.Map(lambda tt: TimestampedValue(tt, tt)))
class PeriodicImpulse(PTransform):
'''
PeriodicImpulse transform generates an infinite sequence of elements with
given runtime interval.
PeriodicImpulse transform behaves same as {@link PeriodicSequence} transform,
but can be used as first transform in pipeline.
'''
def __init__(
self,
start_timestamp=Timestamp.now(),
stop_timestamp=MAX_TIMESTAMP,
fire_interval=360.0,
apply_windowing=False):
'''
:param start_timestamp: Timestamp for first element.
:param stop_timestamp: Timestamp after which no elements will be output.
:param fire_interval: Interval at which to output elements.
:param apply_windowing: Whether each element should be assigned to
individual window. If false, all elements will reside in global window.
'''
self.start_ts = start_timestamp
self.stop_ts = stop_timestamp
self.interval = fire_interval
self.apply_windowing = apply_windowing
def expand(self, pbegin):
result = (
pbegin
| 'ImpulseElement' >> beam.Create(
[(self.start_ts, self.stop_ts, self.interval)])
| 'GenSequence' >> beam.ParDo(ImpulseSeqGenDoFn())
| 'MapToTimestamped' >> beam.Map(lambda tt: TimestampedValue(tt, tt)))
if self.apply_windowing:
result = result | 'ApplyWindowing' >> beam.WindowInto(
window.FixedWindows(self.interval))
return result
|
import requests
import json
import sys
import numpy as np
# python3 reset_users_and_groups.py qa 0000000000000000000000000000000000000000000000000000000000000000
# used to reset the second environment if you would like to start fresh
def get_groups(env, admin_api_key):
api_url = "https://" + env + ".cloudcheckr.com/api/account.json/get_groups_v2"
r1 = requests.get(api_url, headers = {"Content-Type": "application/json", "access_key": admin_api_key})
if ('ErrorCode' in r1.json()):
if (r1.json()['ErrorCode'] == "NotFound"):
print("No Groups")
return None
# print(r1.json())
# print(r1.json()['Groups'])
return r1.json()['Groups']
def delete_group(env, admin_api_key, group_id):
api_url = "https://" + env + ".cloudcheckr.com/api/account.json/delete_group"
r2 = requests.post(api_url, headers = {"Content-Type": "application/json", "access_key": admin_api_key}, data = json.dumps({"group_id": group_id}))
def delete_groups(env, admin_api_key, groups):
if (groups is None):
return
for i in np.arange(0, np.size(groups)):
delete_group(env, admin_api_key, groups[i]["Id"])
print("Deleted Group " + groups[i]["Id"])
def get_users(env, admin_api_key):
api_url = "https://" + env + ".cloudcheckr.com/api/account.json/get_users_v2"
r1 = requests.get(api_url, headers = {"Content-Type": "application/json", "access_key": admin_api_key})
users = r1.json()['user_permissions']
return users
def delete_user(env, admin_api_key, user_email):
api_url = "https://" + env + ".cloudcheckr.com/api/account.json/remove_user"
# print(user_email)
r2 = requests.post(api_url, headers = {"Content-Type": "application/json", "access_key": admin_api_key}, data = json.dumps({"email": user_email}))
# print(r2.json())
if ('ModelState' in r2.json()):
print(user_email + " failed to be deleted")
print(r2.json())
else:
print(user_email + " was deleted")
def add_user(env, admin_api_key, env_type, email, role, group_name):
"""
Adds a user to the new environment. Will only add to a gorup if the user is in a group.
Will add functionality around logon rules later.
Can't use add_users because users in the same group could have different roles.
"""
api_url = "https://" + env + ".cloudcheckr.com/api/account.json/add_user"
r7 = None
# email = get_corrected_email(email, env_type)
if (group_name is None):
r7 = requests.post(api_url, headers = {"Content-Type": "application/json", "access_key": admin_api_key}, data = json.dumps({"email": email, "user_role": role}))
else:
r7 = requests.post(api_url, headers = {"Content-Type": "application/json", "access_key": admin_api_key}, data = json.dumps({"email": email, "user_role": role, "group": group_name}))
if ("Message" in r7.json()):
print("Failed to create new user " + email)
print(r7.json())
else:
if ("CreationStatuses" in r7.json()):
if (group_name is None):
print("Created the user " + email)
print(r7.json())
else:
print("Created the user " + email + " in the group" + group_name)
print(r7.json())
else:
print("Failed to create user " + email)
print(r7.json())
return email
def delete_users(env, admin_api_key, users):
if (users is None):
print("no users")
return
for i in np.arange(0, np.shape(users)[0]):
delete_user(env, admin_api_key, users[i]["email"])
def main():
print("Reset Groups and Users\n")
env = sys.argv[1]
admin_api_key = sys.argv[2]
groups = get_groups(env, admin_api_key)
delete_groups(env, admin_api_key, groups)
users = get_users(env, admin_api_key)
delete_users(env, admin_api_key, users)
# print("\n")
# adds a simple admin, because later when you try to add a user without an Admin, it throws an error
add_user(env, admin_api_key, 1, "<EMAIL>", "Administrator", None)
if __name__ == '__main__':
main() |
import sys
import gym
import numpy as np
from gym import spaces
from .edit_photo import PhotoEditor, edit_demo
import cv2
import random
import logging
import os
DATASET_DIR = "./fivek_dataset/"
TARGET_DIR = "expertC/"
ORIGINAL_DIR = "original/"
class PhotoEnhancementEnv(gym.Env):
action_space = None
observation_space = None
reward_range = None
viewer = None
metadata = {
'render.modes': ['human', 'rgb_array'],
}
def __init__(self,
batch_size,
logger=None,
imsize=512,
max_episode_steps=1):
super().__init__()
self.tags = {'max_episode_steps': max_episode_steps}
self.logger = logger or logging.getLogger(__name__)
self.imsize = imsize
self.batch_size = batch_size
try:
self.file_names
except:
self.file_names = []
with open(os.path.join(DATASET_DIR, "trainSource.txt")) as f:
s = f.read()
self.file_names.extend(s.split("\n")[:-1])
self.file_names = \
list(map(lambda x: os.path.join(DATASET_DIR, ORIGINAL_DIR, x), self.file_names))
self.photo_editor = PhotoEditor()
self.num_parameters = self.photo_editor.num_parameters
# action space
self.action_space = spaces.Dict({
'parameters':
spaces.Box(low=-1.0, high=1.0,
shape=(self.batch_size, self.num_parameters), dtype=np.float32),
})
# observation space
self.observation_space = spaces.Dict({
'image':
spaces.Box(low=0,
high=255,
shape=(self.batch_size, self.imsize, self.imsize, 3),
dtype=np.uint8)
})
# reset canvas and set current position of the pen
self.reset()
def reset(self):
self.logger.debug('reset the drawn picture')
self.original_images = []
self.editted_images = []
for i in range(self.batch_size):
original_image = cv2.imread(random.choice(self.file_names))
original_image = cv2.resize(original_image, (64, 64)) / 255.0
if random.randint(0, 1) == 0:
original_image = original_image[:, ::-1, :]
editted_image = original_image.copy()
self.original_images.append(original_image)
self.editted_images.append(editted_image)
ob = {
'images': self._get_rgb_array()
}
return ob
def step(self, action):
parameters_space = self.action_space.spaces['parameters']
clipped_action = np.clip(action['parameters'] / 1.0, parameters_space.low, parameters_space.high)
for i in range(self.batch_size):
self.editted_images[i] = self.photo_editor(self.original_images[i].copy(), clipped_action[i])
reward = 0.0
done = False
ob = {
'images': self._get_rgb_array()
}
return ob, reward, done, {}
def render(self, mode='human'):
""" render the current drawn picture image for human """
if mode == 'human':
if self.viewer is None:
from gym.envs.classic_control import rendering
self.viewer = rendering.SimpleImageViewer()
self.viewer.imshow(self._get_rgb_array())
elif mode == 'rgb_array':
return self._get_rgb_array()
else:
raise NotImplementedError
def _get_rgb_array(self, cut=True):
""" render the current canvas as a rgb array
"""
rgb_array = np.zeros((self.batch_size, self.imsize, self.imsize, 3), dtype=np.uint8)
for i in range(self.batch_size):
shape = self.original_images[i].shape
rgb_array[i, :shape[0], :shape[1], :] = \
(self.editted_images[i][:, :, ::-1] * 255).astype(np.uint8)
return rgb_array
def calc_mse(self):
return ((np.array(self.original_images) - np.array(self.editted_images)) ** 2).mean(axis=(1,2,3))
def close(self):
if self.viewer is not None:
self.viewer.close()
self.viewer = None
def seed(self, seed=None):
# TODO: implement here
pass
class PhotoEnhancementEnvTest(PhotoEnhancementEnv):
def __init__(self,
batch_size,
logger=None,
imsize=512,
max_episode_steps=1):
with open(os.path.join(DATASET_DIR, "test.txt")) as f:
s = f.read()
self.file_names = s.split("\n")[:-1]
self.file_names = \
list(map(lambda x: os.path.join(DATASET_DIR, ORIGINAL_DIR, x), self.file_names))
super().__init__(batch_size=batch_size,
logger=logger,
imsize=imsize,
max_episode_steps=max_episode_steps)
def set_result_dir(self, result_dir):
self.RESULT_DIR = result_dir
def reset(self):
self.original_images = []
self.editted_images = []
self.original_original_images = []
self.original_editted_images = []
for i in range(self.batch_size):
original_image = cv2.imread(self.file_names[i])
self.original_original_images.append(original_image.copy() / 255.0)
original_image = cv2.resize(original_image, (64, 64)) / 255.0
editted_image = original_image.copy()
self.original_images.append(original_image)
self.editted_images.append(editted_image)
self.done = False
self.steps = 0
ob = {
'images': self._get_rgb_array()
}
return ob
def step(self, action):
parameters_space = self.action_space.spaces['parameters']
clipped_action = np.clip(action['parameters'] / 1.0, parameters_space.low, parameters_space.high)
for i in range(self.batch_size):
self.editted_images[i] = self.photo_editor(self.original_images[i].copy(), clipped_action[i])
self.original_editted_images.append(self.photo_editor(self.original_original_images[i].copy(), clipped_action[i]))
self.steps += 1
done = self._is_done()
reward = 0.0
ob = {
'images': self._get_rgb_array()
}
return ob, reward, done, {}
def _is_done(self):
if self.steps >= 1:
assert self.RESULT_DIR != "", "error: specify the result directory"
if not os.path.exists(self.RESULT_DIR):
os.mkdir(self.RESULT_DIR)
for i in range(self.batch_size):
cv2.imwrite(os.path.join(self.RESULT_DIR, os.path.basename(self.file_names[i])),
(self.original_editted_images[i] * 255).astype(np.uint8))
self.file_names = self.file_names[self.batch_size:]
return True
else:
return False
class PhotoEnhancementEnvDemo(PhotoEnhancementEnvTest):
def __init__(self,
batch_size=1,
logger=None,
imsize=512,
max_episode_steps=1,
file_name=None):
self.file_names = [file_name]
super(PhotoEnhancementEnvTest, self).__init__(batch_size=batch_size,
logger=logger,
imsize=imsize,
max_episode_steps=max_episode_steps)
def step(self, action):
parameters_space = self.action_space.spaces['parameters']
clipped_action = np.clip(action['parameters'] / 1.0, parameters_space.low, parameters_space.high)
# for i in range(self.batch_size):
# self.editted_images[i] = self.photo_editor(self.original_images[i].copy(), clipped_action[i])
# self.original_editted_images.append(self.photo_editor(self.original_original_images[i].copy(), clipped_action[i]))
self.steps += 1
done = self._is_done()
reward = 0.0
if done:
for i in range(self.batch_size):
# edit_demo(self.original_original_images[i] * 255, clipped_action[i])
editted_image = self.photo_editor(self.original_original_images[i].copy(), clipped_action[i])
cv2.imwrite(os.path.basename(self.file_names[i]), (editted_image * 255).astype(np.uint8))
ob = {
'images': self._get_rgb_array()
}
return ob, reward, done, {}
def _is_done(self):
if self.steps >= 1:
return True
else:
return False
|
#
# -*- coding: utf-8 -*-
#
# Copyright (c) 2018 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import sys
import argparse
from common.base_model_init import BaseModelInitializer, set_env_var
class ModelInitializer(BaseModelInitializer):
command = []
RFCN_PERF_SCRIPT = "run_rfcn_inference.py"
RFCN_ACCURACY_SCRIPT = "coco_mAP.sh"
perf_script_path = ""
accuracy_script_path = ""
def __init__(self, args, custom_args=[], platform_util=None):
super(ModelInitializer, self).__init__(args, custom_args, platform_util)
self.perf_script_path = os.path.join(
self.args.intelai_models, self.args.mode, self.args.precision,
self.RFCN_PERF_SCRIPT)
self.accuracy_script_path = os.path.join(
self.args.intelai_models, self.args.mode, self.args.precision,
self.RFCN_ACCURACY_SCRIPT)
# remove intelai models path, so that imports don't conflict
if "MOUNT_BENCHMARK" in os.environ and \
os.environ["MOUNT_BENCHMARK"] in sys.path:
sys.path.remove(os.environ["MOUNT_BENCHMARK"])
if self.args.intelai_models in sys.path:
sys.path.remove(self.args.intelai_models)
self.parse_args()
# Set KMP env vars, if they haven't already been set
config_file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "config.json")
self.set_kmp_vars(config_file_path)
# Set num_inter_threads and num_intra_threads
self.set_num_inter_intra_threads()
def parse_args(self):
if self.custom_args:
parser = argparse.ArgumentParser()
mutex_group = parser.add_mutually_exclusive_group()
mutex_group.add_argument("-x", "--number_of_steps",
help="Run for n number of steps",
type=int, default=None)
mutex_group.add_argument(
"-v", "--visualize",
help="Whether to visualize the output image",
action="store_true")
parser.add_argument(
"-t", "--timeline",
help="Output file name for TF timeline",
type=str, default=None)
parser.add_argument("-e", "--evaluate_tensor",
help="Full tensor name to evaluate",
type=str, default=None)
parser.add_argument("-p", "--print_accuracy",
help="Print accuracy results",
action="store_true")
parser.add_argument("-q", "--split",
help="Location of accuracy data",
type=str, default=None)
self.args = parser.parse_args(self.custom_args,
namespace=self.args)
self.validate_args()
else:
raise ValueError("Custom parameters are missing...")
def validate_args(self):
if not (self.args.batch_size == -1 or self.args.batch_size == 1):
raise ValueError(
"Batch Size specified: {}. R-FCN inference only supports "
"batch size = 1".format(self.args.batch_size))
if not os.path.exists(self.perf_script_path):
raise ValueError("Unable to locate the R-FCN perf script: {}".
format(self.perf_script_path))
if not os.path.exists(self.accuracy_script_path):
raise ValueError("Unable to locate the R-FCN accuracy script: "
"{}".format(self.accuracy_script_path))
if not self.args.model_source_dir or not os.path.isdir(
self.args.model_source_dir):
raise ValueError("Unable to locate TensorFlow models at {}".
format(self.args.model_source_dir))
def run_perf_command(self):
# Get the command previx, but numactl is added later in run_perf_command()
self.command.append(self.get_command_prefix(self.args.socket_id, numactl=False))
num_cores = str(self.platform_util.num_cores_per_socket)
if self.args.num_cores != -1:
num_cores = str(self.args.num_cores)
set_env_var("OMP_NUM_THREADS", num_cores)
if self.args.socket_id != -1:
self.command.append("numactl")
if self.args.socket_id:
socket_id = self.args.socket_id
else:
socket_id = "0"
if self.args.num_cores != -1:
self.command.append("-C")
self.command.append("+0")
i = 1
while i < self.args.num_cores:
self.command.append(",{}".format(i))
i += i
self.command.append("-N")
self.command.append("{}".format(socket_id))
self.command.append("-m")
self.command.append("{}".format(socket_id))
self.command += (self.python_exe, self.perf_script_path)
self.command += ("-m", self.args.model_source_dir)
self.command += ("-g", self.args.input_graph)
self.command += ("--num-intra-threads", str(self.args.num_intra_threads))
self.command += ("--num-inter-threads", str(self.args.num_inter_threads))
if self.args.number_of_steps:
self.command += ("-x", "{}".format(self.args.number_of_steps))
if self.args.visualize:
self.command += ("-v")
if self.args.timeline:
self.command += ("-t", self.args.timeline)
if self.args.data_location:
self.command += ("-d", self.args.data_location)
if self.args.evaluate_tensor:
self.command += ("-e", self.args.evaluate_tensor)
if self.args.print_accuracy:
self.command += ("-p")
self.run_command(" ".join(self.command))
def run_accuracy_command(self):
# already validated by parent
self.command = self.get_command_prefix(self.args.socket_id, numactl=False)
self.command += "FROZEN_GRAPH=" + self.args.input_graph
if self.args.data_location and os.path.exists(
self.args.data_location):
self.command += " TF_RECORD_FILE=" + self.args.data_location
else:
raise ValueError(
"Unable to locate the coco data record file at {}".format(
self.args.tf_record_file))
if self.args.split:
self.command += " SPLIT=" + self.args.split
else:
raise ValueError("Must specify SPLIT parameter")
self.command += " TF_MODELS_ROOT={}".format(
self.args.model_source_dir)
self.command += " " + self.accuracy_script_path
self.run_command(self.command)
def run(self):
# Run script from the tensorflow models research directory
original_dir = os.getcwd()
os.chdir(os.path.join(self.args.model_source_dir, "research"))
if self.args.accuracy_only:
self.run_accuracy_command()
else:
self.run_perf_command()
os.chdir(original_dir)
|
from afqueue.common.encoding_utilities import cast_list_of_strings, cast_bytes
from afqueue.messages.base_message import BaseMessage #@UnresolvedImport
from afqueue.common.exception_formatter import ExceptionFormatter #@UnresolvedImport
from afqueue.messages import message_types #@UnresolvedImport
from afqueue.data_objects.exchange_wrapper import ExchangeWrapper #@UnresolvedImport
from afqueue.data_objects.data_queue_wrapper import DataQueueWrapper #@UnresolvedImport
import json, bson #@UnresolvedImport
class CommandSetMasterRequestMessage(BaseMessage):
def __init__(self, qm_id_string, notification = ""):
# Build base.
super(CommandSetMasterRequestMessage, self).__init__(message_types.COMMAND_SET_MASTER_REQUEST)
# Transmitted data.
self.qm_id_string = qm_id_string
self.notification = notification
def send(self, socket):
"""
Sends the message over the socket.
"""
try:
BaseMessage._send(self, socket, self.qm_id_string, self.notification)
except:
raise ExceptionFormatter.get_full_exception()
@staticmethod
def create_from_received(raw_message):
"""
Returns a new message of this type from the raw message data.
Returns None if the message fails to create.
"""
try:
return CommandSetMasterRequestMessage(raw_message[1], raw_message[2])
except:
return None
class CommandFreezeQueueRequestMessage(BaseMessage):
def __init__(self, queue_name, freeze_push = False, freeze_pull = False, notification = ""):
# Build base.
super(CommandFreezeQueueRequestMessage, self).__init__(message_types.COMMAND_FREEZE_QUEUE_REQUEST)
# Transmitted data.
self.queue_name = queue_name
self.freeze_push = freeze_push
self.freeze_pull = freeze_pull
self.notification = notification
def send(self, socket):
"""
Sends the message over the socket.
"""
try:
BaseMessage._send(self, socket, self.queue_name, str(self.freeze_push), str(self.freeze_pull), self.notification)
except:
raise ExceptionFormatter.get_full_exception()
@staticmethod
def create_from_received(raw_message):
"""
Returns a new message of this type from the raw message data.
Returns None if the message fails to create.
"""
try:
return CommandFreezeQueueRequestMessage(raw_message[1], BaseMessage.bool_from_string(raw_message[2]), BaseMessage.bool_from_string(raw_message[3]), raw_message[4])
except:
return None
class CommandDeleteQueuesRequestMessage(BaseMessage):
def __init__(self, queue_name_list, notification = ""):
# Build base.
super(CommandDeleteQueuesRequestMessage, self).__init__(message_types.COMMAND_DELETE_QUEUES_REQUEST)
# Transmitted data.
self.queue_name_list = queue_name_list
self.notification = notification
def send(self, socket):
"""
Sends the message over the socket.
"""
try:
BaseMessage._send(self, socket, json.dumps(self.queue_name_list), self.notification)
except:
raise ExceptionFormatter.get_full_exception()
@staticmethod
def create_from_received(raw_message):
"""
Returns a new message of this type from the raw message data.
Returns None if the message fails to create.
"""
try:
return CommandDeleteQueuesRequestMessage(json.loads(raw_message[1]), raw_message[2])
except:
return None
class CommandAddWorkersRequestMessage(BaseMessage):
def __init__(self, count, notification = ""):
# Build base.
super(CommandAddWorkersRequestMessage, self).__init__(message_types.COMMAND_ADD_WORKERS_REQUEST)
# Transmitted data.
self.count = count
self.notification = notification
def send(self, socket):
"""
Sends the message over the socket.
"""
try:
BaseMessage._send(self, socket, str(self.count), self.notification)
except:
raise ExceptionFormatter.get_full_exception()
@staticmethod
def create_from_received(raw_message):
"""
Returns a new message of this type from the raw message data.
Returns None if the message fails to create.
"""
try:
return CommandAddWorkersRequestMessage(int(raw_message[1]), raw_message[2])
except:
return None
class CommandGetPeckingOrderRequestMessage(BaseMessage):
def __init__(self, notification = ""):
# Build base.
super(CommandGetPeckingOrderRequestMessage, self).__init__(message_types.COMMAND_GET_PECKING_ORDER_REQUEST)
# Transmitted data.
self.notification = notification
def send(self, socket):
"""
Sends the message over the socket.
"""
try:
BaseMessage._send(self, socket, self.notification)
except:
raise ExceptionFormatter.get_full_exception()
@staticmethod
def create_from_received(raw_message):
"""
Returns a new message of this type from the raw message data.
Returns None if the message fails to create.
"""
try:
return CommandGetPeckingOrderRequestMessage(raw_message[1])
except:
return None
class CommandGetPeckingOrderReplyMessage(BaseMessage):
def __init__(self, pecking_order_list, notification = ""):
# Build base.
super(CommandGetPeckingOrderReplyMessage, self).__init__(message_types.COMMAND_GET_PECKING_ORDER_REPLY)
# Transmitted data.
self.pecking_order_list = pecking_order_list
self.notification = notification
def send(self, socket):
"""
Sends the message over the socket.
"""
try:
BaseMessage._send(self, socket, json.dumps(self.pecking_order_list), self.notification)
except:
raise ExceptionFormatter.get_message()
@staticmethod
def create_from_received(raw_message):
"""
Returns a new message of this type from the raw message data.
"""
try:
return CommandGetPeckingOrderReplyMessage(json.loads(raw_message[1]), raw_message[2])
except:
raise ExceptionFormatter.get_message()
class CommandGetStatisticsRequestMessage(BaseMessage):
def __init__(self, stat_type, notification = ""):
# Build base.
super(CommandGetStatisticsRequestMessage, self).__init__(message_types.COMMAND_GET_STATISTICS_REQUEST)
# Transmitted data.
self.type = stat_type
self.notification = notification
def send(self, socket):
"""
Sends the message over the socket.
"""
try:
BaseMessage._send(self, socket, self.type, self.notification)
except:
raise ExceptionFormatter.get_full_exception()
@staticmethod
def create_from_received(raw_message):
"""
Returns a new message of this type from the raw message data.
Returns None if the message fails to create.
"""
try:
return CommandGetStatisticsRequestMessage(raw_message[1], raw_message[2])
except:
return None
class CommandGetStatisticsReplyMessage(BaseMessage):
def __init__(self, response_code, thread_dict, net_stat_dict, notification = ""):
# Build base.
super(CommandGetStatisticsReplyMessage, self).__init__(message_types.COMMAND_GET_STATISTICS_REPLY)
# Transmitted data.
self.response_code = response_code
self.thread_dict = thread_dict
self.net_stat_dict = net_stat_dict
self.notification = notification
def send(self, socket):
"""
Sends the message over the socket.
"""
try:
BaseMessage._send(self, socket, self.response_code, bson.dumps(self.thread_dict), bson.dumps(self.net_stat_dict), self.notification)
except:
raise ExceptionFormatter.get_message()
@staticmethod
def create_from_received(raw_message):
"""
Returns a new message of this type from the raw message data.
"""
try:
return CommandGetStatisticsReplyMessage(raw_message[1], bson.loads(raw_message[2]), bson.loads(raw_message[3]), raw_message[4])
except:
raise ExceptionFormatter.get_message()
class CommandGetQueueSizeRequestMessage(BaseMessage):
def __init__(self, queue_name, notification = ""):
# Build base.
super(CommandGetQueueSizeRequestMessage, self).__init__(message_types.COMMAND_GET_QUEUE_SIZE_REQUEST)
# Transmitted data.
self.queue_name = queue_name
self.notification = notification
# Internal data.
self.queue_size = None
def send(self, socket):
"""
Sends the message over the socket.
"""
try:
BaseMessage._send(self, socket, self.queue_name, self.notification)
except:
raise ExceptionFormatter.get_full_exception()
@staticmethod
def create_from_received(raw_message):
"""
Returns a new message of this type from the raw message data.
Returns None if the message fails to create.
"""
try:
return CommandGetQueueSizeRequestMessage(raw_message[1], raw_message[2])
except:
return None
class CommandGetQueueSizeReplyMessage(BaseMessage):
def __init__(self, queue_name, queue_size, notification = ""):
# Build base.
super(CommandGetQueueSizeReplyMessage, self).__init__(message_types.COMMAND_GET_QUEUE_SIZE_REPLY_MESSAGE)
# Transmitted data.
self.queue_name = queue_name
self.queue_size = queue_size
self.notification = notification
def send(self, socket):
"""
Sends the message over the socket.
"""
try:
BaseMessage._send(self, socket, self.queue_name, str(self.queue_size), self.notification)
except:
raise ExceptionFormatter.get_full_exception()
@staticmethod
def create_from_received(raw_message):
"""
Returns a new message of this type from the raw message data.
"""
try:
return CommandGetQueueSizeReplyMessage(raw_message[1], int(raw_message[2]), raw_message[3])
except:
raise ExceptionFormatter.get_message()
class CommandGetSetupDataRequestMessage(BaseMessage):
def __init__(self, notification = ""):
# Build base.
super(CommandGetSetupDataRequestMessage, self).__init__(message_types.COMMAND_GET_SETUP_DATA_REQUEST)
# Transmitted data.
self.notification = notification
# Internal data.
self.queue_size = None
def send(self, socket):
"""
Sends the message over the socket.
"""
try:
BaseMessage._send(self, socket, self.notification)
except:
raise ExceptionFormatter.get_full_exception()
@staticmethod
def create_from_received(raw_message):
"""
Returns a new message of this type from the raw message data.
Returns None if the message fails to create.
"""
try:
return CommandGetSetupDataRequestMessage(raw_message[1])
except:
return None
class CommandGetSetupDataReplyMessage(BaseMessage):
def __init__(self, response_code, exchange_wrapper_list, queue_wrapper_list, notification = ""):
# Build base.
super(CommandGetSetupDataReplyMessage, self).__init__(message_types.COMMAND_GET_SETUP_DATA_REPLY_MESSAGE)
# Transmitted data.
self.response_code = response_code
self.exchange_wrapper_list = exchange_wrapper_list
self.queue_wrapper_list = queue_wrapper_list
self.notification = notification
def dump(self):
"""
Dumps the message into a format in which it can be recreated via the "load" method.
"""
try:
dump_dict = dict()
dump_dict["ewl"] = [ew.dump() for ew in self.exchange_wrapper_list]
dump_dict["qwl"] = [qw.dump() for qw in self.queue_wrapper_list]
dump_dict["n"] = self.notification
return bson.dumps(dump_dict)
except:
raise ExceptionFormatter.get_full_exception()
@staticmethod
def load(dumped_string):
"""
Returns an instance object of this class built from data which was created in the "dump" method.
"""
dump_dict = bson.loads(dumped_string)
exchange_wrapper_list = [ExchangeWrapper.load(dew) for dew in dump_dict["ewl"]]
queue_wrapper_list = [DataQueueWrapper.load(dqw) for dqw in dump_dict["qwl"]]
notification = dump_dict["n"]
return CommandGetSetupDataReplyMessage("", exchange_wrapper_list, queue_wrapper_list, notification)
def send(self, socket):
"""
Sends the message over the socket.
"""
try:
BaseMessage._send(self, socket, self.response_code, self.dump())
except:
raise ExceptionFormatter.get_full_exception()
@staticmethod
def create_from_received(raw_message):
"""
Returns a new message of this type from the raw message data.
"""
try:
message = CommandGetSetupDataReplyMessage.load(raw_message[2])
message.response_code = raw_message[1]
return message
except:
raise ExceptionFormatter.get_full_exception()
class CommandListQueuesRequestMessage(BaseMessage):
def __init__(self, queue_name_list, from_all_servers_flag, notification = ""):
# Build base.
super(CommandListQueuesRequestMessage, self).__init__(message_types.COMMAND_LIST_QUEUES_REQUEST)
# Transmitted data.
self.queue_name_list = queue_name_list
self.from_all_servers_flag = from_all_servers_flag
self.notification = notification
# Internal data.
self.queue_size = None
def send(self, socket):
"""
Sends the message over the socket.
"""
try:
BaseMessage._send(self, socket, ";".join(self.queue_name_list), str(self.from_all_servers_flag), self.notification)
except:
raise ExceptionFormatter.get_full_exception()
@staticmethod
def create_from_received(raw_message):
"""
Returns a new message of this type from the raw message data.
Returns None if the message fails to create.
"""
try:
return CommandListQueuesRequestMessage(raw_message[1].split(";"), BaseMessage.bool_from_string(raw_message[2]), raw_message[3])
except:
return None
class CommandListQueuesReplyMessage(BaseMessage):
def __init__(self, response_code, queue_size_dict, notification = ""):
# Build base.
super(CommandListQueuesReplyMessage, self).__init__(message_types.COMMAND_LIST_QUEUES_REPLY)
# Transmitted data.
self.response_code = response_code
self.queue_size_dict = queue_size_dict
self.notification = notification
def send(self, socket):
"""
Sends the message over the socket.
"""
try:
BaseMessage._send(self, socket, self.response_code, bson.dumps(self.queue_size_dict), self.notification)
except:
raise ExceptionFormatter.get_full_exception()
@staticmethod
def create_from_received(raw_message):
"""
Returns a new message of this type from the raw message data.
"""
try:
raw_message[2] = cast_bytes(raw_message[2])
return CommandListQueuesReplyMessage(raw_message[1], bson.loads(raw_message[2]), raw_message[3])
except:
raise ExceptionFormatter.get_message()
class CommandPurgeQueuesRequestMessage(BaseMessage):
def __init__(self, queue_name_list, from_all_servers_flag, notification = ""):
# Build base.
super(CommandPurgeQueuesRequestMessage, self).__init__(message_types.COMMAND_PURGE_QUEUES_REQUEST)
# Transmitted data.
self.queue_name_list = queue_name_list
self.from_all_servers_flag = from_all_servers_flag
self.notification = notification
def send(self, socket):
"""
Sends the message over the socket.
"""
try:
BaseMessage._send(self, socket, json.dumps(self.queue_name_list), str(self.from_all_servers_flag), self.notification)
except:
raise ExceptionFormatter.get_full_exception()
@staticmethod
def create_from_received(raw_message):
"""
Returns a new message of this type from the raw message data.
Returns None if the message fails to create.
"""
try:
return CommandPurgeQueuesRequestMessage(json.loads(raw_message[1]), BaseMessage.bool_from_string(raw_message[2]), raw_message[3])
except:
return None
class CommandRemoteConnectRequestMessage(BaseMessage):
def __init__(self, remote_ip_address, remote_port, count, notification = ""):
# Build base.
super(CommandRemoteConnectRequestMessage, self).__init__(message_types.COMMAND_REMOTE_CONNECT_REQUEST)
# Transmitted data.
self.remote_ip_address = remote_ip_address
self.remote_port = remote_port
self.count = count
self.notification = notification
def send(self, socket):
"""
Sends the message over the socket.
"""
try:
BaseMessage._send(self, socket, self.remote_ip_address, str(self.remote_port), str(self.count), self.notification)
except:
raise ExceptionFormatter.get_full_exception()
@staticmethod
def create_from_received(raw_message):
"""
Returns a new message of this type from the raw message data.
Returns None if the message fails to create.
"""
try:
return CommandRemoteConnectRequestMessage(raw_message[1], int(raw_message[2]), int(raw_message[3]), raw_message[4])
except:
return None
class CommandRemoteConnectPikaRequestMessage(BaseMessage):
def __init__(self, connection_string, queue_mode, count, notification = ""):
# Build base.
super(CommandRemoteConnectPikaRequestMessage, self).__init__(message_types.COMMAND_REMOTE_CONNECT_PIKA_REQUEST)
# Transmitted data.
self.connection_string = connection_string
self.queue_mode = queue_mode
self.count = count
self.notification = notification
def send(self, socket):
"""
Sends the message over the socket.
"""
try:
BaseMessage._send(self, socket, self.connection_string, self.queue_mode, str(self.count), self.notification)
except:
raise ExceptionFormatter.get_full_exception()
@staticmethod
def create_from_received(raw_message):
"""
Returns a new message of this type from the raw message data.
Returns None if the message fails to create.
"""
try:
return CommandRemoteConnectPikaRequestMessage(raw_message[1], raw_message[2], int(raw_message[3]), raw_message[4])
except:
return None
class CommandRemoteDisconnectRequestMessage(BaseMessage):
def __init__(self, remote_ip_address, remote_port, count, notification = ""):
# Build base.
super(CommandRemoteDisconnectRequestMessage, self).__init__(message_types.COMMAND_REMOTE_DISCONNECT_REQUEST)
# Transmitted data.
self.remote_ip_address = remote_ip_address
self.remote_port = remote_port
self.count = count
self.notification = notification
def send(self, socket):
"""
Sends the message over the socket.
"""
try:
BaseMessage._send(self, socket, self.remote_ip_address, str(self.remote_port), str(self.count), self.notification)
except:
raise ExceptionFormatter.get_full_exception()
@staticmethod
def create_from_received(raw_message):
"""
Returns a new message of this type from the raw message data.
Returns None if the message fails to create.
"""
try:
return CommandRemoteDisconnectRequestMessage(raw_message[1], int(raw_message[2]), int(raw_message[3]), raw_message[4])
except:
return None
class CommandRemoteDisconnectPikaRequestMessage(BaseMessage):
def __init__(self, connection_string, count, notification = ""):
# Build base.
super(CommandRemoteDisconnectPikaRequestMessage, self).__init__(message_types.COMMAND_REMOTE_DISCONNECT_PIKA_REQUEST)
# Transmitted data.
self.connection_string = connection_string
self.count = count
self.notification = notification
def send(self, socket):
"""
Sends the message over the socket.
"""
try:
BaseMessage._send(self, socket, self.connection_string, str(self.count), self.notification)
except:
raise ExceptionFormatter.get_full_exception()
@staticmethod
def create_from_received(raw_message):
"""
Returns a new message of this type from the raw message data.
Returns None if the message fails to create.
"""
try:
return CommandRemoteDisconnectPikaRequestMessage(raw_message[1], int(raw_message[2]), raw_message[3])
except:
return None
class CommandRemoveWorkersRequestMessage(BaseMessage):
def __init__(self, count, notification = ""):
# Build base.
super(CommandRemoveWorkersRequestMessage, self).__init__(message_types.COMMAND_REMOVE_WORKERS_REQUEST)
# Transmitted data.
self.count = count
self.notification = notification
def send(self, socket):
"""
Sends the message over the socket.
"""
try:
BaseMessage._send(self, socket, str(self.count), self.notification)
except:
raise ExceptionFormatter.get_full_exception()
@staticmethod
def create_from_received(raw_message):
"""
Returns a new message of this type from the raw message data.
Returns None if the message fails to create.
"""
try:
return CommandRemoveWorkersRequestMessage(int(raw_message[1]), raw_message[2])
except:
return None
class CommandReplyMessage(BaseMessage):
def __init__(self, response_code, notification = ""):
# Build base.
super(CommandReplyMessage, self).__init__(message_types.COMMAND_REPLY)
# Store data.
self.response_code = response_code
self.notification = notification
def send(self, socket):
"""
Sends the message over the socket.
"""
try:
BaseMessage._send(self, socket, self.response_code, self.notification)
except:
raise ExceptionFormatter.get_full_exception()
@staticmethod
def create_from_received(raw_message):
"""
Returns a new message of this type from the raw message data.
"""
try:
return CommandReplyMessage(raw_message[1], raw_message[2])
except:
raise ExceptionFormatter.get_message()
class CommandShutDownMessage(BaseMessage):
def __init__(self, notification = ""):
# Build base.
super(CommandShutDownMessage, self).__init__(message_types.COMMAND_SHUT_DOWN_REQUEST)
# Transmitted data.
self.notification = notification
def send(self, socket):
"""
Sends the message over the socket.
"""
try:
BaseMessage._send(self, socket, self.notification)
except:
raise ExceptionFormatter.get_full_exception()
@staticmethod
def create_from_received(raw_message):
"""
Returns a new message of this type from the raw message data.
Returns None if the message fails to create.
"""
try:
return CommandShutDownMessage(raw_message[1])
except:
return None
class CommandUnlockQueueRequestMessage(BaseMessage):
def __init__(self, queue_name, notification = ""):
# Build base.
super(CommandUnlockQueueRequestMessage, self).__init__(message_types.COMMAND_UNLOCK_QUEUE_REQUEST)
# Transmitted data.
self.queue_name = queue_name
self.notification = notification
def send(self, socket):
"""
Sends the message over the socket.
"""
try:
BaseMessage._send(self, socket, self.queue_name, self.notification)
except:
raise ExceptionFormatter.get_full_exception()
@staticmethod
def create_from_received(raw_message):
"""
Returns a new message of this type from the raw message data.
Returns None if the message fails to create.
"""
try:
return CommandUnlockQueueRequestMessage(raw_message[1], raw_message[2])
except:
return None
|
<filename>pype/modules/ftrack/event_handlers_user/action_create_project_structure.py
import os
import re
from pype.modules.ftrack.lib import BaseAction, statics_icon
from pype.api import Anatomy, get_project_settings
class CreateProjectFolders(BaseAction):
"""Action create folder structure and may create hierarchy in Ftrack.
Creation of folder structure and hierarchy in Ftrack is based on presets.
These presets are located in:
`~/pype-config/presets/tools/project_folder_structure.json`
Example of content:
```json
{
"__project_root__": {
"prod" : {},
"resources" : {
"footage": {
"plates": {},
"offline": {}
},
"audio": {},
"art_dept": {}
},
"editorial" : {},
"assets[ftrack.Library]": {
"characters[ftrack]": {},
"locations[ftrack]": {}
},
"shots[ftrack.Sequence]": {
"scripts": {},
"editorial[ftrack.Folder]": {}
}
}
}
```
Key "__project_root__" indicates root folder (or entity). Each key in
dictionary represents folder name. Value may contain another dictionary
with subfolders.
Identifier `[ftrack]` in name says that this should be also created in
Ftrack hierarchy. It is possible to specify entity type of item with "." .
If key is `assets[ftrack.Library]` then in ftrack will be created entity
with name "assets" and entity type "Library". It is expected Library entity
type exist in Ftrack.
"""
identifier = "create.project.structure"
label = "Create Project Structure"
description = "Creates folder structure"
role_list = ["Pypeclub", "Administrator", "Project Manager"]
icon = statics_icon("ftrack", "action_icons", "CreateProjectFolders.svg")
pattern_array = re.compile(r"\[.*\]")
pattern_ftrack = re.compile(r".*\[[.]*ftrack[.]*")
pattern_ent_ftrack = re.compile(r"ftrack\.[^.,\],\s,]*")
project_root_key = "__project_root__"
def discover(self, session, entities, event):
if len(entities) != 1:
return False
if entities[0].entity_type.lower() != "project":
return False
return True
def launch(self, session, entities, event):
# Get project entity
project_entity = self.get_project_from_entity(entities[0])
# Load settings for project
project_name = project_entity["full_name"]
project_settings = get_project_settings(project_name)
project_folder_structure = (
project_settings["global"]["project_folder_structure"]
)
if not project_folder_structure:
return {
"success": False,
"message": "Project structure is not set."
}
try:
# Get paths based on presets
basic_paths = self.get_path_items(project_folder_structure)
self.create_folders(basic_paths, project_entity)
self.create_ftrack_entities(basic_paths, project_entity)
except Exception as exc:
self.log.warning("Creating of structure crashed.", exc_info=True)
session.rollback()
return {
"success": False,
"message": str(exc)
}
return True
def get_ftrack_paths(self, paths_items):
all_ftrack_paths = []
for path_items in paths_items:
ftrack_path_items = []
is_ftrack = False
for item in reversed(path_items):
if item == self.project_root_key:
continue
if is_ftrack:
ftrack_path_items.append(item)
elif re.match(self.pattern_ftrack, item):
ftrack_path_items.append(item)
is_ftrack = True
ftrack_path_items = list(reversed(ftrack_path_items))
if ftrack_path_items:
all_ftrack_paths.append(ftrack_path_items)
return all_ftrack_paths
def compute_ftrack_items(self, in_list, keys):
if len(keys) == 0:
return in_list
key = keys[0]
exist = None
for index, subdict in enumerate(in_list):
if key in subdict:
exist = index
break
if exist is not None:
in_list[exist][key] = self.compute_ftrack_items(
in_list[exist][key], keys[1:]
)
else:
in_list.append({key: self.compute_ftrack_items([], keys[1:])})
return in_list
def translate_ftrack_items(self, paths_items):
main = []
for path_items in paths_items:
main = self.compute_ftrack_items(main, path_items)
return main
def create_ftrack_entities(self, basic_paths, project_ent):
only_ftrack_items = self.get_ftrack_paths(basic_paths)
ftrack_paths = self.translate_ftrack_items(only_ftrack_items)
for separation in ftrack_paths:
parent = project_ent
self.trigger_creation(separation, parent)
def trigger_creation(self, separation, parent):
for item, subvalues in separation.items():
matches = re.findall(self.pattern_array, item)
ent_type = "Folder"
if len(matches) == 0:
name = item
else:
match = matches[0]
name = item.replace(match, "")
ent_type_match = re.findall(self.pattern_ent_ftrack, match)
if len(ent_type_match) > 0:
ent_type_split = ent_type_match[0].split(".")
if len(ent_type_split) == 2:
ent_type = ent_type_split[1]
new_parent = self.create_ftrack_entity(name, ent_type, parent)
if subvalues:
for subvalue in subvalues:
self.trigger_creation(subvalue, new_parent)
def create_ftrack_entity(self, name, ent_type, parent):
for children in parent["children"]:
if children["name"] == name:
return children
data = {
"name": name,
"parent_id": parent["id"]
}
if parent.entity_type.lower() == "project":
data["project_id"] = parent["id"]
else:
data["project_id"] = parent["project"]["id"]
existing_entity = self.session.query((
"TypedContext where name is \"{}\" and "
"parent_id is \"{}\" and project_id is \"{}\""
).format(name, data["parent_id"], data["project_id"])).first()
if existing_entity:
return existing_entity
new_ent = self.session.create(ent_type, data)
self.session.commit()
return new_ent
def get_path_items(self, in_dict):
output = []
for key, value in in_dict.items():
if not value:
output.append(key)
else:
paths = self.get_path_items(value)
for path in paths:
if not isinstance(path, (list, tuple)):
path = [path]
output.append([key, *path])
return output
def compute_paths(self, basic_paths_items, project_root):
output = []
for path_items in basic_paths_items:
clean_items = []
for path_item in path_items:
matches = re.findall(self.pattern_array, path_item)
if len(matches) > 0:
path_item = path_item.replace(matches[0], "")
if path_item == self.project_root_key:
path_item = project_root
clean_items.append(path_item)
output.append(os.path.normpath(os.path.sep.join(clean_items)))
return output
def create_folders(self, basic_paths, project):
anatomy = Anatomy(project["full_name"])
roots_paths = []
if isinstance(anatomy.roots, dict):
for root in anatomy.roots.values():
roots_paths.append(root.value)
else:
roots_paths.append(anatomy.roots.value)
for root_path in roots_paths:
project_root = os.path.join(root_path, project["full_name"])
full_paths = self.compute_paths(basic_paths, project_root)
# Create folders
for path in full_paths:
full_path = path.format(project_root=project_root)
if os.path.exists(full_path):
self.log.debug(
"Folder already exists: {}".format(full_path)
)
else:
self.log.debug("Creating folder: {}".format(full_path))
os.makedirs(full_path)
def register(session):
CreateProjectFolders(session).register()
|
<reponame>davidanastasiu/antibiofilm
# AntiBiofilm Peptide Research
# Department of Computer Science and Engineering, Santa Clara University
# Author: <NAME>
# If SVM model decides peptide <=64uM then the SVR model is used to predict MBIC
# Hyperparameters for both models have already been tuned on the training set using cross-validation
# ------------------------------------------------------------------------------
# Libraries
# ------------------------------------------------------------------------------
import pandas as pd
import numpy as np
from sklearn import preprocessing
from sklearn.svm import SVR
from sklearn.svm import SVC
import json
from sklearn.decomposition import PCA
# ------------------------------------------------------------------------------
# Functions
# ------------------------------------------------------------------------------
def seperatePeptides(peptides, threshold):
columns = ['MBIC']
filterMBIC = (peptides[columns] <= threshold).all(axis=1)
lower_peptides = peptides[filterMBIC]
filterMBIC = (peptides[columns] > threshold).all(axis=1)
upper_peptides = peptides[filterMBIC]
return lower_peptides, upper_peptides
# ------------------------------------------------------------------------------
# Variables
# ------------------------------------------------------------------------------
training_filename = '../../data/mbic_training_data.csv'
test_filename = '../../data/test_peptide_data.csv'
svm_features_filename = 'mbic_svm_forward_selection_features.json'
svr_features_filename = 'mbic_svr_forward_selection_features.json'
pred_filename = './mbic_predictions.csv'
split = 0.760 # Where to break peptides into class 0 and class 1
# Hyperparameters
svm_num_feats = 9
svm_pca_comp = 6
svm_c = 10
svm_g = 1000
svr_num_feats = 9
svr_pca_comp = 8
svr_c = 45
svr_g = 40
# ------------------------------------------------------------------------------
# Main
# ------------------------------------------------------------------------------
def main():
# Prepare training peptides for SVM
with open(svm_features_filename) as f:
svm_feat_dict = json.load(f)
svm_feat_dict = svm_feat_dict[0:svm_num_feats]
peptides_svm = pd.read_csv(training_filename)
peptides_svm.loc[(peptides_svm['MBIC'] > 64), 'MBIC'] = 0
peptides_svm.loc[(peptides_svm['MBIC'] != 0), 'MBIC'] = 1
# Filter out columns based on feat list
labels = peptides_svm.columns.values.tolist()
for l in labels:
if l == 'MBIC':
continue
if l not in svm_feat_dict:
peptides_svm = peptides_svm.drop(columns=[l])
y_svm = peptides_svm['MBIC'].to_numpy()
peptides_svm = peptides_svm.drop(columns=['MBIC'])
# Prepare training peptides for SVM model
min_max_scaler_svm = preprocessing.MinMaxScaler()
X_norm_svm = min_max_scaler_svm.fit_transform(peptides_svm)
pca_svm = PCA(n_components=svm_pca_comp)
X_trans_svm = pca_svm.fit_transform(X_norm_svm)
SVC_rbf = SVC(kernel='rbf', C=svm_c, gamma=svm_g)
print('Training SVM Peptides Shape: ', peptides_svm.shape)
svm_fit = SVC_rbf.fit(X_trans_svm, y_svm) # Train SVM model
# Prepare peptides for SVR
with open(svr_features_filename) as f:
svr_feat_dict = json.load(f)
svr_feat_dict = svr_feat_dict[0:svr_num_feats]
peptides_svr = pd.read_csv(training_filename)
peptides_svr, _ = seperatePeptides(peptides_svr, 64)
# Filter out columns based on feat list
labels = peptides_svr.columns.values.tolist()
for l in labels:
if l == 'MBIC':
continue
if l not in svr_feat_dict:
peptides_svr = peptides_svr.drop(columns=[l])
y_svr = peptides_svr['MBIC'].to_numpy()
peptides_svr = peptides_svr.drop(columns=['MBIC'])
min_max_scaler_svr = preprocessing.MinMaxScaler()
X_norm_svr = min_max_scaler_svr.fit_transform(peptides_svr)
pca_svr = PCA(n_components=svr_pca_comp)
X_trans_svr = pca_svr.fit_transform(X_norm_svr)
SVR_rbf = SVR(kernel='rbf', C=svr_c, gamma=svr_g)
print('Training SVR Peptides Shape: ', peptides_svr.shape)
svr_fit = SVR_rbf.fit(X_trans_svr, y_svr) # Train SVR model
# Test peptides for SVM
test_peptides_svm = pd.read_csv(test_filename)
# Filter out columns based on popular feature list
labels = test_peptides_svm.columns.values.tolist()
for l in labels:
if l not in svm_feat_dict:
test_peptides_svm = test_peptides_svm.drop(columns=[l])
# Prepare test peptides
print('Test SVM Peptides Shape: ', test_peptides_svm.shape)
X_norm_test_svm = min_max_scaler_svm.transform(test_peptides_svm)
X_trans_tp_svm = pca_svm.transform(X_norm_test_svm)
# Test peptides for SVR
test_peptides_svr = pd.read_csv(test_filename)
names = test_peptides_svr['Name'].tolist()
dec_fuc = test_peptides_svr['Decision Fn'].tolist()
# Filter out columns based on popular feature list
labels = test_peptides_svr.columns.values.tolist()
for l in labels:
if l not in svr_feat_dict:
test_peptides_svr = test_peptides_svr.drop(columns=[l])
# Prepare test peptides
print('Test SVR Peptides Shape: ', test_peptides_svr.shape)
X_norm_test_svr = min_max_scaler_svr.transform(test_peptides_svr)
X_trans_tp_svr = pca_svr.transform(X_norm_test_svr)
# Train SVM model and then predict which bucket test peptides fall into
test_peptide_classes = svm_fit.predict(X_trans_tp_svm)
bucket0 = []
for i,c in enumerate(test_peptide_classes):
if c==1:
bucket0.append(i)
X_trans_tp_svr_small = X_trans_tp_svr[bucket0]
test_peptide_mbic = svr_fit.predict(X_trans_tp_svr_small)
mbic_names = []
dec_funcs = []
for j in bucket0:
mbic_names.append(names[j])
dec_funcs.append(dec_fuc[j])
# Save MBIC predictions
pred_results = list(zip(mbic_names, dec_funcs, test_peptide_mbic))
df_pred_results = pd.DataFrame(pred_results, columns = ['Names', 'Decision Fn', 'Predicted MBIC Value'])
df_pred_results.to_csv(pred_filename, sep=',',index=False)
if __name__ == "__main__":
main() |
<filename>pandas_ta/momentum/cci.py
# -*- coding: utf-8 -*-
from ..overlap.hlc3 import hlc3
from ..overlap.sma import sma
from ..statistics.mad import mad
from ..utils import get_offset, verify_series
def cci(high, low, close, length=None, c=None, offset=None, **kwargs):
"""Indicator: Commodity Channel Index (CCI)"""
# Validate Arguments
high = verify_series(high)
low = verify_series(low)
close = verify_series(close)
length = int(length) if length and length > 0 else 21
c = float(c) if c and c > 0 else 0.015
min_periods = int(kwargs['min_periods']) if 'min_periods' in kwargs and kwargs['min_periods'] is not None else length
offset = get_offset(offset)
# Calculate Result
typical_price = hlc3(high=high, low=low, close=close)
mean_typical_price = sma(typical_price, length=length)
mad_typical_price = mad(typical_price, length=length)
cci = typical_price - mean_typical_price
cci /= c * mad_typical_price
# Offset
if offset != 0:
cci = cci.shift(offset)
# Handle fills
if 'fillna' in kwargs:
cci.fillna(kwargs['fillna'], inplace=True)
if 'fill_method' in kwargs:
cci.fillna(method=kwargs['fill_method'], inplace=True)
# Name and Categorize it
cci.name = f"CCI_{length}_{c}"
cci.category = 'momentum'
return cci
cci.__doc__ = \
"""Commodity Channel Index (CCI)
Commodity Channel Index is a momentum oscillator used to primarily identify
overbought and oversold levels relative to a mean.
Sources:
https://www.tradingview.com/wiki/Commodity_Channel_Index_(CCI)
Calculation:
Default Inputs:
length=20, c=0.015
SMA = Simple Moving Average
MAD = Mean Absolute Deviation
tp = typical_price = hlc3 = (high + low + close) / 3
mean_tp = SMA(tp, length)
mad_tp = MAD(tp, length)
CCI = (tp - mean_tp) / (c * mad_tp)
Args:
high (pd.Series): Series of 'high's
low (pd.Series): Series of 'low's
close (pd.Series): Series of 'close's
length (int): It's period. Default: 20
c (float): Scaling Constant. Default: 0.015
offset (int): How many periods to offset the result. Default: 0
Kwargs:
fillna (value, optional): pd.DataFrame.fillna(value)
fill_method (value, optional): Type of fill method
Returns:
pd.Series: New feature generated.
"""
from pandas import DataFrame
def stoch_cci(high, low, close, length=None, smoothK=None, smoothD=None, offset=None, **kwargs):
"""Indicator: Stochastic CCI (Stoch CCI)"""
# Validate arguments
close = verify_series(close)
length = int(length) if length and length > 0 else 14
smoothK = int(smoothK) if smoothK and smoothK > 0 else 3
smoothD = int(smoothD) if smoothD and smoothD > 0 else 3
# Calculate CCI
data_cci = cci(high, low, close, length=length, offset=offset, **kwargs)
# Calculate Result
min = data_cci.rolling(length).min()
max = data_cci.rolling(length).max()
stoch_cci = 100 * (data_cci - min) / (max - min)
stoch_cci_k = sma(stoch_cci, smoothK)
stoch_cci_d = sma(stoch_cci_k, smoothD)
stoch_cci_k.name = f"STOCH_CCI_K_{smoothK}_{smoothD}_{length}"
stoch_cci_d.name = f"STOCH_CCI_D_{smoothK}_{smoothD}_{length}"
stoch_cci_k.category = stoch_cci_d.category = 'momentum'
# Prepare DataFrame to return
data = {stoch_cci_k.name: stoch_cci_k, stoch_cci_d.name: stoch_cci_d}
stoch_ccif = DataFrame(data)
stoch_ccif.name = f"Stoch_CCI_{smoothK}_{smoothD}_{length}"
stoch_ccif.category = 'momentum'
return stoch_ccif
stoch_cci.__doc__ = \
"""Stochastic CCI (Stoch CCI)
The Stochastic CCI indicator (Stoch CCI) is essentially an variation indicator of Stochastic RSI. It is used in
technical analysis to provide a stochastic calculation to the RSI indicator. This means that it is a
measure of RSI relative to its own high/low range over a user defined period of time.
Sources:
https://www.tradingview.com/wiki/Stochastic_RSI_(STOCH_RSI)
Calculation:
Default Inputs:
length=14, drift=1
ABS = Absolute Value
EMA = Exponential Moving Average
Stoch RSI = 100* (CCI - Lowest Low CCI) / (Highest High CCI - Lowest Low CCI)
Args:
close (pd.Series): Series of 'close's
length (int): It's period. Default: 1
drift (int): The difference period. Default: 1
offset (int): How many periods to offset the result. Default: 0
Kwargs:
fillna (value, optional): pd.DataFrame.fillna(value)
fill_method (value, optional): Type of fill method
Returns:
pd.Series: New feature generated.
""" |
"""
day14a - https://adventofcode.com/2020/day/14
--- Day 14: Docking Data ---
* Part 1
The initialization program (your puzzle input) can either update the bitmask or write a value to memory.
Values and memory addresses are both 36-bit unsigned integers.
For example, ignoring bitmasks for a moment, a line like mem[8] = 11 would
write the value 11 to memory address 8.
The bitmask is always given as a string of 36 bits, written with the most significant bit (representing 2^35)
on the left and the least significant bit (2^0, that is, the 1s bit) on the right.
The current bitmask is applied to values immediately before they are written to memory: a 0 or 1 overwrites
the corresponding bit in the value, while an X leaves the bit in the value unchanged.
The entire 36-bit address space begins initialized to the value 0 at every address.
Example: Two values in memory are not zero - 101 (at address 7) and 64 (at address 8),
producing a sum of 165.
Execute the initialization program.
What is the sum of all values left in memory after it completes?
15172047086292
* Part 2
A version 2 decoder chip doesn't modify the values being written at all.
Instead, it acts as a memory address decoder.
Immediately before a value is written to memory, each bit in the bitmask modifies the
corresponding bit of the destination memory address in the following way:
- If the bitmask bit is 0, the corresponding memory address bit is unchanged.
- If the bitmask bit is 1, the corresponding memory address bit is overwritten with 1.
- If the bitmask bit is X, the corresponding memory address bit is floating.
A floating bit is not connected to anything and instead fluctuates unpredictably.
In practice, this means the floating bits will take on all possible values,
potentially causing many memory addresses to be written all at once!
Example (different example data than part1!): The sum is 208
Execute the initialization program using an emulator for a version 2 decoder chip.
What is the sum of all values left in memory after it completes?
4197941339968
"""
def load_data():
data = []
datafile = 'input-day14'
with open(datafile, 'r') as input:
for line in input:
data.append(line.strip())
return data
def process_data(instructions, part):
mem = {}
for i in instructions:
if i[:4] == "mask":
mask = i.split(" = ")[1]
elif i[:3] == "mem":
bits = i.split(" = ")
mem_address = int(bits[0][4:-1])
value = int(bits[1])
if part == 1:
masked_value = apply_mask(value, mask, ["0", "1"])
mem[mem_address] = int(masked_value, base=2)
elif part == 2:
masked_mem_address = apply_mask(mem_address, mask, ["X", "1"])
# method A uses a list outside the function
#r = MyResults()
#assemble_addresses_external_results(masked_mem_address, "", r)
#addresses = r.results
# method B uses a list inside the function
addresses = assemble_addresses_internal_results(masked_mem_address, "", [])
for address in addresses:
mem[address] = value
#print(f"mem address {mem_address}, value {value}, memory is now {mem[mem_address]}")
return sum(mem.values())
def apply_mask(value, mask, maskbits):
# convert value from decimal -> binary -> string, then lop off the '0b' header
binval = str(bin(value))[2:]
# pad the binary string out to 36 0s
zeros = "0" * (36 - len(binval))
padded_binval = zeros + binval
new_value = ""
# loop through both binary and mask to construct new_value
for pbv, m in zip(padded_binval, mask):
if m in maskbits:
new_value += m
else:
new_value += pbv
return new_value
class MyResults(object):
def __init__(self):
self.results = []
def assemble_addresses_external_results(address, s, r):
"""
I wrote this one first, since I could write it faster. It works.
Leaving it here for future reference.
"""
for pos in range(len(address)):
if address[pos] != "X":
s += address[pos]
else:
assemble_addresses_external_results(address[pos+1:], s + "0", r)
assemble_addresses_external_results(address[pos+1:], s + "1", r)
# return here to throw away incomplete strings (<36 characters)
return
r.results.append(int(s, base=2))
#print(f"s {len(s)} {s} - results {r.results} - address {address}")
def assemble_addresses_internal_results(address, s, results):
"""
Refactored into this self-contained solution. It works.
"""
for pos in range(len(address)):
if address[pos] != "X":
s += address[pos]
else:
results = assemble_addresses_internal_results(address[pos+1:], s + "0", results)
results = assemble_addresses_internal_results(address[pos+1:], s + "1", results)
# throw away incomplete strings (<36 characters)
return results
results.append(int(s, base=2))
#print(f"s {len(s)} {s} - results {results} - address {address}")
return results
if __name__ == '__main__':
data = load_data()
#print(f"{data} \n")
results = process_data(data, 1)
print(f"Part 1 - {results}")
results = process_data(data, 2)
print(f"Part 2 - {results}\n")
|
<filename>stacks/XIAOMATECH/1.0/services/YARN/package/scripts/yarn.py
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
XmlConfig
Ambari Agent
"""
# Python Imports
import os
# Ambari Common and Resource Management Imports
from resource_management.libraries.script.script import Script
from resource_management.libraries.functions.format import format
from resource_management.libraries.functions.generate_logfeeder_input_config import generate_logfeeder_input_config
from resource_management.libraries.functions.is_empty import is_empty
from resource_management.core.resources.system import Directory, Execute
from resource_management.core.resources.system import File
from resource_management.libraries.resources.xml_config import XmlConfig
from resource_management.core.source import InlineTemplate, Template
from resource_management.libraries.functions.mounted_dirs_helper import handle_mounted_dirs
from resource_management.core.resources.system import Execute
from resource_management.libraries.functions.default import default
download_url_base = default("/configurations/cluster-env/download_url_base",
'http://assets.example.com/')
def install_hadoop_share_lib():
import params
share_dir = '/usr/share/java/hadoop/'
Directory(
share_dir,
owner=params.hdfs_user,
group=params.user_group,
create_parents=True,
mode=0755)
hadoop_native_so_file = share_dir + '/native/libgplcompression.a'
if not os.path.exists(hadoop_native_so_file):
Execute('mkdir -p ' + share_dir + '/native/')
Execute(
'wget ' + download_url_base +
'/share/hadoop/hadoopnative.tar.gz -O /tmp/hadoopnative.tar.gz',
user=params.hdfs_user)
Execute('tar -zxvf /tmp/hadoopnative.tar.gz -C ' + share_dir +
'/native/')
share_jar_files_conf = default("/configurations/hadoop-env/share_jars", '').strip()
if share_jar_files_conf != '':
share_jar_files = share_jar_files_conf.split(',')
for jar_file in share_jar_files:
jar_file_path = share_dir + jar_file.strip()
if not os.path.exists(jar_file_path):
Execute('wget ' + download_url_base + '/share/hadoop/' + jar_file + ' -O ' + jar_file_path,
user=params.hdfs_user)
def install_yarn():
import params
Directory(
params.yarn_log_dir,
owner=params.yarn_user,
group=params.user_group,
create_parents=True,
mode=0755)
Directory([
'/sys/fs/cgroup/cpu/yarn', '/sys/fs/cgroup/memory/yarn',
'/sys/fs/cgroup/blkio/yarn', '/sys/fs/cgroup/net_cls/yarn'
],
owner=params.yarn_user,
group=params.user_group,
create_parents=True,
mode=0755)
Directory(
params.limits_conf_dir,
create_parents=True,
owner='root',
group='root')
Directory(
params.hadoop_conf_dir,
create_parents=True,
owner='root',
group='root')
install_hadoop_share_lib()
if not os.path.exists(Script.get_stack_root() + '/' + params.version_dir) or not os.path.exists(
params.install_dir):
Execute('rm -rf %s' % Script.get_stack_root() + '/' + params.version_dir)
Execute('rm -rf %s' % params.install_dir)
Execute('/bin/rm -f /tmp/' + params.filename)
Execute(
'wget ' + params.download_url + ' -O /tmp/' + params.filename,
user=params.hdfs_user)
Execute('tar -zxf /tmp/' + params.filename + ' -C ' + Script.get_stack_root())
Execute('ln -s ' + Script.get_stack_root() + '/' + params.version_dir + ' ' + params.install_dir)
Execute(' rm -rf ' + params.install_dir + '/etc/hadoop')
Execute('ln -s ' + params.hadoop_conf_dir + ' ' + params.install_dir +
'/etc/hadoop')
Execute('mkdir ' + params.install_dir + '/logs && chmod 777 ' +
params.install_dir + '/logs')
Execute("echo 'export PATH=%s/bin:$PATH'>/etc/profile.d/hadoop.sh" %
params.install_dir)
Execute('chown -R %s:%s %s/%s' %
(params.hdfs_user, params.user_group, Script.get_stack_root(), params.version_dir))
Execute('chown -R %s:%s %s' % (params.hdfs_user, params.user_group,
params.install_dir))
Execute('chmod -R 755 %s/%s' % (Script.get_stack_root(), params.version_dir))
Execute('chown root:%s %s/bin/container-executor' %
(params.user_group, params.install_dir))
Execute('/bin/rm -f /tmp/' + params.filename)
def yarn(name=None, config_dir=None):
"""
:param name: Component name, apptimelinereader, apptimelineserver, nodemanager, resourcemanager, or None (defaults for client)
:param config_dir: Which config directory to write configs to, which could be different during rolling upgrade.
"""
import params
if config_dir is None:
config_dir = params.hadoop_conf_dir
Directory(
[params.yarn_log_dir_prefix],
owner=params.yarn_user,
group=params.user_group,
create_parents=True,
ignore_failures=True,
cd_access='a',
mode=0775,
)
Directory(
[params.yarn_pid_dir_prefix, params.yarn_pid_dir, params.yarn_log_dir],
owner=params.yarn_user,
group=params.user_group,
create_parents=True,
cd_access='a',
)
Directory(
[
params.mapred_pid_dir_prefix, params.mapred_pid_dir,
params.mapred_log_dir_prefix, params.mapred_log_dir
],
owner=params.mapred_user,
group=params.user_group,
create_parents=True,
cd_access='a',
)
# Some of these function calls depend on the directories above being created first.
if name == 'resourcemanager':
setup_resourcemanager()
elif name == 'nodemanager':
setup_nodemanager()
elif name == 'apptimelineserver':
setup_ats()
elif name == 'historyserver':
setup_historyserver()
generate_logfeeder_input_config(
'yarn', Template("input.config-yarn.json.j2", extra_imports=[default]))
# if there is the viewFS mount table content, create separate xml config and include in in the core-site
# else just create core-site
if params.mount_table_content:
XmlConfig(
"core-site.xml",
conf_dir=config_dir,
configurations=params.config['configurations']['core-site'],
configuration_attributes=params.config['configurationAttributes']
['core-site'],
owner=params.hdfs_user,
group=params.user_group,
mode=0644,
xml_include_file=os.path.join(config_dir,
params.xml_inclusion_file_name))
File(
os.path.join(config_dir, params.xml_inclusion_file_name),
owner=params.hdfs_user,
group=params.user_group,
content=params.mount_table_content,
mode=0644)
else:
XmlConfig(
"core-site.xml",
conf_dir=config_dir,
configurations=params.config['configurations']['core-site'],
configuration_attributes=params.config['configurationAttributes']
['core-site'],
owner=params.hdfs_user,
group=params.user_group,
mode=0644)
# During RU, Core Masters and Slaves need hdfs-site.xml
XmlConfig(
"hdfs-site.xml",
conf_dir=config_dir,
configurations=params.config['configurations']['hdfs-site'],
configuration_attributes=params.config['configurationAttributes']
['hdfs-site'],
owner=params.hdfs_user,
group=params.user_group,
mode=0644)
XmlConfig(
"mapred-site.xml",
conf_dir=config_dir,
configurations=params.config['configurations']['mapred-site'],
configuration_attributes=params.config['configurationAttributes']
['mapred-site'],
owner=params.yarn_user,
group=params.user_group,
mode=0644)
configs = {}
configs.update(params.config['configurations']['yarn-site'])
configs["hadoop.registry.dns.bind-port"] = params.config['configurations'][
'yarn-env']['registry.dns.bind-port']
XmlConfig(
"yarn-site.xml",
conf_dir=config_dir,
configurations=configs,
configuration_attributes=params.config['configurationAttributes']
['yarn-site'],
owner=params.yarn_user,
group=params.user_group,
mode=0644)
XmlConfig(
"capacity-scheduler.xml",
conf_dir=config_dir,
configurations=params.config['configurations']['capacity-scheduler'],
configuration_attributes=params.config['configurationAttributes']
['capacity-scheduler'],
owner=params.yarn_user,
group=params.user_group,
mode=0644)
XmlConfig(
"resource-types.xml",
conf_dir=config_dir,
configurations=params.config['configurations']['resource-types'],
configuration_attributes=params.config['configurationAttributes']
['resource-types'],
owner=params.yarn_user,
group=params.user_group,
mode=0644)
File(
format("{limits_conf_dir}/yarn.conf"),
mode=0644,
content=Template('yarn.conf.j2'))
File(
format("{limits_conf_dir}/mapreduce.conf"),
mode=0644,
content=Template('mapreduce.conf.j2'))
File(
os.path.join(config_dir, "yarn-env.sh"),
owner=params.yarn_user,
group=params.user_group,
mode=0755,
content=InlineTemplate(params.yarn_env_sh_template))
File(
format("{yarn_bin}/container-executor"),
group=params.yarn_executor_container_group,
mode=params.container_executor_mode)
File(
os.path.join(config_dir, "container-executor.cfg"),
group=params.user_group,
mode=0644,
content=InlineTemplate(params.container_executor_cfg_template))
Directory(
params.cgroups_dir,
group=params.user_group,
create_parents=True,
mode=0755,
cd_access="a")
File(
os.path.join(config_dir, "mapred-env.sh"),
owner=params.tc_owner,
mode=0755,
content=InlineTemplate(params.mapred_env_sh_template))
if params.yarn_nodemanager_recovery_dir:
Directory(
InlineTemplate(params.yarn_nodemanager_recovery_dir).get_content(),
owner=params.yarn_user,
group=params.user_group,
create_parents=True,
mode=0755,
cd_access='a',
)
if params.security_enabled:
File(
os.path.join(params.hadoop_bin, "task-controller"),
owner="root",
group=params.mapred_tt_group,
mode=06050)
File(
os.path.join(config_dir, 'taskcontroller.cfg'),
owner=params.tc_owner,
mode=params.tc_mode,
group=params.mapred_tt_group,
content=Template("taskcontroller.cfg.j2"))
File(
os.path.join(config_dir, 'yarn_jaas.conf'),
owner=params.yarn_user,
group=params.user_group,
content=Template("yarn_jaas.conf.j2"))
if params.has_ats:
File(
os.path.join(config_dir, 'yarn_ats_jaas.conf'),
owner=params.yarn_user,
group=params.user_group,
content=Template("yarn_ats_jaas.conf.j2"))
if params.has_registry_dns:
File(
os.path.join(config_dir, 'yarn_registry_dns_jaas.conf'),
owner=params.yarn_user,
group=params.user_group,
content=Template("yarn_registry_dns_jaas.conf.j2"))
File(
os.path.join(config_dir, 'yarn_nm_jaas.conf'),
owner=params.yarn_user,
group=params.user_group,
content=Template("yarn_nm_jaas.conf.j2"))
if params.has_hs:
File(
os.path.join(config_dir, 'mapred_jaas.conf'),
owner=params.mapred_user,
group=params.user_group,
content=Template("mapred_jaas.conf.j2"))
else:
File(
os.path.join(config_dir, 'taskcontroller.cfg'),
owner=params.tc_owner,
content=Template("taskcontroller.cfg.j2"))
XmlConfig(
"mapred-site.xml",
conf_dir=config_dir,
configurations=params.config['configurations']['mapred-site'],
configuration_attributes=params.config['configurationAttributes']
['mapred-site'],
owner=params.mapred_user,
group=params.user_group)
XmlConfig(
"capacity-scheduler.xml",
conf_dir=config_dir,
configurations=params.config['configurations']['capacity-scheduler'],
configuration_attributes=params.config['configurationAttributes']
['capacity-scheduler'],
owner=params.hdfs_user,
group=params.user_group)
if "ssl-client" in params.config['configurations']:
XmlConfig(
"ssl-client.xml",
conf_dir=config_dir,
configurations=params.config['configurations']['ssl-client'],
configuration_attributes=params.config['configurationAttributes']
['ssl-client'],
owner=params.hdfs_user,
group=params.user_group)
Directory(
params.hadoop_conf_secure_dir,
create_parents=True,
owner='root',
group=params.user_group,
cd_access='a',
)
XmlConfig(
"ssl-client.xml",
conf_dir=params.hadoop_conf_secure_dir,
configurations=params.config['configurations']['ssl-client'],
configuration_attributes=params.config['configurationAttributes']
['ssl-client'],
owner=params.hdfs_user,
group=params.user_group)
if "ssl-server" in params.config['configurations']:
XmlConfig(
"ssl-server.xml",
conf_dir=config_dir,
configurations=params.config['configurations']['ssl-server'],
configuration_attributes=params.config['configurationAttributes']
['ssl-server'],
owner=params.hdfs_user,
group=params.user_group)
if os.path.exists(os.path.join(config_dir, 'fair-scheduler.xml')):
File(
os.path.join(config_dir, 'fair-scheduler.xml'),
owner=params.mapred_user,
group=params.user_group)
if os.path.exists(os.path.join(config_dir, 'ssl-client.xml.example')):
File(
os.path.join(config_dir, 'ssl-client.xml.example'),
owner=params.mapred_user,
group=params.user_group)
if os.path.exists(os.path.join(config_dir, 'ssl-server.xml.example')):
File(
os.path.join(config_dir, 'ssl-server.xml.example'),
owner=params.mapred_user,
group=params.user_group)
def setup_historyserver():
import params
if params.yarn_log_aggregation_enabled:
params.HdfsResource(
params.yarn_nm_app_log_dir,
action="create_on_execute",
type="directory",
owner=params.yarn_user,
group=params.user_group,
mode=01777,
recursive_chmod=True)
# create the /tmp folder with proper permissions if it doesn't exist yet
if params.entity_file_history_directory.startswith('/tmp'):
params.HdfsResource(
params.hdfs_tmp_dir,
action="create_on_execute",
type="directory",
owner=params.hdfs_user,
mode=0777,
)
params.HdfsResource(
params.yarn_system_service_dir + '/async',
action="create_on_execute",
type="directory",
owner=params.yarn_user,
group=params.user_group)
params.HdfsResource(
params.yarn_system_service_dir + '/sync',
action="create_on_execute",
type="directory",
owner=params.yarn_user,
group=params.user_group)
params.HdfsResource(
params.entity_file_history_directory,
action="create_on_execute",
type="directory",
owner=params.yarn_user,
group=params.user_group)
params.HdfsResource(
"/mapred",
type="directory",
action="create_on_execute",
owner=params.mapred_user)
params.HdfsResource(
"/mapred/system",
type="directory",
action="create_on_execute",
owner=params.hdfs_user)
params.HdfsResource(
params.mapreduce_jobhistory_done_dir,
type="directory",
action="create_on_execute",
owner=params.mapred_user,
group=params.user_group,
change_permissions_for_parents=True,
mode=0777)
params.HdfsResource(None, action="execute")
Directory(
params.jhs_leveldb_state_store_dir,
owner=params.mapred_user,
group=params.user_group,
create_parents=True,
cd_access="a",
recursive_ownership=True,
)
generate_logfeeder_input_config(
'mapreduce2',
Template("input.config-mapreduce2.json.j2", extra_imports=[default]))
def setup_nodemanager():
import params
# First start after enabling/disabling security
if params.toggle_nm_security:
Directory(
params.nm_local_dirs_list + params.nm_log_dirs_list,
action='delete')
# If yarn.nodemanager.recovery.dir exists, remove this dir
if params.yarn_nodemanager_recovery_dir:
Directory(
InlineTemplate(
params.yarn_nodemanager_recovery_dir).get_content(),
action='delete')
# Setting NM marker file
if params.security_enabled:
Directory(params.nm_security_marker_dir)
File(
params.nm_security_marker,
content=
"Marker file to track first start after enabling/disabling security. "
"During first start yarn local, log dirs are removed and recreated"
)
elif not params.security_enabled:
File(params.nm_security_marker, action="delete")
if not params.security_enabled or params.toggle_nm_security:
nm_log_dir_to_mount_file_content = handle_mounted_dirs(
create_log_dir, params.nm_log_dirs,
params.nm_log_dir_to_mount_file, params)
# create a history file used by handle_mounted_dirs
File(
params.nm_log_dir_to_mount_file,
owner=params.hdfs_user,
group=params.user_group,
mode=0644,
content=nm_log_dir_to_mount_file_content)
nm_local_dir_to_mount_file_content = handle_mounted_dirs(
create_local_dir, params.nm_local_dirs,
params.nm_local_dir_to_mount_file, params)
File(
params.nm_local_dir_to_mount_file,
owner=params.hdfs_user,
group=params.user_group,
mode=0644,
content=nm_local_dir_to_mount_file_content)
def setup_resourcemanager():
import params
Directory(
params.rm_nodes_exclude_dir,
mode=0755,
create_parents=True,
cd_access='a',
)
File(
params.exclude_file_path,
content=Template("exclude_hosts_list.j2"),
owner=params.yarn_user,
group=params.user_group)
if params.include_hosts:
Directory(
params.rm_nodes_include_dir,
mode=0755,
create_parents=True,
cd_access='a',
)
File(
params.include_file_path,
content=Template("include_hosts_list.j2"),
owner=params.yarn_user,
group=params.user_group)
# This depends on the parent directory already existing.
File(
params.yarn_job_summary_log,
owner=params.yarn_user,
group=params.user_group)
if not is_empty(
params.node_label_enable) and params.node_label_enable or is_empty(
params.node_label_enable) and params.node_labels_dir:
params.HdfsResource(
params.node_labels_dir,
type="directory",
action="create_on_execute",
owner=params.yarn_user,
group=params.user_group,
mode=0700)
params.HdfsResource(None, action="execute")
def setup_ats():
import params
Directory(
params.ats_leveldb_dir,
owner=params.yarn_user,
group=params.user_group,
create_parents=True,
cd_access="a",
)
if params.stack_supports_timeline_state_store:
Directory(
params.ats_leveldb_state_store_dir,
owner=params.yarn_user,
group=params.user_group,
create_parents=True,
cd_access="a",
)
# app timeline server 1.5 directories
if not is_empty(params.entity_groupfs_store_dir):
parent_path = os.path.dirname(params.entity_groupfs_store_dir)
params.HdfsResource(
parent_path,
type="directory",
action="create_on_execute",
change_permissions_for_parents=True,
owner=params.yarn_user,
group=params.user_group,
mode=0755)
params.HdfsResource(
params.entity_groupfs_store_dir,
type="directory",
action="create_on_execute",
owner=params.yarn_user,
group=params.user_group,
mode=params.entity_groupfs_store_dir_mode)
if not is_empty(params.entity_groupfs_active_dir):
parent_path = os.path.dirname(params.entity_groupfs_active_dir)
params.HdfsResource(
parent_path,
type="directory",
action="create_on_execute",
change_permissions_for_parents=True,
owner=params.yarn_user,
group=params.user_group,
mode=0755)
params.HdfsResource(
params.entity_groupfs_active_dir,
type="directory",
action="create_on_execute",
owner=params.yarn_user,
group=params.user_group,
mode=params.entity_groupfs_active_dir_mode)
params.HdfsResource(None, action="execute")
def create_log_dir(dir_name):
import params
Directory(
dir_name,
create_parents=True,
cd_access="a",
mode=0775,
owner=params.yarn_user,
group=params.user_group,
ignore_failures=True,
)
def create_local_dir(dir_name):
import params
directory_args = {}
if params.toggle_nm_security:
directory_args["recursive_mode_flags"] = {'f': 'a+rw', 'd': 'a+rwx'}
Directory(
dir_name,
create_parents=True,
cd_access="a",
mode=0755,
owner=params.yarn_user,
group=params.user_group,
ignore_failures=True,
**directory_args)
|
"""Case_Attribute TQL Filter"""
# standard library
from enum import Enum
# first-party
from tcex.api.tc.v3.api_endpoints import ApiEndpoints
from tcex.api.tc.v3.filter_abc import FilterABC
from tcex.api.tc.v3.tql.tql import Tql
from tcex.api.tc.v3.tql.tql_operator import TqlOperator
from tcex.api.tc.v3.tql.tql_type import TqlType
class CaseAttributeFilter(FilterABC):
"""Filter Object for CaseAttributes"""
@property
def _api_endpoint(self) -> str:
"""Return the API endpoint."""
return ApiEndpoints.CASE_ATTRIBUTES.value
def case_id(self, operator: Enum, case_id: int) -> None:
"""Filter Workflow ID based on **caseId** keyword.
Args:
operator: The operator enum for the filter.
case_id: The ID of the case the workflow attribute is applied to.
"""
self._tql.add_filter('caseId', operator, case_id, TqlType.INTEGER)
def date_added(self, operator: Enum, date_added: str) -> None:
"""Filter Date Added based on **dateAdded** keyword.
Args:
operator: The operator enum for the filter.
date_added: The date the attribute was added to the system.
"""
date_added = self.utils.any_to_datetime(date_added).strftime('%Y-%m-%dT%H:%M:%S')
self._tql.add_filter('dateAdded', operator, date_added, TqlType.STRING)
def date_val(self, operator: Enum, date_val: str) -> None:
"""Filter Date based on **dateVal** keyword.
Args:
operator: The operator enum for the filter.
date_val: The date value of the attribute (only applies to certain types).
"""
date_val = self.utils.any_to_datetime(date_val).strftime('%Y-%m-%dT%H:%M:%S')
self._tql.add_filter('dateVal', operator, date_val, TqlType.STRING)
def displayed(self, operator: Enum, displayed: bool) -> None:
"""Filter Displayed based on **displayed** keyword.
Args:
operator: The operator enum for the filter.
displayed: Whether or not the attribute is displayed on the item.
"""
self._tql.add_filter('displayed', operator, displayed, TqlType.BOOLEAN)
@property
def has_case(self):
"""Return **CaseFilter** for further filtering."""
# first-party
from tcex.api.tc.v3.cases.case_filter import CaseFilter
cases = CaseFilter(Tql())
self._tql.add_filter('hasCase', TqlOperator.EQ, cases, TqlType.SUB_QUERY)
return cases
def id(self, operator: Enum, id: int) -> None: # pylint: disable=redefined-builtin
"""Filter ID based on **id** keyword.
Args:
operator: The operator enum for the filter.
id: The ID of the attribute.
"""
self._tql.add_filter('id', operator, id, TqlType.INTEGER)
def int_val(self, operator: Enum, int_val: int) -> None:
"""Filter Integer Value based on **intVal** keyword.
Args:
operator: The operator enum for the filter.
int_val: The integer value of the attribute (only applies to certain types).
"""
self._tql.add_filter('intVal', operator, int_val, TqlType.INTEGER)
def last_modified(self, operator: Enum, last_modified: str) -> None:
"""Filter Last Modified based on **lastModified** keyword.
Args:
operator: The operator enum for the filter.
last_modified: The date the attribute was last modified in the system.
"""
last_modified = self.utils.any_to_datetime(last_modified).strftime('%Y-%m-%dT%H:%M:%S')
self._tql.add_filter('lastModified', operator, last_modified, TqlType.STRING)
def max_size(self, operator: Enum, max_size: int) -> None:
"""Filter Max Size based on **maxSize** keyword.
Args:
operator: The operator enum for the filter.
max_size: The max length of the attribute text.
"""
self._tql.add_filter('maxSize', operator, max_size, TqlType.INTEGER)
def owner(self, operator: Enum, owner: int) -> None:
"""Filter Owner ID based on **owner** keyword.
Args:
operator: The operator enum for the filter.
owner: The owner ID of the attribute.
"""
self._tql.add_filter('owner', operator, owner, TqlType.INTEGER)
def owner_name(self, operator: Enum, owner_name: str) -> None:
"""Filter Owner Name based on **ownerName** keyword.
Args:
operator: The operator enum for the filter.
owner_name: The owner name of the attribute.
"""
self._tql.add_filter('ownerName', operator, owner_name, TqlType.STRING)
def source(self, operator: Enum, source: str) -> None:
"""Filter Source based on **source** keyword.
Args:
operator: The operator enum for the filter.
source: The source text of the attribute.
"""
self._tql.add_filter('source', operator, source, TqlType.STRING)
def text(self, operator: Enum, text: str) -> None:
"""Filter Text based on **text** keyword.
Args:
operator: The operator enum for the filter.
text: The text of the attribute (only applies to certain types).
"""
self._tql.add_filter('text', operator, text, TqlType.STRING)
def type(self, operator: Enum, type: int) -> None: # pylint: disable=redefined-builtin
"""Filter Type ID based on **type** keyword.
Args:
operator: The operator enum for the filter.
type: The ID of the attribute type.
"""
self._tql.add_filter('type', operator, type, TqlType.INTEGER)
def type_name(self, operator: Enum, type_name: str) -> None:
"""Filter Type Name based on **typeName** keyword.
Args:
operator: The operator enum for the filter.
type_name: The name of the attribute type.
"""
self._tql.add_filter('typeName', operator, type_name, TqlType.STRING)
def user(self, operator: Enum, user: str) -> None:
"""Filter User based on **user** keyword.
Args:
operator: The operator enum for the filter.
user: The user who created the attribute.
"""
self._tql.add_filter('user', operator, user, TqlType.STRING)
|
<reponame>bm16ton/yoga-c630-linux-kernel
# SPDX-License-Identifier: GPL-2.0
#
# Runs UML kernel, collects output, and handles errors.
#
# Copyright (C) 2019, Google LLC.
# Author: <NAME> <<EMAIL>>
# Author: <NAME> <<EMAIL>>
import logging
import subprocess
import os
import shutil
import signal
from typing import Iterator
from contextlib import ExitStack
import kunit_config
import kunit_parser
KCONFIG_PATH = '.config'
KUNITCONFIG_PATH = '.kunitconfig'
DEFAULT_KUNITCONFIG_PATH = 'arch/um/configs/kunit_defconfig'
BROKEN_ALLCONFIG_PATH = 'tools/testing/kunit/configs/broken_on_uml.config'
OUTFILE_PATH = 'test.log'
def get_file_path(build_dir, default):
if build_dir:
default = os.path.join(build_dir, default)
return default
class ConfigError(Exception):
"""Represents an error trying to configure the Linux kernel."""
class BuildError(Exception):
"""Represents an error trying to build the Linux kernel."""
class LinuxSourceTreeOperations(object):
"""An abstraction over command line operations performed on a source tree."""
def make_mrproper(self) -> None:
try:
subprocess.check_output(['make', 'mrproper'], stderr=subprocess.STDOUT)
except OSError as e:
raise ConfigError('Could not call make command: ' + str(e))
except subprocess.CalledProcessError as e:
raise ConfigError(e.output.decode())
def make_olddefconfig(self, build_dir, make_options) -> None:
command = ['make', 'ARCH=um', 'olddefconfig']
if make_options:
command.extend(make_options)
if build_dir:
command += ['O=' + build_dir]
try:
subprocess.check_output(command, stderr=subprocess.STDOUT)
except OSError as e:
raise ConfigError('Could not call make command: ' + str(e))
except subprocess.CalledProcessError as e:
raise ConfigError(e.output.decode())
def make_allyesconfig(self, build_dir, make_options) -> None:
kunit_parser.print_with_timestamp(
'Enabling all CONFIGs for UML...')
command = ['make', 'ARCH=um', 'allyesconfig']
if make_options:
command.extend(make_options)
if build_dir:
command += ['O=' + build_dir]
process = subprocess.Popen(
command,
stdout=subprocess.DEVNULL,
stderr=subprocess.STDOUT)
process.wait()
kunit_parser.print_with_timestamp(
'Disabling broken configs to run KUnit tests...')
with ExitStack() as es:
config = open(get_kconfig_path(build_dir), 'a')
disable = open(BROKEN_ALLCONFIG_PATH, 'r').read()
config.write(disable)
kunit_parser.print_with_timestamp(
'Starting Kernel with all configs takes a few minutes...')
def make(self, jobs, build_dir, make_options) -> None:
command = ['make', 'ARCH=um', '--jobs=' + str(jobs)]
if make_options:
command.extend(make_options)
if build_dir:
command += ['O=' + build_dir]
try:
proc = subprocess.Popen(command,
stderr=subprocess.PIPE,
stdout=subprocess.DEVNULL)
except OSError as e:
raise BuildError('Could not call make command: ' + str(e))
_, stderr = proc.communicate()
if proc.returncode != 0:
raise BuildError(stderr.decode())
if stderr: # likely only due to build warnings
print(stderr.decode())
def linux_bin(self, params, timeout, build_dir) -> None:
"""Runs the Linux UML binary. Must be named 'linux'."""
linux_bin = get_file_path(build_dir, 'linux')
outfile = get_outfile_path(build_dir)
with open(outfile, 'w') as output:
process = subprocess.Popen([linux_bin] + params,
stdout=output,
stderr=subprocess.STDOUT)
process.wait(timeout)
def get_kconfig_path(build_dir) -> str:
return get_file_path(build_dir, KCONFIG_PATH)
def get_kunitconfig_path(build_dir) -> str:
return get_file_path(build_dir, KUNITCONFIG_PATH)
def get_outfile_path(build_dir) -> str:
return get_file_path(build_dir, OUTFILE_PATH)
class LinuxSourceTree(object):
"""Represents a Linux kernel source tree with KUnit tests."""
def __init__(self, build_dir: str, load_config=True, kunitconfig_path='') -> None:
signal.signal(signal.SIGINT, self.signal_handler)
self._ops = LinuxSourceTreeOperations()
if not load_config:
return
if kunitconfig_path:
if not os.path.exists(kunitconfig_path):
raise ConfigError(f'Specified kunitconfig ({kunitconfig_path}) does not exist')
else:
kunitconfig_path = get_kunitconfig_path(build_dir)
if not os.path.exists(kunitconfig_path):
shutil.copyfile(DEFAULT_KUNITCONFIG_PATH, kunitconfig_path)
self._kconfig = kunit_config.Kconfig()
self._kconfig.read_from_file(kunitconfig_path)
def clean(self) -> bool:
try:
self._ops.make_mrproper()
except ConfigError as e:
logging.error(e)
return False
return True
def validate_config(self, build_dir) -> bool:
kconfig_path = get_kconfig_path(build_dir)
validated_kconfig = kunit_config.Kconfig()
validated_kconfig.read_from_file(kconfig_path)
if not self._kconfig.is_subset_of(validated_kconfig):
invalid = self._kconfig.entries() - validated_kconfig.entries()
message = 'Provided Kconfig is not contained in validated .config. Following fields found in kunitconfig, ' \
'but not in .config: %s' % (
', '.join([str(e) for e in invalid])
)
logging.error(message)
return False
return True
def build_config(self, build_dir, make_options) -> bool:
kconfig_path = get_kconfig_path(build_dir)
if build_dir and not os.path.exists(build_dir):
os.mkdir(build_dir)
self._kconfig.write_to_file(kconfig_path)
try:
self._ops.make_olddefconfig(build_dir, make_options)
except ConfigError as e:
logging.error(e)
return False
return self.validate_config(build_dir)
def build_reconfig(self, build_dir, make_options) -> bool:
"""Creates a new .config if it is not a subset of the .kunitconfig."""
kconfig_path = get_kconfig_path(build_dir)
if os.path.exists(kconfig_path):
existing_kconfig = kunit_config.Kconfig()
existing_kconfig.read_from_file(kconfig_path)
if not self._kconfig.is_subset_of(existing_kconfig):
print('Regenerating .config ...')
os.remove(kconfig_path)
return self.build_config(build_dir, make_options)
else:
return True
else:
print('Generating .config ...')
return self.build_config(build_dir, make_options)
def build_um_kernel(self, alltests, jobs, build_dir, make_options) -> bool:
try:
if alltests:
self._ops.make_allyesconfig(build_dir, make_options)
self._ops.make_olddefconfig(build_dir, make_options)
self._ops.make(jobs, build_dir, make_options)
except (ConfigError, BuildError) as e:
logging.error(e)
return False
return self.validate_config(build_dir)
def run_kernel(self, args=None, build_dir='', filter_glob='', timeout=None) -> Iterator[str]:
if not args:
args = []
args.extend(['mem=1G', 'console=tty'])
if filter_glob:
args.append('kunit.filter_glob='+filter_glob)
self._ops.linux_bin(args, timeout, build_dir)
outfile = get_outfile_path(build_dir)
subprocess.call(['stty', 'sane'])
with open(outfile, 'r') as file:
for line in file:
yield line
def signal_handler(self, sig, frame) -> None:
logging.error('Build interruption occurred. Cleaning console.')
subprocess.call(['stty', 'sane'])
|
<reponame>behavioral-data/multiverse
import click
import os
import numpy as np
from transformers import (BartForConditionalGeneration, DataCollator,
RobertaTokenizerFast, Trainer, TrainingArguments,
BartTokenizerFast, BartConfig, BartForSequenceClassification,
AutoTokenizer, BertForTokenClassification, BertConfig)
from src.models.CORAL_BART.trainer import CORALBARTTrainer, CORALBARTTrainerClassification, CORALBARTTrainerSeq2Seq, CORALBARTMultiTaskTrainer
from src.models.CORAL_BART.dataset import (KaggleDiffsDataset, KaggleDiffsDatasetClassification, KaggleDiffsReader,
DynamicPaddingCollator, DynamicPaddingCollatorSeq2Seq, CoralKaggleDiffsDataset, CoralDiffsReader)
from src.models.CORAL_BART.utils import count_parameters, block_shuffle, has_internet
from src.models.CORAL_BART.metrics import get_seq2seq_eval, classification_eval, get_multitask_eval
from src.models.CORAL_BART.models import MultiTaskBart
from scipy.special import softmax
@click.command()
@click.argument("path_to_dataset", type=click.Path())
@click.argument("path_to_class_model", type=click.Path())
@click.argument("path_to_gen_model", type=click.Path())
@click.option("--path_to_tokenizer", type=click.Path(), default='./tokenizer/')
def main(path_to_dataset,
path_to_class_model,
path_to_gen_model,
path_to_tokenizer='./tokenizer/'):
vocab_path = os.path.join(path_to_tokenizer, "vocab.json")
merges_path = os.path.join(path_to_tokenizer, "merges.txt")
tokenizer = BartTokenizerFast(vocab_path, merges_path)
collator = DynamicPaddingCollatorSeq2Seq(tokenizer)
base_dataset = KaggleDiffsDataset
dataset_args = {"predict_spans": True,
"replace_inserted_tokens_in_output": False}
data_reader = KaggleDiffsReader(path_to_dataset)
data_reader.diffs = block_shuffle(
data_reader.diffs, key_fn=lambda x: x["metadata"]["comp_name"],
seed=421994)
n_examples = len(data_reader)
split_point = int(n_examples * (1 - 0.05))
eval_dataset = base_dataset(
data_reader.diffs[split_point:], tokenizer, max_length=128, **dataset_args)
class_training_args = TrainingArguments(
output_dir="./results/",
overwrite_output_dir=False,
# num_train_epochs=num_train_epochs,
# per_device_train_batch_size=per_device_train_batch_size,
per_device_eval_batch_size=150,
# warmup_steps=warmup_steps,
# save_steps=save_steps,
# weight_decay=weight_decay,
# logging_dir=logging_dir,
# logging_steps=logging_steps,
# eval_steps=eval_steps,
# logging_first_step=True,
# learning_rate=learning_rate,
# save_total_limit=2,
# evaluate_during_training=not dont_evaluate
)
class_model = BertForTokenClassification.from_pretrained(path_to_class_model)
class_trainer = CORALBARTTrainerClassification( model=class_model,
args=class_training_args,
eval_dataset=eval_dataset,
prediction_loss_only=False,
compute_metrics=classification_eval,
data_collator=collator,
tokenizer=tokenizer,
save_eval=True)
eval_data_loader = class_trainer.get_eval_dataloader()
output = class_trainer._prediction_loop(eval_data_loader , description="Evaluation")
for i,(logits,labels) in enumerate(zip(output.predictions, output.label_ids)):
labels = np.array(labels)
logits = np.array(logits)
mask = labels != -100
labels= labels[mask]
logits = logits[mask]
score = softmax(logits,axis=1)[:,-1]
pred_labels = score > 0.15
eval_dataset.diffs[i]["input_labels"] = pred_labels.tolist()
gen_metrics = get_multitask_eval(
tokenizer, wandb=False, threshold=0.15)
gen_config = BartConfig(
vocab_size = tokenizer.vocab_size+4,
hidden_dropout_prob=0.0,
num_labels=2,
span_aware_decoding=True,
classification_threshold=0.15,
d_model=128
)
gen_model = MultiTaskBart.from_pretrained(path_to_gen_model, config=gen_config)
gen_training_args = TrainingArguments(
output_dir="./results/",
overwrite_output_dir=False,
# num_train_epochs=num_train_epochs,
# per_device_train_batch_size=per_device_train_batch_size,
per_device_eval_batch_size=60,
# warmup_steps=warmup_steps,
# save_steps=save_steps,
# weight_decay=weight_decay,
# logging_dir=logging_dir,
# logging_steps=logging_steps,
# eval_steps=eval_steps,
# logging_first_step=True,
# learning_rate=learning_rate,
# save_total_limit=2,
# evaluate_during_training=not dont_evaluate
)
gen_trainer = CORALBARTMultiTaskTrainer(model=gen_model,
args=gen_training_args,
eval_dataset=eval_dataset,
prediction_loss_only=False,
data_collator=collator,
tokenizer=tokenizer,
save_eval=True,
compute_metrics = gen_metrics,
oracle_span_aware_decoder=True)
gen_trainer.evaluate()
if __name__ == "__main__":
main() |
<gh_stars>1-10
#!/usr/bin/env python3
# -+-coding: utf-8 -+-
"""
"""
#--------------------------------------------
# Authors: <NAME> <<EMAIL>>
#
#--------------------------------------------
# Date: 12.12.19
#--------------------------------------------
# License: BSD (3-clause)
#--------------------------------------------
# Updates
#--------------------------------------------
import mne
import numpy as np
from jumeg.base.jumeg_base import jumeg_base as jb
from jumeg.base import jumeg_logger
logger = jumeg_logger.get_logger()
__version__= "2020.05.05.001"
class JuMEG_MNE_FILTER(object):
"""
wrapper cls to wrap mne.filter MNE version 19.2 in juMEG
call MNE filter e.g.:
raw.filter(l_freq=flow,h_freq=fhigh,picks=picks)
save and rename filterd raw file
raw : <None> raw obj
flow : <None> mne <l_freq>
fhigh : <None> mne <h_freq>
picks : <None> => if None then exclude channels from <stim> group
save : <False> / True
dcoffset : <False> => if True apply DC offset correction , substract mean
overwrite: <False> if save overwrite existing filtered file
verbose : <False> tell me more
debug : <False>
postfix : <None> postfix for filename
Returns:
--------
filename of filtered raw
!!! raw is filtered in place !!!
Example:
--------
from jumeg.base.jumeg_base import jumeg_base as jb
from jumeg.filter.jumeg_mne_filter import JuMEG_MNE_FILTER
#--- load raw
raw = raw_fname = jb.get_raw_obj(fname,raw=None)
#--- ini MNE_Filter class
jfi= JuMEG_MNE_FILTER()
#--- filter inplace
fname_fitered_raw = jfi.apply(raw=raw,flow=0.1,fhigh=45.0,picks=None,save=True,verbose=True,overwrite=True)
"""
__slots__ = ["raw","flow","fhigh","picks","save","overwrite","dcoffset","verbose","debug","_is_filtered","_is_reloaded","_fname_orig","annotations"]
def __init__(self,**kwargs):
#super().__init__()
self.clear()
self._update_from_kwargs(**kwargs)
@property
def fname_orig(self): return self._fname_orig
@property
def fname(self):
return jb.get_raw_filename(self.raw,index=0)
@property
def isFiltered(self):
return self._is_filtered
@property
def isReloaded(self):
return self._is_reloaded
@property
def postfix(self):
return self._update_postfix()
def clear(self):
for k in self.__slots__:
self.__setattr__(k,None)
def _update_from_kwargs(self,**kwargs):
for k in self.__slots__:
self.__setattr__(k,kwargs.get(k,self.__getattribute__(k)))
def _update_postfix(self,**kwargs):
"""return filter extention """
self._update_from_kwargs(**kwargs)
fi_fix = None
if self.flow and self.fhigh:
fi_fix = "fibp"
fi_fix += "%0.2f-%0.1f" % (self.flow,self.fhigh)
elif self.flow:
fi_fix = "fihp"
fi_fix += "%0.2f" % self.flow
elif self.fhigh:
fi_fix = "filp"
fi_fix += "%0.2f" % (self.fhigh)
# self.postfix = fi_fix
return fi_fix
def get_filter_filename(self,raw=None):
"""
Parameters
----------
raw : TYPE, optional
DESCRIPTION. The default is None.
Returns
-------
fname : filename of filtered raw
"""
self._update_postfix()
if raw:
fname = jb.get_raw_filename(raw,index=0)
else:
fname = self.fname
fname,ext = fname.rsplit('-',1)
fname += "," + self.postfix
if self.dcoffset:
fname += "dc"
fname += "-" + ext
return fname
def apply_dcoffset(self,raw=None,picks=None):
'''
apply dc offset to data, works in place
substract data mean
Parameters
----------
raw : TYPE, optional
DESCRIPTION. The default is None.
picks : TYPE, optional
DESCRIPTION. The default is None.
Returns
-------
data mean
'''
if not picks:
picks = self.picks
if not raw:
raw = self.raw
# logger.debug("Total MEAN RAW Orig: {}".format(self.raw._data.mean() ) )
dm = raw._data[picks,:].mean(axis=-1)
raw._data[picks,:] -= dm[:, np.newaxis]
# logger.debug("Total MEAN RAW DC: {}".format(self.raw._data.mean() ) )
return dm
def apply(self,**kwargs):
"""
wrapper function for MNE filter cls
raw is filtered with MNE filter function inplace
data in raw-obj will be overwritten
filename is updated in raw-obj
call MNE filter e.g.:
raw.filter(l_freq=flow,h_freq=fhigh,picks=picks)
208497_INTEXT01_190103_1010_1_c,rfDC,meeg,nr,bcc,int,ar
:param kwargs:
flow,fhigh,raw,picks
Example
--------
-> filter all chanels 0.1 -45.0 Hz except STIM
from jumeg.base.jumeg_base import jumeg_base as jb
from jumeg.filter.jumeg_mne_filter import JUMEG_FILTER
jFI = JUMEG_FILTER()
fname = jFI.apply(
flow = 0.1,
fhigh = 45.0,
save = True,
raw = raw,
picks = jb.picks.exclude_trigger(raw) )
:return:
fname
"""
self._update_from_kwargs(**kwargs)
self._is_filtered = False
self._is_reloaded = False
jb.verbose = self.verbose
logger.info("Filter start: {}".format(self.fname))
fname = self.get_filter_filename()
#--- ck if load from disk
if not self.overwrite:
if jb.isFile(fname):
logger.debug("Filtered RAW reloading from disk ...")
self.raw,fname = jb.get_raw_obj(fname,None)
self._fname_orig = fname
if self.annotations:
self.raw.set_annotations(self.annotations)
self._is_filtered = True
self._is_reloaded = True
if not self._is_filtered:
logger.info("Filter start MNE filter ...")
if isinstance(self.picks,(list,np.ndarray)):
picks = self.picks
else:
logger.warning("WARNING: picks not defined : excluding channel group <stim> and <resp>")
picks = jb.picks.exclude_trigger(self.raw)
if self.dcoffset:
self.apply_dcoffset()
self.raw.filter(l_freq=self.flow,h_freq=self.fhigh,picks=picks)
self._fname_orig = jb.get_raw_filename(self.raw)
self._is_filtered = True
if self.annotations:
self.raw.set_annotations( self.annotations.copy() )
fname,_ = jb.update_and_save_raw(self.raw,fout=fname,save=self.save,overwrite=True,update_raw_filename=True)
if self.verbose:
self.GetInfo()
return fname
def GetInfo(self,msg=None):
"""
:param msg:
:return:
"""
_msg = ["Filter : {}".format(self.isFiltered),
" --> raw filtered: {}".format(self.fname),
" -> postfix : {}".format(self.postfix),
" -> flow : {}".format(self.flow),
" -> fhigh : {}".format(self.fhigh),
" -> dcoffset: {}".format(self.dcoffset),
" -> save : {}".format(self.save)
]
try:
annota =self.raw.annotations
except:
annota = None
_msg.append(" -> mne.annotations in RAW:\n -> {}".format(annota))
if self.debug:
_msg.extend(["-"*20,
"-> MNE version: {}".format(mne.__version__),
"-> version: {}".format(__version__) ])
if msg:
msg.extend(_msg)
return msg
else:
logger.info("\n".join(_msg))
def info(self,msg=None):
"""
wrapper for GetInfo()
Parameters
----------
msg : TYPE, optional
DESCRIPTION. The default is None.
Returns
-------
None.
"""
self.GetInfo(msg=msg)
class JuMEG_MNE_NOTCH_FILTER(JuMEG_MNE_FILTER):
"""
wrapper cls to wrap mne.notch_filter MNE version 19.2 in juMEG
call MNE notch_filter e.g.:
raw.notch_filter(l_freq=flow,h_freq=fhigh,picks=picks)
save and rename filterd raw file
call MNE <raw.notch_filter>
notch_filter(self,freqs,picks=None,filter_length='auto',notch_widths=None,trans_bandwidth=1.0,n_jobs=1,method='fir',
iir_params=None,mt_bandwidth=None,p_value=0.05,phase='zero',fir_window='hamming',fir_design='firwin',
pad='reflect_limited',verbose=None)[source]
Example
--------
-> notch all chanels 50.0,100.0,150.0 Hz except STIM
from jumeg.base.jumeg_base import jumeg_base as jb
from jumeg.filter.jumeg_mne_filter import JUMEG_NOTCH_FILTER
jNFI = JUMEG_NOTCH_FILTER()
fname = jNFI.apply(
freqs = [50.0,100.0,150.0]
picks = jb.picks.exclude_trigger(raw)
)
"""
__slots__ = ["raw","freqs","picks","filter_length","notch_widths","trans_bandwidth","n_jobs","method",
"iir_params","mt_bandwidth","p_value","phase","fir_window","fir_design","pad","verbose",
"save","overwrite","verbose","debug","_is_filtered","_is_reloaded","_fname_orig"]
def __init__(self,**kwargs):
#super().__init__()
self.clear()
self._update_from_kwargs(**kwargs)
def clear(self):
for k in self.__slots__:
self.__setattr__(k,None)
self.filter_length = 'auto'
self.trans_bandwidth = 1.0
self.n_jobs = 1
self.method = 'fir'
self.p_value = 0.05
self.phase = 'zero'
self.fir_window ='hamming'
self.fir_design ='firwin'
self.pad ='reflect_limited'
def _update_from_kwargs(self,**kwargs):
for k in self.__slots__:
self.__setattr__(k,kwargs.get(k,self.__getattribute__(k)))
def _update_postfix(self,**kwargs):
"""return filter extention """
self._update_from_kwargs(**kwargs)
fi_fix = "fin"
if isinstance(self.freqs,(list,np.ndarray)):
fi_fix += "{%0.2f}x{}".format(self.freqs[0],len(self.freqs))
else:
fi_fix += "{%0.2f}x1".format(self.freqs)
return fi_fix
def apply(self,**kwargs):
"""
wrapper function for MNE version 19.2 notch filter cls
data in raw-obj will be overwritten
filename is updated in raw-obj
call MNE <raw.notch_filter>
notch_filter(self,freqs,picks=None,filter_length='auto',notch_widths=None,trans_bandwidth=1.0,n_jobs=1,method='fir',
iir_params=None,mt_bandwidth=None,p_value=0.05,phase='zero',fir_window='hamming',fir_design='firwin',
pad='reflect_limited',verbose=None)[source]
:param kwargs:
Example
--------
-> notch all chanels 50.0,100.0,150.0 Hz except STIM
from jumeg.base.jumeg_base import jumeg_base as jb
from jumeg.filter.jumeg_mne_filter import JUMEG_NOTCH_FILTER
jNFI = JUMEG_NOTCH_FILTER()
fname = jNFI.apply(
freqs = [50.0,100.0,150.0]
picks = jb.picks.exclude_trigger(raw)
)
Example
--------
-> filter all chanels 0.1 -45.0 Hz except STIM
from jumeg.filter.jumeg_mne_filter import JUMEG_FILTER
jFI = JUMEG_FILTER()
fname = jFI.apply(
flow = 0.1,
fhigh = 45.0,
save = True,
raw = raw,
picks = jb.picks.exclude_trigger(raw) )
:return:
fname
"""
self._update_from_kwargs(**kwargs)
self._is_filtered = False
self._is_reloaded = False
v = jb.verbose
jb.verbose = self.verbose
logger.info("---> Filter start: {}".format(self.fname))
self._update_postfix()
fname,ext = self.fname.rsplit('-',1) #raw.fif'
fname += "," + self.postfix + "-" + ext
#--- ck if load from disk
if not self.overwrite:
if jb.isFile(fname):
logger.debug("Notch Filtered RAW reloading from disk ...")
self.raw,fname = jb.get_raw_obj(fname,None)
self._is_filtered = True
self._is_reloaded = True
if not self._is_filtered:
logger.info("Notch Filter start MNE filter ...")
if isinstance(self.picks,(list,np.ndarray)):
picks = self.picks
else:
logger.warning("picks not defined : excluding channel group <stim> and <resp>")
picks = jb.picks.exclude_trigger(self.raw)
self.raw.notch_filter(self.freqs,picks=picks,filter_length=self.filter_length,notch_widths=self.notch_widths,
trans_bandwidth=self.trans_bandwidth,n_jobs=self.n_jobs,method=self.method,
iir_params=self.iir_params,mt_bandwidth=self.mt_bandwidth,
p_value=self.p_value,phase=self.phase,fir_window=self.fir_window,
fir_design=self.fir_design,pad=self.pad,verbose=self.verbose)
self._fname_orig = jb.get_raw_filename(self.raw)
self._is_filtered = True
if self.save:
logger.info("Notch Filter saving data")
fname = jb.apply_save_mne_data(self.raw,fname=fname,overwrite=True)
else:
jb.set_raw_filename(self.raw,fname)
logger.info("Notch Filter done: {}\n".format(self.fname) +
" -> reloaded from disk: {}".format(self._is_reloaded)
)
jb.verbose = v
return fname
def GetInfo(self,msg=None):
"""
:param msg:
:return:
"""
_msg = ["Notch Filter: {}".format(self.isFiltered),
" --> raw filtered: {}".format(self.fname),
" -> postfix: {}".format(self.postfix),
" -> save : {}".format(self.save),
"---> Parameter:",
" -> freqs : {}".format(self.freqs),
" -> notch_widths : {}".format(self.notch_widths),
" -> trans_bandwidthfreqs: {}".format(self.trans_bandwidth),
" -> method : {}".format(self.method),
" -> irr_params : {}".format(self.irr_params),
" -> mt_bandwidth : {}".format(self.mt_bandwidth),
" -> phase : {}".format(self.phase),
" -> fir_window : {}".format(self.fir_window),
" -> fir_design : {}".format(self.fir_design)
]
if self.debug:
_msg.extend(["-"*20,
"-> MNE version: {}".format(mne.__version__),
"-> version: {}".format(__version__) ])
if msg:
msg.extend(_msg)
return msg
else:
logger.info(_msg)
#---
def jumeg_mne_filter(raw=None,fname=None,**kwargs):
jfi = JuMEG_MNE_FILTER(raw=raw,fname=fname)
fname = jfi.apply(**kwargs)
return raw,fname
#if cfg.post_filter.run:
# self.PostFilter.apply(
# flow = cfg.post_filter.flow,
# fhigh = cfg.post_filter.fhigh,
# save = cfg.post_filter.save,
# raw = raw_unfiltered_clean, # ????
# picks = jb.picks.exclude_trigger(raw_filtered_clean)
# )
# return self.PostFilter.raw
|
<reponame>manera/legacypipe
from __future__ import print_function
import numpy as np
import fitsio
from astrometry.util.fits import fits_table
from legacypipe.survey import *
from legacypipe.coadds import _resample_one
from legacypipe.cpimage import CP_DQ_BITS
from legacypipe.runbrick import rgbkwargs
import sys
def main():
for brickname in sys.argv[1:]:
make_coadd(brickname)
def make_coadd(brickname):
#brickname = '0362m045'
#brickname = '0359m047'
#brickname = '0359m045'
W = H = 3600
pixscale = 0.262
bands = 'grz'
lanczos = True
survey = LegacySurveyData()
brick = survey.get_brick_by_name(brickname)
targetwcs = wcs_for_brick(brick, W=W, H=H, pixscale=pixscale)
pixscale = targetwcs.pixel_scale()
targetrd = np.array([targetwcs.pixelxy2radec(x,y) for x,y in
[(1,1),(W,1),(W,H),(1,H),(1,1)]])
# where to measure the depth
probe_ra = brick.ra
probe_dec = brick.dec
#ccds = survey.ccds_touching_wcs(targetwcs, ccdrad=None)
ccds = survey.get_annotated_ccds()
I = ccds_touching_wcs(targetwcs, ccds)
ccds.cut(I)
print(len(ccds), 'CCDs touching target WCS')
#I = survey.apply_blacklist(ccds)
#ccds.cut(I)
#print(len(ccds), 'CCDs not in blacklisted propids (too many exposures!)')
# Sort images by band -- this also eliminates images whose
# *image.filter* string is not in *bands*.
print('Unique filters:', np.unique(ccds.filter))
ccds.cut(np.hstack([np.flatnonzero(ccds.filter == band) for band in bands]))
print('Cut on filter:', len(ccds), 'CCDs remain.')
print('Cutting out non-photometric CCDs...')
I = survey.photometric_ccds(ccds)
print(len(I), 'of', len(ccds), 'CCDs are photometric')
ccds.cut(I)
fn = 'deep/coadd-%s-ccds.fits' % brickname
ccds.writeto(fn)
print('Wrote', fn)
psfdepths = dict([(b,0.) for b in bands])
ims = []
for ccd in ccds:
im = survey.get_image_object(ccd)
ims.append(im)
print(im, im.band, 'exptime', im.exptime, 'propid', ccd.propid)
wcs = survey.get_approx_wcs(ccd)
if wcs.is_inside(probe_ra, probe_dec):
# Point-source detection
detsig1 = ccd.sig1 / ccd.psfnorm_mean
psfdepths[im.band] += (1. / detsig1**2)
for band in bands:
sig1 = np.sqrt(1. / psfdepths[band])
depth = 5. * sig1
mag = -2.5 * (np.log10(depth) - 9)
print('PSF 5-sigma depth:', mag)
coimgs = []
coimgs2 = []
for band in bands:
print('Computing coadd for band', band)
hdr = fitsio.FITSHDR()
hdr.add_record(dict(name='FILTER', value=band))
hdr.add_record(dict(name='BRICK', value=brickname))
# Plug the WCS header cards into these images
targetwcs.add_to_header(hdr)
hdr.delete('IMAGEW')
hdr.delete('IMAGEH')
hdr.add_record(dict(name='EQUINOX', value=2000.))
# coadded weight map (moo)
cow = np.zeros((H,W), np.float32)
# coadded weighted image map
cowimg = np.zeros((H,W), np.float32)
# unweighted image
coimg = np.zeros((H,W), np.float32)
# number of exposures
con = np.zeros((H,W), np.uint8)
# coadded weight map (moo)
cow2 = np.zeros((H,W), np.float32)
# coadded weighted image map
cowimg2 = np.zeros((H,W), np.float32)
# unweighted image
coimg2 = np.zeros((H,W), np.float32)
# number of exposures
con2 = np.zeros((H,W), np.uint8)
tinyw = 1e-30
I = np.flatnonzero(ccds.filter == band)
medsee = np.median(ccds.seeing[I])
for ccd in ccds[I]:
im = survey.get_image_object(ccd)
tim = im.get_tractor_image(radecpoly=targetrd, splinesky=True, gaussPsf=True)
if tim is None:
continue
print('Reading', tim.name)
# surface-brightness correction
tim.sbscale = (targetwcs.pixel_scale() / tim.subwcs.pixel_scale())**2
R = _resample_one((0, tim, None, lanczos, targetwcs))
if R is None:
continue
itim,Yo,Xo,iv,im,mo,dq = R
goodsee = (ccd.seeing < medsee)
# invvar-weighted image
cowimg[Yo,Xo] += iv * im
cow [Yo,Xo] += iv
if goodsee:
cowimg2[Yo,Xo] += iv * im
cow2 [Yo,Xo] += iv
if dq is None:
goodpix = 1
else:
# include BLEED, SATUR, INTERP pixels if no other
# pixels exists (do this by eliminating all other CP
# flags)
badbits = 0
for bitname in ['badpix', 'cr', 'trans', 'edge', 'edge2']:
badbits |= CP_DQ_BITS[bitname]
goodpix = ((dq & badbits) == 0)
coimg[Yo,Xo] += goodpix * im
con [Yo,Xo] += goodpix
if goodsee:
coimg2[Yo,Xo] += goodpix * im
con2 [Yo,Xo] += goodpix
# Per-band:
cowimg /= np.maximum(cow, tinyw)
coimg /= np.maximum(con, 1)
cowimg[cow == 0] = coimg[cow == 0]
fn = 'deep/coadd-%s-image-%s.fits' % (brickname, band)
fitsio.write(fn, cowimg, clobber=True, header=hdr)
print('Wrote', fn)
fn = 'deep/coadd-%s-invvar-%s.fits' % (brickname, band)
fitsio.write(fn, cow, clobber=True, header=hdr)
print('Wrote', fn)
fn = 'deep/coadd-%s-n-%s.fits' % (brickname, band)
fitsio.write(fn, con, clobber=True, header=hdr)
print('Wrote', fn)
coimgs.append(cowimg)
cowimg2 /= np.maximum(cow2, tinyw)
coimg2 /= np.maximum(con2, 1)
cowimg2[cow2 == 0] = coimg2[cow2 == 0]
fn = 'deep/coadd-%s-image2-%s.fits' % (brickname, band)
fitsio.write(fn, cowimg2, clobber=True, header=hdr)
print('Wrote', fn)
fn = 'deep/coadd-%s-invvar2-%s.fits' % (brickname, band)
fitsio.write(fn, cow2, clobber=True, header=hdr)
print('Wrote', fn)
fn = 'deep/coadd-%s-n2-%s.fits' % (brickname, band)
fitsio.write(fn, con2, clobber=True, header=hdr)
print('Wrote', fn)
coimgs2.append(cowimg2)
rgb = get_rgb(coimgs, bands, **rgbkwargs)
kwa = {}
imsave_jpeg('deep/coadd-%s-image.jpg' % brickname, rgb, origin='lower', **kwa)
rgb = get_rgb(coimgs2, bands, **rgbkwargs)
imsave_jpeg('deep/coadd-%s-image2.jpg' % brickname, rgb, origin='lower', **kwa)
if __name__ == '__main__':
main()
|
<filename>training_tf2/lossfuncs.py
"""
Custom Loss functions and metrics for training/analysis
"""
from tf_funcs import *
import tensorflow as tf
# The following loss functions all expect the lpcnet model to output the lpc prediction
# Computing the excitation by subtracting the lpc prediction from the target, followed by minimizing the cross entropy
def res_from_sigloss():
def loss(y_true,y_pred):
p = y_pred[:,:,0:1]
model_out = y_pred[:,:,1:]
e_gt = tf_l2u(tf_u2l(y_true) - tf_u2l(p))
e_gt = tf.round(e_gt)
e_gt = tf.cast(e_gt,'int32')
sparse_cel = tf.keras.losses.SparseCategoricalCrossentropy(reduction=tf.keras.losses.Reduction.NONE)(e_gt,model_out)
return sparse_cel
return loss
# Interpolated and Compensated Loss (In case of end to end lpcnet)
# Interpolates between adjacent embeddings based on the fractional value of the excitation computed (similar to the embedding interpolation)
# Also adds a probability compensation (to account for matching cross entropy in the linear domain), weighted by gamma
def interp_mulaw(gamma = 1):
def loss(y_true,y_pred):
p = y_pred[:,:,0:1]
model_out = y_pred[:,:,1:]
e_gt = tf_l2u(tf_u2l(y_true) - tf_u2l(p))
prob_compensation = tf.squeeze((K.abs(e_gt - 128)/128.0)*K.log(256.0))
alpha = e_gt - tf.math.floor(e_gt)
alpha = tf.tile(alpha,[1,1,256])
e_gt = tf.cast(e_gt,'int32')
e_gt = tf.clip_by_value(e_gt,0,254)
interp_probab = (1 - alpha)*model_out + alpha*tf.roll(model_out,shift = -1,axis = -1)
sparse_cel = tf.keras.losses.SparseCategoricalCrossentropy(reduction=tf.keras.losses.Reduction.NONE)(e_gt,interp_probab)
loss_mod = sparse_cel + gamma*prob_compensation
return loss_mod
return loss
# Same as above, except a metric
def metric_oginterploss(y_true,y_pred):
p = y_pred[:,:,0:1]
model_out = y_pred[:,:,1:]
e_gt = tf_l2u(tf_u2l(y_true) - tf_u2l(p))
prob_compensation = tf.squeeze((K.abs(e_gt - 128)/128.0)*K.log(256.0))
alpha = e_gt - tf.math.floor(e_gt)
alpha = tf.tile(alpha,[1,1,256])
e_gt = tf.cast(e_gt,'int32')
e_gt = tf.clip_by_value(e_gt,0,254)
interp_probab = (1 - alpha)*model_out + alpha*tf.roll(model_out,shift = -1,axis = -1)
sparse_cel = tf.keras.losses.SparseCategoricalCrossentropy(reduction=tf.keras.losses.Reduction.NONE)(e_gt,interp_probab)
loss_mod = sparse_cel + prob_compensation
return loss_mod
# Interpolated cross entropy loss metric
def metric_icel(y_true, y_pred):
p = y_pred[:,:,0:1]
model_out = y_pred[:,:,1:]
e_gt = tf_l2u(tf_u2l(y_true) - tf_u2l(p))
alpha = e_gt - tf.math.floor(e_gt)
alpha = tf.tile(alpha,[1,1,256])
e_gt = tf.cast(e_gt,'int32')
e_gt = tf.clip_by_value(e_gt,0,254) #Check direction
interp_probab = (1 - alpha)*model_out + alpha*tf.roll(model_out,shift = -1,axis = -1)
sparse_cel = tf.keras.losses.SparseCategoricalCrossentropy(reduction=tf.keras.losses.Reduction.NONE)(e_gt,interp_probab)
return sparse_cel
# Non-interpolated (rounded) cross entropy loss metric
def metric_cel(y_true, y_pred):
p = y_pred[:,:,0:1]
model_out = y_pred[:,:,1:]
e_gt = tf_l2u(tf_u2l(y_true) - tf_u2l(p))
e_gt = tf.round(e_gt)
e_gt = tf.cast(e_gt,'int32')
e_gt = tf.clip_by_value(e_gt,0,255)
sparse_cel = tf.keras.losses.SparseCategoricalCrossentropy(reduction=tf.keras.losses.Reduction.NONE)(e_gt,model_out)
return sparse_cel
# Variance metric of the output excitation
def metric_exc_sd(y_true,y_pred):
p = y_pred[:,:,0:1]
e_gt = tf_l2u(tf_u2l(y_true) - tf_u2l(p))
sd_egt = tf.keras.losses.MeanSquaredError(reduction=tf.keras.losses.Reduction.NONE)(e_gt,128)
return sd_egt
|
# this program is written for Python2
# imports
import Tkinter
import time
import random
import sys
from copy import copy
# create game window
window = Tkinter.Tk()
# create window size and set no-resize option
window_dimensions = [800, 625]
window.geometry(str(window_dimensions[0]) + "x" + str(window_dimensions[1]))
window.resizable(0, 0)
# set window title
window.title("Pong Game")
# close window when OS close button is clicked
window.protocol("WM_DELETE_WINDOW", sys.exit)
# choose fps for game
frames_per_second = 30
# create game canvas
game_canvas = Tkinter.Canvas(window, width=window_dimensions[0], height=window_dimensions[1], bd=0, highlightthickness=0)
game_canvas.pack()
# create game variables
# paddle sizes
paddle_size = [15, 125]
# initial centered Y position for both paddles
initial_y_position = (window_dimensions[1] - paddle_size[1]) / 2
# player variables
player_y_position = initial_y_position
player_y_velocity = 0
# player2 variables
player2_y_position = initial_y_position
player2_y_velocity = 0
# ball variables
ball_diameter = 15
initial_ball_position = [(window_dimensions[0] - 35 - paddle_size[0]) - (int(window_dimensions[1] / 2)), ((window_dimensions[1] - ball_diameter) / 2) - (int(window_dimensions[1] / 2))]
initial_ball_velocity = [12, 12]
ball_position = copy(initial_ball_position)
ball_velocity = copy(initial_ball_velocity)
# score variable and widget
score = [0, 0]
# delete useless global variables
del initial_y_position
# display instructions variable
display_instructions = True
# gameloop
def gameloop():
# declare use of global variables
global frames_per_second
global game_canvas
global window_dimensions
global player_y_position
global paddle_size
global player2_y_position
global ball_diameter
global ball_position
global ball_velocity
global player_y_velocity
global player2_y_velocity
global display_instructions
# call gameloop again in 100 milleseconds (gameloops is called every 100 MS)
window.after(1000 / frames_per_second, gameloop)
# clear canvas
game_canvas.delete("all")
# create dark gray background
game_canvas.create_rectangle(0, 0, window_dimensions[0], window_dimensions[1], fill="#222222", outline="#222222")
# display player paddle (35 pixels from left)
game_canvas.create_rectangle(35, player_y_position, 35 + paddle_size[0], player_y_position + paddle_size[1], fill="#ffffff", outline="#ffffff")
# display player2 paddle (35 pixels from right)
game_canvas.create_rectangle(window_dimensions[0] - 35, player2_y_position, (window_dimensions[0] - 35) - paddle_size[0], player2_y_position + paddle_size[1], fill="#ffffff", outline="#ffffff")
# display ball
game_canvas.create_rectangle(ball_position[0], ball_position[1], ball_position[0] + ball_diameter, ball_position[1] + ball_diameter, fill="#ffffff", outline="#ffffff")
# display score (centered)
game_canvas.create_text(window_dimensions[0] / 2, 35, anchor="center", font="Monaco 28 bold", fill="#ffffff", text=str(score[0]) + " " + str(score[1]))
# display center separator line
game_canvas.create_line((window_dimensions[0] / 2) , 0, (window_dimensions[0] / 2), window_dimensions[1], fill="#ffffff", dash=(6, 10), width=3)
# display instructions
if(display_instructions):
game_canvas.create_text((window_dimensions[0] / 2) - 30, window_dimensions[1] - 40, anchor="ne", font="Monaco 16 bold", fill="#ffffff", text="Move w/WASD")
game_canvas.create_text((window_dimensions[0] / 2) + 30, window_dimensions[1] - 40, anchor="nw", font="Monaco 16 bold", fill="#ffffff", text="Move w/Arrows")
# update player Y position and movement
player_y_position += player_y_velocity
# update player2 Y position and movement
player2_y_position += player2_y_velocity
# set window boundaries for max and min position for paddles
# player paddle
if(player_y_position + paddle_size[1] > window_dimensions[1]):
player_y_position = window_dimensions[1] - paddle_size[1]
elif(player_y_position < 0):
player_y_position = 0
# player2 paddle
if(player2_y_position + paddle_size[1] > window_dimensions[1]):
player2_y_position = window_dimensions[1] - paddle_size[1]
elif(player2_y_position < 0):
player2_y_position = 0
# update ball position
ball_position[0] += ball_velocity[0]
ball_position[1] += ball_velocity[1]
# set window boundaries for ball
# top and bottom of screen
if(ball_position[1] >= window_dimensions[1] - ball_diameter or ball_position[1] <= 0):
ball_velocity[1] = -ball_velocity[1]
# left side and right side of screen --> update score accordingly and reset ball vars
if(ball_position[0] <= 0):
# point for player2
score[1] += 1
# reset ball vars
ball_position = copy(initial_ball_position)
ball_velocity = copy(initial_ball_velocity)
if(ball_position[0] >= window_dimensions[0] - ball_diameter):
# point for player
score[0] += 1
# reset ball vars
ball_position = copy(initial_ball_position)
ball_velocity = copy(initial_ball_velocity)
# paddle collision (also possibly one of the longest if statements you've seen in your life)
if(((ball_position[0] >= 35 and ball_position[0] <= 35 + paddle_size[0]) and (ball_position[1] + ball_diameter >= player_y_position and ball_position[1] <= player_y_position + paddle_size[1])) or ((ball_position[0] + ball_diameter <= window_dimensions[0] - 35 and ball_position[0] + ball_diameter >= (window_dimensions[0] - 35) - paddle_size[0]) and (ball_position[1] + ball_diameter >= player2_y_position and ball_position[1] <= player2_y_position + paddle_size[1]))):
ball_velocity[0] = -ball_velocity[0]
# switch Y velocity if collision was on top or bottom sides of paddle
# player paddle
if(ball_velocity[0] >= 0):
if((ball_position[1] + ball_diameter <= player_y_position + paddle_size[0] and ball_velocity[1] >= 0) or (ball_position[1] >= player_y_position + paddle_size[1] - paddle_size[0] and ball_velocity[1] <= 0)):
ball_velocity[1] = -ball_velocity[1]
# player2 paddle
if(ball_velocity[0] <= 0):
if((ball_position[1] + ball_diameter <= player2_y_position + paddle_size[0] and ball_velocity[1] >= 0) or (ball_position[1] >= player2_y_position + paddle_size[1] - paddle_size[0] and ball_velocity[1] <= 0)):
ball_velocity[1] = -ball_velocity[1]
# handle arrow keys keydown events
def onKeyDown(e):
# declare use of global variable(s)
global player_y_velocity
global player2_y_velocity
global display_instructions
# record current player velocities
player_y_velocity_current = player_y_velocity
player2_y_velocity_current = player2_y_velocity
# bind arrow keys to player velocity changes
if(e.keysym == "w"):
# start movement up when up arrow is pressed down
player_y_velocity = -15
elif(e.keysym == "s"):
# start movement down when down arrow is pressed down
player_y_velocity = 15
# bind WASD arrow keys to player2 velocity changes
if(e.keysym == "Up"):
# start movement up when w key is pressed down
player2_y_velocity = -15
elif(e.keysym == "Down"):
# start movement down when s key is pressed down
player2_y_velocity = 15
# turn off instructions if either paddle has moved
if(player_y_velocity_current != player_y_velocity or player2_y_velocity_current != player2_y_velocity):
display_instructions = False
# handle arrow keys keyup events
def onKeyUp(e):
# declare use of global variable(s)
global player_y_velocity
global player2_y_velocity
# bind arrow keys to player velocity change
if(e.keysym == "w" or e.keysym == "s"):
# stop movement when either arrow key is released
player_y_velocity = 0
# bind WASD arrow keys to player2 velocity changes
if(e.keysym == "Up" or e.keysym == "Down"):
# stop movement when either w or s key is pressed down
player2_y_velocity = 0
# connect keydown event to function
window.bind("<KeyPress>", onKeyDown)
# connect keyup event to function
window.bind("<KeyRelease>", onKeyUp)
# call gameloop
gameloop()
# display window and mainloop
window.mainloop() |
<gh_stars>10-100
from typing import Optional, Callable
import json
from instagram_api import response
from instagram_api.constants import Constants
from instagram_api.response.model import Token
from instagram_api.signatures import Signatures
from instagram_api.exceptions import SettingsException
from .base import CollectionBase
from .metadata import InternalMetadata
class Internal(CollectionBase):
# @var int Number of retries for each video chunk. */
MAX_CHUNK_RETRIES = 5
# @var int Number of retries for resumable uploader. */
MAX_RESUMABLE_RETRIES = 15
# @var int Number of retries for each media configuration. */
MAX_CONFIGURE_RETRIES = 5
# @var int Minimum video chunk size in bytes. */
MIN_CHUNK_SIZE = 204800
# @var int Maximum video chunk size in bytes. */
MAX_CHUNK_SIZE = 5242880
def upload_single_photo(self,
target_feed: int,
photo_filename: str,
internal_metadata: InternalMetadata = None,
external_metadata: dict = None) -> response.ConfigureResponse:
...
def upload_photo_data(self, target_feed: int, internal_metadata: InternalMetadata):
...
def configure_single_photo(self,
target_feed: int,
internal_metadata: InternalMetadata,
external_metadata: dict = None) -> response.ConfigureResponse:
...
def upload_video(self,
target_feed: int,
video_filename: str,
internal_metadata: InternalMetadata = None):
...
def upload_single_video(self,
target_feed: int,
video_filename: str,
internal_metadata: InternalMetadata = None,
external_metadata: dict = None) -> response.ConfigureResponse:
...
def upload_video_thumbnail(self,
target_feed: int,
internal_metadata: InternalMetadata = None,
external_metadata: dict = None):
...
def configure_single_video(self,
target_feed: int,
internal_metadata: InternalMetadata = None,
external_metadata: dict = None) -> response.ConfigureResponse:
...
def configure_timeline_album(self,
media: dict,
internal_metadata: InternalMetadata,
external_metadata: dict = None) -> response.ConfigureResponse:
...
def sync_device_features(self, prelogin: bool = False) -> response.SyncResponse:
request = self._ig.request('qe/sync/').add_headers(**{
'X-DEVICE_ID': self._ig.uuid,
}).add_posts(
id=self._ig.uuid,
experiments=Constants.LOGIN_EXPERIMENTS,
)
if prelogin:
request.set_needs_auth(False)
else:
request.add_posts(
_uuid=self._ig.uuid,
_uid=self._ig.account_id,
_csrftoken=self._ig.client.get_token(),
)
return request.get_response(response.SyncResponse)
def sync_user_features(self) -> response.SyncResponse:
...
def send_launcher_sync(self, prelogin: bool) -> response.LauncherSyncResponse:
request = self._ig.request('launcher/sync/').add_posts(
configs=Constants.LAUNCHER_CONFIGS,
)
if prelogin:
request.set_needs_auth(False).add_posts(
id=self._ig.uuid,
)
else:
request.add_posts(
id=self._ig.account_id,
_uuid=self._ig.uuid,
_uid=self._ig.account_id,
_csrftoken=self._ig.client.get_token(),
)
return request.get_response(response.LauncherSyncResponse)
def log_attribution(self) -> response.GenericResponse:
return self._ig.request('attribution/log_attribution/').set_needs_auth(False).add_posts(
adid=self._ig.advertising_id,
).get_response(response.GenericResponse)
def log_resurrect_attribution(self) -> response.GenericResponse:
...
def read_msisdn_header(self, usage: str, subno_key: Optional[str] = None) -> response.MsisdnHeaderResponse:
request = self._ig.request('accounts/read_msisdn_header/').set_needs_auth(False).add_headers(**{
'X-DEVICE-ID': self._ig.uuid,
}).add_posts(
device_id=self._ig.uuid,
mobile_subno_usage=usage,
)
if subno_key is not None:
request = request.add_posts(subno_key=subno_key)
return request.get_response(response.MsisdnHeaderResponse)
def bootstrap_msisdn_header(self, usage: str = 'ig_select_app') -> response.MsisdnHeaderResponse:
...
def _save_zero_rating_token(self, token: Optional[Token]):
if token is None:
return
rules = {}
for rule in token.rewrite_rules:
rules[rule.matcher] = rule.replacer
self._ig.client.zero_rating.update(rules)
try:
self._ig.settings.set_rewrite_rules(rules)
self._ig.settings.set('zr_token', token.token_hash)
self._ig.settings.set('zr_expires', token.expires_at)
except SettingsException as e:
pass
def fetch_zero_rating_token(self, reason: str = 'token_expired') -> response.TokenResultResponse:
request = self._ig.request('zr/token/result/').set_needs_auth(False).add_params(
custom_device_id=self._ig.uuid,
device_id=self._ig.device_id,
fetch_reason=reason,
# TODO: Если токена нет, None или ''???
token_hash=self._ig.settings.get('zr_token', ''),
)
result = request.get_response(response.TokenResultResponse)
self._save_zero_rating_token(result.token)
return result
def get_megaphone_log(self) -> response.MegaphoneLogResponse:
...
def get_facebook_hidden_search_entities(self) -> response.FacebookHiddenEntitiesResponse:
...
def get_facebook_ota(self) -> response.FacebookOTAResponse:
return self._ig.request('facebook_ota/').add_params(
fields=Constants.FACEBOOK_OTA_FIELDS,
custom_user_id=self._ig.account_id,
signed_body=Signatures.generate_signature('') + '.',
ig_sig_key_version=Constants.SIG_KEY_VERSION,
version_code=Constants.VERSOIN_CODE,
version_name=Constants.IG_VERSION,
custom_app_id=Constants.FACEBOOK_ORCA_APPLICATION_ID,
custom_device_id=self._ig.uuid,
).get_response(response.FacebookOTAResponse)
def get_loom_fetch_config(self) -> response.LoomFetchConfigResponse:
return self._ig.request('loom/fetch_config/').get_response(response.LoomFetchConfigResponse)
def get_profile_notice(self) -> response.ProfileNoticeResponse:
return self._ig.request('users/profile_notice/').get_response(response.ProfileNoticeResponse)
def get_qp_fetch(self) -> response.FetchQPDataResponse:
query = (
'viewer() {eligible_promotions.surface_nux_id(<surface>)'
'.external_gating_permitted_qps(<external_gating_permitted_qps>)'
'.supports_client_filters(true) {edges {priority,time_range {start,end},node'
' {id,promotion_id,max_impressions,triggers,contextual_filters {clause_type,filters'
' {filter_type,unknown_action,value {name,required,bool_value,int_value, string_value},extra_datas'
' {name,required,bool_value,int_value, string_value}},clauses {clause_type,filters'
' {filter_type,unknown_action,value {name,required,bool_value,int_value, string_value},extra_datas'
' {name,required,bool_value,int_value, string_value}},clauses {clause_type,filters'
' {filter_type,unknown_action,value {name,required,bool_value,int_value, string_value},extra_datas'
' {name,required,bool_value,int_value, string_value}},clauses {clause_type,filters'
' {filter_type,unknown_action,value {name,required,bool_value,int_value, string_value},extra_datas'
' {name,required,bool_value,int_value, string_value}}}}}},template {name,parameters'
' {name,required,bool_value,string_value,color_value,}},creatives {title {text},content'
' {text},footer {text},social_context {text},primary_action{title {text},url,limit,dismiss_promotion},'
'secondary_action{title {text},url,limit,dismiss_promotion},dismiss_action{title'
' {text},url,limit,dismiss_promotion},image.scale(<scale>) {uri,width,height}}}}}}'
)
return self._ig.request('qp/batch_fetch/').add_posts(
vc_policy='default',
_csrftoken=self._ig.client.get_token(),
_uid=self._ig.account_id,
_uuid=self._ig.uuid,
surfaces_to_queries=json.dumps({
Constants.SURFACE_PARAM[0]: query,
Constants.SURFACE_PARAM[1]: query,
}),
version=1,
scale=2,
).get_response(response.FetchQPDataResponse)
def get_qp_cooldowns(self) -> response.QPCooldownsResponse:
...
def mark_story_media_seen(self,
items: dict,
source_id: Optional[str] = None,
module: str = 'feed_timeline') -> response.MediaSeenResponse:
...
def configure_with_retries(self, configurator: Callable):
...
|
<gh_stars>0
# -*- coding: utf-8 -*-
"""
Created on Tue Jul 7 23:49:30 2020
@author: danaukes
"""
import file_sorter.support as fus
import file_sorter.images as fui
import yaml
import os
import shutil
# should be done in a separate script if on a remote machine
# path1 = r'/home/danaukes/nas/photos/2021'
# path2 = r'/home/danaukes/nas/photos/2020'
# hash1 = fus.scan_list(path1,path2,directories_recursive=True,file_filter=fus.filter_none,hasher=fus.hash_filesize,directory_hashfile_name='hash_filesize.yaml')
# hash1.save(os.path.expanduser('~'),'size_remote.yaml')
with open(os.path.join(os.path.expanduser('~'),'size_remote.yaml')) as f:
remote_compare1 = yaml.load(f,Loader=yaml.Loader)
path1 = r'C:\Users\danaukes\Desktop\Camera'
hash1 = fus.scan_list(path1,directories_recursive=True,file_filter=fus.filter_none,hasher=fus.hash_filesize,directory_hashfile_name='hash_filesize.yaml')
hash1.save(os.path.expanduser('~'),'size_local.yaml')
with open(os.path.join(os.path.expanduser('~'),'size_local.yaml')) as f:
local_compare1 = yaml.load(f,Loader=yaml.Loader)
same_size_hashes = set(local_compare1.hashes).intersection(set(remote_compare1.hashes))
new_local_file_hashes1 = set(local_compare1.hashes).difference(set(same_size_hashes))
new_local_file_names1 = [filename for key in new_local_file_hashes1 for filename in local_compare1.hash_file_dict[key]]
local_files_to_check = [item for key in same_size_hashes for item in local_compare1.hash_file_dict[key]]
with open(os.path.join(os.path.expanduser('~'),'local_files_to_check.yaml'),'w') as f:
yaml.dump(local_files_to_check,f)
# remote_files_to_check = [item for key in same_size_hashes for item in remote_compare1.hash_file_dict[key]]
# with open(os.path.join(os.path.expanduser('~'),'remote_files_to_check.yaml'),'w') as f:
# yaml.dump(remote_files_to_check,f)
# with open(os.path.join(os.path.expanduser('~'),'remote_files_to_check.yaml')) as f:
# remote_files_to_check = yaml.load(f,Loader = yaml.Loader)
# remote_compare2 = fus.scan_list(*remote_files_to_check,directories_recursive=False,file_filter = fus.filter_none,hasher=fus.hash_file)
# remote_compare2.save(os.path.expanduser('~'),'sha256_remote.yaml')
with open(os.path.join(os.path.expanduser('~'),'local_files_to_check.yaml')) as f:
local_files_to_check = yaml.load(f,Loader = yaml.Loader)
local_compare2 = fus.scan_list(*local_files_to_check,directories_recursive=False,file_filter = fus.filter_none,hasher=fus.hash_file)
local_compare2.save(os.path.expanduser('~'),'sha256_local.yaml')
with open(os.path.join(os.path.expanduser('~'),'sha256_local.yaml')) as f:
local_compare2 = yaml.load(f,Loader=yaml.Loader)
with open(os.path.join(os.path.expanduser('~'),'sha256_remote.yaml')) as f:
remote_compare2 = yaml.load(f,Loader=yaml.Loader)
same_file_hashes = set(local_compare2.hashes).intersection(set(remote_compare2.hashes))
same_file_names = [filename for key in same_file_hashes for filename in remote_compare2.hash_file_dict[key]]
new_local_file_hashes2 = set(local_compare2.hashes).difference(set(same_file_hashes))
new_local_file_names2 = [filename for key in new_local_file_hashes2 for filename in local_compare2.hash_file_dict[key]]
new_local_file_names = list(set(new_local_file_names1+new_local_file_names2))
new_path = os.path.join(os.path.expanduser('~'),'Desktop','new')
if not os.path.exists(new_path):
os.mkdir(new_path)
for filename in new_local_file_names:
new_file = os.path.join(new_path,os.path.split(filename)[1])
shutil.copy2(filename,new_file)
# # os.rename(filename, new_file)
# # os.remove(filename)
|
# -*- coding:utf-8 -*-
#
# Tencent is pleased to support the open source community by making QTA available.
# Copyright (C) 2016THL A29 Limited, a Tencent company. All rights reserved.
# Licensed under the BSD 3-Clause License (the "License"); you may not use this
# file except in compliance with the License. You may obtain a copy of the License at
#
# https://opensource.org/licenses/BSD-3-Clause
#
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS
# OF ANY KIND, either express or implied. See the License for the specific language
# governing permissions and limitations under the License.
#
'''pymobiledevice extended tools
'''
from __future__ import absolute_import, print_function
from cmd import Cmd
import os
import re
from subprocess import check_call, STDOUT, CalledProcessError
import tempfile
import time
import six
from pymobiledevice.afc import AFCClient, AFCShell
from pymobiledevice.diagnostics_relay import DIAGClient
from pymobiledevice.lockdown import LockdownClient
from pymobiledevice.screenshotr import screenshotr
from pymobiledevice.usbmux.usbmux import USBMux
from pymobiledevice.syslog import Syslog
TIME_FORMAT = '%Y-%m-%d %H:%M:%S'
def list_devices():
'''获取当前电脑上连接的所有iOS设备
:returns: list - 包含所有iOS设备的UDID的数组
'''
mux = USBMux()
if not mux.devices:
mux.process(1)
for _ in range(5):
mux.process(0.1)
return [d.serial for d in mux.devices]
def get_device_info(udid = None, keyname = None):
'''获取iOS设备的相关属性信息
:param udid: iOS设备的udid
:type udid: str
:param keyname: iOS设备的特定信息字段的名称
:type keyname: str
:returns: str - iOS设备的信息
'''
lockdown = LockdownClient(udid)
lockdown.startService("com.apple.afc")
if keyname:
return lockdown.allValues.get(keyname)
else:
return lockdown.allValues
def get_screenshot(udid = None, filepath = "/tmp/screenshot.png"):
'''获取iOS设备的屏幕快照
:param udid: iOS设备的udid
:type udid: str
:param filepath: 屏幕快照的存储路径
:type filepath: str
:returns: boolean - 截图是否成功
'''
result = False
tiff_file = tempfile.NamedTemporaryFile(suffix='.tiff')
tiff_file_path = tiff_file.name
lockdown = LockdownClient(udid)
screenshot = screenshotr(lockdown)
data = screenshot.take_screenshot()
with open(tiff_file_path, "wb") as fd:
fd.write(data)
screenshot.stop_session()
try:
args = ["/usr/bin/sips", "-s format png", tiff_file_path, "--out", filepath]
check_call(" ".join(args), shell=True, stderr=STDOUT)
result = True
except CalledProcessError:
pass
finally:
tiff_file.close()
return result
def get_crash_log(procname, device_udid = None, log_path='/tmp'):
afc_crash = AFCCrashLog(device_udid)
return afc_crash.get_crash_log(procname, log_path, True)
def pull_file(bundle_id, remotepath, localpath='/tmp', device_udid = None, is_dir=False, is_delete = True):
'''拷贝手机中sandbox指定目录的文件到Mac本地
:param bundle_id: app的bundle id
:type bundle_id: str
:param remotepath: sandbox上的目录或者文件,例如:/Library/Caches/test/
:type remotepath: str
:param localpath: 本地的目录
:type localpath: str
:param device_udid: 设备的udid
:type device_udid: str
:param is_dir: remotepath是否为目录,默认为单个文件
:type is_dir: bool
:param is_delete: 是否删除
:type is_delete: bool
:returns: list or None
'''
afc_client = None
try:
files = None
afc_client = SandboxClient(device_udid, bundle_id)
files = afc_client.copy_to_local(remotepath, localpath, is_dir, is_delete)
finally:
if afc_client is not None:
afc_client.close()
return files
def push_file(bundle_id, localpath, remotepath, device_udid = None):
'''拷贝Mac本地文件到手机中sandbox的指定目录地
:param bundle_id: app的bundle id
:type bundle_id: str
:param localpath: Mac上的文件路径
:type localpath: str
:param remotepath: sandbox上的目录或者文件,例如:/Library/Caches/test/
:type remotepath: str
:param device_udid: 设备的udid
:type device_udid: str
:returns: boolean
'''
afc_client = None
result = False
try:
afc_client = SandboxClient(device_udid, bundle_id)
result = afc_client.copy_to_remote(localpath, remotepath)
result = True
finally:
if afc_client is not None:
afc_client.close()
return result
def remove_files(bundle_id, file_path, device_udid = None):
'''删除手机上指定app中的文件或者目录
:param bundle_id: app的bundle id
:type bundle_id: str
:param file_path: 待删除的文件或者目录,例如: /Documents/test.log
:type file_path: str
:param device_udid: 设备的udid
:type device_udid: str
'''
afc_client = None
try:
afc_client = SandboxClient(device_udid, bundle_id)
afc_client.remove(file_path)
finally:
if afc_client is not None:
afc_client.close()
def reboot(device_udid = None):
'''重启手机
:param device_udid: 设备的udid
:type device_udid: str
'''
lockdown = LockdownClient(udid=device_udid)
DIAGClient(lockdown).restart()
time.sleep(10)
for _ in range(20):
if device_udid in list_devices():
print('reboot successfully')
break
else:
print('reboot error: real device disconect')
def get_syslog(watchtime, logFile, procName = None, device_udid = None):
''' 获取手机服务日志
:param watchtime: 观察时间
:type watchtime: 观察时间
:param logFile: 日志名称
:type device_udid: str
:param procName: 进程名称
:type procName: str
:param device_udid: 设备的udid
:type device_udid: str
'''
lockdown = LockdownClient(udid=device_udid)
syslog = Syslog(lockdown)
syslog.watch(watchtime, logFile, procName)
class InstallationProxy(object):
def __init__(self, udid=None):
self.udid = udid
self.lockdown = LockdownClient(udid)
self.service = self.lockdown.startService("com.apple.mobile.installation_proxy")
def wait_completion(self, handler=None, *args):
while True:
z = self.service.recvPlist()
if not z:
break
completion = z.get("PercentComplete")
if completion:
if handler:
handler(completion,*args)
# print "%s: %s%% Complete" % (z.get("Status"), completion)
else:
if z.get("Status") == "Complete" or ("Status" not in z and "CFBundleIdentifier" in z):
return (True, "")
else:
return (False, z.get("ErrorDescription"))
def send_cmd_for_bid(self, bundle_id, cmd="Archive", options=None, handler=None, *args):
cmd = {"Command": cmd, "ApplicationIdentifier": bundle_id }
if options:
cmd.update(options)
self.service.sendPlist(cmd)
return self.wait_completion(handler, *args)
def install(self, ipa_path, options=None, handler=None, *args):
'''安装应用程序
:param ipa_path: 安装包的路径
:type ipa_path: str
:return: boolean - 安装是否成功
'''
print("上传安装包...")
afc_client = AFCClient(self.lockdown)
tmp_ipa = "t%d.ipa" % time.time()
with open(ipa_path, "rb") as f:
ipa_content = f.read()
afc_client.set_file_contents("/" + tmp_ipa, ipa_content)
print("上传完毕")
print("开始安装")
cmd = {"Command":"Install", "PackagePath": tmp_ipa}
if options:
cmd.update(options)
self.lockdown = LockdownClient(self.udid)
self.service = self.lockdown.startService("com.apple.mobile.installation_proxy")
self.service.sendPlist(cmd)
ret = self.wait_completion(handler, args)
return ret
def uninstall(self, bundle_id, options=None, handler=None, *args):
'''卸载应用程序
:param bundle_id: 应用程序的bundle id
:type bundle_id: str
:return: boolean - 卸载是否成功
'''
return self.send_cmd_for_bid(bundle_id, "Uninstall", options, handler, args)
def apps_info(self):
self.service.sendPlist({"Command": "Lookup"})
return self.service.recvPlist()
def list_apps(self, app_type='all'):
options = {}
if app_type == 'system':
options["ApplicationType"]="System"
elif app_type == 'user':
options["ApplicationType"]="User"
options["ReturnAttributes"] = ["CFBundleIdentifier",
# "CFBundleDisplayName",
# "CFBundleVersion",
"CFBundleName",]
self.service.sendPlist({"Command": "Browse", "ClientOptions":options})
apps = []
while True:
z = self.service.recvPlist()
if z.get("Status") == "BrowsingApplications":
apps.extend(z["CurrentList"])
elif z.get("Status") == "Complete":
break
else:
raise Exception(z.get("ErrorDescription"))
apps = [{app["CFBundleIdentifier"]:app["CFBundleName"]} for app in apps]
return apps
def __del__(self):
if hasattr(self, "service"):
self.service.close()
class AFCShell2(AFCShell):
def __init__(self, afcclient, completekey='tab', stdin=None, stdout=None):
Cmd.__init__(self, completekey=completekey, stdin=stdin, stdout=stdout)
self.afc = afcclient
self.curdir = '/'
self.prompt = 'AFC$ ' + self.curdir + ' '
self.complete_cat = self._complete
self.complete_ls = self._complete
def dir_walk(self,dirname):
res = self.afc.dir_walk(dirname)
file_iter = six.next(res)[2]
return file_iter
def set_file_contents(self,filename,data):
return self.afc.set_file_contents(filename,data)
def get_file_contents(self,filename):
return self.afc.get_file_contents(filename)
def do_rm(self, p):
f = self.afc.get_file_info(self.curdir + "/" + p)
if f is None:
return
elif 'st_ifmt' in f and f['st_ifmt'] == 'S_IFDIR':
self.afc.remove_directory(self.curdir + "/" + p)
else:
self.afc.file_remove(self.curdir + "/" + p)
class AFCCrashLog(AFCClient):
def __init__(self, udid=None, lockdown=None):
self.lockdown = lockdown if lockdown else LockdownClient(udid)
self.service = None
retry = 5
while retry > 0:
try:
self.service = self.lockdown.startService("com.apple.crashreportcopymobile")
break
except:
import traceback
traceback.print_exc()
retry -= 1
time.sleep(5)
if self.service is None:
raise RuntimeError('Connect to crashreportcopymobile failed')
self.afc_shell = AFCShell2(afcclient=self)
super(AFCCrashLog, self).__init__(self.lockdown, service=self.service)
def _filter_crash_log(self, logs, log_path, procname):
latest_log = None
latest_timestamp = 0
for log in logs:
result = re.match('%s/%s.+(\d{4}-\d{2}-\d{2}-\d{6}).*\.(ips|synced)' % (log_path, procname), log)
if result:
time_src = result.group(1)
timestamp = int(time.mktime(time.strptime(time_src, "%Y-%m-%d-%H%M%S")))
if timestamp > latest_timestamp:
latest_log = log
latest_timestamp = timestamp
# 过滤掉Xcode的bug引起的假crash
now = int(time.time())
if now - latest_timestamp > 300: #判断crash文件的时间戳距离当前时间超过5分钟,则认为当前没有发生crash
latest_log = None
return latest_log
def get_crash_log(self, procname, dest, is_delete = False):
local_crashes =[]
remote_crash_path = '/'
for filename in self.afc_shell.dir_walk(remote_crash_path):
if procname in filename:
remote_crash_file = os.path.join(remote_crash_path, filename)
data = self.afc_shell.get_file_contents(remote_crash_file)
local_crash_file = os.path.join(dest, filename)
local_crashes.append(local_crash_file)
with open(local_crash_file, 'wb') as fp:
fp.write(data)
if is_delete:
self.afc_shell.do_rm(remote_crash_file)
return self._filter_crash_log(local_crashes, dest, procname)
def close(self):
if self.service:
self.service.close()
class SandboxClient(AFCClient):
'''
访问app的sandbox的类
'''
def __init__(self, udid=None, bid='com.tencent.sng.test.gn', sandbox="VendContainer", lockdown=None):
self.lockdown = lockdown if lockdown else LockdownClient(udid)
self.bid = bid
self.service = None
self.udid = udid
retry = 5
while retry > 0:
try:
self.service = self.lockdown.startService("com.apple.mobile.house_arrest")
break
except:
import traceback
traceback.print_exc()
retry -= 1
time.sleep(2)
if retry <= 0:
raise
if self.service is None:
raise RuntimeError('Connect to house_arrest failed')
self.service.sendPlist({"Command": sandbox, "Identifier": bid})
status = self.service.recvPlist()
if 'Error' in status and status['Error'] == "ApplicationLookupFailed":
raise RuntimeWarning('ApplicationLookupFailed')
if 'Status' in status and status['Status'] != 'Complete':
raise RuntimeWarning('House arrest service launch failed')
super(SandboxClient, self).__init__(self.lockdown, service=self.service)
self.afc_shell = AFCShell2(self)
def copy_to_local(self, remotepath, localpath='/tmp', is_dir=False, is_delete = False):
'''拷贝手机中sandbox指定目录的文件到Mac本地
:param remotepath: sandbox上的目录或者文件,例如:/Library/Caches/test/
:type remotepath: str
:param localpath: 本地的目录
:type localpath: str
:param is_dir: remotepath是否为目录,默认为单个文件
:type is_dir: bool
:param is_delete: 是否删除,默认为单个文件
:type is_delete: bool
:return: list 拷贝后的本地文件列表
'''
local_files = []
if is_dir:
remotefiles = self.afc_shell.dir_walk(remotepath)
else:
filepath, filename = os.path.split(remotepath)
remotefiles = [filename]
remotepath = filepath
for f in remotefiles:
src = os.path.join(remotepath,f)
content = self.afc_shell.get_file_contents(src)
if self.udid:
(short_name, extension) = os.path.splitext(f)
new_f = '%s-%s%s' % (short_name, self.udid, extension)
if content is None:
continue
dst = os.path.join(localpath,new_f)
local_files.append(dst)
with open(dst, 'wb') as fd:
fd.write(content)
if is_delete:
self.afc_shell.do_rm(src)
return local_files
def copy_to_remote(self, localfile, remotepath):
'''拷贝Mac本地文件到手机中sandbox的指定目录
:param localfile: Mac本地文件路径(仅限文件拷贝)
:type localfile: str
:param remotepath: sandbox上的目录,例如:/Library/Caches/
:type remotepath: str
:return: bool
'''
result = False
with open(localfile, 'rb') as fd:
data = fd.read()
remotefile = os.path.join(remotepath,os.path.basename(localfile))
status = self.afc_shell.set_file_contents(remotefile,data)
if status == 0:
result = True
return result
def remove(self, files):
'''删除sandbox中的文件或者目录
'''
self.afc_shell.do_rm(files)
def close(self):
'''必须显示调用该函数,否则会出现端口泄漏
'''
if self.service:
self.service.close() |
"""
#-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=#
Ontology Engineering Group
http://www.oeg-upm.net/
#-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=#
Copyright (C) 2016 Ontology Engineering Group.
#-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=#
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
#-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=#
"""
import logging
from rdflib import URIRef, BNode, Graph
from rdflib.namespace import RDFS, OWL, NamespaceManager
from agora.engine.utils.cache import Cache, ContextGraph, cached
__author__ = '<NAME>'
log = logging.getLogger('agora.engine.fountain.path')
def __flat_slice(iterable):
# type: (iter) -> set
lst = filter(lambda x: x, list(iterable))
for i, _ in enumerate(lst):
while hasattr(lst[i], "__iter__") and not isinstance(lst[i], basestring):
lst[i:i + 1] = lst[i]
return set(filter(lambda x: x is not None, lst))
def __q_name(ns, term):
# type: (NamespaceManager, any) -> any
n3_method = getattr(term, "n3", None)
if callable(n3_method):
return term.n3(ns)
return term
def __query(graph, q):
# type: (Graph, str) -> set
graph.namespace_manager.bind('rdfs', RDFS, replace=True, override=True)
graph.namespace_manager.bind('owl', OWL, replace=True, override=True)
result = graph.query(q)
return set([__q_name(graph.namespace_manager, x) for x in __flat_slice(result)])
def __extend_prefixed(ns, pu):
# type: (dict, str) -> URIRef
parts = pu.split(':')
if len(parts) == 1:
parts = ('', parts[0])
try:
return URIRef(ns[parts[0]] + parts[1])
except KeyError:
return BNode(pu)
def __extend_with(f, graph, *args):
# type: (callable, Graph, iter) -> iter
args = __flat_slice(args)
extension = __flat_slice([f(graph, t) for t in args])
return set.union(args, extension)
def _contexts(graph):
# type: (ContextGraph) -> list
return [str(x.identifier) for x in graph.contexts()]
def _update_context(graph, vid, g):
# type: (ContextGraph, str, Graph) -> None
context = graph.get_context(vid)
if isinstance(context, SchemaGraph):
context = context.g
graph.remove_context(context)
_add_context(graph, vid, g)
def _remove_context(graph, vid):
# type: (ContextGraph, str) -> None
context = graph.get_context(vid)
if isinstance(context, SchemaGraph):
context = context.g
graph.remove_context(context)
def _get_context(graph, vid):
# type: (ContextGraph, str) -> Graph
return graph.get_context(vid)
def _add_context(graph, vid, g):
# type: (ContextGraph, str, Graph) -> None
def match_ns(term):
filter_ns = [ns for ns in rev_ns if ns in term]
if filter_ns:
ns = filter_ns.pop()
if rev_ns[ns]:
vid_context.bind(rev_ns[ns], ns)
del rev_ns[ns]
rev_ns = {ns: prefix for prefix, ns in g.namespaces() if not prefix.startswith('ns')}
vid_context = graph.get_context(vid)
for s, p, o in g.triples((None, None, None)):
if o != OWL.Ontology:
match_ns(s)
match_ns(p)
match_ns(o)
vid_context.add((s, p, o))
def _prefixes(graph):
# type: (Graph) -> dict
return dict(graph.namespaces())
def _get_types(graph):
# type: (Graph) -> set
return __query(graph,
"""SELECT DISTINCT ?c WHERE {
{
?p a owl:ObjectProperty .
{
{ ?p rdfs:range ?c }
UNION
{ ?p rdfs:domain ?c }
}
}
UNION
{
?p a owl:DatatypeProperty .
?p rdfs:domain ?c .
}
UNION
{ ?c a owl:Class }
UNION
{ ?c a rdfs:Class }
UNION
{ [] rdfs:subClassOf ?c }
UNION
{ ?c rdfs:subClassOf [] }
UNION
{
?r a owl:Restriction ;
owl:onProperty ?p .
{
?p a owl:ObjectProperty .
{ ?r owl:allValuesFrom ?c }
UNION
{ ?r owl:someValuesFrom ?c }
}
UNION
{ ?r owl:onClass ?c }
}
FILTER(isURI(?c))
}""")
def _get_properties(graph):
# type: (Graph) -> set
return __query(graph, """SELECT DISTINCT ?p WHERE {
{ ?p a rdf:Property }
UNION
{ ?p a owl:ObjectProperty }
UNION
{ ?p a owl:DatatypeProperty }
UNION
{
[] a owl:Restriction ;
owl:onProperty ?p .
}
FILTER(isURI(?p))
}""")
def _is_object_property(graph, prop):
# type: (Graph, str) -> bool
evidence = __query(graph, """ASK {
{ %s a owl:ObjectProperty }
UNION
{
?r owl:onProperty %s .
{
{ ?c a owl:Class }
UNION
{ ?c rdfs:subClassOf ?r }
}
{
{
?r owl:onClass ?c .
}
UNION
{
?r owl:someValuesFrom ?c .
}
UNION
{
?r owl:allValuesFrom ?c .
}
}
}
}""" % (prop, prop))
return False if not evidence else bool(evidence.pop())
def _get_property_domain(graph, prop):
# type: (Graph, str) -> set
all_property_domains = graph.query("""
PREFIX owl: <http://www.w3.org/2002/07/owl#>
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
SELECT DISTINCT ?p ?c WHERE {
{ ?p rdfs:domain ?c }
UNION
{ ?c rdfs:subClassOf [ owl:onProperty ?p ] }
FILTER (isURI(?p) && isURI(?c))
}""")
dom = map(lambda x: __q_name(graph.namespace_manager, x.c),
filter(lambda x: __q_name(graph.namespace_manager, x.p) == prop, all_property_domains))
return __extend_with(_get_subtypes, graph, dom)
def _get_property_range(graph, prop):
# type: (Graph, str) -> set
all_property_ranges = graph.query("""
PREFIX owl: <http://www.w3.org/2002/07/owl#>
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
SELECT DISTINCT ?p ?r WHERE {
{?p rdfs:range ?r}
UNION
{
?d owl:onProperty ?p.
{ ?d owl:allValuesFrom ?r }
UNION
{ ?d owl:someValuesFrom ?r }
UNION
{ ?d owl:onClass ?r }
UNION
{ ?d owl:onDataRange ?r }
}
FILTER(isURI(?p) && isURI(?r))
}""")
rang = map(lambda x: __q_name(graph.namespace_manager, x.r),
filter(lambda x: __q_name(graph.namespace_manager, x.p) == prop, all_property_ranges))
return __extend_with(_get_subtypes, graph, rang)
def _get_property_inverses(graph, prop):
# type: (Graph, str) -> set
return __query(graph, """SELECT DISTINCT ?i WHERE {
{%s owl:inverseOf ?i}
UNION
{?i owl:inverseOf %s}
FILTER(isURI(?i))
}""" % (prop, prop))
def _get_property_constraints(graph, prop):
# type: (Graph, str) -> set
all_property_domains = graph.query("""
PREFIX owl: <http://www.w3.org/2002/07/owl#>
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
SELECT DISTINCT ?p ?c WHERE {
{ ?p rdfs:domain ?c }
UNION
{ ?c rdfs:subClassOf [ owl:onProperty ?p ] }
FILTER (isURI(?p) && isURI(?c))
}""")
dom = map(lambda x: __q_name(graph.namespace_manager, x.c),
filter(lambda x: __q_name(graph.namespace_manager, x.p) == prop, all_property_domains))
dom_supertypes = [_get_supertypes(graph, d) for d in dom]
for d, s in zip(dom, dom_supertypes):
if set.intersection(set(s), set(dom)):
cons_range = __query(graph, """SELECT DISTINCT ?r WHERE {
{
%s rdfs:subClassOf ?d .
?d owl:onProperty %s .
{ ?d owl:allValuesFrom ?r }
UNION
{ ?d owl:someValuesFrom ?r }
UNION
{ ?d owl:onClass ?r }
UNION
{ ?d owl:onDataRange ?r }
}
FILTER(isURI(?r))
}""" % (d, prop))
cons_range = __extend_with(_get_subtypes, graph, cons_range)
if cons_range:
try:
is_strong_constraint = __query(graph, """ASK {
%s rdfs:subClassOf ?d .
?d owl:onProperty %s .
?d owl:allValuesFrom []
}""" % (d, prop)).pop()
except KeyError:
is_strong_constraint = True
if is_strong_constraint:
yield (d, list(cons_range))
def _get_supertypes(graph, ty):
# type: (Graph, str) -> set
res = map(lambda x: __q_name(graph.namespace_manager, x), filter(lambda y: isinstance(y, URIRef),
graph.transitive_objects(
__extend_prefixed(_prefixes(graph), ty),
RDFS.subClassOf)))
return set(filter(lambda x: str(x) != ty, res))
def _get_subtypes(graph, ty):
# type: (Graph, str) -> set
res = map(lambda x: __q_name(graph.namespace_manager, x), filter(lambda y: isinstance(y, URIRef),
graph.transitive_subjects(RDFS.subClassOf,
__extend_prefixed(
_prefixes(graph),
ty))))
return set(filter(lambda x: str(x) != ty, res))
def _get_type_properties(graph, ty):
# type: (Graph, str) -> set
all_class_props = graph.query("""
PREFIX owl: <http://www.w3.org/2002/07/owl#>
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
SELECT DISTINCT ?c ?p WHERE {
{?c rdfs:subClassOf [ owl:onProperty ?p ] }
UNION
{?p rdfs:domain ?c}
FILTER (isURI(?p) && isURI(?c))
}""")
all_types = __extend_with(_get_supertypes, graph, ty)
return set([__q_name(graph.namespace_manager, r.p) for r in all_class_props if
__q_name(graph.namespace_manager, r.c) in all_types])
def _get_type_specific_properties(graph, ty):
# type: (Graph, str) -> set
all_class_props = graph.query("""
PREFIX owl: <http://www.w3.org/2002/07/owl#>
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
SELECT DISTINCT ?c ?p WHERE {
{?c rdfs:subClassOf [ owl:onProperty ?p ] }
UNION
{?p rdfs:domain ?c}
FILTER (isURI(?p) && isURI(?c))
}""")
return set([__q_name(graph.namespace_manager, r.p) for r in all_class_props if
__q_name(graph.namespace_manager, r.c) == ty])
def _get_type_references(graph, ty):
# type: (Graph, str) -> set
all_class_props = graph.query("""
PREFIX owl: <http://www.w3.org/2002/07/owl#>
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
SELECT ?c ?p WHERE {
{ ?r owl:onProperty ?p .
{?r owl:someValuesFrom ?c}
UNION
{?r owl:allValuesFrom ?c}
UNION
{?r owl:onClass ?c}
}
UNION
{?p rdfs:range ?c}
FILTER (isURI(?p) && isURI(?c))
}""")
all_types = __extend_with(_get_supertypes, graph, ty)
return set([__q_name(graph.namespace_manager, r.p) for r in all_class_props if
__q_name(graph.namespace_manager, r.c) in all_types])
def _get_type_specific_references(graph, ty):
# type: (Graph, str) -> set
all_class_props = graph.query("""
PREFIX owl: <http://www.w3.org/2002/07/owl#>
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
SELECT ?c ?p WHERE {
{ ?r owl:onProperty ?p .
{?r owl:someValuesFrom ?c}
UNION
{?r owl:allValuesFrom ?c}
UNION
{?r owl:onClass ?c}
}
UNION
{?p rdfs:range ?c}
FILTER (isURI(?p) && isURI(?c))
}""")
return set([__q_name(graph.namespace_manager, r.p) for r in all_class_props if
__q_name(graph.namespace_manager, r.c) == ty])
def _context(f):
# type: (callable) -> callable
def wrap(self=None, *args, **kwargs):
return cached(self.cache)(f)(self, *args, **kwargs)
return wrap
@_context
def _query(graph, q):
return graph.query(q)
class SchemaGraph(object):
def __init__(self, query_f, g):
self.g = g
self.__query = query_f
def __getattr__(self, item):
try:
return self.__getattribute__(item)
except:
return self.g.__getattribute__(item)
def query(self, q):
return self.__query(self.g, q)
def get_context(self, c):
return SchemaGraph(self.__query, self.g.get_context(c))
class Schema(object):
def __init__(self):
self.__cache = Cache()
self.__graph = None
self.__namespaces = {}
self.__prefixes = {}
@property
def cache(self):
# type: () -> Cache
return self.__cache
@property
def graph(self):
# type: () -> ContextGraph
return SchemaGraph(self.__query, self.__graph)
@graph.setter
def graph(self, g):
self.__graph = g
self.__graph.store.graph_aware = False
self.update_ns_dicts()
def update_ns_dicts(self):
self.__namespaces.update([(uri, prefix) for (prefix, uri) in self.__graph.namespaces()])
self.__prefixes.update([(prefix, uri) for (prefix, uri) in self.__graph.namespaces()])
self.__cache.clear()
def add_context(self, id, context):
# type: (str, Graph) -> None
_add_context(self.graph, id, context)
self.update_ns_dicts()
def update_context(self, id, context):
# type: (str, Graph) -> None
_update_context(self.graph, id, context)
self.update_ns_dicts()
def remove_context(self, id):
# type: (str) -> None
_remove_context(self.graph, id)
self.update_ns_dicts()
self.__cache.clear()
@property
def contexts(self):
# type: () -> iter
return _contexts(self.graph)
def get_context(self, id):
# type: (str) -> Graph
return _get_context(self.graph, id)
@property
def prefixes(self):
# type: () -> dict
return self.__prefixes
@_context
def get_types(self, context=None):
# type: (object) -> iter
if not isinstance(context, ContextGraph):
context = self.graph.get_context(context) if context is not None else self.graph
return _get_types(context)
@_context
def get_properties(self, context=None):
# type: (object) -> iter
if not isinstance(context, ContextGraph):
context = self.graph.get_context(context) if context is not None else self.graph
return _get_properties(context)
@_context
def is_object_property(self, p):
# type: (str, Graph) -> bool
return _is_object_property(self.graph, p)
@_context
def get_property_domain(self, p):
# type: (str, Graph) -> iter
return _get_property_domain(self.graph, p)
@_context
def get_property_range(self, p):
# type: (str, Graph) -> iter
return _get_property_range(self.graph, p)
@_context
def get_property_inverses(self, p):
# type: (str, Graph) -> iter
return _get_property_inverses(self.graph, p)
@_context
def get_property_constraints(self, p):
# type: (str, Graph) -> iter
return _get_property_constraints(self.graph, p)
@_context
def get_supertypes(self, t):
# type: (str, Graph) -> iter
return _get_supertypes(self.graph, t)
@_context
def get_subtypes(self, t):
# type: (str, Graph) -> iter
return _get_subtypes(self.graph, t)
@_context
def get_type_properties(self, t):
# type: (str, Graph) -> iter
return _get_type_properties(self.graph, t)
@_context
def get_type_specific_properties(self, t):
# type: (str, Graph) -> iter
return _get_type_specific_properties(self.graph, t)
@_context
def get_type_references(self, t):
# type: (str, Graph) -> iter
return _get_type_references(self.graph, t)
@_context
def get_type_specific_references(self, t):
# type: (str, Graph) -> iter
return _get_type_specific_references(self.graph, t)
@_context
def __query(self, g, q):
return g.query(q)
|
<gh_stars>1-10
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###
# Copyright (2016-2020) Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# You may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import mock
import pytest
import yaml
from copy import deepcopy
from ansible_collections.hpe.oneview.tests.unit.utils.hpe_test_utils import OneViewBaseTest
from ansible_collections.hpe.oneview.tests.unit.utils.oneview_module_loader import EnclosureModule
FAKE_MSG_ERROR = 'Fake message error'
DEFAULT_ENCLOSURE_NAME = 'Test-Enclosure'
PRIMARY_IP_ADDRESS = '172.18.1.13'
STANDBY_IP_ADDRESS = '172.18.1.14'
ENCLOSURE_FROM_ONEVIEW = dict(
name='Encl1',
uri='/a/path',
applianceBayCount=2,
uidState='Off',
applianceBays=[
dict(bayNumber=1, poweredOn=True, bayPowerState='Unknown'),
dict(bayNumber=2, poweredOn=False, bayPowerState='Unknown')
],
managerBays=[
dict(bayNumber=1, uidState='On', bayPowerState='Unknown'),
dict(bayNumber=2, uidState='Off', bayPowerState='Unknown')
],
deviceBays=[
dict(bayNumber=1, bayPowerState='Unknown'),
dict(bayNumber=2, bayPowerState='Unknown')
],
interconnectBays=[
dict(bayNumber=1, bayPowerState='Unknown'),
dict(bayNumber=2, bayPowerState='Unknown')
],
supportDataCollectionState='Completed',
activeOaPreferredIP=PRIMARY_IP_ADDRESS,
standbyOaPreferredIP=STANDBY_IP_ADDRESS
)
ALL_ENCLOSURES = [dict(name='Encl3', uri='/a/path3', activeOaPreferredIP='172.18.1.3'),
dict(name='Encl2', uri='/a/path2', activeOaPreferredIP='172.18.1.2'),
ENCLOSURE_FROM_ONEVIEW]
PARAMS_FOR_PRESENT = dict(
config='config.json',
state='present',
data=dict(name='Encl1',
hostname=PRIMARY_IP_ADDRESS,
username='admin',
password='<PASSWORD>')
)
PARAMS_FOR_PRESENT_NO_HOSTNAME = dict(
config='config.json',
state='present',
data=dict(name='Encl1')
)
PARAMS_WITH_NEW_NAME = dict(
config='config.json',
state='present',
data=dict(name=DEFAULT_ENCLOSURE_NAME,
newName='OneView-Enclosure')
)
PARAMS_WITH_NEW_RACK_NAME = dict(
config='config.json',
state='present',
data=dict(name='Encl1',
rackName='Another-Rack-Name')
)
PARAMS_WITH_CALIBRATED_MAX_POWER = dict(
config='config.json',
state='present',
data=dict(name='Encl1',
calibratedMaxPower=1750)
)
PARAMS_FOR_ABSENT = dict(
config='config.json',
state='absent',
data=dict(name=DEFAULT_ENCLOSURE_NAME)
)
PARAMS_FOR_RECONFIGURED = dict(
config='config.json',
state='reconfigured',
data=dict(name=DEFAULT_ENCLOSURE_NAME)
)
PARAMS_FOR_REFRESH = dict(
config='config.json',
state='refreshed',
data=dict(name=DEFAULT_ENCLOSURE_NAME,
refreshState='Refreshing')
)
PARAMS_FOR_BAY_POWER_ON = dict(
config='config.json',
state='appliance_bays_powered_on',
data=dict(name=DEFAULT_ENCLOSURE_NAME,
bayNumber=2)
)
PARAMS_FOR_CREATE_CSR = dict(
config='config.json',
state='create_certificate_request',
data=dict(name=DEFAULT_ENCLOSURE_NAME,
type='CertificateDtoV2',
organization='HPE',
organizationalUnit='IT',
locality='Fort Collins',
state='Colorado',
country='US',
commonName='e10-oa',
bay_number=1)
)
PARAMS_FOR_GET_CSR = dict(
config='config.json',
state='get_certificate_request',
data=dict(name=DEFAULT_ENCLOSURE_NAME,
bay_number=1)
)
PARAMS_FOR_IMPORT_CSR = dict(
config='config.json',
state='import_certificate_request',
data=dict(name=DEFAULT_ENCLOSURE_NAME,
type='CertificateDataV2',
base64Data='certificate')
)
PARAMS_FOR_DATA_COL_SET = """
config: "{{ config_file_path }}"
state: support_data_collection_set
data:
name: 'Test-Enclosure'
supportDataCollectionState: 'PendingCollection'
"""
PARAMS_FOR_INTERCONNECT_BAY_IPV4_RELEASE = """
config: "{{ config_file_path }}"
state: interconnect_bays_ipv4_removed
data:
name: 'Test-Enclosure'
bayNumber: 1
"""
PARAMS_FOR_DEVICE_BAY_IPV4_RELEASE = """
config: "{{ config_file_path }}"
state: device_bays_ipv4_removed
data:
name: 'Test-Enclosure'
bayNumber: 1
"""
PARAMS_FOR_UID_ON = """
config: "{{ config_file_path }}"
state: uid_on
data:
name: 'Test-Enclosure'
"""
PARAMS_FOR_UID_OFF = """
config: "{{ config_file_path }}"
state: uid_off
data:
name: 'Test-Enclosure'
"""
PARAMS_FOR_MANAGER_BAY_UID_ON = """
config: "{{ config_file_path }}"
state: manager_bays_uid_on
data:
name: 'Test-Enclosure'
bayNumber: 2
"""
PARAMS_FOR_MANAGER_BAY_UID_OFF = """
config: "{{ config_file_path }}"
state: manager_bays_uid_off
data:
name: 'Test-Enclosure'
bayNumber: 1
"""
PARAMS_FOR_MANAGER_BAY_POWER_STATE_E_FUSE = """
config: "{{ config_file_path }}"
state: manager_bays_power_state_e_fuse
data:
name: 'Test-Enclosure'
bayNumber: 1
"""
PARAMS_FOR_MANAGER_BAY_POWER_STATE_RESET = """
config: "{{ config_file_path }}"
state: manager_bays_power_state_reset
data:
name: 'Test-Enclosure'
bayNumber: 1
"""
PARAMS_FOR_APPLIANCE_BAY_POWER_STATE_E_FUSE = """
config: "{{ config_file_path }}"
state: appliance_bays_power_state_e_fuse
data:
name: 'Test-Enclosure'
bayNumber: 1
"""
PARAMS_FOR_DEVICE_BAY_POWER_STATE_E_FUSE = """
config: "{{ config_file_path }}"
state: device_bays_power_state_e_fuse
data:
name: 'Test-Enclosure'
bayNumber: 1
"""
PARAMS_FOR_DEVICE_BAY_POWER_STATE_RESET = """
config: "{{ config_file_path }}"
state: device_bays_power_state_reset
data:
name: 'Test-Enclosure'
bayNumber: 1
"""
PARAMS_FOR_INTERCONNECT_BAY_POWER_STATE_E_FUSE = """
config: "{{ config_file_path }}"
state: interconnect_bays_power_state_e_fuse
data:
name: 'Test-Enclosure'
bayNumber: 2
"""
@pytest.mark.resource(TestEnclosureModule='enclosures')
class TestEnclosureModule(OneViewBaseTest):
def test_should_create_new_enclosure(self):
self.resource.get_by_name.return_value = []
self.resource.get_by_hostname.return_value = []
self.resource.data = ENCLOSURE_FROM_ONEVIEW
self.resource.add.return_value = self.resource
self.resource.patch.return_value = self.resource
self.mock_ansible_module.params = PARAMS_FOR_PRESENT
EnclosureModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
msg=EnclosureModule.MSG_CREATED,
ansible_facts=dict(enclosure=ENCLOSURE_FROM_ONEVIEW)
)
def test_should_fail_create_new_enclosure(self):
self.resource.get_by_name.return_value = []
self.resource.get_by_hostname.return_value = []
self.resource.add.return_value = []
self.mock_ansible_module.params = PARAMS_FOR_PRESENT
EnclosureModule().run()
self.mock_ansible_module.fail_json.assert_called_once_with(exception=mock.ANY,
msg=EnclosureModule.MSG_ENCLOSURE_REQUIRED_FIELDS)
def test_should_not_update_when_no_changes_by_primary_ip_key(self):
self.resource.data = ENCLOSURE_FROM_ONEVIEW
self.resource.get_by_hostname.return_value = self.resource
self.mock_ansible_module.params = PARAMS_FOR_PRESENT
EnclosureModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
msg=EnclosureModule.MSG_ALREADY_PRESENT,
ansible_facts=dict(enclosure=ENCLOSURE_FROM_ONEVIEW)
)
def test_should_not_update_when_no_changes_by_standby_ip_key(self):
self.resource.data = ENCLOSURE_FROM_ONEVIEW
self.resource.get_by_hostname.return_value = self.resource
params = deepcopy(PARAMS_FOR_PRESENT)
params['data']['hostname'] = STANDBY_IP_ADDRESS
self.mock_ansible_module.params = params
EnclosureModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
msg=EnclosureModule.MSG_ALREADY_PRESENT,
ansible_facts=dict(enclosure=ENCLOSURE_FROM_ONEVIEW)
)
def test_should_not_update_when_no_changes_by_name_key(self):
self.resource.data = ENCLOSURE_FROM_ONEVIEW
self.resource.get_by_hostname.return_value = self.resource
self.mock_ansible_module.params = PARAMS_FOR_PRESENT_NO_HOSTNAME
EnclosureModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
msg=EnclosureModule.MSG_ALREADY_PRESENT,
ansible_facts=dict(enclosure=ENCLOSURE_FROM_ONEVIEW)
)
def test_update_when_data_has_new_name(self):
updated_data = ENCLOSURE_FROM_ONEVIEW.copy()
updated_data['name'] = 'Test-Enclosure-Renamed'
self.resource.data = ENCLOSURE_FROM_ONEVIEW
self.resource.get_by_hostname.return_value = self.resource
updated_obj = self.resource.copy()
updated_obj.data = updated_data
self.resource.patch.return_value = updated_obj
self.mock_ansible_module.params = PARAMS_WITH_NEW_NAME
EnclosureModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
msg=EnclosureModule.MSG_UPDATED,
ansible_facts=dict(enclosure=self.resource.data)
)
def test_update_when_data_has_new_rack_name(self):
updated_data = ENCLOSURE_FROM_ONEVIEW.copy()
updated_data['rackName'] = 'Another-Rack-Name'
self.resource.data = ENCLOSURE_FROM_ONEVIEW
updated_obj = self.resource.copy()
updated_obj.data = updated_data
self.resource.patch.return_value = updated_obj
self.mock_ansible_module.params = PARAMS_WITH_NEW_RACK_NAME
EnclosureModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
msg=EnclosureModule.MSG_UPDATED,
ansible_facts=dict(enclosure=self.resource.data)
)
def test_replace_name_for_new_enclosure(self):
self.resource.get_by_name.return_value = []
self.resource.get_by_hostname.return_value = []
self.resource.get_all.return_value = []
self.resource.data = ENCLOSURE_FROM_ONEVIEW
self.resource.add.return_value = self.resource
self.resource.patch.return_value = []
params_ansible = deepcopy(PARAMS_FOR_PRESENT)
params_ansible['data']['name'] = 'Encl1-Renamed'
self.mock_ansible_module.params = params_ansible
EnclosureModule().run()
self.resource.patch.assert_called_once_with("replace", "/name", "Encl1-Renamed")
def test_replace_name_for_existent_enclosure(self):
self.resource.data = ENCLOSURE_FROM_ONEVIEW
self.resource.patch.return_value = []
self.mock_ansible_module.params = PARAMS_WITH_NEW_NAME
EnclosureModule().run()
self.resource.patch.assert_called_once_with("replace", "/name", "OneView-Enclosure")
def test_replace_rack_name_for_new_enclosure(self):
updated_data = ENCLOSURE_FROM_ONEVIEW.copy()
updated_data['rackName'] = 'Another-Rack-Name'
self.resource.get_by_name.return_value = []
self.resource.get_by_hostname.return_value = []
self.resource.data = ENCLOSURE_FROM_ONEVIEW
self.resource.add.return_value = self.resource
self.resource.patch.return_value = updated_data
params_ansible = deepcopy(PARAMS_FOR_PRESENT)
params_ansible['data']['rackName'] = 'Another-Rack-Name'
self.mock_ansible_module.params = params_ansible
EnclosureModule().run()
self.resource.patch.assert_called_once_with(
"replace", "/rackName", "Another-Rack-Name")
def test_replace_rack_name_for_existent_enclosure(self):
self.resource.data = ENCLOSURE_FROM_ONEVIEW
self.resource.patch.return_value = []
self.mock_ansible_module.params = PARAMS_WITH_NEW_RACK_NAME
EnclosureModule().run()
self.resource.patch.assert_called_once_with(
"replace", "/rackName", "Another-Rack-Name")
def test_update_calibrated_max_power_for_existent_enclosure(self):
self.resource.data = ENCLOSURE_FROM_ONEVIEW
self.resource.patch.return_value = []
self.mock_ansible_module.params = PARAMS_WITH_CALIBRATED_MAX_POWER
EnclosureModule().run()
self.resource.update_environmental_configuration.assert_called_once_with(
{"calibratedMaxPower": 1750})
def test_should_remove_enclosure(self):
self.resource.data = ENCLOSURE_FROM_ONEVIEW
self.mock_ansible_module.params = PARAMS_FOR_ABSENT
EnclosureModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
msg=EnclosureModule.MSG_DELETED
)
def test_should_do_nothing_when_enclosure_not_exist(self):
self.resource.get_by_name.return_value = []
self.mock_ansible_module.params = PARAMS_FOR_ABSENT
EnclosureModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
msg=EnclosureModule.MSG_ALREADY_ABSENT
)
def test_should_reconfigure_enclosure(self):
self.resource.data = ENCLOSURE_FROM_ONEVIEW
self.resource.update_configuration.return_value = ENCLOSURE_FROM_ONEVIEW
self.mock_ansible_module.params = PARAMS_FOR_RECONFIGURED
EnclosureModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
msg=EnclosureModule.MSG_RECONFIGURED,
ansible_facts=dict(enclosure=ENCLOSURE_FROM_ONEVIEW)
)
def test_should_fail_when_enclosure_not_exist(self):
self.resource.get_by_name.return_value = []
self.mock_ansible_module.params = PARAMS_FOR_RECONFIGURED
EnclosureModule().run()
self.mock_ansible_module.fail_json.assert_called_once_with(exception=mock.ANY,
msg=EnclosureModule.MSG_ENCLOSURE_NOT_FOUND)
def test_should_fail_when_name_is_not_in_data(self):
self.resource.get_by_name.return_value = []
params = deepcopy(PARAMS_FOR_RECONFIGURED)
del params['data']['name']
self.mock_ansible_module.params = params
EnclosureModule().run()
self.mock_ansible_module.fail_json.assert_called_once_with(exception=mock.ANY, msg=EnclosureModule.MSG_ENCLOSURE_NOT_FOUND)
def test_should_refresh_enclosure(self):
self.resource.data = ENCLOSURE_FROM_ONEVIEW
self.resource.get.return_value = ENCLOSURE_FROM_ONEVIEW
self.mock_ansible_module.params = PARAMS_FOR_REFRESH
EnclosureModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
ansible_facts=dict(enclosure=ENCLOSURE_FROM_ONEVIEW),
msg=EnclosureModule.MSG_REFRESHED
)
def test_should_power_on_appliance_bays(self):
self.resource.data = ENCLOSURE_FROM_ONEVIEW
self.resource.patch.return_value = self.resource
self.mock_ansible_module.params = PARAMS_FOR_BAY_POWER_ON
EnclosureModule().run()
self.resource.patch.assert_called_once_with(operation='replace',
path='/applianceBays/2/power',
value='On')
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
ansible_facts=dict(enclosure=ENCLOSURE_FROM_ONEVIEW),
msg=EnclosureModule.MSG_APPLIANCE_BAY_POWERED_ON
)
def test_should_not_power_on_when_state_is_already_on(self):
self.resource.data = ENCLOSURE_FROM_ONEVIEW
params_power_on_do_nothing = deepcopy(PARAMS_FOR_BAY_POWER_ON)
params_power_on_do_nothing['data']['bayNumber'] = 1
self.mock_ansible_module.params = params_power_on_do_nothing
EnclosureModule().run()
self.resource.patch.not_been_called()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
ansible_facts=dict(enclosure=ENCLOSURE_FROM_ONEVIEW),
msg=EnclosureModule.MSG_APPLIANCE_BAY_ALREADY_POWERED_ON
)
def test_should_fail_when_appliance_bay_not_found_power_on(self):
self.resource.data = ENCLOSURE_FROM_ONEVIEW
params_power_on_not_found_bay = deepcopy(PARAMS_FOR_BAY_POWER_ON)
params_power_on_not_found_bay['data']['bayNumber'] = 3
self.mock_ansible_module.params = params_power_on_not_found_bay
EnclosureModule().run()
self.resource.patch.not_been_called()
self.mock_ansible_module.fail_json.assert_called_once_with(exception=mock.ANY,
msg=EnclosureModule.MSG_BAY_NOT_FOUND)
def test_should_fail_when_there_are_not_appliance_bays_power_on(self):
enclosure_without_appliance_bays = dict(ENCLOSURE_FROM_ONEVIEW, applianceBays=[])
self.resource.data = enclosure_without_appliance_bays
self.mock_ansible_module.params = PARAMS_FOR_BAY_POWER_ON
EnclosureModule().run()
self.resource.patch.not_been_called()
self.mock_ansible_module.fail_json.assert_called_once_with(exception=mock.ANY,
msg=EnclosureModule.MSG_BAY_NOT_FOUND)
def test_should_turn_on_uid(self):
self.resource.data = ENCLOSURE_FROM_ONEVIEW
self.resource.patch.return_value = self.resource
self.mock_ansible_module.params = yaml.safe_load(PARAMS_FOR_UID_ON)
EnclosureModule().run()
self.resource.patch.assert_called_once_with(operation='replace',
path='/uidState',
value='On')
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
ansible_facts=dict(enclosure=ENCLOSURE_FROM_ONEVIEW),
msg=EnclosureModule.MSG_UID_POWERED_ON
)
def test_should_not_set_to_on_when_it_is_already_on(self):
enclosure_uid_on = dict(ENCLOSURE_FROM_ONEVIEW, uidState='On')
self.resource.data = enclosure_uid_on
self.mock_ansible_module.params = yaml.safe_load(PARAMS_FOR_UID_ON)
EnclosureModule().run()
self.resource.patch.not_been_called()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
ansible_facts=dict(enclosure=enclosure_uid_on),
msg=EnclosureModule.MSG_UID_ALREADY_POWERED_ON
)
def test_should_turn_off_uid(self):
enclosure_uid_on = dict(ENCLOSURE_FROM_ONEVIEW, uidState='On')
self.resource.data = enclosure_uid_on
self.resource.patch.return_value = self.resource
self.mock_ansible_module.params = yaml.safe_load(PARAMS_FOR_UID_OFF)
EnclosureModule().run()
self.resource.patch.assert_called_once_with(operation='replace',
path='/uidState',
value='Off')
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
ansible_facts=dict(enclosure=enclosure_uid_on),
msg=EnclosureModule.MSG_UID_POWERED_OFF
)
def test_should_not_set_to_off_when_it_is_already_off(self):
self.resource.data = ENCLOSURE_FROM_ONEVIEW
self.mock_ansible_module.params = yaml.safe_load(PARAMS_FOR_UID_OFF)
EnclosureModule().run()
self.resource.patch.not_been_called()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
ansible_facts=dict(enclosure=ENCLOSURE_FROM_ONEVIEW),
msg=EnclosureModule.MSG_UID_ALREADY_POWERED_OFF
)
def test_should_turn_on_uid_manager_bay(self):
self.resource.data = ENCLOSURE_FROM_ONEVIEW
self.resource.patch.return_value = self.resource
self.mock_ansible_module.params = yaml.safe_load(PARAMS_FOR_MANAGER_BAY_UID_ON)
EnclosureModule().run()
self.resource.patch.assert_called_once_with(operation='replace',
path='/managerBays/2/uidState',
value='On')
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
ansible_facts=dict(enclosure=ENCLOSURE_FROM_ONEVIEW),
msg=EnclosureModule.MSG_MANAGER_BAY_UID_ON
)
def test_should_not_set_to_on_when_state_already_on(self):
self.resource.data = ENCLOSURE_FROM_ONEVIEW
params_manager_bay_uid = yaml.safe_load(PARAMS_FOR_MANAGER_BAY_UID_ON)
params_manager_bay_uid['data']['bayNumber'] = '1'
self.mock_ansible_module.params = params_manager_bay_uid
EnclosureModule().run()
self.resource.patch.not_been_called()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
ansible_facts=dict(enclosure=ENCLOSURE_FROM_ONEVIEW),
msg=EnclosureModule.MSG_MANAGER_BAY_UID_ALREADY_ON
)
def test_should_fail_when_manager_bay_not_found(self):
self.resource.data = ENCLOSURE_FROM_ONEVIEW
params_power_on_not_found_bay = yaml.safe_load(PARAMS_FOR_MANAGER_BAY_UID_ON)
params_power_on_not_found_bay['data']['bayNumber'] = 3
self.mock_ansible_module.params = params_power_on_not_found_bay
EnclosureModule().run()
self.resource.patch.not_been_called()
self.mock_ansible_module.fail_json.assert_called_once_with(exception=mock.ANY,
msg=EnclosureModule.MSG_BAY_NOT_FOUND)
def test_should_fail_when_there_are_not_manager_bays_uid_on(self):
enclosure_without_appliance_bays = dict(ENCLOSURE_FROM_ONEVIEW, managerBays=[])
self.resource.data = enclosure_without_appliance_bays
self.mock_ansible_module.params = yaml.safe_load(PARAMS_FOR_MANAGER_BAY_UID_ON)
EnclosureModule().run()
self.resource.patch.not_been_called()
self.mock_ansible_module.fail_json.assert_called_once_with(exception=mock.ANY,
msg=EnclosureModule.MSG_BAY_NOT_FOUND)
def test_should_turn_off_uid_manager_bay(self):
self.resource.data = ENCLOSURE_FROM_ONEVIEW
self.resource.patch.return_value = self.resource
self.mock_ansible_module.params = yaml.safe_load(PARAMS_FOR_MANAGER_BAY_UID_OFF)
EnclosureModule().run()
self.resource.patch.assert_called_once_with(operation='replace',
path='/managerBays/1/uidState',
value='Off')
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
ansible_facts=dict(enclosure=ENCLOSURE_FROM_ONEVIEW),
msg=EnclosureModule.MSG_MANAGER_BAY_UID_OFF
)
def test_should_not_set_to_off_when_state_already_off(self):
self.resource.data = ENCLOSURE_FROM_ONEVIEW
params_manager_bay_uid = yaml.safe_load(PARAMS_FOR_MANAGER_BAY_UID_OFF)
params_manager_bay_uid['data']['bayNumber'] = '2'
self.mock_ansible_module.params = params_manager_bay_uid
EnclosureModule().run()
self.resource.patch.not_been_called()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
ansible_facts=dict(enclosure=ENCLOSURE_FROM_ONEVIEW),
msg=EnclosureModule.MSG_MANAGER_BAY_UID_ALREADY_OFF
)
def test_should_fail_when_manager_bay_not_found_uid_off(self):
self.resource.data = ENCLOSURE_FROM_ONEVIEW
params_power_on_not_found_bay = yaml.safe_load(PARAMS_FOR_MANAGER_BAY_UID_OFF)
params_power_on_not_found_bay['data']['bayNumber'] = 3
self.mock_ansible_module.params = params_power_on_not_found_bay
EnclosureModule().run()
self.resource.patch.not_been_called()
self.mock_ansible_module.fail_json.assert_called_once_with(exception=mock.ANY,
msg=EnclosureModule.MSG_BAY_NOT_FOUND)
def test_should_fail_when_there_are_not_manager_bays_uid_off(self):
enclosure_without_appliance_bays = dict(ENCLOSURE_FROM_ONEVIEW, managerBays=[])
self.resource.data = enclosure_without_appliance_bays
self.mock_ansible_module.params = yaml.safe_load(PARAMS_FOR_MANAGER_BAY_UID_OFF)
EnclosureModule().run()
self.resource.patch.not_been_called()
self.mock_ansible_module.fail_json.assert_called_once_with(exception=mock.ANY,
msg=EnclosureModule.MSG_BAY_NOT_FOUND)
def test_should_perform_an_e_fuse_manager_bay(self):
self.resource.data = ENCLOSURE_FROM_ONEVIEW
updated_resource = self.resource.copy()
updated_resource.data = ENCLOSURE_FROM_ONEVIEW
self.resource.patch.return_value = updated_resource
self.mock_ansible_module.params = yaml.safe_load(PARAMS_FOR_MANAGER_BAY_POWER_STATE_E_FUSE)
EnclosureModule().run()
self.resource.patch.assert_called_once_with(operation='replace',
path='/managerBays/1/bayPowerState',
value='E-Fuse')
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
ansible_facts=dict(enclosure=ENCLOSURE_FROM_ONEVIEW),
msg=EnclosureModule.MSG_MANAGER_BAY_POWER_STATE_E_FUSED
)
def test_should_fail_when_manager_bay_not_found_e_fuse(self):
self.resource.data = ENCLOSURE_FROM_ONEVIEW
params_power_on_not_found_bay = yaml.safe_load(PARAMS_FOR_MANAGER_BAY_POWER_STATE_E_FUSE)
params_power_on_not_found_bay['data']['bayNumber'] = 3
self.mock_ansible_module.params = params_power_on_not_found_bay
EnclosureModule().run()
self.resource.patch.not_been_called()
self.mock_ansible_module.fail_json.assert_called_once_with(exception=mock.ANY,
msg=EnclosureModule.MSG_BAY_NOT_FOUND)
def test_should_fail_when_there_are_not_manager_bays_e_fuse(self):
enclosure_without_appliance_bays = dict(ENCLOSURE_FROM_ONEVIEW, managerBays=[])
self.resource.data = enclosure_without_appliance_bays
self.mock_ansible_module.params = yaml.safe_load(PARAMS_FOR_MANAGER_BAY_POWER_STATE_E_FUSE)
EnclosureModule().run()
self.resource.patch.not_been_called()
self.mock_ansible_module.fail_json.assert_called_once_with(exception=mock.ANY,
msg=EnclosureModule.MSG_BAY_NOT_FOUND)
def test_should_reset_manager_bay(self):
self.resource.data = ENCLOSURE_FROM_ONEVIEW
self.resource.patch.return_value = self.resource
self.mock_ansible_module.params = yaml.safe_load(PARAMS_FOR_MANAGER_BAY_POWER_STATE_RESET)
EnclosureModule().run()
self.resource.patch.assert_called_once_with(operation='replace',
path='/managerBays/1/bayPowerState',
value='Reset')
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
ansible_facts=dict(enclosure=ENCLOSURE_FROM_ONEVIEW),
msg=EnclosureModule.MSG_MANAGER_BAY_POWER_STATE_RESET
)
def test_should_fail_when_manager_bay_not_found_power_reset(self):
self.resource.data = ENCLOSURE_FROM_ONEVIEW
params_power_on_not_found_bay = yaml.safe_load(PARAMS_FOR_MANAGER_BAY_POWER_STATE_RESET)
params_power_on_not_found_bay['data']['bayNumber'] = 3
self.mock_ansible_module.params = params_power_on_not_found_bay
EnclosureModule().run()
self.resource.patch.not_been_called()
self.mock_ansible_module.fail_json.assert_called_once_with(exception=mock.ANY,
msg=EnclosureModule.MSG_BAY_NOT_FOUND)
def test_should_fail_when_there_are_not_manager_bays_reset(self):
enclosure_without_appliance_bays = dict(ENCLOSURE_FROM_ONEVIEW, managerBays=[])
self.resource.data = enclosure_without_appliance_bays
self.mock_ansible_module.params = yaml.safe_load(PARAMS_FOR_MANAGER_BAY_POWER_STATE_RESET)
EnclosureModule().run()
self.resource.patch.not_been_called()
self.mock_ansible_module.fail_json.assert_called_once_with(exception=mock.ANY,
msg=EnclosureModule.MSG_BAY_NOT_FOUND)
def test_should_perform_an_e_fuse_appliance_bay(self):
self.resource.data = ENCLOSURE_FROM_ONEVIEW
self.resource.patch.return_value = self.resource
self.mock_ansible_module.params = yaml.safe_load(PARAMS_FOR_APPLIANCE_BAY_POWER_STATE_E_FUSE)
EnclosureModule().run()
self.resource.patch.assert_called_once_with(operation='replace',
path='/applianceBays/1/bayPowerState',
value='E-Fuse')
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
ansible_facts=dict(enclosure=ENCLOSURE_FROM_ONEVIEW),
msg=EnclosureModule.MSG_APPLIANCE_BAY_POWER_STATE_E_FUSED
)
def test_should_fail_when_appliance_bay_not_found_appliance_bay(self):
self.resource.data = ENCLOSURE_FROM_ONEVIEW
params_power_on_not_found_bay = yaml.safe_load(PARAMS_FOR_APPLIANCE_BAY_POWER_STATE_E_FUSE)
params_power_on_not_found_bay['data']['bayNumber'] = 3
self.mock_ansible_module.params = params_power_on_not_found_bay
EnclosureModule().run()
self.resource.patch.not_been_called()
self.mock_ansible_module.fail_json.assert_called_once_with(exception=mock.ANY,
msg=EnclosureModule.MSG_BAY_NOT_FOUND)
def test_should_fail_when_there_are_not_appliance_bays_e_fuse(self):
enclosure_without_appliance_bays = dict(ENCLOSURE_FROM_ONEVIEW, applianceBays=[])
self.resource.data = enclosure_without_appliance_bays
self.mock_ansible_module.params = yaml.safe_load(PARAMS_FOR_APPLIANCE_BAY_POWER_STATE_E_FUSE)
EnclosureModule().run()
self.resource.patch.not_been_called()
self.mock_ansible_module.fail_json.assert_called_once_with(exception=mock.ANY,
msg=EnclosureModule.MSG_BAY_NOT_FOUND)
def test_should_perform_an_e_fuse_device_bay(self):
self.resource.data = ENCLOSURE_FROM_ONEVIEW
self.resource.patch.return_value = self.resource
self.mock_ansible_module.params = yaml.safe_load(PARAMS_FOR_DEVICE_BAY_POWER_STATE_E_FUSE)
EnclosureModule().run()
self.resource.patch.assert_called_once_with(operation='replace',
path='/deviceBays/1/bayPowerState',
value='E-Fuse')
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
ansible_facts=dict(enclosure=ENCLOSURE_FROM_ONEVIEW),
msg=EnclosureModule.MSG_DEVICE_BAY_POWER_STATE_E_FUSED
)
def test_should_fail_when_device_bay_not_found_e_fuse(self):
self.resource.data = ENCLOSURE_FROM_ONEVIEW
params_power_on_not_found_bay = yaml.safe_load(PARAMS_FOR_DEVICE_BAY_POWER_STATE_E_FUSE)
params_power_on_not_found_bay['data']['bayNumber'] = 3
self.mock_ansible_module.params = params_power_on_not_found_bay
EnclosureModule().run()
self.resource.patch.not_been_called()
self.mock_ansible_module.fail_json.assert_called_once_with(exception=mock.ANY,
msg=EnclosureModule.MSG_BAY_NOT_FOUND)
def test_should_fail_when_there_are_not_device_bays_e_fuse(self):
enclosure_without_appliance_bays = dict(ENCLOSURE_FROM_ONEVIEW, deviceBays=[])
self.resource.data = enclosure_without_appliance_bays
self.mock_ansible_module.params = yaml.safe_load(PARAMS_FOR_DEVICE_BAY_POWER_STATE_E_FUSE)
EnclosureModule().run()
self.resource.patch.not_been_called()
self.mock_ansible_module.fail_json.assert_called_once_with(exception=mock.ANY,
msg=EnclosureModule.MSG_BAY_NOT_FOUND)
def test_should_reset_device_bay(self):
self.resource.data = ENCLOSURE_FROM_ONEVIEW
self.resource.patch.return_value = self.resource
self.mock_ansible_module.params = yaml.safe_load(PARAMS_FOR_DEVICE_BAY_POWER_STATE_RESET)
EnclosureModule().run()
self.resource.patch.assert_called_once_with(operation='replace',
path='/deviceBays/1/bayPowerState',
value='Reset')
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
ansible_facts=dict(enclosure=ENCLOSURE_FROM_ONEVIEW),
msg=EnclosureModule.MSG_DEVICE_BAY_POWER_STATE_RESET
)
def test_should_fail_when_device_bay_not_found_reset(self):
self.resource.data = ENCLOSURE_FROM_ONEVIEW
params_power_on_not_found_bay = yaml.safe_load(PARAMS_FOR_DEVICE_BAY_POWER_STATE_RESET)
params_power_on_not_found_bay['data']['bayNumber'] = 3
self.mock_ansible_module.params = params_power_on_not_found_bay
EnclosureModule().run()
self.resource.patch.not_been_called()
self.mock_ansible_module.fail_json.assert_called_once_with(exception=mock.ANY,
msg=EnclosureModule.MSG_BAY_NOT_FOUND)
def test_should_fail_when_there_are_not_device_bays_reset(self):
enclosure_without_appliance_bays = dict(ENCLOSURE_FROM_ONEVIEW, deviceBays=[])
self.resource.data = enclosure_without_appliance_bays
self.mock_ansible_module.params = yaml.safe_load(PARAMS_FOR_DEVICE_BAY_POWER_STATE_RESET)
EnclosureModule().run()
self.resource.patch.not_been_called()
self.mock_ansible_module.fail_json.assert_called_once_with(exception=mock.ANY,
msg=EnclosureModule.MSG_BAY_NOT_FOUND)
def test_should_perform_an_e_fuse_interconnect(self):
self.resource.data = ENCLOSURE_FROM_ONEVIEW
self.resource.patch.return_value = self.resource
self.mock_ansible_module.params = yaml.safe_load(PARAMS_FOR_INTERCONNECT_BAY_POWER_STATE_E_FUSE)
EnclosureModule().run()
self.resource.patch.assert_called_once_with(operation='replace',
path='/interconnectBays/2/bayPowerState',
value='E-Fuse')
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
ansible_facts=dict(enclosure=ENCLOSURE_FROM_ONEVIEW),
msg=EnclosureModule.MSG_INTERCONNECT_BAY_POWER_STATE_E_FUSE
)
def test_should_fail_when_interconnect_bay_not_found_e_fuse(self):
self.resource.data = ENCLOSURE_FROM_ONEVIEW
params_power_on_not_found_bay = yaml.safe_load(PARAMS_FOR_INTERCONNECT_BAY_POWER_STATE_E_FUSE)
params_power_on_not_found_bay['data']['bayNumber'] = 3
self.mock_ansible_module.params = params_power_on_not_found_bay
EnclosureModule().run()
self.resource.patch.not_been_called()
self.mock_ansible_module.fail_json.assert_called_once_with(exception=mock.ANY, msg=EnclosureModule.MSG_BAY_NOT_FOUND)
def test_should_fail_when_there_are_not_interconnect_bays_e_fuse(self):
enclosure_without_appliance_bays = dict(ENCLOSURE_FROM_ONEVIEW, interconnectBays=[])
self.resource.data = enclosure_without_appliance_bays
self.mock_ansible_module.params = yaml.safe_load(PARAMS_FOR_INTERCONNECT_BAY_POWER_STATE_E_FUSE)
EnclosureModule().run()
self.resource.patch.not_been_called()
self.mock_ansible_module.fail_json.assert_called_once_with(exception=mock.ANY, msg=EnclosureModule.MSG_BAY_NOT_FOUND)
def test_should_remove_ipv4_device_bays(self):
self.resource.data = ENCLOSURE_FROM_ONEVIEW
self.resource.patch.return_value = self.resource
self.mock_ansible_module.params = yaml.safe_load(PARAMS_FOR_DEVICE_BAY_IPV4_RELEASE)
EnclosureModule().run()
self.resource.patch.assert_called_once_with(operation='remove',
path='/deviceBays/1/ipv4Setting',
value='')
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
ansible_facts=dict(enclosure=ENCLOSURE_FROM_ONEVIEW),
msg=EnclosureModule.MSG_DEVICE_BAY_IPV4_SETTING_REMOVED
)
def test_should_remove_ipv4_interconnect_bays(self):
self.resource.data = ENCLOSURE_FROM_ONEVIEW
self.resource.patch.return_value = self.resource
self.mock_ansible_module.params = yaml.safe_load(PARAMS_FOR_INTERCONNECT_BAY_IPV4_RELEASE)
EnclosureModule().run()
self.resource.patch.assert_called_once_with(operation='remove',
path='/interconnectBays/1/ipv4Setting',
value='')
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
ansible_facts=dict(enclosure=ENCLOSURE_FROM_ONEVIEW),
msg=EnclosureModule.MSG_INTERCONNECT_BAY_IPV4_SETTING_REMOVED
)
def test_should_fail_when_device_bay_not_found_ipv4_release(self):
self.resource.data = ENCLOSURE_FROM_ONEVIEW
params_power_on_not_found_bay = yaml.safe_load(PARAMS_FOR_DEVICE_BAY_IPV4_RELEASE)
params_power_on_not_found_bay['data']['bayNumber'] = 3
self.mock_ansible_module.params = params_power_on_not_found_bay
EnclosureModule().run()
self.resource.patch.not_been_called()
self.mock_ansible_module.fail_json.assert_called_once_with(exception=mock.ANY, msg=EnclosureModule.MSG_BAY_NOT_FOUND)
def test_should_fail_when_there_are_not_device_bays_ipv4_release(self):
enclosure_without_appliance_bays = dict(ENCLOSURE_FROM_ONEVIEW, deviceBays=[])
self.resource.data = enclosure_without_appliance_bays
self.mock_ansible_module.params = yaml.safe_load(PARAMS_FOR_DEVICE_BAY_IPV4_RELEASE)
EnclosureModule().run()
self.resource.patch.not_been_called()
self.mock_ansible_module.fail_json.assert_called_once_with(exception=mock.ANY, msg=EnclosureModule.MSG_BAY_NOT_FOUND)
def test_should_fail_when_interconnect_bay_not_found_ipv4(self):
self.resource.data = ENCLOSURE_FROM_ONEVIEW
params_power_on_not_found_bay = yaml.safe_load(PARAMS_FOR_DEVICE_BAY_IPV4_RELEASE)
params_power_on_not_found_bay['data']['bayNumber'] = 3
self.mock_ansible_module.params = params_power_on_not_found_bay
EnclosureModule().run()
self.resource.patch.not_been_called()
self.mock_ansible_module.fail_json.assert_called_once_with(exception=mock.ANY, msg=EnclosureModule.MSG_BAY_NOT_FOUND)
def test_should_fail_when_there_are_not_interconnect_bays_ipv4(self):
enclosure_without_appliance_bays = dict(ENCLOSURE_FROM_ONEVIEW, interconnectBays=[])
self.resource.data = enclosure_without_appliance_bays
self.mock_ansible_module.params = yaml.safe_load(PARAMS_FOR_INTERCONNECT_BAY_IPV4_RELEASE)
EnclosureModule().run()
self.resource.patch.not_been_called()
self.mock_ansible_module.fail_json.assert_called_once_with(exception=mock.ANY, msg=EnclosureModule.MSG_BAY_NOT_FOUND)
def test_should_set_state(self):
self.resource.data = ENCLOSURE_FROM_ONEVIEW
self.resource.patch.return_value = self.resource
self.mock_ansible_module.params = yaml.safe_load(PARAMS_FOR_DATA_COL_SET)
EnclosureModule().run()
self.resource.patch.assert_called_once_with(operation='replace',
path='/supportDataCollectionState',
value='PendingCollection')
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
ansible_facts=dict(enclosure=ENCLOSURE_FROM_ONEVIEW),
msg=EnclosureModule.MSG_SUPPORT_DATA_COLLECTION_STATE_SET
)
def test_should_not_set_state_when_it_is_already_on_desired_state(self):
enclosure_uid_on = dict(ENCLOSURE_FROM_ONEVIEW, supportDataCollectionState='PendingCollection')
self.resource.data = enclosure_uid_on
self.mock_ansible_module.params = yaml.safe_load(PARAMS_FOR_DATA_COL_SET)
EnclosureModule().run()
self.resource.patch.not_been_called()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
ansible_facts=dict(enclosure=enclosure_uid_on),
msg=EnclosureModule.MSG_SUPPORT_DATA_COLLECTION_STATE_ALREADY_SET
)
def test_update_scopes_when_different(self):
params_to_scope = deepcopy(PARAMS_FOR_PRESENT_NO_HOSTNAME)
params_to_scope['data']['scopeUris'] = ['test']
self.mock_ansible_module.params = params_to_scope
resource_data = deepcopy(PARAMS_FOR_PRESENT_NO_HOSTNAME)['data']
resource_data['uri'] = 'rest/enclosures/fake'
resource_data['scopeUris'] = []
self.resource.data = resource_data
patch_return = resource_data.copy()
patch_return['scopeUris'] = ['test']
patch_return_obj = self.resource.copy()
patch_return_obj.data = patch_return
self.resource.patch.return_value = patch_return_obj
EnclosureModule().run()
self.resource.patch.assert_called_once_with(operation='replace',
path='/scopeUris',
value=['test'])
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
ansible_facts=dict(enclosure=patch_return),
msg=EnclosureModule.MSG_UPDATED
)
def test_should_do_nothing_when_scopes_are_the_same(self):
params_to_scope = deepcopy(PARAMS_FOR_PRESENT_NO_HOSTNAME)
params_to_scope['data']['scopeUris'] = ['test']
self.mock_ansible_module.params = params_to_scope
self.resource.data = params_to_scope['data']
EnclosureModule().run()
self.resource.patch.not_been_called()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
ansible_facts=dict(enclosure=params_to_scope['data']),
msg=EnclosureModule.MSG_ALREADY_PRESENT
)
def test_should_create_new_certificate_signing_request(self):
self.resource.generate_csr.return_value = ENCLOSURE_FROM_ONEVIEW
self.mock_ansible_module.params = PARAMS_FOR_CREATE_CSR
EnclosureModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
ansible_facts=dict(enclosure=ENCLOSURE_FROM_ONEVIEW),
msg=EnclosureModule.MSG_CREATE_CERTIFICATE_REQUEST
)
def test_should_get_previous_certificate_signing_request(self):
self.resource.get_csr.return_value = ENCLOSURE_FROM_ONEVIEW
self.mock_ansible_module.params = PARAMS_FOR_GET_CSR
EnclosureModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
ansible_facts=dict(enclosure=ENCLOSURE_FROM_ONEVIEW),
msg=EnclosureModule.MSG_GET_CERTIFICATE_REQUEST
)
def test_should_import_certificate_signing_request(self):
self.resource.import_certificate.return_value = ENCLOSURE_FROM_ONEVIEW
self.mock_ansible_module.params = PARAMS_FOR_IMPORT_CSR
EnclosureModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
ansible_facts=dict(enclosure=ENCLOSURE_FROM_ONEVIEW),
msg=EnclosureModule.MSG_IMPORT_CERTIFICATE_REQUEST
)
if __name__ == '__main__':
pytest.main([__file__])
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" Feedbackbot - slack bot for providing teammates with anonymous feedback.
"""
import argparse
import os
import re
import sys
from slacker import Slacker
DESCRIPTION = 'Procedure to provide teammates with anonymous feedback on slack'
USER_NOT_FOUND = 'USER_NOT_FOUND'
COULD_NOT_FIND_CHANNEL = 'COULD_NOT_FIND_CHANNEL'
FEEDBACK_REGEX = r"""
^tell\s # 'tell' at the start of the string
(?P<username>\S*)\s # group any non-whitespace as username
(?P<feedback>.*$) # grab any following characters until EOS as feedback
"""
FEEDBACK_STRING = 'An anonymous user wanted to tell you: "{}"'
def list_im_channels(client):
"""Returns the ids of the channels the user is participating in.
"""
r = client.im.list()
return [c for c in r.body.get('ims')] if r.successful else []
def list_text_from_channel(client, channel_id, oldest=0):
"""Create a list of the individual messages in a channel.
"""
r = client.im.history(channel=channel_id, oldest=oldest)
return [m.get('text') for m in r.body.get('messages')] \
if r.successful else []
def open_im(client, user_id):
r = client.im.open(user=user_id)
return r.successful
def post_message(client, channel_id, text):
r = client.chat.post_message(
channel=channel_id,
text=FEEDBACK_STRING.format(text),
as_user=True)
return r.successful
def get_feedback_ims(client):
feedback_messages = []
for c in list_im_channels(client):
channel_text = list_text_from_channel(client, c.get('id'))
for t in channel_text:
message = process_feedback(t)
if message:
feedback_messages.append(message)
return feedback_messages
def list_users(client):
r = client.users.list()
return r.body.get('members') if r.successful else []
def get_im_channel_id(client, user_id):
channels = list_im_channels(client)
for c in channels:
if user_id == c.get('user'):
channel_id = c.get('id')
break
else:
channel_id = COULD_NOT_FIND_CHANNEL
return channel_id
def get_user_id(client, username):
users = list_users(client)
for user in users:
if username == user.get('name'):
user_id = user.get('id')
break
else:
user_id = USER_NOT_FOUND
return user_id
def process_feedback(feedback_string):
m = re.search(FEEDBACK_REGEX, feedback_string, re.VERBOSE)
return {'username': m.group('username'), 'feedback': m.group('feedback')} \
if m is not None else {}
def send_feedback(client, user_id, feedback):
open_im(client, user_id)
channel_id = get_im_channel_id(client, user_id)
post_message(client, channel_id, feedback)
def process(client):
messages = get_feedback_ims(client)
for m in messages:
user_id = get_user_id(client, m.get('username'))
send_feedback(client, user_id, m.get('feedback'))
return
def get_token(parser):
results = parser.parse_args()
try:
token = results.token
except(AttributeError):
pass
try:
token = os.environ['SLACK_TOKEN']
except(KeyError):
pass
if not isinstance(token, str) or token is None:
raise Exception('No token found for Slack API.')
return token
def main():
parser = argparse.ArgumentParser(description=DESCRIPTION)
parser.add_argument('--token', action='store', dest='token')
token = get_token(parser)
client = Slacker(token)
process(client)
if __name__ == "__main__":
sys.exit(main())
|
#
# General-purpose Photovoltaic Device Model - a drift diffusion base/Shockley-Read-Hall
# model for 1st, 2nd and 3rd generation solar cells.
# Copyright (C) 2008-2022 <NAME> r.c.i.mackenzie at googlemail.com
#
# https://www.gpvdm.com
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License v2.0, as published by
# the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
## @package snapshot_slider
# A slider to scroll through simulation snapshots.
#
import os
from tab import tab_class
#qt
from PyQt5.QtCore import QSize, Qt
from PyQt5.QtWidgets import QWidget,QVBoxLayout,QToolBar,QSizePolicy,QAction,QTabWidget,QSlider,QHBoxLayout,QLabel,QComboBox,QAbstractItemView
from PyQt5.QtGui import QPainter,QIcon
from PyQt5.QtCore import pyqtSignal
from help import help_window
from dat_file import dat_file
from dat_file_math import dat_file_max_min
from PyQt5.QtCore import QTimer
from icon_lib import icon_get
from util import wrap_text
from gpvdm_tab import gpvdm_tab
from inp import inp
class snapshot_slider(QWidget):
changed = pyqtSignal()
def timer_toggle(self):
if self.timer==None:
self.timer=QTimer()
self.timer.timeout.connect(self.slider_auto_incroment)
self.timer.start(1)
self.tb_play.setIcon(icon_get("media-playback-pause"))
else:
self.anim_stop()
def anim_stop(self):
if self.timer!=None:
self.timer.stop()
self.timer=None
self.tb_play.setIcon(icon_get("media-playback-start"))
def slider_auto_incroment(self):
val=self.slider0.value()
val=val+1
if val>self.slider0.maximum():
val=0
self.slider0.setValue(val)
def update(self):
self.dirs=[]
if os.path.isdir(self.path)==True:
for name in os.listdir(self.path):
if name!="." and name!= "..":
full_path=os.path.join(self.path, name)
if os.path.isdir(full_path):
self.dirs.append(name)
self.dirs.sort(key=int)
for i in range(0,len(self.dirs)):
self.dirs[i]=os.path.join(self.path, self.dirs[i])
self.slider_max=len(self.dirs)-1
self.slider0.setMaximum(self.slider_max)
self.tab.clear()
self.tab.setColumnCount(2)
self.tab.setSelectionBehavior(QAbstractItemView.SelectRows)
self.tab.setHorizontalHeaderLabels([ _("File to plot"),_("Plot type")])
self.tab.setColumnWidth(0, 400)
self.tab.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding)
if self.load_state()==False:
pos=self.tab.insert_row()
self.insert_row(pos)
def slider0_change(self):
value = self.slider0.value()
self.label0.setText(str(value))
self.changed.emit()
def get_file_name(self):
ret=[]
plot_types=[]
val=self.slider0.value()
if self.slider_dir_exists()==False:
return ret
for i in range(0,self.tab.rowCount()):
file_path=os.path.join(self.path,self.dirs[val],self.tab.get_value(i,0))
if os.path.isfile(file_path)==True:
ret.append(file_path)
plot_types.append(self.tab.get_value(i,1))
return ret,plot_types
def set_path(self,path):
self.path=path
self.update()
def __init__(self):
QWidget.__init__(self)
self.dirs=[]
self.path=""
self.timer=None
self.tb_play = QAction(icon_get("media-playback-start"), wrap_text(_("Play"),2), self)
self.tb_play.triggered.connect(self.timer_toggle)
self.setWindowTitle(_("Snapshot slider"))
self.main_vbox = QVBoxLayout()
self.slider_hbox0= QHBoxLayout()
self.slider_max=30
self.slider0 = QSlider(Qt.Horizontal)
#self.slider0.setMinimum(0)
#self.slider0.setMaximum(self.slider_max)
self.slider0.setTickPosition(QSlider.TicksBelow)
self.slider0.setTickInterval(5)
self.slider0.valueChanged.connect(self.slider0_change)
#self.slider0.setMinimumSize(300, 80)
self.slider_hbox0.addWidget(self.slider0)
self.label0 = QLabel()
self.label0.setText("")
self.slider0.setValue(1)
self.slider_hbox0.addWidget(self.label0)
self.widget0=QWidget()
self.widget0.setLayout(self.slider_hbox0)
self.main_vbox.addWidget(self.widget0)
self.toolbar=QToolBar()
self.main_vbox.addWidget(self.toolbar)
self.tab=gpvdm_tab(toolbar=self.toolbar)
self.main_vbox.addWidget(self.tab)
self.tab.tb_add.triggered.connect(self.callback_insert_row)
self.tab.tb_remove.triggered.connect(self.callback_remove_row)
self.setLayout(self.main_vbox)
def slider_dir_exists(self):
if self.slider0.value()==-1:
return False
if self.slider0.value()>=len(self.dirs):
return False
return True
def update_files_combo(self,combo):
if self.slider_dir_exists()==False:
return False
combo.blockSignals(True)
combo.clear()
path=os.path.join(self.path,self.dirs[self.slider0.value()])
all_files=[]
if os.path.isdir(path)==True:
for name in os.listdir(path):
full_path=os.path.join(path, name)
if os.path.isfile(full_path):
if name!="data.json":
all_files.append(name)
all_files.sort()
for a in all_files:
combo.addItem(a)
all_items = [combo.itemText(i) for i in range(combo.count())]
for i in range(0,len(all_items)):
if all_items[i] == "Jn.dat":
combo.setCurrentIndex(i)
combo.blockSignals(False)
return True
def save_state(self):
f=inp()
for i in range(0,self.tab.rowCount()):
f.lines.append(self.tab.get_value(i,0))
f.save_as(os.path.join(self.path,"last.inp"))
def load_state(self):
f=inp()
if f.load(os.path.join(self.path,"last.inp"))==False:
return False
if len(f.lines)==0:
return False
print("bing")
for i in range(0,len(f.lines)):
line=f.lines[i]
print(line)
#pos=self.tab.insert_row()
pos=self.tab.insert_row()
self.insert_row(pos)
self.tab.blockSignals(True)
self.tab.set_value(i,0,line)
self.tab.blockSignals(False)
#f.lines.append(self.tab.get_value(i,0))
return True
def files_combo_changed(self):
self.save_state()
self.changed.emit()
def insert_row(self,i):
self.tab.blockSignals(True)
self.item = QComboBox()
self.update_files_combo(self.item)
self.item.currentIndexChanged.connect(self.files_combo_changed)
#self.item.setText(v2)
#self.item.button.clicked.connect(self.callback_show_list)
self.tab.setCellWidget(i,0,self.item)
self.item_type = QComboBox()
self.item_type.addItem("wireframe")
self.item_type.addItem("heat")
self.item_type.addItem("contour")
self.item_type.currentIndexChanged.connect(self.files_combo_changed)
self.tab.setCellWidget(i,1,self.item_type)
self.tab.blockSignals(False)
def callback_insert_row(self):
pos=self.tab.insert_row()
self.insert_row(pos)
self.changed.emit()
def callback_remove_row(self):
self.tab.remove()
self.changed.emit()
|
<filename>django/project/my_web/my_web/personas/views.py
from django.shortcuts import get_object_or_404
from django.http import JsonResponse
from django.http import Http404
from django.http import HttpResponse
from django.template import loader
from django.urls import reverse
import json
from .models import (
Persona,
Miembro,
)
from .forms import (
PersonaForm,
MiembroForm,
)
def example_display(request):
context = {
"result_url": reverse('example_view')
}
template = loader.get_template('example.html')
return HttpResponse(template.render(context, request))
def example_view(request):
# Aqui recibimos los valores de la matrix
matrix = request.GET.get('matrix', None)
if matrix is not None:
matrix = json.loads(matrix)
print("matrix: {}".format(matrix))
# Aqui mandamos una matrix de 5 x 1
data = {
"matrix_data": [
[7],
[2],
[4],
[10],
[3],
],
"matrix_size": [5, 1]
}
return JsonResponse(data)
def persona_listing(request):
personas = Persona.objects.all()
lista = []
for index, persona in enumerate(personas):
lista.append(
{
0: str(index + 1),
1: persona.nombre_completo,
2: persona.genero_label,
3: persona.genero_label,
4: persona.is_member,
'link': {
'url': reverse('persona', kwargs={
'persona_id': persona.id
}),
'label': 'Detalle'
},
}
)
lista_data = {
'lista': lista,
'columnas': [
'No.', 'Nombre', 'Genero', 'Nacionalidad', 'Es Miembro', 'Detalle'
]
}
context = {
'lista': lista_data,
}
template = loader.get_template('personas/persona_listing.html')
return HttpResponse(template.render(context, request))
def persona_detail(request, persona_id=None):
if persona_id is not None:
persona = get_object_or_404(Persona, pk=persona_id)
if request.method == 'POST':
form = PersonaForm(request.POST, instance=persona)
else:
form = PersonaForm(instance=persona)
context = {
'form': form,
'pais_url': reverse('countries'),
'estado_url': reverse('states'),
'ciudad_url': reverse('cities'),
}
template = loader.get_template('personas/persona_detail.html')
return HttpResponse(template.render(context, request))
raise Http404("No existe la persona seleccionada")
def member_listing(request):
miembros = Miembro.objects.all()
lista = []
for index, persona in enumerate(miembros):
lista.append(
{
0: str(index + 1),
1: persona.nombre_completo,
2: persona.genero_label,
3: persona.genero_label,
4: persona.numero_membresia,
'link': {
'url': reverse('miembros'),
'label': 'Detalle'
}, #persona.id
}
)
lista_data = {
'lista': lista,
'columnas': [
'No.', 'Nombre', 'Genero', 'Nacionalidad', 'Numero Miembro', 'Detalle'
]
}
context = {
'lista': lista_data,
}
template = loader.get_template('personas/member_listing.html')
return HttpResponse(template.render(context, request))
def member_detail(request, member_id=None):
pass
|
# Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""This module helper query builder for my dashboard page."""
from sqlalchemy import and_
from sqlalchemy import literal
from sqlalchemy import or_
from sqlalchemy import true, false
from sqlalchemy import union
from sqlalchemy import alias
from sqlalchemy.orm import aliased
from ggrc import db
from ggrc.models import all_models
from ggrc.models.object_person import ObjectPerson
from ggrc.models.relationship import Relationship
from ggrc.models.custom_attribute_value import CustomAttributeValue
from ggrc.query import utils as query_utils
from ggrc_basic_permissions import backlog_workflows
from ggrc_basic_permissions.models import UserRole, Role
from ggrc_workflows.models import Cycle
def _types_to_type_models(types):
"""Convert string types to real objects."""
if types is None:
return all_models.all_models
return [m for m in all_models.all_models if m.__name__ in types]
def get_myobjects_query(types=None, contact_id=None, is_creator=False): # noqa
"""Filters by "myview" for a given person.
Finds all objects which might appear on a user's Profile or Dashboard
pages.
This method only *limits* the result set -- Contexts and Roles will still
filter out forbidden objects.
"""
type_models = _types_to_type_models(types)
model_names = [model.__name__ for model in type_models]
type_union_queries = []
def _get_people():
"""Get all the people w/o any restrictions."""
all_people = db.session.query(
all_models.Person.id.label('id'),
literal(all_models.Person.__name__).label('type'),
literal(None).label('context_id')
)
return all_people
def _get_object_people():
"""Objects to which the user is 'mapped'."""
object_people_query = db.session.query(
ObjectPerson.personable_id.label('id'),
ObjectPerson.personable_type.label('type'),
literal(None).label('context_id')
).filter(
and_(
ObjectPerson.person_id == contact_id,
ObjectPerson.personable_type.in_(model_names)
)
)
return object_people_query
def _get_object_owners():
"""Objects for which the user is an 'owner'."""
object_owners_query = db.session.query(
all_models.AccessControlList.object_id.label('id'),
all_models.AccessControlList.object_type.label('type'),
literal(None).label('context_id')
).filter(
and_(
all_models.AccessControlList.person_id == contact_id,
all_models.AccessControlList.object_type.in_(model_names),
all_models.AccessControlRole.name == "Admin"
)
)
return object_owners_query
def _get_object_mapped_ca():
"""Objects to which the user is mapped via a custom attribute."""
ca_mapped_objects_query = db.session.query(
CustomAttributeValue.attributable_id.label('id'),
CustomAttributeValue.attributable_type.label('type'),
literal(None).label('context_id')
).filter(
and_(
CustomAttributeValue.attribute_value == "Person",
CustomAttributeValue.attribute_object_id == contact_id,
CustomAttributeValue.attributable_type.in_(model_names)
)
)
return ca_mapped_objects_query
def _get_objects_user_assigned():
"""Objects for which the user is assigned."""
dst_assignee_query = db.session.query(
Relationship.destination_id.label('id'),
Relationship.destination_type.label('type'),
literal(None).label('context_id'),
).filter(
and_(
Relationship.source_type == "Person",
Relationship.source_id == contact_id,
Relationship.destination_type.in_(model_names)
),
)
src_assignee_query = db.session.query(
Relationship.source_id.label('id'),
Relationship.source_type.label('type'),
literal(None).label('context_id'),
).filter(
and_(
Relationship.destination_type == "Person",
Relationship.destination_id == contact_id,
Relationship.source_type.in_(model_names)
),
)
return dst_assignee_query.union(src_assignee_query)
def _get_results_by_context(model):
"""Objects based on the context of the current model.
Return the objects that are in private contexts via UserRole.
"""
context_query = db.session.query(
model.id.label('id'),
literal(model.__name__).label('type'),
literal(None).label('context_id'),
).join(
UserRole,
and_(
UserRole.context_id == model.context_id,
UserRole.person_id == contact_id,
)
)
return context_query
def _get_assigned_to_records(model):
"""Get query by models contacts fields.
Objects for which the user is the 'contact' or 'secondary contact'.
Control also has 'principal_assessor' and 'secondary_assessor'.
"""
model_type_queries = []
for attr in ('contact_id', 'secondary_contact_id',
'principal_assessor_id', 'secondary_assessor_id'):
if hasattr(model, attr):
model_type_queries.append(getattr(model, attr) == contact_id)
return model_type_queries
def _get_tasks_in_cycle(model):
"""Filter tasks with particular statuses and cycle.
Filtering tasks with statuses "Assigned", "InProgress" and "Finished".
Where the task is in current users cycle.
"""
task_query = db.session.query(
model.id.label('id'),
literal(model.__name__).label('type'),
literal(None).label('context_id'),
).join(
Cycle,
Cycle.id == model.cycle_id
).filter(
Cycle.is_current == true(),
model.contact_id == contact_id
)
return task_query.filter(
Cycle.is_verification_needed == true(),
model.status.in_([
all_models.CycleTaskGroupObjectTask.ASSIGNED,
all_models.CycleTaskGroupObjectTask.IN_PROGRESS,
all_models.CycleTaskGroupObjectTask.FINISHED,
all_models.CycleTaskGroupObjectTask.DECLINED,
])
).union_all(
task_query.filter(
Cycle.is_verification_needed == false(),
model.status.in_([
all_models.CycleTaskGroupObjectTask.ASSIGNED,
all_models.CycleTaskGroupObjectTask.IN_PROGRESS,
])
)
)
def _get_model_specific_query(model):
"""Prepare query specific for a particular model."""
model_type_query = None
if model is all_models.CycleTaskGroupObjectTask:
model_type_query = _get_tasks_in_cycle(model)
else:
model_type_queries = _get_assigned_to_records(model)
if model_type_queries:
type_column = query_utils.get_type_select_column(model)
model_type_query = db.session.query(
model.id.label('id'),
type_column.label('type'),
literal(None).label('context_id')
).filter(or_(*model_type_queries)).distinct()
return model_type_query
def _get_context_relationships():
"""Load list of objects related on contexts and objects types.
This code handles the case when user is added as `Auditor` and should be
able to see objects mapped to the `Program` on `My Work` page.
Returns:
objects (list((id, type, None))): Related objects
"""
user_role_query = db.session.query(UserRole.context_id).join(
Role, UserRole.role_id == Role.id).filter(and_(
UserRole.person_id == contact_id, Role.name == 'Auditor')
)
_ct = aliased(all_models.Context, name="c")
_rl = aliased(all_models.Relationship, name="rl")
context_query = db.session.query(
_rl.source_id.label('id'),
_rl.source_type.label('type'),
literal(None)).join(_ct, and_(
_ct.id.in_(user_role_query),
_rl.destination_id == _ct.related_object_id,
_rl.destination_type == _ct.related_object_type,
_rl.source_type.in_(model_names),
)).union(db.session.query(
_rl.destination_id.label('id'),
_rl.destination_type.label('type'),
literal(None)).join(_ct, and_(
_ct.id.in_(user_role_query),
_rl.source_id == _ct.related_object_id,
_rl.source_type == _ct.related_object_type,
_rl.destination_type.in_(model_names),)))
return context_query
def _get_custom_roles():
"""Objects for which the user is an 'owner'."""
custom_roles_query = db.session.query(
all_models.AccessControlList.object_id.label('id'),
all_models.AccessControlList.object_type.label('type'),
literal(None).label('context_id')
).join(
all_models.AccessControlRole,
all_models.AccessControlList.ac_role_id ==
all_models.AccessControlRole.id
).filter(
and_(
all_models.AccessControlList.person_id == contact_id,
all_models.AccessControlList.object_type.in_(model_names),
all_models.AccessControlRole.my_work == true(),
all_models.AccessControlRole.read == true()
)
)
return custom_roles_query
# Note: We don't return mapped objects for the Creator because being mapped
# does not give the Creator necessary permissions to view the object.
if not is_creator:
type_union_queries.append(_get_object_people())
type_union_queries.extend((_get_object_owners(),
_get_object_mapped_ca(),
_get_objects_user_assigned(),
_get_context_relationships(),
_get_custom_roles(),))
for model in type_models:
query = _get_model_specific_query(model)
if query:
type_union_queries.append(query)
if model is all_models.Workflow:
type_union_queries.append(backlog_workflows())
if model is all_models.Person:
type_union_queries.append(_get_people())
if model in (all_models.Program, all_models.Audit, all_models.Workflow):
type_union_queries.append(_get_results_by_context(model))
return alias(union(*type_union_queries))
|
from unittest import TestCase
from glimslib import fenics_local as fenics
from glimslib.simulation_helpers.helper_classes import Parameters, SubDomains, FunctionSpace, DiscontinuousScalar
class Boundary(fenics.SubDomain):
def inside(self, x, on_boundary):
return on_boundary
class TestSimulationParameters(TestCase):
def setUp(self):
# Domain
nx = ny = nz = 10
self.mesh = fenics.RectangleMesh(fenics.Point(-2, -2), fenics.Point(2, 2), nx, ny)
# function spaces
self.displacement_element = fenics.VectorElement("Lagrange", self.mesh.ufl_cell(), 1)
self.concentration_element = fenics.FiniteElement("Lagrange", self.mesh.ufl_cell(), 1)
self.element = fenics.MixedElement([self.displacement_element, self.concentration_element])
subspace_names = {0: 'displacement', 1: 'concentration'}
self.functionspace = FunctionSpace(self.mesh)
self.functionspace.init_function_space(self.element, subspace_names)
# subdomains
label_funspace = fenics.FunctionSpace(self.mesh, "DG", 1)
label_expr = fenics.Expression('(x[0]>=0) ? (1.0) : (2.0)', degree=1)
labels = fenics.project(label_expr, label_funspace)
self.labels = labels
self.tissue_id_name_map = {0: 'outside',
1: 'tissue',
2: 'tumor'}
self.parameter = {'outside': 0.0,
'tissue': 1.0,
'tumor': 0.1}
self.boundary = Boundary()
boundary_dict = {'boundary_1': self.boundary,
'boundary_2': self.boundary}
self.boundary_dict = boundary_dict
self.subdomains = SubDomains(self.mesh)
self.subdomains.setup_subdomains(label_function=self.labels)
self.subdomains.setup_boundaries(tissue_map=self.tissue_id_name_map,
boundary_fct_dict=self.boundary_dict)
self.subdomains.setup_measures()
# parameter instance
self.params = Parameters(self.functionspace, self.subdomains)
def test_set_initial_value_expressions(self):
u_0_conc_expr = fenics.Expression('sqrt(pow(x[0]-x0,2)+pow(x[1]-y0,2)) < 0.1 ? (1.0) : (0.0)',
degree=1, x0=0.25, y0=0.5)
u_0_disp_expr = fenics.Constant((0.0, 0.0))
ivs = {1 : u_0_conc_expr, 0 : u_0_disp_expr}
self.params.set_initial_value_expressions(ivs)
self.assertEqual(self.params.get_iv(1), u_0_conc_expr)
self.assertEqual(self.params.get_iv(0), u_0_disp_expr)
def test_define_required_params(self):
req_params = ['a', 'b', 'c']
self.params = Parameters(self.functionspace, self.subdomains, time_dependent=False)
self.params.define_required_params(req_params)
self.assertEqual(sorted(req_params), sorted(self.params.params_required))
self.params = Parameters(self.functionspace, self.subdomains, time_dependent=True)
self.params.define_required_params(req_params)
self.assertEqual(sorted(req_params+['sim_time', 'sim_time_step']),
sorted(self.params.params_required))
def test_define_optional_params(self):
opt_params = ['d', 'e', 'f']
self.params = Parameters(self.functionspace, self.subdomains, time_dependent=False)
self.params.define_optional_params(opt_params)
self.assertEqual(sorted(opt_params), sorted(self.params.params_optional))
def test_check_param_arguments(self):
self.params = Parameters(self.functionspace, self.subdomains, time_dependent=False)
req_params = ['a', 'b', 'c']
self.params.define_required_params(req_params)
kw_args = {'a' : 1, 'b': 2, 'c' : 3}
test = self.params._check_param_arguments(kw_args)
self.assertTrue(test)
self.params = Parameters(self.functionspace, self.subdomains, time_dependent=True)
req_params = ['a', 'b', 'c']
self.params.define_required_params(req_params)
kwargs = {'a': 1, 'b': 2, 'c': 3}
test = self.params._check_param_arguments(kw_args)
self.assertTrue(~test)
def test_set_parameters(self):
self.params = Parameters(self.functionspace, self.subdomains, time_dependent=False)
req_params = ['a', 'b', 'c']
self.params.define_required_params(req_params)
opt_params = ['d']
self.params.define_optional_params(opt_params)
input_params = {'a':1, 'b' : self.parameter, 'c':1 ,'d':1, 'e':1}
self.params.init_parameters( input_params)
self.assertTrue(hasattr(self.params,'a'))
self.assertTrue(hasattr(self.params, 'b'))
self.assertTrue(hasattr(self.params, 'b_dict'))
self.assertEqual(type(self.params.b), DiscontinuousScalar)
self.assertTrue(hasattr(self.params, 'c'))
self.assertTrue(hasattr(self.params, 'd'))
self.assertFalse(hasattr(self.params, 'e'))
def test_create_initial_value_function(self):
self.params = Parameters(self.functionspace, self.subdomains, time_dependent=False)
u_0_conc_expr = fenics.Expression('sqrt(pow(x[0]-x0,2)+pow(x[1]-y0,2)) < 0.1 ? (1.0) : (0.0)',
degree=1, x0=0.25, y0=0.5)
u_0_disp_expr = fenics.Constant((0.0, 0.0))
ivs = {1: u_0_conc_expr, 0: u_0_disp_expr}
self.params.set_initial_value_expressions(ivs)
u = self.params.create_initial_value_function()
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
# Author:
from easyai.base_name.model_name import ModelName
from easyai.base_name.backbone_name import BackboneName
from easyai.base_name.block_name import NormalizationType, ActivationType
from easyai.base_name.block_name import LayerType, BlockType
from easyai.base_name.loss_name import LossType
from easyai.loss.utility.cross_entropy2d import CrossEntropy2d
from easyai.model.base_block.utility.upsample_layer import Upsample
from easyai.model.base_block.utility.utility_layer import RouteLayer
from easyai.model.base_block.utility.utility_layer import AddLayer
from easyai.model.base_block.utility.utility_block import ConvBNActivationBlock
from easyai.model.utility.base_model import *
from easyai.model.backbone.utility.backbone_factory import BackboneFactory
class FCN8sSeg(BaseModel):
def __init__(self, class_num=2):
super().__init__()
self.set_name(ModelName.FCNSeg)
self.class_number = class_num
self.bn_name = NormalizationType.BatchNormalize2d
self.activation_name = ActivationType.ReLU
self.factory = BackboneFactory()
self.create_block_list()
def create_block_list(self):
self.block_out_channels = []
self.index = 0
backbone = self.factory.get_base_model(BackboneName.Vgg16)
base_out_channels = backbone.get_outchannel_list()
self.add_block_list(BlockType.BaseNet, backbone, base_out_channels[-1])
layer1 = ConvBNActivationBlock(in_channels=base_out_channels[-1],
out_channels=4096,
kernel_size=7,
padding=3,
bias=True,
bnName=self.bn_name,
activationName=self.activation_name)
self.add_block_list(layer1.get_name(), layer1, 4096)
dropout1 = nn.Dropout()
self.add_block_list(LayerType.Dropout, dropout1, 4096)
layer2 = ConvBNActivationBlock(in_channels=4096,
out_channels=4096,
kernel_size=1,
padding=0,
bias=True,
bnName=self.bn_name,
activationName=self.activation_name)
self.add_block_list(layer2.get_name(), layer2, 4096)
dropout2 = nn.Dropout()
self.add_block_list(LayerType.Dropout, dropout2, 4096)
layer3 = nn.Conv2d(4096, self.class_number, kernel_size=1)
self.add_block_list(LayerType.Convolutional, layer3, self.class_number)
upsample1 = Upsample(scale_factor=2, mode='bilinear')
self.add_block_list(LayerType.Upsample, upsample1, self.block_out_channels[-1])
layer4 = RouteLayer('13')
output_channel = sum([base_out_channels[i] if i >= 0
else self.block_out_channels[i] for i in layer4.layers])
self.add_block_list(layer4.get_name(), layer4, output_channel)
layer5 = nn.Conv2d(512, self.class_number, kernel_size=1)
self.add_block_list(LayerType.Convolutional, layer5, self.class_number)
layer6 = AddLayer('-1,-3')
index = layer6.layers[0]
output_channel = base_out_channels[index] if index >= 0 else self.block_out_channels[index]
self.add_block_list(layer6.get_name(), layer6, output_channel)
layer7 = RouteLayer('9')
output_channel = sum([base_out_channels[i] if i >= 0
else self.block_out_channels[i] for i in layer7.layers])
self.add_block_list(layer7.get_name(), layer7, output_channel)
layer8 = nn.Conv2d(256, self.class_number, kernel_size=1)
self.add_block_list(LayerType.Convolutional, layer8, self.class_number)
layer9 = RouteLayer('-3')
output_channel = sum([base_out_channels[i] if i >= 0
else self.block_out_channels[i] for i in layer7.layers])
self.add_block_list(layer9.get_name(), layer9, output_channel)
upsample2 = Upsample(scale_factor=2, mode='bilinear')
self.add_block_list(LayerType.Upsample, upsample2, self.block_out_channels[-1])
layer10 = AddLayer('-1,-3')
index = layer10.layers[0]
output_channel = base_out_channels[index] if index >= 0 else self.block_out_channels[index]
self.add_block_list(layer10.get_name(), layer10, output_channel)
upsample3 = Upsample(scale_factor=8, mode='bilinear')
self.add_block_list(LayerType.Upsample, upsample3, self.block_out_channels[-1])
self.create_loss()
def create_loss(self, input_dict=None):
self.lossList = []
loss = CrossEntropy2d(ignore_index=250)
self.add_block_list(LossType.CrossEntropy2d, loss, self.block_out_channels[-1])
self.lossList.append(loss)
def forward(self, x):
base_outputs = []
layer_outputs = []
output = []
for key, block in self._modules.items():
if BlockType.BaseNet in key:
base_outputs = block(x)
x = base_outputs[-1]
elif LayerType.MultiplyLayer in key:
x = block(layer_outputs, base_outputs)
elif LayerType.AddLayer in key:
x = block(layer_outputs, base_outputs)
elif LayerType.RouteLayer in key:
x = block(layer_outputs, base_outputs)
elif LayerType.ShortcutLayer in key:
x = block(layer_outputs)
elif LossType.CrossEntropy2d in key:
output.append(x)
else:
x = block(x)
layer_outputs.append(x)
return output
|
# import inspect
import json
# import ntpath
import os
# import re
import sqlite3
# import time
import dateutil.parser
import openpyxl
from pprint import pprint
dartssqlitedb = os.path.join(
os.path.dirname(__file__),
"../data/svadarts.sqlite3db"
)
# uri = 'file:' + dartssqlitedb + '?mode=ro'
# db = sqlite3.connect(uri, uri=True)
db = sqlite3.connect(dartssqlitedb)
# db.isolation_level = None
aliases = {
"Andor": "Andor", "Anil": "Anil", "Bas": "Bas", "Bert": "Bert", "Brandon": "Brandon", "Christa": "Christa", "Colin": "Colin", "Ed": "Ed", "Elbert": "Elbert", "<NAME>": "<NAME>", "Erik": "<NAME>", "Ernie": "Ernie", "Frank": "Frank", "Frans": "Frans", "<NAME>": "<NAME>", "Gert": "<NAME>", "Gijs": "Gijs", "Gilbert": "Gilbert", "Hans": "Hans", "Harry": "Harry", "Henri": "Henri", "Jari": "Jari", "Johan": "Johan", "John": "John", "Joost": "Joost", "Joris": "Joris", "JR": "JR", "Kim": "Kim", "Maik": "Maik", "Marianne": "Marianne", "Martin": "Martin", "Menno": "Menno", "Otto": "Otto", "Paplip": "Paplip", "Pietra": "Pietra", "Reyn": "Reyn", "Youri": "Youri"}
def load_xlsx(filename):
wb = openpyxl.load_workbook(filename, read_only=True)
sheet_data = []
for sheet in wb.worksheets:
title_list = sheet.title.split()
date_avond = dateutil.parser.parse(title_list[-1])
if len(title_list) > 1:
type_avond = title_list[0].lower()
else:
type_avond = "regulier"
header = [cell.value for cell in sheet[1]]
iter_rows = sheet.rows
next(iter_rows)
for row in iter_rows:
values = {
"Date": date_avond.strftime('%Y-%m-%d'),
"Type": type_avond
}
for key, cell in zip(header, row):
values[key] = cell.value
if values[key] == None:
values[key] = 0
sheet_data.append(values)
return sheet_data
def save_data_to_json(data, filename):
filename = os.path.expanduser(filename)
with open(filename, 'w') as outfile:
json.dump(data, outfile)
def exec_select_query(query, args=[], json_file=None):
c = db.cursor()
c.execute(query, args)
names = [d[0] for d in c.description]
rows = [dict(zip(names, row)) for row in c.fetchall()]
if json_file:
save_to_json(rows, json_file)
return rows
def init_clean_db():
db.execute('''
DROP TABLE IF EXISTS speler
''')
db.execute('''
CREATE TABLE speler (
speler_naam VARCHAR(128)
) ''')
db.execute('''
DROP TABLE IF EXISTS game
''')
db.execute('''
CREATE TABLE game (
game_id INTEGER PRIMARY KEY,
comp VARCHAR(64),
datum DATE,
file_order INT,
game_order INT,
round VARCHAR(16),
speler1_naam VARCHAR(128),
speler2_naam VARCHAR(128),
speler1_legs INT,
speler2_legs INT,
speler1_180s INT,
speler2_180s INT,
speler2_lollies INT,
speler1_lollies INT,
speler2_finishes VARCHAR(32),
speler1_finishes VARCHAR(32),
UNIQUE (datum, round, speler1_naam, speler2_naam)
) ''')
db.execute('''
DROP TABLE IF EXISTS game_data
''')
db.execute('''
CREATE TABLE game_data (
game_id INT,
speler_naam VARCHAR(128),
speler_punten INT,
speler_rating INT,
speler_rating_adj INT,
speler_game_number INT,
UNIQUE (game_id, speler_naam)
)
''')
db.execute('''
DROP TABLE IF EXISTS adjustments
''')
db.execute('''
CREATE TABLE adjustments (
adj_id INTEGER PRIMARY KEY,
comp VARCHAR(64),
datum DATE,
adj_type VARCHAR(32),
speler_naam VARCHAR(128),
speler_points INT,
speler_180s INT,
speler_lollies INT,
speler_finishes VARCHAR(32),
speler_games INT,
UNIQUE (datum, speler_naam)
) ''')
db.commit()
def load_all_data_into_db():
files = ['Austerlitz_seizoen_2016-2017.xlsx', 'Austerlitz_seizoen_2017-2018.xlsx',
'Austerlitz_seizoen_2018-2019.xlsx', 'Austerlitz_seizoen_2019-2020.xlsx']
spelers = {
}
def get_speler_naam(naam):
alias = aliases[naam]
spelers[alias] = 1
return aliases[naam]
for file in files:
xlsx_path = os.path.join(
os.path.dirname(__file__),
"../data/" + file
)
comp = file[:-5];
pprint(comp);
data = load_xlsx(xlsx_path)
order = 1;
for entry in data:
if entry['Type'] == 'regulier':
if entry['Speler1'] == 0:
# soms meer regels in sheet: overslaan
continue
db.execute('''INSERT INTO game (
comp, datum, file_order, round, speler1_naam, speler2_naam,
speler1_legs, speler2_legs,
speler1_180s, speler2_180s,
speler1_lollies, speler2_lollies,
speler1_finishes, speler2_finishes)
VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?)
''', [
comp,
entry['Date'],
order,
entry['Ronde'],
get_speler_naam(entry['Speler1']),
get_speler_naam(entry['Speler2']),
entry['Legs1'],
entry['Legs2'],
entry['Max1'],
entry['Max2'],
entry['Lollies1'],
entry['Lollies2'],
entry['Finishes1'],
entry['Finishes2']
])
order = order + 1
else:
# pprint(entry)
db.execute('''INSERT INTO adjustments (
comp, datum, adj_type, speler_naam, speler_points, speler_180s, speler_lollies, speler_finishes,
speler_games)
VALUES (?,?,?,?,?,?,?,?,?)
''', [
comp,
entry['Date'],
entry['Type'],
get_speler_naam(entry['Speler']),
entry['Points'],
entry['Max'],
entry['Lollies'],
entry['Finishes'],
entry['Matches'],
])
# speler
db.commit()
for speler in spelers:
db.execute('''INSERT INTO speler (speler_naam) VALUES (?)''', [speler])
db.commit();
db.execute('''
UPDATE
game
SET
game_order = (
SELECT a.rn
FROM
(
SELECT
g1.game_id as x,
ROW_NUMBER() OVER (
ORDER BY g1.datum, g1.file_order
) AS rn
FROM game g1
) AS a
WHERE a.x = game_id
)
''')
db.commit();
|
import os
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.utils.model_zoo as model_zoo
class VGG(nn.Module):
def __init__(self, features, num_classes=1000, init_weights=True):
super(VGG, self).__init__()
self.features = features
self.avgpool = nn.AdaptiveAvgPool2d((7, 7))
self.classifier = nn.Sequential(
nn.Linear(512 * 7 * 7, 4096),
nn.ReLU(True),
nn.Dropout(),
nn.Linear(4096, 4096),
nn.ReLU(True),
nn.Dropout(),
nn.Linear(4096, num_classes)
)
if init_weights:
self._initialize_weights()
def forward(self, x):
x = self.features(x)
x = self.avgpool(x)
x = x.view(x.size(0), -1)
x = self.classifier(x)
return x
def _initialize_weights(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
if m.bias is not None:
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.Linear):
nn.init.normal_(m.weight, 0, 0.01)
nn.init.constant_(m.bias, 0)
def make_layers(cfg, batch_norm=False):
layers = []
in_channels = 3
for v in cfg:
if v == 'M':
layers += [nn.MaxPool2d(kernel_size=2, stride=2)]
else:
conv2d = nn.Conv2d(in_channels, v, kernel_size=3, padding=1)
if batch_norm:
layers += (conv2d, nn.BatchNorm2d(v), nn.ReLU(inplace=True))
else:
layers += [conv2d, nn.ReLU(inplace=True)]
in_channels = v
return nn.Sequential(*layers)
cfg = {
'A': [64, 'M', 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'],
'B': [64, 64, 'M', 128, 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'],
'D': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'M', 512, 512, 512, 'M', 512, 512, 512, 'M'],
'E': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 256, 'M', 512, 512, 512, 512, 'M', 512, 512, 512, 512, 'M'],
}
def vgg16(**kwargs):
model = VGG(make_layers(cfg['D']), **kwargs)
return model
class FCN16s(nn.Module):
def __init__(self, nclass, backbone='vgg16', aux=False, norm_layer=nn.BatchNorm2d, **kwargs):
super(FCN16s, self).__init__()
self.aux = aux
if backbone == 'vgg16':
self.pretrained = vgg16().features
else:
raise RuntimeError('unknown backbone: {}'.format(backbone))
self.pool4 = nn.Sequential(*self.pretrained[:24])
self.pool5 = nn.Sequential(*self.pretrained[24:])
self.head = _FCNHead(512, nclass, norm_layer)
self.score_pool4 = nn.Conv2d(512, nclass, 1)
if aux:
self.auxlayer = _FCNHead(512, nclass, norm_layer)
self.__setattr__('exclusive', ['head', 'score_pool4', 'auxlayer'] if aux else ['head', 'score_pool4'])
def forward(self, x):
pool4 = self.pool4(x)
pool5 = self.pool5(pool4)
outputs = []
score_fr = self.head(pool5)
score_pool4 = self.score_pool4(pool4)
upscore2 = F.interpolate(score_fr, score_pool4.size()[2:], mode='bilinear', align_corners=True)
fuse_pool4 = upscore2 + score_pool4
out = F.interpolate(fuse_pool4, x.size()[2:], mode='bilinear', align_corners=True)
outputs = out
if self.aux:
auxout = self.auxlayer(pool5)
auxout = F.interpolate(auxout, x.size()[2:], mode='bilinear', align_corners=True)
outputs.append(auxout)
return outputs
class _FCNHead(nn.Module):
def __init__(self, in_channels, channels, norm_layer=nn.BatchNorm2d, **kwargs):
super(_FCNHead, self).__init__()
inter_channels = in_channels // 4
self.block = nn.Sequential(
nn.Conv2d(in_channels, inter_channels, 3, padding=1, bias=False),
norm_layer(inter_channels),
nn.ReLU(inplace=True),
nn.Dropout(0.1),
nn.Conv2d(inter_channels, channels, 1)
)
def forward(self, x):
return self.block(x)
if __name__ == '__main__':
from tools.flops_params_fps_count import flops_params_fps
model = FCN16s(nclass=6)
flops_params_fps(model)
|
<filename>tests/test_thinkcell.py
import pandas as pd
import pytest
from thinkcell import Thinkcell, DataFrameError
from datetime import datetime
import os
class TestThinkcell(object):
def test_init(self):
tc = Thinkcell()
assert tc.charts == []
def test_str_all(self):
tc = Thinkcell()
assert str(tc) == "[]"
@pytest.mark.parametrize(
"test_input, expected",
[
("daf", {"string": "daf"}),
(3, {"number": 3}),
(2.0, {"number": 2.0}),
(datetime(2012, 9, 16, 0, 0), {"date": "2012-09-16"}),
],
)
def test_transform_input(self, test_input, expected):
assert Thinkcell.transform_input(test_input) == expected
def test_transform_input_bad(self):
with pytest.raises(ValueError) as e_info:
Thinkcell.transform_input([3, 4])
def test_verify_template_1(self):
template_name = "not a file name"
with pytest.raises(TypeError) as e_info:
Thinkcell.verify_template(template_name)
def test_verify_template_2(self):
template_name = 5
with pytest.raises(TypeError) as e_info:
Thinkcell.verify_template(template_name)
def test_verify_template_3(self):
template_name = "example.pptx"
assert Thinkcell.verify_template(template_name) == template_name
def test_add_template(self):
tc = Thinkcell()
template = "example.pptx"
tc.add_template(template)
assert tc.charts == [{"template": template, "data": []}]
def test_add_chart_warning(self):
tc = Thinkcell()
template_name = "template.pptx"
tc.add_template(template_name)
with pytest.warns(UserWarning) as record:
tc.add_chart(
template_name=template_name,
chart_name=234,
categories=["Alpha", "bravo"],
data=[[3, 4, datetime(2012, 9, 16, 0, 0)], [2, "adokf", 6]],
)
def test_add_textfield_warning(self):
tc = Thinkcell()
template_name = "template.pptx"
tc.add_template(template_name)
with pytest.warns(UserWarning) as record:
tc.add_textfield(
template_name=template_name,
field_name=234,
text="A great slide",
)
def test_add_chart_bad_template(self):
tc = Thinkcell()
template = "example.pptx"
with pytest.raises(ValueError) as e_info:
tc.add_chart(
template_name="example2.pptx",
chart_name="Cool Name bro",
categories=["Alpha", "bravo"],
data=[[3, 4, datetime(2012, 9, 16, 0, 0)], [2, "adokf", 6]],
)
def test_add_textfield_bad_template(self):
tc = Thinkcell()
template = "example.pptx"
with pytest.raises(ValueError) as e_info:
tc.add_textfield(
template_name="example2.pptx",
field_name="Title",
text="A great slide",
)
def test_add_chart_bad_dimensions(self):
tc = Thinkcell()
template_name = "example.pptx"
tc.add_template(template_name)
with pytest.raises(ValueError) as e_info:
tc.add_chart(
template_name=template_name,
chart_name="Cool Name bro",
categories=["Alpha", "bravo"],
data=[[3, 4, datetime(2012, 9, 16, 0, 0)], [2, "adokf"]],
)
def test_add_chart(self):
tc = Thinkcell()
template = "example.pptx"
tc.add_template(template)
tc.add_chart(
template_name=template,
chart_name="Cool Name bro",
categories=["Alpha", "bravo"],
data=[[3, 4, datetime(2012, 9, 16, 0, 0)], [2, "adokf", 4]],
)
assert tc.charts == [
{
"template": "example.pptx",
"data": [
{
"name": "Cool Name bro",
"table": [
[None, {"string": "Alpha"}, {"string": "bravo"}],
[],
[
{"number": 3},
{"number": 4},
{"date": "2012-09-16"},
],
[
{"number": 2},
{"string": "adokf"},
{"number": 4},
],
],
}
],
}
]
def test_add_chart_with_fill(self):
tc = Thinkcell()
template = "example.pptx"
tc.add_template(template)
tc.add_chart(
template_name=template,
chart_name="Cool Name bro",
categories=["Alpha", "bravo"],
data=[[3, 4, datetime(2012, 9, 16, 0, 0)], [2, "adokf", 4]],
fill=["#70AD47", "#ED7D31"],
)
assert tc.charts == [
{
"template": "example.pptx",
"data": [
{
"name": "Cool Name bro",
"table": [
[None, {"string": "Alpha"}, {"string": "bravo"}],
[],
[
{"number": 3, "fill": "#70AD47"},
{"number": 4, "fill": "#70AD47"},
{"date": "2012-09-16", "fill": "#70AD47"},
],
[
{"number": 2, "fill": "#ED7D31"},
{"string": "adokf", "fill": "#ED7D31"},
{"number": 4, "fill": "#ED7D31"},
],
],
}
],
}
]
def test_add_chart_from_dataframe(self):
tc = Thinkcell()
template = "example.pptx"
dataframe = pd.DataFrame(
columns=["Company", "Employees", "Revenue", "Other"],
data=[
["Apple", 200, 1.5, 10],
["Amazon", 100, 1.0, 12],
["Slack", 50, 0.5, 16],
],
)
tc.add_template(template)
tc.add_chart_from_dataframe(
template_name=template,
chart_name="Cool Chart",
dataframe=dataframe,
)
assert tc.charts == [
{
"template": "example.pptx",
"data": [
{
"name": "Cool Chart",
"table": [
[
None,
{"string": "Employees"},
{"string": "Revenue"},
{"string": "Other"},
],
[],
[
{"string": "Apple"},
{"number": 200},
{"number": 1.5},
{"number": 10},
],
[
{"string": "Amazon"},
{"number": 100},
{"number": 1.0},
{"number": 12},
],
[
{"string": "Slack"},
{"number": 50},
{"number": 0.5},
{"number": 16},
],
],
}
],
}
]
def test_add_chart_from_dataframe_with_fill(self):
tc = Thinkcell()
template = "example.pptx"
dataframe = pd.DataFrame(
columns=["Company", "Employees", "Revenue", "Other"],
data=[
["Apple", 200, 1.5, 10],
["Amazon", 100, 1.0, 12],
["Slack", 50, 0.5, 16],
],
)
tc.add_template(template)
tc.add_chart_from_dataframe(
template_name=template,
chart_name="Cool Chart",
dataframe=dataframe,
fill=["#70AD47", "#ED7D31", "#4472C4"],
)
assert tc.charts == [
{
"template": "example.pptx",
"data": [
{
"name": "Cool Chart",
"table": [
[
None,
{"string": "Employees"},
{"string": "Revenue"},
{"string": "Other"},
],
[],
[
{"string": "Apple", "fill": "#70AD47"},
{"number": 200, "fill": "#70AD47"},
{"number": 1.5, "fill": "#70AD47"},
{"number": 10, "fill": "#70AD47"},
],
[
{"string": "Amazon", "fill": "#ED7D31"},
{"number": 100, "fill": "#ED7D31"},
{"number": 1.0, "fill": "#ED7D31"},
{"number": 12, "fill": "#ED7D31"},
],
[
{"string": "Slack", "fill": "#4472C4"},
{"number": 50, "fill": "#4472C4"},
{"number": 0.5, "fill": "#4472C4"},
{"number": 16, "fill": "#4472C4"},
],
],
}
],
}
]
def test_add_chart_from_dataframe_invalid_dataframe(self):
tc = Thinkcell()
template = "example.pptx"
dataframe = [
["Apple", 200, 1.5, 10],
["Amazon", 100, 1.0, 12],
["Slack", 50, 0.5, 16],
]
tc.add_template(template)
with pytest.raises(DataFrameError) as e_info:
tc.add_chart_from_dataframe(
template_name=template,
chart_name="Cool Chart",
dataframe=dataframe,
)
def test_add_chart_from_dataframe_no_columns(self):
tc = Thinkcell()
template = "example.pptx"
dataframe = pd.Series(
data=[
["Apple", 200, 1.5, 10],
["Amazon", 100, 1.0, 12],
["Slack", 50, 0.5, 16],
]
)
tc.add_template(template)
with pytest.raises(DataFrameError) as e_info:
tc.add_chart_from_dataframe(
template_name=template,
chart_name="Cool Chart",
dataframe=dataframe,
)
def test_add_chart_from_dataframe_no_data(self):
tc = Thinkcell()
template = "example.pptx"
dataframe = pd.DataFrame(
columns=["Company"], data=[["Apple"], ["Amazon"], ["Slack"]]
)
tc.add_template(template)
with pytest.raises(DataFrameError) as e_info:
tc.add_chart_from_dataframe(
template_name=template,
chart_name="Cool Chart",
dataframe=dataframe,
)
def test_add_chart_from_dataframe_no_rows(self):
tc = Thinkcell()
template = "example.pptx"
dataframe = pd.DataFrame(
columns=["Company", "Employees", "Revenue", "Other"]
)
tc.add_template(template)
with pytest.raises(DataFrameError) as e_info:
tc.add_chart_from_dataframe(
template_name=template,
chart_name="Cool Chart",
dataframe=dataframe,
)
def test_add_textfield(self):
tc = Thinkcell()
template = "example.pptx"
tc.add_template(template)
tc.add_textfield(
template_name=template, field_name="Title", text="A great slide"
)
assert tc.charts == [
{
"template": "example.pptx",
"data": [
{"name": "Title", "table": [[{"string": "A great slide"}]]}
],
}
]
@pytest.mark.parametrize(
"input, output", [("word.docx", ValueError), (3, ValueError)]
)
def test_save_ppttc_bad_file(self, input, output):
tc = Thinkcell()
tc.add_template("example.pptx")
tc.add_chart(
template_name="example.pptx",
chart_name="Chart name",
categories=["alpha", "bravo"],
data=[["today", 1, 2], ["tomorrow", 3, 4]],
)
with pytest.raises(output) as e_info:
tc.save_ppttc(filename=input)
def test_save_pptc(self):
tc = Thinkcell()
with pytest.raises(ValueError) as e_info:
tc.save_ppttc("test.ppttc")
def test_save_ppttc(self):
tc = Thinkcell()
tc.add_template("example.pptx")
tc.add_chart(
template_name="example.pptx",
chart_name="Chart name",
categories=["alpha", "bravo"],
data=[["today", 1, 2], ["tomorrow", 3, 4]],
)
assert tc.save_ppttc(filename="test.ppttc") == True
os.remove("test.ppttc")
|
from __future__ import print_function
import httplib2
import io
import os
import sys
import time
import dateutil.parser
from apiclient import discovery
from oauth2client import client
from oauth2client import tools
from oauth2client.file import Storage
from apiclient.http import MediaIoBaseDownload
import pprint
#Change these to the day of the osiris infestation
YEAR_OF_INFECTION=2017
MONTH_OF_INFECTION=01
DAY_OF_INFECTION=01
try:
import argparse
flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args()
except ImportError:
flags = None
SCOPES = 'https://www.googleapis.com/auth/drive'
#YOU NEED TO SET UP AN APPLICATION ON GOOGLE AND GENERATE A KEY AND CREATE THIS FILE
CLIENT_SECRET_FILE = 'revert_osiris.json'
APPLICATION_NAME = 'Revert Osiris'
#copy pasta form gdrive API help examples
def get_credentials():
"""Gets valid user credentials from storage.
If nothing has been stored, or if the stored credentials are invalid,
the OAuth2 flow is completed to obtain the new credentials.
Returns:
Credentials, the obtained credential.
"""
home_dir = os.path.expanduser('~')
credential_dir = os.path.join(home_dir, '.credentials')
if not os.path.exists(credential_dir):
os.makedirs(credential_dir)
credential_path = os.path.join(credential_dir,
'drive-python-quickstart.json')
store = Storage(credential_path)
credentials = store.get()
if not credentials or credentials.invalid:
flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)
flow.user_agent = APPLICATION_NAME
if flags:
credentials = tools.run_flow(flow, store, flags)
else: # Needed only for compatibility with Python 2.6
credentials = tools.run(flow, store)
print('Storing credentials to ' + credential_path)
return credentials
def main():
credentials = get_credentials()
http = credentials.authorize(httplib2.Http())
service = discovery.build('drive', 'v3', http=http)
pp = pprint.PrettyPrinter()
#grab first batch of possible infected files
results = service.files().list(pageSize=1,
fields="nextPageToken, files(id, name)").execute()
items = results.get('files', [])
next_page = results.get('nextPageToken', None)
bad_files = []
done = False
next_page = None
while True:
results = service.files().list(pageToken=next_page, pageSize=100,
fields="nextPageToken, files(id, name)").execute()
items = results.get('files', [])
if not items:
print('No files found.')
break
else:
for item in items:
#Only act on files with osiris in the name.
if 'osiris' in item['name']:
bad_files.append(item)
next_page = results.get('nextPageToken', None)
print("Found {} bad files".format(len(bad_files)))
#Download a backup of all files just in case
for bad_item in bad_files:
revisions = service.revisions().list(fileId=bad_item['id'], fields='*').execute()
assert(len(revisions['revisions']) >= 2)
dt = dateutil.parser.parse(revisions['revisions'][-1]['modifiedTime'])
if dt.day == DAY_OF_INFECTION and dt.month = MONTH_OF_INFECTION and dt.year == YEAR_OF_INFECTION:
print("Last revision dates from virus day")
else:
print("Skipping {}, datastamp on file isn't from virus day")
continue
dt = dateutil.parser.parse(revisions['revisions'][-2]['modifiedTime'])
print("Date of second to last revision is: {}".format(dt))
request = service.revisions().get_media(fileId=bad_item['id'],
revisionId=revisions['revisions'][-2]['id'])
#Filenames are not unique in gdrive so append with file ID as well
new_filename = os.path.join('backup',
revisions['revisions'][-2]['originalFilename'] + '_' + bad_item['id'])
#If we are re-running script see if we already downloaded this file
if os.path.isfile(new_filename):
print("File {} already backed up, skipping".format(new_filename))
continue
fh = io.FileIO(new_filename, 'wb')
downloader = MediaIoBaseDownload(fh, request)
done = False
while done is False:
status, done = downloader.next_chunk()
print("Download {}".format(int(status.progress() * 100)) )
count = 0
for bad_item in bad_files:
count = count + 1
#Do in batches just to be kind of safe.
if count > 50:
break
file_id = bad_item['id']
revisions = service.revisions().list(fileId=file_id, fields='*').execute()
if len(revisions['revisions']) < 2:
print("File has only 1 revision, skipping: {}".format(bad_item))
continue
file_meta = service.files().get(fileId=file_id, fields='*').execute()
dt_last = dateutil.parser.parse(revisions['revisions'][-1]['modifiedTime'])
dt_2nd_last = dateutil.parser.parse(revisions['revisions'][-2]['modifiedTime'])
if dt_last.day == DAY_OF_INFECTION and dt_last.month == MONTH_OF_INFECTION and dt_last.year == YEAR_OF_INFECTION:
print("Last revision dates from virus day")
else:
print("Skipping {}, datestamp on file isn't from virus day")
continue
orig_file_name = file_meta['originalFilename']
target_rev_name = revisions['revisions'][-2]['originalFilename']
#If the 2nd to last revision is also osiris, we can't simply revert
if 'osiris' in target_rev_name:
print("2nd to last rev filename has osiris in the name, skipping: ({})".format(target_rev_name))
#print out some debug info so we can figure out what we have multipe revisions with osiris
pp.pprint(file_meta)
print(' ')
pp.pprint(revisions)
continue
print("{}: {} revisions found".format(target_rev_name, len(revisions['revisions'])) )
#THESE ARE THE REALLY DANGEROUS STEPS, ONLY UNCOMMMENT IF YOU KNOW WHAT YOU ARE DOING!!!
rev_id_to_delete = revisions['revisions'][-1]['id']
print("service.revisions().delete(fileId={}, revisionId={}).execute()".format(file_id, rev_id_to_delete))
#del_rev = service.revisions().delete(fileId=file_id, revisionId=rev_id_to_delete).execute()
update_body = { 'name': target_rev_name }
print("service.files().update(fileId={}, body={}).execute()".format(file_id, update_body))
#update_name = service.files().update(fileId=file_id, body=update_body).execute()
if __name__ == '__main__':
main()
|
import time
import random
TIME_SPEED = .2
DEFAULT_PROMPT = "Please enter a direction to move in.\n"
list_of_valid_cardinal_directions = ["n", "s", "e", "w"]
NORTH = "n"
SOUTH = "s"
EAST = "e"
WEST = "w"
YES_RESPONSE = ["yes", "y", "yep"]
NO_RESPONSE = ["no", "n", "nope"]
HAT = "hat"
KEY = "key"
BUNNY = "bunny"
RING = "ring"
BOX = "box"
COMB = "comb"
proton_pack_charge = 3
proton_pack_charge_reset = 3
library_question_index = 0
library_question_index_reset = 0
disco_ground_score_index = 0
disco_ground_score_index_reset = 0
ghost_fight_index = 0
ghost_fight_index_reset = 0
items = []
items_reset = []
KITCHEN = ["the kitchen", "It smells like something's burning..."]
CORRIDOR = ["a corridor", ("A light at the end of the hallway "
"piques your curiosity")]
GALLERY = ["the gallery of secrets", ("The room is full of mysterious "
"cabinets, secured with an array of "
"fancy padlocks.")]
ARBORETUM = ["the arboretum", ("The sumptuous fruit trees are tempting..."
"until you notice the feasting worms.")]
FREEZER = ["the walk-in freezer", ("The freezer has a detrimental effect on "
"your proton pack's battery life.")]
ROTUNDA = ["the rotunda", ("You are surrounded by statues and "
"suits of armor.\n"
"You clap to test the room echo, and "
"are taken aback as one of the heads "
"swivels to investigate.")]
LIBRARY = ["the library", ("Bookshelves line every wall from "
"floor to ceiling\n"
"Surely there's a secret passageway "
"in here somewhere...")]
BROOM_CLOSET = ["the broom closet", ("You never knew one person "
"could own so many brooms.")]
DISCO = ["the discotheque", ("The floor is littered with glitter and "
"forgotten shoes from the previous night.\n"
"Strangely no one seems to "
"remember what happened...")]
BATHROOM = ["a particularly lavish bathroom", ("The golden fixtures and "
"marble toilet dazzle you..."
"and then you notice the "
"handpainted ceiling fresco.")]
INVALID = ["", ""]
game_map = [[KITCHEN, CORRIDOR, GALLERY, CORRIDOR, ARBORETUM],
[CORRIDOR, INVALID, CORRIDOR, INVALID, CORRIDOR],
[FREEZER, CORRIDOR, ROTUNDA, CORRIDOR, LIBRARY],
[CORRIDOR, INVALID, CORRIDOR, INVALID, CORRIDOR],
[BROOM_CLOSET, CORRIDOR, DISCO, CORRIDOR, BATHROOM]]
x_coor = 2
y_coor = 2
start_position = game_map[y_coor][x_coor]
def print_pause(text_to_print):
print(text_to_print)
time.sleep(TIME_SPEED)
def intro():
print_pause("Welcome to Ghostbusters: The Game!")
print_pause("*cue theme song*")
print_pause("<NAME> has been having a ghastly problem:")
print_pause("His lavish D.C. estate is currently being haunted "
"by the ghost of <NAME>.")
print_pause("It's your job to explore the main floor of his mansion "
"and rid the grounds of McNamera's ghost once and for all.")
print_pause("Your proton pack only has enough charge for three capture "
"attempts, so take proper aim and good luck!")
print_pause("The butler shows you to the rotunda, where the most "
"recent ghost-sighting happened earlier this week.")
def play_game():
intro()
get_move_response(y_coor, x_coor, DEFAULT_PROMPT)
def get_move_response(y_coor, x_coor, prompt):
move_response = input(prompt).lower()
if move_response in list_of_valid_cardinal_directions:
check_validity(y_coor, x_coor, move_response)
else:
response_try_again = "Please enter a cardinal direction "\
"(ex. 'n', 's', 'e, 'w'.)\n")
get_move_response(y_coor, x_coor, response_try_again)
def check_validity(y_coor, x_coor, move_response):
(new_y_coor, new_x_coor) = apply_direction(y_coor, x_coor, move_response)
if invalid_move(new_y_coor, new_x_coor):
invalid_move_response(y_coor, x_coor)
else:
move(new_y_coor, new_x_coor)
def apply_direction(y_coor, x_coor, direction):
if direction == NORTH:
return (y_coor - 1, x_coor)
if direction == SOUTH:
return (y_coor + 1, x_coor)
if direction == EAST:
return (y_coor, x_coor + 1)
if direction == WEST:
return (y_coor, x_coor - 1)
def invalid_move(new_y_coor, new_x_coor):
if new_y_coor > 4 or new_x_coor > 4 or new_y_coor < 0 or\
new_x_coor < 0 or game_map[new_y_coor][new_x_coor] == INVALID:
return True
def invalid_move_response(y_coor, x_coor):
print_pause("There doesn't appear to be a way out here.")
get_move_response(y_coor, x_coor, ("Please select another "
"direction to move in.\n"))
def move(y_coor, x_coor):
room = game_map[y_coor][x_coor][0]
room_intro = game_map[y_coor][x_coor][1]
new_room(y_coor, x_coor, room, room_intro)
def new_room(y_coor, x_coor, room, room_intro):
print_pause(f"You have entered {room}.")
print_pause(room_intro)
is_room_interactive(y_coor, x_coor)
def is_room_interactive(y_coor, x_coor):
if game_map[y_coor][x_coor] == GALLERY:
gallery_lock_prompt()
elif game_map[y_coor][x_coor] == DISCO:
disco()
elif game_map[y_coor][x_coor] == BATHROOM:
bathroom()
elif game_map[y_coor][x_coor] == LIBRARY:
library()
elif game_map[y_coor][x_coor] == FREEZER:
freezer()
get_move_response(y_coor, x_coor, DEFAULT_PROMPT)
def yes_no_question(response):
if response.lower() in YES_RESPONSE:
return "y"
if response.lower() in NO_RESPONSE:
return "n"
else:
try_again = input("Sorry, I don't understand. Please respond "
"with 'yes' or 'no'.\n")
return yes_no_question(try_again)
def coin_flip():
flip = random.randint(0, 1)
if flip == 0:
return "y"
if flip == 1:
return "n"
def gallery_lock_prompt():
first_lock = input("Would you like to try any of the locks?\n")
answer = yes_no_question(first_lock)
if answer == "y":
open_locks()
def open_locks():
if KEY in items:
print_pause("You try a few of the locks, to no avail...")
print_pause("But wait...the key seems to fit into the door of "
"the old wardrobe in the corner.")
print_pause("You turn the key and remove the "
"padlock when suddenly...")
print_pause('OUT POPS A GHOST! "OOOOOOOOOOOO", the old warmonger '
"shouts, as he violently crashes about the room, waving "
"his arms in sheer terror.")
if HAT in items:
print_pause('"I see you stole my hat, you conniving thief! "'
"You'll pay for this!")
ghost_fight_setup()
else:
print_pause("All of the locks are locked. "
"Perhaps there's a key around here somewhere...")
def disco():
global disco_ground_score_index
scan_floor = input("Would you like to scan the room for ground scores?\n")
answer = yes_no_question(scan_floor)
if answer == "y" and disco_ground_score_index == 0:
print_pause("While rummaging through the fallen streamers and party "
"cups, you manage to find an unopened package "
"of Skittles and a sweet party hat.")
pick_up = input("Would you like to pick these up?\n")
answer = yes_no_question(pick_up)
if answer == "y":
print_pause("You put the hat on and tuck the Skittles "
"into your back pocket for later.")
disco_ground_score_index = 1
items.append(HAT)
elif answer == "y" and disco_ground_score_index == 1:
print_pause("There doesn't appear to be anything else "
"interesting here.")
print_pause("Your shoes are noticably sticker now.")
def bathroom():
print_pause("Above the toilet are giant ebony cabinets, with "
"large ivory handles.")
open_drawers = input("Would you care to take a peek inside?\n")
answer = yes_no_question(open_drawers)
if answer == "y":
if (BUNNY and KEY and RING and BOX and COMB) in items:
print_pause("Haven't you ransacked this place enough??")
elif (BUNNY and KEY and RING and BOX) in items:
print_pause("The inside of the cabinet is mostly bare, except "
"for an ornate tortoise shell comb.")
steal_comb = input("I bet you'd like to steal that too, wouldn't "
"you, you dirty thief?\n")
answer = yes_no_question(steal_comb)
if answer == "y":
print_pause("You give your hair a quick run through "
"and add the comb to your Kissinger "
"memorabilia collection.")
items.append(COMB)
else:
print_pause("The inside of the cabinet is mostly bare, "
"except for an ornate tortoise shell "
"comb and small jewelry box.")
open_box = input("Would you like to open the jewelry box?\n")
answer = yes_no_question(open_box)
if answer == "y":
if (BUNNY in items) and (KEY in items) and (RING in items):
all_items_retrieved()
else:
print_pause("You peer inside of the jewelry box "
"to discover a large diamond ring, "
"an ornate brass key, and a dust bunny.")
pick_up_prompt = input("Would you like to pick "
"any of these up?\n")
answer = yes_no_question(pick_up_prompt)
print(answer)
if answer == "y":
all_items_retrieved()
def pick_up_question():
question = input("Would you like to pick anything else up?\n")
answer = yes_no_question(question)
if answer == "y":
all_items_retrieved()
def all_items_retrieved():
if (BUNNY in items) and (KEY in items) and (RING in items):
print_pause("There's nothing interesting about an enpty box...")
take_box = input("...unless you'd like to take that as well?\n")
answer = yes_no_question(take_box)
if answer == "y":
print_pause("Unbelievable. This game really caters to the "
"morally depraved, doesn't it?")
print_pause("As you reach for the jewelry box, a loud clatter "
"in the hallway startles and distracts you.")
print_pause("When you turn back around, the box is GONE.")
print_pause("Back to work, Ghostbuster!")
items.append(BOX)
else:
pick_up_items()
def pick_up_items():
pick_up = input("Please type 'bunny', 'ring', or 'key' "
"to pick up an item.\n")
if pick_up == "bunny":
if BUNNY in items:
print_pause("The bunny is long gone! Hop to it if you want "
"to catch that little rascal.")
else:
print_pause("You reach for the bunny, but it hops out of the "
"box and scampers off down the hallway.")
items.append(BUNNY)
pick_up_question()
elif pick_up == "ring":
if RING in items:
print_pause("You frantically rummage through your pocket "
"for the ring, wrapping your fingers "
"around the smooth diamond.")
else:
print_pause("You dust off the ring and hurridly pocket it, "
"hoping no one noticed.")
items.append(RING)
pick_up_question()
elif pick_up == "key":
if KEY in items:
print_pause("Better figure out what your key goes to "
"before you go looking for another.")
else:
print_pause("The key doesn't match the lock on the jewelry "
"box, so you put it in your pocket, "
"in case it proves useful later.")
items.append(KEY)
pick_up_question()
else:
print_pause("Sorry, I don't understand.")
pick_up_items()
def library():
global library_question_index
if library_question_index == 0:
library_question_index += 1
choose_book = input("Would you like to choose a book?\n")
answer = yes_no_question(choose_book)
book_choice(answer)
else:
choose_book = input("Would you like to choose another book?\n")
answer = yes_no_question(choose_book)
book_choice(answer)
def book_choice(answer):
book_list = [["You select a dusty copy of the Canterbury Tales.",
"Sadly, moths have eaten away most of the pages, "
"and the book falls apart in your hands."],
["You select a first-edition copy of 'How to Succeed "
"in Business Without Really Trying'",
"It appears to be autographed, but you can't tell if "
"the signature belongs to the author or owner."],
["You select an old copy of the Holy Bible.",
"Upon opening it, you discover that the pages have "
"been cut out to make room for a whiskey flask."],
["You select a copy of 'Hogwarts: A History'",
"Suddenly, a book across the room falls off "
"its shelf and crashes to the floor."]]
if answer == "y":
book_select = random.randint(0, 3)
print_pause(book_list[book_select][0])
print_pause(book_list[book_select][1])
if book_select == 3:
hogwarts_book()
else:
library()
else:
global library_question_index
library_question_index = 0
def hogwarts_book():
investigate = input("Would you like to go investigate?\n")
answer = yes_no_question(investigate)
if answer == "y":
print_pause("You cross the room to retrieve the fallen book.")
print_pause("The cover and pages of the book "
"are all completely blank!")
print_pause("As you peer up at the shelf the book fell from, "
"you notice that the entire shelf is full of "
"these blank volumes.")
print_pause("While odd, this seems to have "
"no particular significance.")
library()
def freezer():
print_pause("Your charge depletes by one battery cell!")
global proton_pack_charge
proton_pack_charge -= 1
if proton_pack_charge == 2:
print_pause("This leaves you two more chances to catch the ghost.")
if proton_pack_charge == 1:
print_pause("You only have one more chance to catch the ghost!")
if proton_pack_charge == 0:
game_over(False)
def ghost_fight_setup():
print_pause("Now's your chance to rid the world of "
"McNamara once and for all!")
ghost_fight_text()
def ghost_fight_text():
global ghost_fight_index
if ghost_fight_index == 0:
ghost_fight_text = "Would you like to try to capture the ghost?\n"
else:
ghost_fight_text = "Would you like to try again?\n"
ghost_fight_confirm(ghost_fight_text)
def ghost_fight_confirm(ghost_fight_text):
global ghost_fight_index
global proton_pack_charge
capture_attempt = input(ghost_fight_text)
answer = yes_no_question(capture_attempt)
if answer == "y":
print_pause("You take aim and fire your proton pack at the ghost.")
proton_pack_charge -= 1
ghost_fight_index = 1
ghost_battle()
else:
print_pause("What are you waiting for?! Who knows "
"when the ghost might appear again.")
ghost_fight_index = 0
def ghost_battle():
global ghost_fight_index
answer = coin_flip()
if answer == "y":
print_pause("SUCCESS!")
print_pause("After an intense few minutes, you manage to trap "
"the ghost and complete your mission!")
game_over(True)
else:
failed_ghost_battle()
def failed_ghost_battle():
if proton_pack_charge != 0:
print_pause("Damn! The ghost escapes the beam of your proton "
"pack and is even more irritated.")
print_pause("He begins to taunt you mercilessly.")
ghost_fight_text()
else:
print_pause("Your aim isn't so great, and "
"the ghost manages to escape!")
game_over(False)
def reset_game():
global items
items = items_reset
global proton_pack_charge
proton_pack_charge = proton_pack_charge_reset
global disco_ground_score_index
disco_ground_score_index = disco_ground_score_index_reset
global library_question_index
library_question_index = library_question_index_reset
global ghost_fight_index
ghost_fight_index = ghost_fight_index_reset
play_game()
def game_over(result):
global proton_pack_charge
if result is False:
print_pause("Unfortunately, your proton pack battery is now dead.")
print_pause("<NAME> demands a refund.")
print_pause("The final score for this round:")
print_pause("Ghosts - 1")
print_pause("Ghostbusters - 0")
else:
print_pause("Congratulations on your victory!")
print_pause("Now it's time for you to purchase the second "
"installment in this series, 'Ghostbusters: Invoice':")
print_pause("a zany journey through time and space to track down "
"<NAME> and obtain payment for your services.")
print_pause("GAME OVER")
replay = input("Would you like to play again?\n")
answer = yes_no_question(replay)
if answer == "y":
reset_game()
else:
exit()
play_game()
|
<filename>thanksmeter.py
#!/usr/bin/env python3
from datetime import datetime as dt
from dateutil.relativedelta import relativedelta as rd
from pywikibot import Page, Site
from pywikibot.exceptions import APIError
class ThanksMeter:
def __init__(self):
self._site = Site('bg', fam='wikipedia')
self._page = Page(self._site, 'Потребител:Iliev/Мерсиметър')
def _get_thanks(self, since_datetime):
thanks = dict(r=dict(), s=dict(), c=0)
for thank in self._site.logevents(logtype='thanks', end=since_datetime):
try:
thanks['r'][thank.page().title(with_ns=False)] += 1
except KeyError:
thanks['r'][thank.page().title(with_ns=False)] = 1
try:
thanks['s'][thank.user()] += 1
except KeyError:
thanks['s'][thank.user()] = 1
thanks['c'] += 1
return self._sort_user_thanks(thanks)
def _sort_user_thanks(self, user_thanks_dict):
presort = dict(
r=dict(sorted(user_thanks_dict['r'].items())),
s=dict(sorted(user_thanks_dict['s'].items())),
c=user_thanks_dict['c']
)
return dict(
r=dict(sorted(presort['r'].items(), key=lambda _: _[1], reverse=True)),
s=dict(sorted(presort['s'].items(), key=lambda _: _[1], reverse=True)),
c=presort['c']
)
def _draw_table(self, user_thanks_dict, title):
self._page.text += '{| class="wikitable sortable col-2-right"\n'
self._page.text += f'|+ {title}\n'
self._page.text += '! Редактор !! Брой\n|-\n'
for user, thanks in user_thanks_dict.items():
self._page.text += f'| [[Потребител:{user}|{user}]] || {thanks}\n|-\n'
self._page.text += '|}\n'
def init_page(self):
script_url = 'https://github.com/kerberizer/wikimedia-scripts/blob/master/thanksmeter.py'
self._page.text = f"''Тази страница е генерирана автоматично от [{script_url} скрипт] в "
self._page.text += "'''{{subst:CURRENTTIME}}''' [[UTC]] на '''{{subst:CURRENTDAY}} "
self._page.text += "{{subst:CURRENTMONTHNAME}} {{subst:CURRENTYEAR}}''' г.''\n"
def save_page(self):
try:
self._page.save(summary='Бот: актуализация', minor=False)
except APIError as e:
print('ERROR: Cannot save page: APIError: ' + str(e))
def draw_tables(self, since_datetime, group_title):
thanks_help = ':en:Help:Notifications/Thanks'
thanks = self._get_thanks(since_datetime)
self._page.text += f'== {group_title} ==\n'
self._page.text += f": ''Общо [[{thanks_help}|благодарности]]: "
self._page.text += f"'''{thanks['c']}'''''\n"
self._page.text += '<div style="float: left;">\n'
self._draw_table(thanks['r'], 'Получени')
self._page.text += '</div><div style="float: left;">\n'
self._draw_table(thanks['s'], 'Изпратени')
self._page.text += '</div>{{br}}\n'
def main():
NOW = dt.utcnow()
TABLE_CONFIG = [
(NOW - rd(days=1), 'За последния ден'),
(NOW - rd(weeks=1), 'За последната седмица'),
(NOW - rd(months=1), 'За последния месец'),
(NOW - rd(months=3), 'За последните три месеца'),
(NOW - rd(years=1), 'За последната година'),
]
thanksmeter = ThanksMeter()
thanksmeter.init_page()
for since_datetime, group_title in TABLE_CONFIG:
thanksmeter.draw_tables(since_datetime, group_title)
thanksmeter.save_page()
if __name__ == '__main__':
main()
# vim: set ts=4 sts=4 sw=4 et:
|
<reponame>alekseyl1992/pyrobuf
import unittest
from pyrobuf_list import BytesList, StringList
NON_STRING_VALUE = 1
class TestBytesList(unittest.TestCase):
def test_append(self):
bytes_list = BytesList()
bytes_list.append(b"some string")
self.assertEqual(bytes_list, [b"some string"])
def test_append_with_non_string(self):
bytes_list = BytesList()
with self.assertRaises(TypeError):
bytes_list.append(NON_STRING_VALUE)
def test_append_with_unicode_string(self):
bytes_list = BytesList()
with self.assertRaises(TypeError):
bytes_list.append(u"some string")
def test_delitem(self):
bytes_list = BytesList()
bytes_list.extend([b"zero", b"one", b"two", b"three"])
del bytes_list[2]
self.assertEqual(bytes_list, [b"zero", b"one", b"three"])
def test_delitem_with_slice(self):
bytes_list = BytesList()
bytes_list.extend([b"zero", b"one", b"two", b"three"])
del bytes_list[1:3]
self.assertEqual(bytes_list, [b"zero", b"three"])
def test_extend(self):
bytes_list = BytesList()
bytes_list.extend([b"some string", b"another string"])
self.assertEqual(bytes_list, [b"some string", b"another string"])
def test_extend_with_non_string(self):
bytes_list = BytesList()
with self.assertRaises(TypeError):
bytes_list.extend([b"some string", NON_STRING_VALUE])
def test_extend_with_unicode_string(self):
bytes_list = BytesList()
with self.assertRaises(TypeError):
bytes_list.extend([u"some string"])
def test_extend_with_empty_sequence(self):
bytes_list = BytesList()
bytes_list.extend([])
self.assertEqual(bytes_list, [])
def test_insert(self):
bytes_list = BytesList()
bytes_list.insert(0, b"some string")
self.assertEqual(bytes_list, [b"some string"])
def test_insert_with_non_string(self):
bytes_list = BytesList()
with self.assertRaises(TypeError):
bytes_list.insert(0, NON_STRING_VALUE)
def test_insert_with_byte_string(self):
bytes_list = BytesList()
with self.assertRaises(TypeError):
bytes_list.insert(0, u"some string")
def test_setitem_using_integer(self):
bytes_list = BytesList()
bytes_list.append(b"some string")
bytes_list[0] = b"another string"
self.assertEqual(bytes_list[0], b"another string")
def test_setitem_using_slice(self):
# This will call BytesList.__setslice__ on Python 2
bytes_list = BytesList()
bytes_list.extend([b"zero", b"one", b"two", b"three"])
bytes_list[1:3] = [b"four", b"five"]
self.assertEqual(bytes_list, [b"zero", b"four", b"five", b"three"])
def test_setitem_using_slice_with_non_string(self):
# This will call BytesList.__setslice__ on Python 2
bytes_list = BytesList()
bytes_list.extend([b"zero", b"one", b"two", b"three"])
with self.assertRaises(TypeError):
bytes_list[1:3] = [u"four", NON_STRING_VALUE]
def test_setitem_using_slice_with_empty_sequence(self):
# This will call BytesList.__setslice__ on Python 2
bytes_list = BytesList()
bytes_list.extend([b"zero", b"one", b"two", b"three"])
bytes_list[1:3] = []
self.assertEqual(bytes_list, [b"zero", b"three"])
def test_setitem_using_stepped_slice(self):
# This will call BytesList.__setitem__ on both Python 2 and Python 3
bytes_list = BytesList()
bytes_list.extend([b"zero", b"one", b"two", b"three"])
bytes_list[0:3:2] = [b"four", b"five"]
self.assertEqual(bytes_list, [b"four", b"one", b"five", b"three"])
def test_setitem_using_stepped_slice_with_non_string(self):
# This will call BytesList.__setitem__ on both Python 2 and Python 3
bytes_list = BytesList()
bytes_list.extend([b"zero", b"one", b"two", b"three"])
with self.assertRaises(TypeError):
bytes_list[0:3:2] = [b"four", NON_STRING_VALUE]
class TestStringList(unittest.TestCase):
def test_append(self):
str_list = StringList()
str_list.append(u"some string")
self.assertEqual(str_list, [u"some string"])
def test_append_with_non_string(self):
str_list = StringList()
with self.assertRaises(TypeError):
str_list.append(NON_STRING_VALUE)
def test_append_with_byte_string(self):
str_list = StringList()
str_list.append(b"some string")
self.assertEqual(str_list[0], b"some string".decode('utf-8'))
def test_delitem(self):
str_list = StringList()
str_list.extend([u"zero", u"one", u"two", u"three"])
del str_list[2]
self.assertEqual(str_list, [u"zero", u"one", u"three"])
def test_delitem_with_slice(self):
str_list = StringList()
str_list.extend([u"zero", u"one", u"two", u"three"])
del str_list[1:3]
self.assertEqual(str_list, [u"zero", u"three"])
def test_extend(self):
str_list = StringList()
str_list.extend([u"some string", u"another string"])
self.assertEqual(str_list, [u"some string", u"another string"])
def test_extend_with_non_string(self):
str_list = StringList()
with self.assertRaises(TypeError):
str_list.extend([u"some string", NON_STRING_VALUE])
def test_extend_with_byte_string(self):
str_list = StringList()
str_list.extend([b"some string"])
self.assertEqual(str_list[0], b"some string".decode('utf-8'))
def test_extend_with_empty_sequence(self):
str_list = StringList()
str_list.extend([])
self.assertEqual(str_list, [])
def test_insert(self):
str_list = StringList()
str_list.insert(0, u"some string")
self.assertEqual(str_list, [u"some string"])
def test_insert_with_non_string(self):
str_list = StringList()
with self.assertRaises(TypeError):
str_list.insert(0, NON_STRING_VALUE)
def test_insert_with_byte_string(self):
str_list = StringList()
str_list.insert(0, b"some string")
self.assertEqual(str_list[0], b"some string".decode('utf-8'))
def test_setitem_using_integer(self):
str_list = StringList()
str_list.append(u"some string")
str_list[0] = u"another string"
self.assertEqual(str_list[0], u"another string")
def test_setitem_using_slice(self):
# This will call StringList.__setslice__ on Python 2
str_list = StringList()
str_list.extend([u"zero", u"one", u"two", u"three"])
str_list[1:3] = [u"four", u"five"]
self.assertEqual(str_list, [u"zero", u"four", u"five", u"three"])
def test_setitem_using_slice_with_non_string(self):
# This will call StringList.__setslice__ on Python 2
str_list = StringList()
str_list.extend([u"zero", u"one", u"two", u"three"])
with self.assertRaises(TypeError):
str_list[1:3] = [u"four", NON_STRING_VALUE]
def test_setitem_using_slice_with_empty_sequence(self):
# This will call StringList.__setslice__ on Python 2
str_list = StringList()
str_list.extend([u"zero", u"one", u"two", u"three"])
str_list[1:3] = []
self.assertEqual(str_list, [u"zero", u"three"])
def test_setitem_using_stepped_slice(self):
# This will call StringList.__setitem__ on both Python 2 and Python 3
str_list = StringList()
str_list.extend([u"zero", u"one", u"two", u"three"])
str_list[0:3:2] = [u"four", u"five"]
self.assertEqual(str_list, [u"four", u"one", u"five", u"three"])
def test_setitem_using_stepped_slice_with_non_string(self):
# This will call StringList.__setitem__ on both Python 2 and Python 3
str_list = StringList()
str_list.extend([u"zero", u"one", u"two", u"three"])
with self.assertRaises(TypeError):
str_list[0:3:2] = [u"four", NON_STRING_VALUE]
|
<gh_stars>1-10
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
import logging as log
import os
import re
from pathlib import Path
from utils import VERBOSE, clean_odirs, rm_path
class LauncherError(Exception):
def __init__(self, msg):
self.msg = msg
class Launcher:
"""
Abstraction for launching and maintaining a job.
An abstract class that provides methods to prepare a job's environment,
launch the job, poll for its completion and finally do some cleanup
activities. This class is not meant to be instantiated directly. Each
launcher object holds an instance of the deploy object.
"""
# Type of launcher used as string.
variant = None
# Points to the python virtual env area.
pyvenv = None
# If a history of previous invocations is to be maintained, then keep no
# more than this many directories.
max_odirs = 5
# Flag indicating the workspace preparation steps are complete.
workspace_prepared = False
workspace_prepared_for_cfg = set()
@staticmethod
def set_pyvenv(project):
'''Activate a python virtualenv if available.
The env variable <PROJECT>_PYTHON_VENV if set, points to the path
containing the python virtualenv created specifically for this
project. We can activate it if needed, before launching jobs using
external compute machines.
This is not applicable when running jobs locally on the user's machine.
'''
if Launcher.pyvenv is not None:
return
# If project-specific python virtualenv path is set, then activate it
# before running downstream tools. This is more relevant when not
# launching locally, but on external machines in a compute farm, which
# may not have access to the default python installation area on the
# host machine.
#
# The code below allows each launcher variant to set its own virtualenv
# because the loading / activating mechanism could be different between
# them.
Launcher.pyvenv = os.environ.get("{}_PYVENV_{}".format(
project.upper(), Launcher.variant.upper()))
if not Launcher.pyvenv:
Launcher.pyvenv = os.environ.get("{}_PYVENV".format(
project.upper()))
@staticmethod
def prepare_workspace(project, repo_top, args):
'''Prepare the workspace based on the chosen launcher's needs.
This is done once for the entire duration for the flow run.
'project' is the name of the project.
'repo_top' is the path to the repository.
'args' are the command line args passed to dvsim.
'''
pass
@staticmethod
def prepare_workspace_for_cfg(cfg):
'''Prepare the workspace for a cfg.
This is invoked once for each cfg.
'cfg' is the flow configuration object.
'''
pass
def __str__(self):
return self.deploy.full_name + ":launcher"
def __init__(self, deploy):
cfg = deploy.sim_cfg
# One-time preparation of the workspace.
if not Launcher.workspace_prepared:
self.prepare_workspace(cfg.project, cfg.proj_root, cfg.args)
Launcher.workspace_prepared = True
# One-time preparation of the workspace, specific to the cfg.
if cfg not in Launcher.workspace_prepared_for_cfg:
self.prepare_workspace_for_cfg(cfg)
Launcher.workspace_prepared_for_cfg.add(cfg)
# Store the deploy object handle.
self.deploy = deploy
# Return status of the process running the job.
self.exit_code = None
# Flag to indicate whether to 'overwrite' if odir already exists,
# or to backup the existing one and create a new one.
# For builds, we want to overwrite existing to leverage the tools'
# incremental / partition compile features. For runs, we may want to
# create a new one.
self.renew_odir = False
# Error message if the job fails.
self.fail_msg = "\n**{!r}:** {!r}<br>\n".format(
self.deploy.target.upper(), self.deploy.qual_name)
self.fail_msg += "**LOG:** {}<br>\n".format(self.deploy.get_log_path())
def _make_odir(self):
"""Create the output directory."""
# If renew_odir flag is True - then move it.
if self.renew_odir:
clean_odirs(odir=self.deploy.odir, max_odirs=self.max_odirs)
os.makedirs(self.deploy.odir, exist_ok=True)
def _link_odir(self, status):
"""Soft-links the job's directory based on job's status.
The dispatched, passed and failed directories in the scratch area
provide a quick way to get to the job that was executed.
"""
dest = Path(self.deploy.sim_cfg.links[status], self.deploy.qual_name)
# If dest exists, then atomically remove it and link the odir again.
while True:
try:
os.symlink(self.deploy.odir, dest)
break
except FileExistsError:
rm_path(dest)
# Delete the symlink from dispatched directory if it exists.
if status != "D":
old = Path(self.deploy.sim_cfg.links['D'], self.deploy.qual_name)
rm_path(old)
def _dump_env_vars(self, exports):
"""Write env vars to a file for ease of debug.
Each extended class computes the list of exports and invokes this
method right before launching the job.
"""
with open(self.deploy.odir + "/env_vars",
"w",
encoding="UTF-8",
errors="surrogateescape") as f:
for var in sorted(exports.keys()):
f.write("{}={}\n".format(var, exports[var]))
def _pre_launch(self):
"""Do pre-launch activities.
Examples include such as preparing the job's environment, clearing
old runs, creating the output directory, dumping all env variables
etc. This method is already invoked by launch() as the first step.
"""
self.deploy.pre_launch()
self._make_odir()
def _do_launch(self):
"""Launch the job."""
raise NotImplementedError()
def launch(self):
"""Launch the job."""
self._pre_launch()
self._do_launch()
def poll(self):
"""Poll the launched job for completion.
Invokes _check_status() and _post_finish() when the job completes.
"""
raise NotImplementedError()
def kill(self):
"""Terminate the job."""
raise NotImplementedError()
def _check_status(self):
"""Determine the outcome of the job (P/F if it ran to completion).
Returns (status, err_msg) extracted from the log, where the status is
"P" if the it passed, "F" otherwise. This is invoked by poll() just
after the job finishes.
"""
def _find_patterns(patterns, line):
"""Helper function that returns the pattern if any of the given
patterns is found, else None."""
assert patterns
for pattern in patterns:
match = re.search(r"{}".format(pattern), line)
if match:
return pattern
return None
def _get_n_lines(pos, num):
"Helper function that returns next N lines starting at pos index."
return ''.join(lines[pos:pos + num - 1]).strip()
if self.deploy.dry_run:
return "P", None
# Only one fail pattern needs to be seen.
failed = False
chk_failed = bool(self.deploy.fail_patterns)
err_msg = None
# All pass patterns need to be seen, so we replicate the list and remove
# patterns as we encounter them.
pass_patterns = self.deploy.pass_patterns.copy()
chk_passed = bool(pass_patterns) and (self.exit_code == 0)
try:
with open(self.deploy.get_log_path(), "r", encoding="UTF-8",
errors="surrogateescape") as f:
lines = f.readlines()
except OSError as e:
err_msg = "Error opening file {}:\n{}".format(
self.deploy.get_log_path(), e)
return "F", err_msg
if chk_failed or chk_passed:
for cnt, line in enumerate(lines):
if chk_failed:
if _find_patterns(self.deploy.fail_patterns,
line) is not None:
# Print 4 additional lines to help debug more easily.
err_msg = "```\n{}\n```\n".format(_get_n_lines(cnt, 5))
failed = True
chk_failed = False
chk_passed = False
if chk_passed:
pattern = _find_patterns(pass_patterns, line)
if pattern is not None:
pass_patterns.remove(pattern)
chk_passed = bool(pass_patterns)
# If failed, then nothing else to do. Just return.
if failed:
assert err_msg is not None
return "F", err_msg
# If no fail patterns were seen, but the job returned with non-zero
# exit code for whatever reason, then show the last 10 lines of the log
# as the failure message, which might help with the debug.
if self.exit_code != 0:
err_msg = ("Job returned non-zero exit code:\nLast 10 lines:\n"
"```\n{}\n```\n")
err_msg = err_msg.format(''.join(lines[-10:]).strip())
return "F", err_msg
# Ensure all pass patterns were seen.
if chk_passed:
err_msg = ("Some pass patterns missing:\n{}\nLast 10 lines:\n"
"```\n{}\n```\n")
err_msg = err_msg.format(pass_patterns,
''.join(lines[-10:]).strip())
return "F", err_msg
assert err_msg is None
return "P", None
def _post_finish(self, status, err_msg):
"""Do post-completion activities, such as preparing the results.
Must be invoked by poll(), after the job outcome is determined.
"""
assert status in ['P', 'F', 'K']
if status in ['P', 'F']:
self._link_odir(status)
self.deploy.post_finish(status)
log.debug("Item %s has completed execution: %s", self, status)
if status != "P":
self._log_fail_msg(err_msg)
def _log_fail_msg(self, msg):
"""Logs the fail msg for the final report.
Invoked in _post_finish() only if the job did not pass.
"""
assert msg is not None
self.fail_msg += msg
log.log(VERBOSE, msg)
|
<filename>data.py
"""
auther: leechh
"""
import os
import tensorflow as tf
from tqdm import trange
from math import ceil
from fiat.component.chunk import chunk
from fiat.component.path import mkdir
class TFR(object):
def __init__(self, path, count, feature_dict, shards=10, compression=None, c_level=None, seed=18473):
"""
:param path:
:param count:
:param compression: 'GZIP', 'ZLIB' or ''
:param c_level:
"""
self.path = path
self.count = count
self.feature_dict = feature_dict
self.shards = shards
self.compression = compression
self.c_level = c_level
self.__seed = seed
if shards >= 100:
self.shards = ceil(self.count / shards)
count_lst = chunk(count, ceil(count / self.shards))
self.countprefile = dict(zip(range(1, len(count_lst) + 1), count_lst))
def seed(self, seed):
self.__seed = seed
def output_seed(self):
return self.__seed
@staticmethod
def __type_feature(_type, value):
if _type == 'bytes':
return tf.train.Feature(bytes_list=tf.train.BytesList(value=[value]))
if _type == 'int':
return tf.train.Feature(int64_list=tf.train.Int64List(value=[value]))
if _type == 'float':
return tf.train.Feature(float64_list=tf.train.FloatList(value=[value]))
@staticmethod
def __fix_len_feature(_type):
if _type == 'bytes':
return tf.io.FixedLenFeature([], tf.string)
if _type == 'int':
return tf.io.FixedLenFeature([], tf.int64)
if _type == 'float':
return tf.io.FixedLenFeature([], tf.float32)
def __options(self):
return tf.io.TFRecordOptions(compression_type=self.compression, compression_level=self.c_level)
def tfrecordname(self, idx):
return os.path.join(self.path, '%03d-of-%03d.tfrecord' % (idx, self.shards))
def write(self, data_generator, silence=False):
"""
:param data_generator:
:param feature_dict: dict, 是用来记录feature内容的dict.
结构为 {'key1': '_type1', 'key2': '_type2', ...} ,其中,
key 必须与 data_generator 中的 key 对应, '_type' 来自 list
['int', 'float', 'bytes'].
:param shards:
:return:
"""
# base on num_shards & count, build a slice list
# update dir
mkdir(self.path)
chunk_gen = chunk(self.count, ceil(self.count / self.shards))
for idx, step in enumerate(chunk_gen):
idx += 1
tfrpath = self.tfrecordname(idx)
writer = tf.io.TFRecordWriter(tfrpath, options=self.__options())
self.countprefile[idx] = 0
# write TFRecords file.
try:
if silence:
_range = range(step)
else:
_range = trange(step)
for _ in _range:
self.countprefile[idx] += 1
samples = next(data_generator)
# build feature
feature = {}
for key, _type in self.feature_dict.items():
feature[key] = TFR.__type_feature(_type, samples[key])
# build example
exmaple = tf.train.Example(features=tf.train.Features(feature=feature))
writer.write(exmaple.SerializeToString())
# 如果全部数据迭代完成,利用 except 阻止抛出错误,并结束迭代。
# If all data is iteratively completed, use the "except" to
# prevent throwing errors and end the iteration.
except StopIteration:
break
finally:
writer.close()
def read(self, decode_raw, split=10, valid=0, shuffle_buffer=100, augment=None,
buffer_size=None, num_parallel_reads=None, seed=None):
"""
:param decode_raw:
:param split:
:param valid:
:param shuffle_buffer:
:param buffer_size:
:param num_parallel_reads:
:param seed:
:return:
"""
if seed is None:
seed = self.__seed
countprefile = self.countprefile
def readfunc():
datadict = {}
idx_dict = {}
count_dict = {}
step = int(self.shards / split)
idx_dict['valid'] = set([step*valid+i+1 for i in range(step)])
idx_dict['train'] = set(range(1, self.shards+1)) - idx_dict['valid']
for idx, val in idx_dict.items():
files = tf.data.Dataset.list_files([self.tfrecordname(i) for i in val], shuffle=True, seed=seed)
# features
features = {}
for key, _type in self.feature_dict.items():
features[key] = TFR.__fix_len_feature(_type)
dataset = tf.data.TFRecordDataset(files,
compression_type=self.compression,
buffer_size=buffer_size,
num_parallel_reads=num_parallel_reads)
dataset = dataset.map(lambda raw: tf.io.parse_single_example(raw, features=features))
dataset = dataset.shuffle(shuffle_buffer, seed=seed) if shuffle_buffer is not None else dataset
dataset = dataset.map(decode_raw)
# DataAugment
if (idx == 'train') & (augment is not None):
if type(augment) is list:
for subfunc in augment:
dataset = subfunc(dataset)
else:
dataset = augment(dataset)
datadict[idx] = dataset
count_dict[idx] = sum([countprefile[i] for i in val])
return datadict, count_dict['train'], count_dict['valid']
return readfunc |
##########################################################################
#
# Copyright (c) 2017, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of <NAME> nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import unittest
import imath
import IECore
import Gaffer
import GafferTest
class PlugAlgoTest( GafferTest.TestCase ) :
def testPromote( self ) :
s = Gaffer.ScriptNode()
s["b"] = Gaffer.Box()
s["b"]["n1"] = GafferTest.AddNode()
s["b"]["n1"]["op1"].setValue( -10 )
s["n2"] = GafferTest.AddNode()
self.assertTrue( Gaffer.PlugAlgo.canPromote( s["b"]["n1"]["op1"] ) )
self.assertFalse( Gaffer.PlugAlgo.canPromote( s["n2"]["op1"], parent = s["b"]["user"] ) )
self.assertFalse( Gaffer.PlugAlgo.isPromoted( s["b"]["n1"]["op1"] ) )
self.assertFalse( Gaffer.PlugAlgo.isPromoted( s["b"]["n1"]["op2"] ) )
self.assertFalse( Gaffer.PlugAlgo.isPromoted( s["n2"]["op1"] ) )
p = Gaffer.PlugAlgo.promote( s["b"]["n1"]["op1"] )
self.assertEqual( p.getName(), "op1" )
self.assertTrue( p.parent().isSame( s["b"] ) )
self.assertTrue( s["b"]["n1"]["op1"].getInput().isSame( p ) )
self.assertTrue( Gaffer.PlugAlgo.isPromoted( s["b"]["n1"]["op1"] ) )
self.assertFalse( Gaffer.PlugAlgo.canPromote( s["b"]["n1"]["op1"] ) )
self.assertEqual( p.getValue(), -10 )
def testPromoteColor( self ) :
s = Gaffer.ScriptNode()
s["b"] = Gaffer.Box()
s["b"]["n"] = Gaffer.Node()
s["b"]["n"]["c"] = Gaffer.Color3fPlug()
s["b"]["n"]["c"].setValue( imath.Color3f( 1, 0, 1 ) )
self.assertTrue( Gaffer.PlugAlgo.canPromote( s["b"]["n"]["c"] ) )
self.assertFalse( Gaffer.PlugAlgo.isPromoted( s["b"]["n"]["c"] ) )
p = Gaffer.PlugAlgo.promote( s["b"]["n"]["c"] )
self.assertTrue( isinstance( p, Gaffer.Color3fPlug ) )
self.assertTrue( s["b"]["n"]["c"].getInput().isSame( p ) )
self.assertTrue( s["b"]["n"]["c"]["r"].getInput().isSame( p["r"] ) )
self.assertTrue( s["b"]["n"]["c"]["g"].getInput().isSame( p["g"] ) )
self.assertTrue( s["b"]["n"]["c"]["b"].getInput().isSame( p["b"] ) )
self.assertEqual( p.getValue(), imath.Color3f( 1, 0, 1 ) )
def testPromoteCompoundPlugAndSerialise( self ) :
s = Gaffer.ScriptNode()
s["b"] = Gaffer.Box()
s["b"]["n"] = GafferTest.CompoundPlugNode()
s["b"]["n"]["p"]["s"].setValue( "hello" )
Gaffer.PlugAlgo.promote( s["b"]["n"]["p"] )
ss = s.serialise()
s = Gaffer.ScriptNode()
s.execute( ss )
self.assertEqual( s["b"]["n"]["p"]["s"].getValue(), "hello" )
def testPromoteDynamicColorPlugAndSerialise( self ) :
s = Gaffer.ScriptNode()
s["b"] = Gaffer.Box()
s["b"]["n"] = Gaffer.Node()
s["b"]["n"]["c"] = Gaffer.Color3fPlug( flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
Gaffer.PlugAlgo.promote( s["b"]["n"]["c"] )
ss = s.serialise()
s = Gaffer.ScriptNode()
s.execute( ss )
self.assertTrue( isinstance( s["b"]["c"], Gaffer.Color3fPlug ) )
self.assertTrue( s["b"]["n"]["c"].getInput().isSame( s["b"]["c"] ) )
def testPromoteNonDynamicColorPlugAndSerialise( self ) :
s = Gaffer.ScriptNode()
s["b"] = Gaffer.Box()
s["b"]["n"] = Gaffer.Random()
p = Gaffer.PlugAlgo.promote( s["b"]["n"]["baseColor"] )
p.setValue( imath.Color3f( 1, 2, 3 ) )
p.setName( "c" )
self.assertTrue( isinstance( s["b"]["c"], Gaffer.Color3fPlug ) )
self.assertTrue( s["b"]["n"]["baseColor"].getInput().isSame( s["b"]["c"] ) )
self.assertTrue( s["b"]["n"]["baseColor"]["r"].getInput().isSame( s["b"]["c"]["r"] ) )
self.assertTrue( s["b"]["n"]["baseColor"]["g"].getInput().isSame( s["b"]["c"]["g"] ) )
self.assertTrue( s["b"]["n"]["baseColor"]["b"].getInput().isSame( s["b"]["c"]["b"] ) )
self.assertEqual( s["b"]["c"].getValue(), imath.Color3f( 1, 2, 3 ) )
s2 = Gaffer.ScriptNode()
s2.execute( s.serialise() )
self.assertTrue( isinstance( s2["b"]["c"], Gaffer.Color3fPlug ) )
self.assertTrue( s2["b"]["n"]["baseColor"].getInput().isSame( s2["b"]["c"] ) )
self.assertTrue( s2["b"]["n"]["baseColor"]["r"].getInput().isSame( s2["b"]["c"]["r"] ) )
self.assertTrue( s2["b"]["n"]["baseColor"]["g"].getInput().isSame( s2["b"]["c"]["g"] ) )
self.assertTrue( s2["b"]["n"]["baseColor"]["b"].getInput().isSame( s2["b"]["c"]["b"] ) )
self.assertEqual( s2["b"]["c"].getValue(), imath.Color3f( 1, 2, 3 ) )
def testCantPromoteNonSerialisablePlugs( self ) :
s = Gaffer.ScriptNode()
s["b"] = Gaffer.Box()
s["b"]["n"] = Gaffer.Node()
s["b"]["n"]["p"] = Gaffer.IntPlug( flags = Gaffer.Plug.Flags.Default & ~Gaffer.Plug.Flags.Serialisable )
self.assertEqual( Gaffer.PlugAlgo.canPromote( s["b"]["n"]["p"] ), False )
self.assertRaises( RuntimeError, Gaffer.PlugAlgo.promote, s["b"]["n"]["p"] )
def testUnpromoting( self ) :
s = Gaffer.ScriptNode()
s["b"] = Gaffer.Box()
s["b"]["n1"] = GafferTest.AddNode()
p = Gaffer.PlugAlgo.promote( s["b"]["n1"]["op1"] )
self.assertTrue( Gaffer.PlugAlgo.isPromoted( s["b"]["n1"]["op1"] ) )
self.assertTrue( p.node().isSame( s["b"] ) )
Gaffer.PlugAlgo.unpromote( s["b"]["n1"]["op1"] )
self.assertFalse( Gaffer.PlugAlgo.isPromoted( s["b"]["n1"]["op1"] ) )
self.assertTrue( p.node() is None )
def testColorUnpromoting( self ) :
s = Gaffer.ScriptNode()
s["b"] = Gaffer.Box()
s["b"]["n"] = Gaffer.Node()
s["b"]["n"]["c"] = Gaffer.Color3fPlug()
p = Gaffer.PlugAlgo.promote( s["b"]["n"]["c"] )
self.assertTrue( Gaffer.PlugAlgo.isPromoted( s["b"]["n"]["c"] ) )
self.assertTrue( Gaffer.PlugAlgo.isPromoted( s["b"]["n"]["c"]["r"] ) )
self.assertTrue( Gaffer.PlugAlgo.isPromoted( s["b"]["n"]["c"]["g"] ) )
self.assertTrue( Gaffer.PlugAlgo.isPromoted( s["b"]["n"]["c"]["b"] ) )
self.assertTrue( p.node().isSame( s["b"] ) )
Gaffer.PlugAlgo.unpromote( s["b"]["n"]["c"] )
self.assertFalse( Gaffer.PlugAlgo.isPromoted( s["b"]["n"]["c"] ) )
self.assertFalse( Gaffer.PlugAlgo.isPromoted( s["b"]["n"]["c"]["r"] ) )
self.assertFalse( Gaffer.PlugAlgo.isPromoted( s["b"]["n"]["c"]["g"] ) )
self.assertFalse( Gaffer.PlugAlgo.isPromoted( s["b"]["n"]["c"]["b"] ) )
self.assertTrue( p.node() is None )
def testIncrementalUnpromoting( self ) :
s = Gaffer.ScriptNode()
s["b"] = Gaffer.Box()
s["b"]["n"] = Gaffer.Node()
s["b"]["n"]["c"] = Gaffer.Color3fPlug()
p = Gaffer.PlugAlgo.promote( s["b"]["n"]["c"] )
self.assertTrue( Gaffer.PlugAlgo.isPromoted( s["b"]["n"]["c"] ) )
self.assertTrue( Gaffer.PlugAlgo.isPromoted( s["b"]["n"]["c"]["r"] ) )
self.assertTrue( Gaffer.PlugAlgo.isPromoted( s["b"]["n"]["c"]["g"] ) )
self.assertTrue( Gaffer.PlugAlgo.isPromoted( s["b"]["n"]["c"]["b"] ) )
self.assertTrue( p.node().isSame( s["b"] ) )
Gaffer.PlugAlgo.unpromote( s["b"]["n"]["c"]["r"] )
self.assertFalse( Gaffer.PlugAlgo.isPromoted( s["b"]["n"]["c"] ) )
self.assertFalse( Gaffer.PlugAlgo.isPromoted( s["b"]["n"]["c"]["r"] ) )
self.assertTrue( Gaffer.PlugAlgo.isPromoted( s["b"]["n"]["c"]["g"] ) )
self.assertTrue( Gaffer.PlugAlgo.isPromoted( s["b"]["n"]["c"]["b"] ) )
self.assertTrue( p.node().isSame( s["b"] ) )
Gaffer.PlugAlgo.unpromote( s["b"]["n"]["c"]["g"] )
self.assertFalse( Gaffer.PlugAlgo.isPromoted( s["b"]["n"]["c"] ) )
self.assertFalse( Gaffer.PlugAlgo.isPromoted( s["b"]["n"]["c"]["r"] ) )
self.assertFalse( Gaffer.PlugAlgo.isPromoted( s["b"]["n"]["c"]["g"] ) )
self.assertTrue( Gaffer.PlugAlgo.isPromoted( s["b"]["n"]["c"]["b"] ) )
self.assertTrue( p.node().isSame( s["b"] ) )
Gaffer.PlugAlgo.unpromote( s["b"]["n"]["c"]["b"] )
self.assertFalse( Gaffer.PlugAlgo.isPromoted( s["b"]["n"]["c"] ) )
self.assertFalse( Gaffer.PlugAlgo.isPromoted( s["b"]["n"]["c"]["r"] ) )
self.assertFalse( Gaffer.PlugAlgo.isPromoted( s["b"]["n"]["c"]["g"] ) )
self.assertFalse( Gaffer.PlugAlgo.isPromoted( s["b"]["n"]["c"]["b"] ) )
self.assertTrue( p.node() is None )
def testCantPromoteReadOnlyPlug( self ) :
s = Gaffer.ScriptNode()
s["b"] = Gaffer.Box()
s["b"]["n"] = Gaffer.Node()
s["b"]["n"]["i"] = Gaffer.IntPlug()
s["b"]["n"]["c"] = Gaffer.Color3fPlug()
self.assertTrue( Gaffer.PlugAlgo.canPromote( s["b"]["n"]["i"] ) )
self.assertTrue( Gaffer.PlugAlgo.canPromote( s["b"]["n"]["c"] ) )
self.assertTrue( Gaffer.PlugAlgo.canPromote( s["b"]["n"]["c"]["r"] ) )
s["b"]["n"]["i"].setFlags( Gaffer.Plug.Flags.ReadOnly, True )
s["b"]["n"]["c"].setFlags( Gaffer.Plug.Flags.ReadOnly, True )
s["b"]["n"]["c"]["r"].setFlags( Gaffer.Plug.Flags.ReadOnly, True )
self.assertFalse( Gaffer.PlugAlgo.canPromote( s["b"]["n"]["i"] ) )
self.assertFalse( Gaffer.PlugAlgo.canPromote( s["b"]["n"]["c"] ) )
self.assertFalse( Gaffer.PlugAlgo.canPromote( s["b"]["n"]["c"]["r"] ) )
self.assertRaises( RuntimeError, Gaffer.PlugAlgo.promote, s["b"]["n"]["i"] )
self.assertRaises( RuntimeError, Gaffer.PlugAlgo.promote, s["b"]["n"]["c"] )
self.assertRaises( RuntimeError, Gaffer.PlugAlgo.promote, s["b"]["n"]["c"]["r"] )
k = s["b"].keys()
uk = s["b"]["user"].keys()
try :
Gaffer.PlugAlgo.promote( s["b"]["n"]["i"] )
except Exception, e :
self.assertTrue( "Cannot promote" in str( e ) )
self.assertTrue( "read only" in str( e ) )
self.assertEqual( s["b"].keys(), k )
self.assertEqual( s["b"]["user"].keys(), uk )
def testCantPromotePlugWithReadOnlyChildren( self ) :
s = Gaffer.ScriptNode()
s["b"] = Gaffer.Box()
s["b"]["n"] = Gaffer.Node()
s["b"]["n"]["c"] = Gaffer.Color3fPlug()
self.assertTrue( Gaffer.PlugAlgo.canPromote( s["b"]["n"]["c"] ) )
self.assertTrue( Gaffer.PlugAlgo.canPromote( s["b"]["n"]["c"]["r"] ) )
s["b"]["n"]["c"]["r"].setFlags( Gaffer.Plug.Flags.ReadOnly, True )
self.assertFalse( Gaffer.PlugAlgo.canPromote( s["b"]["n"]["c"] ) )
self.assertFalse( Gaffer.PlugAlgo.canPromote( s["b"]["n"]["c"]["r"] ) )
self.assertRaises( RuntimeError, Gaffer.PlugAlgo.promote, s["b"]["n"]["c"] )
self.assertRaises( RuntimeError, Gaffer.PlugAlgo.promote, s["b"]["n"]["c"]["r"] )
k = s["b"].keys()
uk = s["b"]["user"].keys()
try :
Gaffer.PlugAlgo.promote( s["b"]["n"]["c"] )
except Exception, e :
self.assertTrue( "Cannot promote" in str( e ) )
self.assertTrue( "read only" in str( e ) )
self.assertEqual( s["b"].keys(), k )
self.assertEqual( s["b"]["user"].keys(), uk )
def testMakePlugReadOnlyAfterPromoting( self ) :
s = Gaffer.ScriptNode()
s["b"] = Gaffer.Box()
s["b"]["n"] = GafferTest.AddNode()
s["b"]["n"]["op1"].setValue( 0 )
s["b"]["n"]["op2"].setValue( 0 )
self.assertEqual( s["b"]["n"]["sum"].getValue(), 0 )
op1 = Gaffer.PlugAlgo.promote( s["b"]["n"]["op1"] )
s["b"]["n"]["op1"].setFlags( Gaffer.Plug.Flags.ReadOnly, True )
op1.setValue( 1 )
self.assertEqual( s["b"]["n"]["sum"].getValue(), 1 )
def testPromoteOutputPlug( self ) :
b = Gaffer.Box()
b["n"] = GafferTest.AddNode()
self.assertTrue( Gaffer.PlugAlgo.canPromote( b["n"]["sum"] ) )
sum = Gaffer.PlugAlgo.promote( b["n"]["sum"] )
self.assertTrue( b.isAncestorOf( sum ) )
self.assertTrue( sum.direction() == Gaffer.Plug.Direction.Out )
self.assertEqual( sum.getInput(), b["n"]["sum"] )
self.assertTrue( Gaffer.PlugAlgo.isPromoted( b["n"]["sum"] ) )
self.assertFalse( Gaffer.PlugAlgo.canPromote( b["n"]["sum"] ) )
self.assertRaises( RuntimeError, Gaffer.PlugAlgo.promote, b["n"]["sum"] )
b["n"]["op1"].setValue( 10 )
b["n"]["op2"].setValue( 12 )
self.assertEqual( sum.getValue(), 22 )
Gaffer.PlugAlgo.unpromote( b["n"]["sum"] )
self.assertFalse( Gaffer.PlugAlgo.isPromoted( b["n"]["sum"] ) )
self.assertTrue( sum.parent() is None )
self.assertTrue( Gaffer.PlugAlgo.canPromote( b["n"]["sum"] ) )
def testPromoteDynamicBoxPlugAndSerialise( self ) :
s = Gaffer.ScriptNode()
s["b"] = Gaffer.Box()
s["b"]["n"] = Gaffer.Node()
s["b"]["n"]["p"] = Gaffer.Box2iPlug( flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
p = Gaffer.PlugAlgo.promote( s["b"]["n"]["p"] )
p.setValue( imath.Box2i( imath.V2i( 1, 2 ), imath.V2i( 3, 4 ) ) )
p.setName( "c" )
self.assertTrue( isinstance( s["b"]["c"], Gaffer.Box2iPlug ) )
self.assertTrue( s["b"]["n"]["p"].getInput().isSame( s["b"]["c"] ) )
self.assertTrue( s["b"]["n"]["p"]["min"].getInput().isSame( s["b"]["c"]["min"] ) )
self.assertTrue( s["b"]["n"]["p"]["max"].getInput().isSame( s["b"]["c"]["max"] ) )
self.assertEqual( s["b"]["c"].getValue(), imath.Box2i( imath.V2i( 1, 2 ), imath.V2i( 3, 4 ) ) )
s2 = Gaffer.ScriptNode()
s2.execute( s.serialise() )
self.assertTrue( isinstance( s2["b"]["c"], Gaffer.Box2iPlug ) )
self.assertTrue( s2["b"]["n"]["p"].getInput().isSame( s2["b"]["c"] ) )
self.assertTrue( s2["b"]["n"]["p"]["min"].getInput().isSame( s2["b"]["c"]["min"] ) )
self.assertTrue( s2["b"]["n"]["p"]["max"].getInput().isSame( s2["b"]["c"]["max"] ) )
self.assertEqual( s2["b"]["c"].getValue(), imath.Box2i( imath.V2i( 1, 2 ), imath.V2i( 3, 4 ) ) )
def testPromoteStaticPlugsWithChildren( self ) :
s = Gaffer.ScriptNode()
s["b"] = Gaffer.Box()
s["b"]["n"] = GafferTest.CompoundPlugNode()
s["b"]["n"]["valuePlug"]["i"].setValue( 10 )
p = Gaffer.PlugAlgo.promote( s["b"]["n"]["valuePlug"] )
p.setName( "p" )
s2 = Gaffer.ScriptNode()
s2.execute( s.serialise() )
self.assertEqual( s2["b"]["n"]["valuePlug"]["i"].getValue(), 10 )
self.assertTrue( s2["b"]["n"]["valuePlug"]["i"].getInput().isSame( s2["b"]["p"]["i"] ) )
def testPromoteDynamicPlugsWithChildren( self ) :
s = Gaffer.ScriptNode()
s["b"] = Gaffer.Box()
s["b"]["n"] = Gaffer.Node()
s["b"]["n"]["user"]["p"] = Gaffer.Plug( flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
s["b"]["n"]["user"]["p"]["p"] = Gaffer.Plug( flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
s["b"]["n"]["user"]["p"]["p"]["i"] = Gaffer.IntPlug( flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
s["b"]["n"]["user"]["v"] = Gaffer.ValuePlug( flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
s["b"]["n"]["user"]["v"]["v"] = Gaffer.ValuePlug( flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
s["b"]["n"]["user"]["v"]["v"]["i"] = Gaffer.IntPlug( flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
p = Gaffer.PlugAlgo.promote( s["b"]["n"]["user"]["p"] )
p.setName( "p" )
p["p"]["i"].setValue( 10 )
v = Gaffer.PlugAlgo.promote( s["b"]["n"]["user"]["v"] )
v.setName( "v" )
v["v"]["i"].setValue( 20 )
def assertValid( script ) :
self.assertEqual( script["b"]["n"]["user"]["p"]["p"]["i"].getValue(), 10 )
self.assertTrue( script["b"]["n"]["user"]["p"]["p"]["i"].getInput().isSame( script["b"]["p"]["p"]["i"] ) )
self.assertTrue( script["b"]["n"]["user"]["p"]["p"].getInput().isSame( script["b"]["p"]["p"] ) )
self.assertTrue( script["b"]["n"]["user"]["p"].getInput().isSame( script["b"]["p"] ) )
self.assertEqual( script["b"]["n"]["user"]["v"]["v"]["i"].getValue(), 20 )
self.assertTrue( script["b"]["n"]["user"]["v"]["v"]["i"].getInput().isSame( script["b"]["v"]["v"]["i"] ) )
self.assertTrue( script["b"]["n"]["user"]["v"]["v"].getInput().isSame( script["b"]["v"]["v"] ) )
self.assertTrue( script["b"]["n"]["user"]["v"].getInput().isSame( script["b"]["v"] ) )
assertValid( s )
s2 = Gaffer.ScriptNode()
s2.execute( s.serialise() )
assertValid( s2 )
def testPromoteArrayPlug( self ) :
s = Gaffer.ScriptNode()
s["a"] = GafferTest.AddNode()
s["b"] = Gaffer.Box()
s["b"]["n"] = GafferTest.ArrayPlugNode()
p = Gaffer.PlugAlgo.promote( ( s["b"]["n"]["in"] ) )
p.setName( "p" )
s["b"]["p"][0].setInput( s["a"]["sum"] )
s["b"]["p"][1].setInput( s["a"]["sum"] )
self.assertEqual( len( s["b"]["n"]["in"] ), 3 )
self.assertTrue( s["b"]["n"]["in"].getInput().isSame( s["b"]["p"] ) )
s2 = Gaffer.ScriptNode()
s2.execute( s.serialise() )
self.assertEqual( len( s2["b"]["n"]["in"] ), 3 )
self.assertTrue( s2["b"]["n"]["in"].getInput().isSame( s2["b"]["p"] ) )
def testPromotionIncludesArbitraryMetadata( self ) :
s = Gaffer.ScriptNode()
s["b"] = Gaffer.Box()
s["b"]["n"] = Gaffer.Node()
s["b"]["n"]["user"]["p"] = Gaffer.IntPlug( flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
Gaffer.Metadata.registerValue( s["b"]["n"]["user"]["p"], "testInt", 10 )
Gaffer.Metadata.registerValue( s["b"]["n"]["user"]["p"], "testString", "test" )
p = Gaffer.PlugAlgo.promote( s["b"]["n"]["user"]["p"] )
p.setName( "p" )
self.assertEqual( Gaffer.Metadata.value( p, "testInt" ), 10 )
self.assertEqual( Gaffer.Metadata.value( p, "testString" ), "test" )
s2 = Gaffer.ScriptNode()
s2.execute( s.serialise() )
self.assertEqual( Gaffer.Metadata.value( s2["b"]["p"], "testInt" ), 10 )
self.assertEqual( Gaffer.Metadata.value( s2["b"]["p"], "testString" ), "test" )
def testPromotionIncludesArbitraryChildMetadata( self ) :
s = Gaffer.ScriptNode()
s["b"] = Gaffer.Box()
s["b"]["n"] = Gaffer.Node()
s["b"]["n"]["user"]["p"] = Gaffer.Plug( flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
s["b"]["n"]["user"]["p"]["i"] = Gaffer.IntPlug( flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
Gaffer.Metadata.registerValue( s["b"]["n"]["user"]["p"], "testInt", 10 )
Gaffer.Metadata.registerValue( s["b"]["n"]["user"]["p"]["i"], "testString", "test" )
p = Gaffer.PlugAlgo.promote( s["b"]["n"]["user"]["p"] )
p.setName( "p" )
self.assertEqual( Gaffer.Metadata.value( p, "testInt" ), 10 )
self.assertEqual( Gaffer.Metadata.value( p["i"], "testString" ), "test" )
s2 = Gaffer.ScriptNode()
s2.execute( s.serialise() )
self.assertEqual( Gaffer.Metadata.value( s2["b"]["p"], "testInt" ), 10 )
self.assertEqual( Gaffer.Metadata.value( s2["b"]["p"]["i"], "testString" ), "test" )
def testPromoteToNonBoxParent( self ) :
n = Gaffer.Node()
n["n"] = GafferTest.AddNode()
self.assertTrue( Gaffer.PlugAlgo.canPromote( n["n"]["op1"] ) )
p = Gaffer.PlugAlgo.promote( n["n"]["op1"] )
self.assertTrue( p.isSame( n["op1"] ) )
self.assertTrue( n["n"]["op1"].getInput().isSame( n["op1"] ) )
self.assertTrue( Gaffer.PlugAlgo.isPromoted( n["n"]["op1"] ) )
self.assertFalse( n["op1"].getFlags( Gaffer.Plug.Flags.Dynamic ) )
Gaffer.PlugAlgo.unpromote( n["n"]["op1"] )
self.assertTrue( "op1" not in "n" )
self.assertTrue( n["n"]["op1"].getInput() is None )
def testPromotionParent( self ) :
n1 = Gaffer.Node()
n1["n"] = GafferTest.AddNode()
n2 = Gaffer.Node()
self.assertTrue( Gaffer.PlugAlgo.canPromote( n1["n"]["op1"], parent = n1["user"] ) )
self.assertFalse( Gaffer.PlugAlgo.canPromote( n1["n"]["op1"], parent = n2["user"] ) )
self.assertRaises( RuntimeError, Gaffer.PlugAlgo.promote, n1["n"]["op1"], parent = n2["user"] )
p = Gaffer.PlugAlgo.promote( n1["n"]["op1"], parent = n1["user"] )
self.assertTrue( p.parent().isSame( n1["user"] ) )
self.assertTrue( Gaffer.PlugAlgo.isPromoted( n1["n"]["op1"] ) )
def testPromotionExcludingMetadata( self ) :
n = Gaffer.Node()
n["a"] = GafferTest.AddNode()
Gaffer.Metadata.registerValue( n["a"]["op1"], "test", "testValue" )
Gaffer.Metadata.registerValue( n["a"]["op2"], "test", "testValue" )
p1 = Gaffer.PlugAlgo.promote( n["a"]["op1"] )
self.assertEqual( Gaffer.Metadata.value( p1, "test" ), "testValue" )
p2 = Gaffer.PlugAlgo.promote( n["a"]["op2"], excludeMetadata = "*" )
self.assertEqual( Gaffer.Metadata.value( p2, "test" ), None )
def testPromotedNonBoxMetadataIsNonPersistent( self ) :
n = Gaffer.Node()
n["a"] = GafferTest.AddNode()
Gaffer.Metadata.registerValue( n["a"]["op1"], "testPersistence", 10 )
p = Gaffer.PlugAlgo.promote( n["a"]["op1"] )
self.assertEqual( Gaffer.Metadata.value( p, "testPersistence" ), 10 )
self.assertTrue( "testPersistence" in Gaffer.Metadata.registeredValues( p ) )
self.assertTrue( "testPersistence" not in Gaffer.Metadata.registeredValues( p, persistentOnly = True ) )
def testPromoteWithName( self ) :
s = Gaffer.ScriptNode()
s["b"] = Gaffer.Box()
s["b"]["n1"] = GafferTest.AddNode()
p = Gaffer.PlugAlgo.promoteWithName( s["b"]["n1"]["op1"], 'newName' )
self.assertEqual( p.getName(), 'newName' )
def testPromotePlugWithDescendantValues( self ) :
n = Gaffer.Node()
n["a"] = Gaffer.Node()
n["a"]["p"] = Gaffer.Plug( flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
n["a"]["p"]["c"] = Gaffer.Plug( flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
n["a"]["p"]["c"]["i"] = Gaffer.IntPlug( flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
n["a"]["p"]["c"]["v"] = Gaffer.V3fPlug( flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
n["a"]["p"]["c"]["i"].setValue( 10 )
n["a"]["p"]["c"]["v"].setValue( imath.V3f( 1, 2, 3 ) )
p = Gaffer.PlugAlgo.promote( n["a"]["p"] )
self.assertEqual( n["a"]["p"]["c"]["i"].getValue(), 10 )
self.assertEqual( n["a"]["p"]["c"]["v"].getValue(), imath.V3f( 1, 2, 3 ) )
def testPromoteNonSerialisableOutput( self ) :
s = Gaffer.ScriptNode()
s["b"] = Gaffer.Box()
s["b"]["a"] = GafferTest.AddNode()
s["b"]["a"]["sum"].setFlags( Gaffer.Plug.Flags.Serialisable, False )
Gaffer.PlugAlgo.promote( s["b"]["a"]["sum"] )
self.assertTrue( s["b"]["sum"].getInput().isSame( s["b"]["a"]["sum"] ) )
s2 = Gaffer.ScriptNode()
s2.execute( s.serialise() )
self.assertTrue( s2["b"]["sum"].getInput().isSame( s2["b"]["a"]["sum"] ) )
if __name__ == "__main__":
unittest.main()
|
"""Traefik implementation
Custom proxy implementations can subclass :class:`Proxy`
and register in JupyterHub config:
.. sourcecode:: python
from mymodule import MyProxy
c.JupyterHub.proxy_class = MyProxy
Route Specification:
- A routespec is a URL prefix ([host]/path/), e.g.
'host.tld/path/' for host-based routing or '/path/' for default routing.
- Route paths should be normalized to always start and end with '/'
"""
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import json
import os
from urllib.parse import urlparse
import string
import base64
import asyncio
import consul.aio
import escapism
from tornado.concurrent import run_on_executor
from traitlets import Any, default, Unicode
from . import traefik_utils
from jupyterhub_traefik_proxy import TKvProxy
import time
class TraefikConsulProxy(TKvProxy):
"""JupyterHub Proxy implementation using traefik and Consul"""
# Consul doesn't accept keys containing // or starting with / so we have to escape them
key_safe_chars = string.ascii_letters + string.digits + "!@#$%^&*();<>_-.+?:"
kv_name = "consul"
consul_client_ca_cert = Unicode(
config=True,
allow_none=True,
default_value=None,
help="""Consul client root certificates""",
)
@default("kv_url")
def _default_kv_url(self):
return "http://127.0.0.1:8500"
@default("kv_client")
def _default_client(self):
consul_service = urlparse(self.kv_url)
if self.kv_password:
client = consul.aio.Consul(
host=str(consul_service.hostname),
port=consul_service.port,
token=self.kv_password,
cert=self.consul_client_ca_cert,
)
client.http._session._default_headers.update(
{"X-Consul-Token": self.kv_password}
)
return client
return consul.aio.Consul(
host=str(consul_service.hostname),
port=consul_service.port,
cert=self.consul_client_ca_cert,
)
@default("kv_traefik_prefix")
def _default_kv_traefik_prefix(self):
return "traefik/"
@default("kv_jupyterhub_prefix")
def _default_kv_jupyterhub_prefix(self):
return "jupyterhub/"
def _define_kv_specific_static_config(self):
self.static_config["consul"] = {
"endpoint": str(urlparse(self.kv_url).hostname)
+ ":"
+ str(urlparse(self.kv_url).port),
"prefix": self.kv_traefik_prefix,
"watch": True,
}
def _launch_traefik(self, config_type):
os.environ["CONSUL_HTTP_TOKEN"] = self.kv_password
super()._launch_traefik(config_type)
async def _kv_atomic_add_route_parts(
self, jupyterhub_routespec, target, data, route_keys, rule
):
escaped_target = escapism.escape(target, safe=self.key_safe_chars)
escaped_jupyterhub_routespec = escapism.escape(
jupyterhub_routespec, safe=self.key_safe_chars
)
try:
results = await self.kv_client.txn.put(
payload=[
{
"KV": {
"Verb": "set",
"Key": escaped_jupyterhub_routespec,
"Value": base64.b64encode(target.encode()).decode(),
}
},
{
"KV": {
"Verb": "set",
"Key": escaped_target,
"Value": base64.b64encode(data.encode()).decode(),
}
},
{
"KV": {
"Verb": "set",
"Key": route_keys.backend_url_path,
"Value": base64.b64encode(target.encode()).decode(),
}
},
{
"KV": {
"Verb": "set",
"Key": route_keys.backend_weight_path,
"Value": base64.b64encode(b"1").decode(),
}
},
{
"KV": {
"Verb": "set",
"Key": route_keys.frontend_backend_path,
"Value": base64.b64encode(
route_keys.backend_alias.encode()
).decode(),
}
},
{
"KV": {
"Verb": "set",
"Key": route_keys.frontend_rule_path,
"Value": base64.b64encode(rule.encode()).decode(),
}
},
]
)
status = 1
response = ""
except Exception as e:
status = 0
response = str(e)
return status, response
async def _kv_atomic_delete_route_parts(self, jupyterhub_routespec, route_keys):
escaped_jupyterhub_routespec = escapism.escape(
jupyterhub_routespec, safe=self.key_safe_chars
)
index, v = await self.kv_client.kv.get(escaped_jupyterhub_routespec)
if v is None:
self.log.warning("Route %s doesn't exist. Nothing to delete", jupyterhub_routespec)
return True, None
target = v["Value"]
escaped_target = escapism.escape(target, safe=self.key_safe_chars)
try:
status, response = await self.kv_client.txn.put(
payload=[
{"KV": {"Verb": "delete", "Key": escaped_jupyterhub_routespec}},
{"KV": {"Verb": "delete", "Key": escaped_target}},
{"KV": {"Verb": "delete", "Key": route_keys.backend_url_path}},
{"KV": {"Verb": "delete", "Key": route_keys.backend_weight_path}},
{"KV": {"Verb": "delete", "Key": route_keys.frontend_backend_path}},
{"KV": {"Verb": "delete", "Key": route_keys.frontend_rule_path}},
]
)
status = 1
response = ""
except Exception as e:
status = 0
response = str(e)
return status, response
async def _kv_get_target(self, jupyterhub_routespec):
escaped_jupyterhub_routespec = escapism.escape(
jupyterhub_routespec, safe=self.key_safe_chars
)
_, res = await self.kv_client.kv.get(escaped_jupyterhub_routespec)
if res is None:
return None
return res["Value"].decode()
async def _kv_get_data(self, target):
escaped_target = escapism.escape(target, safe=self.key_safe_chars)
_, res = await self.kv_client.kv.get(escaped_target)
if res is None:
return None
return res["Value"].decode()
async def _kv_get_route_parts(self, kv_entry):
key = escapism.unescape(kv_entry["KV"]["Key"])
value = kv_entry["KV"]["Value"]
# Strip the "/jupyterhub" prefix from the routespec
routespec = key.replace(self.kv_jupyterhub_prefix, "")
target = base64.b64decode(value.encode()).decode()
data = await self._kv_get_data(target)
return routespec, target, data
async def _kv_get_jupyterhub_prefixed_entries(self):
routes = await self.kv_client.txn.put(
payload=[
{
"KV": {
"Verb": "get-tree",
"Key": escapism.escape(
self.kv_jupyterhub_prefix, safe=self.key_safe_chars
),
}
}
]
)
return routes["Results"]
async def stop(self):
await super().stop()
await self.kv_client.http._session.close()
|
# !/usr/bin/env python
# -*- coding = utf-8 -*-
# @Author:wanghui
"""
获取当前手机状态
1:传入手机名称
2:遍历判断手机是否使用
判断adb是否连接
否:3
是
3:判断当前手机是否被标识为使用中
是:2
否:1
4:返回一个数组
"""
import os
import sys
from time import sleep
sys.path.append("D:\\appium-android1")
from subprocess import Popen, PIPE
from testAppium.conf.getPhoneConfig import ConfigPhoneDevices
PATH = lambda p: os.path.abspath(
os.path.join(os.path.dirname(__file__), p)
)
devices_state = {} # 保存查询结果,{device_name : state}
EXECUTION = 1 # 可执行
EXECUTION_ING = 2 # 执行中
BREAK_OFF = 3 # 已断开
NEED_ADD = 4 # 库存没有,需添加设备
WRONG = 5 # 传入错误的方式
config = ConfigPhoneDevices()
def get_phones(names):
"""
获取传入的手机名称转为list
:return: 手机名称列表
"""
# if len(sys.argv) < 2:
# print("未传入手机设备名称,请检查后重试。。。")
# return WRONG
#
# phones_name = sys.argv[1]
phones_name = names
if not isinstance(phones_name, list):
return WRONG
all_state = get_device(phones_name)
all_devices = get_adb_devices()
for phone, devices in all_state.items():
if devices[0] in all_devices:
if all_state[phone][1] == 'EXECUTION_ING':
devices_state[phone] = EXECUTION_ING
else:
devices_state[phone] = EXECUTION
else:
devices_state[phone] = BREAK_OFF
return devices_state
def get_adb_devices():
"""
获取所有devices
:return: 所有的设备devices
"""
result = Popen("adb devices", shell=True, stdout=PIPE, stderr=PIPE).stdout.readlines()
devices_name = []
for item in result:
t = item.decode().split("\tdevice")
if len(t) >= 2:
devices_name.append(t[0])
return devices_name
def get_device(phones: list):
"""
获取配置文件中的device_name
:param phones: 需检查状态的电话名称
:return: 配置文件中对应的devices
"""
all_devices_name = {}
for phone in phones:
data = []
data.append(config.get_section_password(phone, 'devicename'))
data.append(config.get_section_password(phone, 'execution'))
all_devices_name[phone] = data
return all_devices_name
def judge_devices(devices_list: dict, adb_list: list):
"""
判断传入的phone名称是否连接电脑
:param devices_list: devices名称列表
:param adb_list: adb与服务器链接列表
:return:
"""
for i, j in devices_list.items():
if j not in adb_list:
devices_state[i] = BREAK_OFF
else:
devices_state[i] = EXECUTION
return devices_state
def judge_phone_ing():
"""
获取手机是否为运行中状态
:return: 手机当前运行状态
"""
for i, j in devices_state.items():
if j == 1:
execution = config.get_section_password(i[0], 'execution')
if execution == "execution_ing":
devices_state[i] = EXECUTION_ING
return devices_state
def get_adb_count(all_dicts: dict):
"""获取链接设备的个数"""
adb_dicts = {}
for (k, v) in all_dicts.items():
if v == 1:
adb_dicts[k] = v
return adb_dicts
# 暂时不使用,判断较为慢
def service_part(device_name: str) -> dict:
"""判断当前设备是否使用中"""
result = Popen(f"adb -s {device_name} shell ps", shell=True, stdout=PIPE, stderr=PIPE).stdout.readlines()
for i, j in enumerate(result):
name = str(j, encoding='utf-8')
# print(name.split('R ')[-1])
if 'com.xiaozhu.xzdz' in name: # 判断小猪是否在线程中,存在设置状态为 执行中
devices_state[device_name] = EXECUTION_ING
return devices_state
sleep(10) # 睡眠10s,再次查询
if 'com.xiaozhu.xzdz' in name:
devices_state[device_name] = EXECUTION_ING
return devices_state
devices_state[device_name] = EXECUTION_ING # 不存在设置状态为 可执行
return devices_state
if __name__ == '__main__':
argument = sys.argv
if len(argument) == 2:
phones = eval(argument[1])
print(get_phones(phones))
|
<gh_stars>0
"""
destroy_artifacts.py
Goes through current jobs in Jenkins and finds artifacts that have been deleted in Jenkins, but not in the artifact
share. Sort of flexible, but it ties into Signiant's current build patterns fairly tightly in some areas.
"""
import sys,os,shutil
reload(sys)
sys.setdefaultencoding('utf8')
import argparse
from ConfigParser import RawConfigParser
from maestro.jenkins.jobs import EnvironmentVariableJobEntry, InvalidEntryError, parse_build_into_environment_variable_job_entry
from maestro.tools import string, path
## Globals used throughout the script
parser = argparse.ArgumentParser(prog='destroy_artifacts')
VERBOSE = False
DEBUG = False
IGNORED_PATHS = []
# ARG: Which Jenkins instance are we targetting?
JENKINS_JOBS_DIRECTORY_PATH = "/var/lib/jenkins/jobs"
# ARG: Don't actually delete anything, but list what would be deleted
IS_DRY_RUN = False
# ARG: Where is the config file stored?
CONFIG_PATH = "./config"
# CONFIG: REQUIRED environment variables
ENVIRONMENT_VARIABLES = []
# CONFIG: Environment Variables potentially containing deployment paths (i.e. paths to search for builds)
# The script will attempt to replace $VARIABLES with their corresponding value from ENVIRONMENT_VARIABLES
DEPLOYMENT_PATHS = []
# CONFIG: REGEX to apply to the subfolders of the deployment directorys. (i.e. {[0-9]+} )
BUILD_FOLDER_REGEX = ""
# CONFIG: Jobs to ignore (use the job name)
IGNORE_JOBS = []
# CONFIG: Where to split the deployment paths, the script takes the second index ( [1] )
# Signiant: Default in config is to split on $PROJECT_FAMILY
SPLIT_TOKEN = ""
# CONFIG: What to prepend to the string[1] from deployment path split with SPLIT_TOKEN
# Signiant: Default in config is to
PREPEND_STRING = ""
# CONFIG: What to append to the string[1] frpm deployment path split with SPLIT_TOKEN
APPEND_STRING = ""
# Tracks dupicates by having $PROJECT_FAMILY:$PROJECT_TITLE:$PROJECT_BRANCH as a key, and the name of the entry as a value (for error messaging)
__duplicate_tracker__ = dict()
# List of found duplicates
__duplicates__ = list()
# We pass in undeleted_paths set in order to avoid duplicates, and get an accurate byte clean up count
# Makes it fairly slow, but whatever. It's still under a minute for scanning the entire thing
def __get_undeleted_artifact_paths__(entry, release_paths, undeleted_paths_dict = None):
"""
Loop through release paths and see if we can find anything with Build-XXX,
strip the number out and compare to the job entry. Put all ones not in the job entry into a set.
"""
if not isinstance(entry, EnvironmentVariableJobEntry) or entry is None:
raise TypeError("You must pass in a EnvironmentVariableJobEntry!")
if undeleted_paths_dict is None:
undeleted_paths_dict = dict()
for path in release_paths:
# TODO: Find a better way to do this - I don't actually think this is required, the path shouldn't contain
# TODO: the literal $BUILD_NUMBER at this point, it should have been replaced...
# We need to strip off any deploy path that has Build-$BUILD_NUMBER at the end
if path.endswith("$BUILD_NUMBER"):
# This is kind of neat. Prepend a slash, and unpack the list returned from path.split without the last element and join it
path = os.path.join("/",*path.split("/")[:-1])
try:
for subdir in os.listdir(path):
try:
# TODO: Find a better way to do this (dont rely on Build-XXX)
# TODO: This doesn't look for Build-XXX, it just looks for folders with a dash!
# TODO: An easy enhancement is to see if path.startswith('Build')
build_no = subdir.split("-")[1]
if build_no not in entry.get_build_number_list():
# print str(entry.get_build_number_list()) + " " + str(build_no)
undeleted_paths_dict[os.path.join(path,subdir)] = entry
except IndexError as e:
# Unrecognized build directory
# print e
continue
except TypeError as e:
# No builds in directorys
continue
except OSError as e:
# There are no deployed artifacts for this directory
# print e
continue
return undeleted_paths_dict
def __enumerate_remote_artifact_config_entries__(jobs_path):
"""
Loop through the found config.xml files and return their folder path
"""
if DEBUG:
print "jobs_path: " + str(jobs_path)
for root, dirnames, filenames in os.walk(jobs_path):
if "config.xml" in filenames:
if DEBUG:
print "Found config.xml at " + str(root)
try:
# print root
if not 'promotions' in root:
yield parse_build_into_environment_variable_job_entry(root)
else:
if VERBOSE:
print "Skipping over " + str(root) + ' - PROMOTION Job'
except InvalidEntryError as e:
if VERBOSE:
print "Skipping over " + str(root)
def __parse_config__(config_file_path):
"""
Parses the config file and sets the globals.
"""
global ENVIRONMENT_VARIABLES
global DEPLOYMENT_PATHS
global DEPLOYMENT_STRUCTURES
global BUILD_FOLDER_REGEX
global IGNORE_JOBS
global SPLIT_TOKEN
global PREPEND_STRING
global APPEND_STRING
config = RawConfigParser()
config.read(config_file_path)
# each wrapped in a try/except just in case we want something optional
try:
ENVIRONMENT_VARIABLES = config.get("ArtifactConfig","ENVIRONMENT_VARIABLES").split(',')
except:
raise
try:
DEPLOYMENT_PATHS = config.get("ArtifactConfig","DEPLOYMENT_PATHS").split(',')
except:
raise
try:
DEPLOYMENT_STRUCTURES = config.get("ArtifactConfig","DEPLOYMENT_STRUCTURES").split(',')
except:
raise
try:
BUILD_FOLDER_REGEX = config.get("ArtifactConfig","BUILD_FOLDER_REGEX")
except:
raise
try:
IGNORE_JOBS = config.get("ArtifactConfig","IGNORE_JOBS").split(',')
except:
raise
try:
SPLIT_TOKEN = config.get("ArtifactConfig","SPLIT_TOKEN")
except:
raise
# check to see if already configured on command line arg:
if not PREPEND_STRING:
try:
PREPEND_STRING = config.get("ArtifactConfig","PREPEND_STRING")
except:
raise
try:
APPEND_STRING = config.get("ArtifactConfig","APPEND_STRING")
except:
raise
def __verify_environment_variables__(entry):
"""
Checks for the required environment variables from ENVIRONMENT_VARIABLES,
and will raise an InvalidEntryError if one is not found or is None.
"""
if not isinstance(entry,EnvironmentVariableJobEntry):
raise TypeError("Received object of type " + str(type(entry)) + " expected type SigniantRemoteArtifactJobEntry.")
for var in ENVIRONMENT_VARIABLES:
if entry.name == "Media Shuttle Store-mjc":
print str(entry.environment_variables.keys())
if var not in entry.environment_variables.keys() or entry.environment_variables[var] is None:
raise InvalidEntryError("Required environment variable " + str(var) + " was not found in job entry " + str(entry.name) + ".")
def __get_release_path_list__(entry):
"""
Builds a string replace dictionary out of the environment variables,
calls __strip_release_path__, replaces the $VARIABLES with their values (if found),
normalizes the path and adds it to a list which is returned by this method.
"""
releases = list()
for key in DEPLOYMENT_PATHS:
if key in entry.environment_variables:
try:
string_replace = dict()
for var in ENVIRONMENT_VARIABLES:
try:
string_replace[str("$" + var)] = entry.environment_variables[var]
except KeyError:
continue
release_path = entry.environment_variables[key]
split_token = entry.environment_variables[SPLIT_TOKEN].strip()
converted_release_path = __strip_release_path__(release_path,split_token)
if converted_release_path is None:
continue
replaced_release_path = string.replaceall(string_replace, converted_release_path)
formatted_release_path = os.path.normpath(replaced_release_path)
# If the formatted_release_path ends with Build-XXX - strip that off
if os.path.basename(formatted_release_path).startswith('Build'):
formatted_release_path = os.path.dirname(formatted_release_path)
if formatted_release_path not in releases:
releases.append(formatted_release_path)
except ValueError as e:
# print str(e)
pass
if len(releases) == 0:
return None
else:
return releases
def __strip_release_path__(release_path, split_token):
"""
Converts UNC/Windows paths into forward slashes, and then splits and pre/appends
"""
# print("In path: " + str(release_path))
try:
clean_path = release_path.replace('\\','/').strip()
stripped_path = clean_path.split(split_token)
return PREPEND_STRING + split_token + stripped_path[1] + APPEND_STRING
except Exception as e:
print str("Exception: " + str(e))
return None
def __compute_dupe_key__(entry):
key = ''
if 'PROJECT_PLATFORM' in entry.environment_variables.keys():
key = str(entry.environment_variables["PROJECT_FAMILY"] + "/" + entry.environment_variables["PROJECT_TITLE"] + "/" + entry.environment_variables["PROJECT_BRANCH"] + "/" + entry.environment_variables["PROJECT_PLATFORM"])
else:
key = str(entry.environment_variables["PROJECT_FAMILY"] + "/" + entry.environment_variables["PROJECT_TITLE"] + "/" + entry.environment_variables["PROJECT_BRANCH"])
return key
def __verify_duplicates__(entry):
# TODO: Make less Signiant specific
global __duplicate_tracker__
global __duplicates__
# Key is all environment variables seperated by a colon
key = __compute_dupe_key__(entry)
if DEBUG:
print "key: " + str(key)
print "IGNORED_PATHS: " + str(IGNORED_PATHS)
if any(key in s for s in IGNORED_PATHS):
return
# Check for duplicate
if key in __duplicate_tracker__.keys():
__duplicates__.append(entry)
__duplicates__.append(__duplicate_tracker__[key])
raise InvalidEntryError("Found a duplicate entry! Please see error message at the end of the script.")
else:
__duplicate_tracker__[key] = entry
def __parse_arguments__():
"""
Currently unused and uses defaults defined above
"""
global IS_DRY_RUN
global VERBOSE
global DEBUG
global parser
global PREPEND_STRING
global CONFIG_PATH
global IGNORED_PATHS
parser.add_argument('-n','--dry-run',action='store_true',help="Does a dry run of the cleaner")
parser.add_argument('-p','--prepend',type=str, help="Where PREPEND is a string of the release share prefix")
parser.add_argument('-i','--ignore', type=str, help="Ignore a job with specified artifact path", action='append', dest='ignored', required=False)
parser.add_argument('-d','--debug',action='store_true',help="Run with verbose debugging")
parser.add_argument('-c','--config',type=str, help="config file path")
args = parser.parse_args()
if args.dry_run:
IS_DRY_RUN = True
if args.debug:
print "Debug is on"
VERBOSE = True
DEBUG = True
if args.prepend:
PREPEND_STRING=args.prepend
if args.config:
CONFIG_PATH=args.config
if args.ignored:
IGNORED_PATHS = args.ignored
def destroy_artifacts():
# Parse arguments
__parse_arguments__()
if not os.path.exists(CONFIG_PATH):
raise ValueError("You need to provide a valid config file! Currently looking for: " + str(CONFIG_PATH))
# Parse config file
__parse_config__(CONFIG_PATH)
# Bytes cleaned up
cleaned_byte_count = 0
# Set containing ALL the paths to be deleted
undeleted_paths_dict = dict()
if DEBUG:
print "Evalutating path"
# First we want to go through the config entries that contain Environment Variables from envinject
for entry in __enumerate_remote_artifact_config_entries__(JENKINS_JOBS_DIRECTORY_PATH):
# Safety net... if there's NO builds, we shouldn't clean anything up
if DEBUG:
print "entry: " + str(entry)
if entry.get_build_number_list() is None or len(entry.builds_in_jenkins) == 0:
if DEBUG:
print "No builds found"
continue
# Skip disabled entries
if entry.disabled is True:
continue
try:
if DEBUG:
print "Found Build " + str(entry.get_build_number_list())
__verify_environment_variables__(entry)
__verify_duplicates__(entry)
release_paths = __get_release_path_list__(entry)
if release_paths is not None:
for undeleted_artifact_path in __get_undeleted_artifact_paths__(entry,release_paths,undeleted_paths_dict):
pass # Building set...
# If there's no match to any of the keys, then we don't care about this entry
except TypeError as e:
# print str(e)
continue
# If the job doesn't have the variables we're looking for, skip over it
except InvalidEntryError as e:
print str(e)
continue
# Loop through the (now) unique path list so we can get the size and delete
for artifact_path in undeleted_paths_dict.keys():
if DEBUG:
print "artifact_path: " + str(artifact_path)
print "IGNORED_PATHS: " + str(IGNORED_PATHS)
if undeleted_paths_dict[artifact_path].name in [d.name for d in __duplicates__]:
print "Not deleting duplicate: " + artifact_path
continue
for key in IGNORED_PATHS:
if key in artifact_path:
print "Artifact path in ignore list, skipping delete: " + artifact_path
continue
if not os.path.isdir(artifact_path):
continue
print "Deleting " + str(artifact_path)
try:
cleaned_byte_count = path.get_tree_size(artifact_path) + cleaned_byte_count
except Exception as e:
print str(e)
if not IS_DRY_RUN:
try:
shutil.rmtree(str(artifact_path), ignore_errors=False)
except OSError as e:
print "WARNING: Unable to delete " + artifact_path + " due to:"
print str(e)
if IS_DRY_RUN:
print "Would have cleaned up " + str(cleaned_byte_count) + " bytes!"
else:
print "Cleaned up " + str(cleaned_byte_count) + " bytes!"
if len(__duplicates__) > 0:
print "The job failed because of the following errors:"
for duplicate in __duplicates__:
key = __compute_dupe_key__(duplicate)
print "Attempted to parse entry with name '" + str(duplicate.name) + "' but entry with name '" + str(__duplicate_tracker__[key].name) + "' is currently using the same deployment strategy: " + key
sys.exit(1)
if __name__ == "__main__":
destroy_artifacts()
|
from unittest import TestCase
from unittest.mock import patch
import pytest
from hubblestack.audit import curl
from hubblestack.exceptions import HubbleCheckValidationError
class TestCurl(TestCase):
"""
Unit tests for curl module
"""
def test_invalid_params1(self):
"""
No mandatory param is passed
should fail
"""
block_dict={"args": {
"function": "invalid"
}}
check_id = "test-1"
with pytest.raises(HubbleCheckValidationError) as exception:
curl.validate_params(check_id, block_dict, {})
pytest.fail("Check should not have passed")
def test_valid_params1(self):
"""
valid param, should pass
"""
block_dict={"args": {"function": "GET", "url": "test-xyz"}}
check_id = "test-1"
curl.validate_params(check_id, block_dict, {})
def test_valid_params2(self):
"""
valid param, default function name from module, should pass
"""
block_dict={"args": {"url": "test-xyz"}}
check_id = "test-1"
curl.validate_params(check_id, block_dict, {})
def test_filtered_logs1(self):
"""
valid param, should pass
"""
block_dict={"args": {"function": "GET", "url": "test"}}
check_id = "test-1"
res = curl.get_filtered_params_to_log(check_id, block_dict, {})
self.assertEqual(res, {"url": "test"})
def test_execute_get(self):
"""
test a get request
"""
class ResultMock:
def __init__(self, status_code):
self.status_code = status_code
def json(self):
return {"id": 1, "name": "test"}
def raise_for_status(self):
pass
block_dict={"args": {"function": "GET", "url": "test"}}
result_mock = ResultMock(200)
expected_result = {'status': 200, 'response': {'id': 1, 'name': 'test'}}
with patch('hubblestack.audit.curl.requests') as requests_mock:
requests_mock.get.return_value = result_mock
status, res = curl.execute('test', block_dict, {})
self.assertEqual(res['result'], expected_result)
def test_execute_post(self):
"""
test a post request
"""
class ResultMock:
def __init__(self, status_code):
self.status_code = status_code
def json(self):
return {"id": 1, "name": "test"}
def raise_for_status(self):
pass
block_dict={"args": {"function": "POST", "url": "test"}}
result_mock = ResultMock(200)
expected_result = {'status': 200, 'response': {'id': 1, 'name': 'test'}}
with patch('hubblestack.audit.curl.requests') as requests_mock:
requests_mock.post.return_value = result_mock
status, res = curl.execute('test', block_dict, {})
self.assertEqual(res['result'], expected_result)
def test_execute_put(self):
"""
test a post request
"""
class ResultMock:
def __init__(self, status_code):
self.status_code = status_code
def json(self):
return {"id": 1, "name": "test"}
def raise_for_status(self):
pass
block_dict={"args": {"function": "PUT", "url": "test"}}
result_mock = ResultMock(200)
expected_result = {'status': 200, 'response': {'id': 1, 'name': 'test'}}
with patch('hubblestack.audit.curl.requests') as requests_mock:
requests_mock.put.return_value = result_mock
status, res = curl.execute('test', block_dict, {})
self.assertEqual(res['result'], expected_result) |
from . import InvalidTypeArgumentError, BaseNode, File, Node, \
InvalidUssageError, GraphFailed, _NodeRunningStatus, _GraphRunningStatus, \
_GraphPostAction
from . import _get_obj, _save_graph
from collections import deque
import json
import requests
import copy
import collections
import time
import logging
def update_recursive(d, u):
for k, v in u.items():
if isinstance(v, collections.Mapping):
d[k] = update_recursive(d.get(k, {}), v)
elif isinstance(v, list):
for v_item in v:
if 'name' in v_item:
for d_item in d[k]:
if 'name' in d_item and d_item['name'] == v_item['name']:
update_recursive(d_item, v_item)
else:
d[k].append(v_item)
else:
d[k] = v if v else d[k]
return d
def traverse_nodes(graph, targets):
nodes = []
visited_nodes = set()
to_visit = deque(targets)
while len(to_visit) > 0:
node = to_visit.popleft()
if node._id in visited_nodes:
continue
visited_nodes.add(node._id)
nodes.append(node)
for name, output_items in node.inputs.items():
if output_items:
for output_item in output_items:
if output_item.node._id in visited_nodes:
continue
to_visit.append(output_item.node)
return nodes
class Graph(object):
def __init__(self, client=None, title=None, description=None, targets=None):
self.client = client
self.title = title or ''
self.description = description or ''
self.targets = targets
self._graph_dict = None
if not isinstance(targets, list):
self.targets = [targets]
for target in self.targets:
if not isinstance(target, BaseNode):
raise InvalidTypeArgumentError('Target is expected to be an instance of {}, found `{}`'.format(BaseNode, type(target)))
def _dictify(self):
nodes = [node for node in traverse_nodes(self, self.targets)]
plynx_nodes = {}
for base_node_name, parent_node in set([(n.base_node_name, n.parent_node) for n in nodes]):
obj = _get_obj('nodes', parent_node, self.client)
plynx_nodes[parent_node] = obj
res_nodes = []
for node in nodes:
node_dict = node._dictify()
plynx_node = copy.deepcopy(plynx_nodes[node.parent_node])
update_recursive(plynx_node, node_dict)
res_nodes.append(plynx_node)
graph = {
'title': self.title,
'description': self.description,
'graph_running_status': _GraphRunningStatus.CREATED,
'nodes': res_nodes
}
return graph
def save(self):
d = self._dictify()
if self._graph_dict:
d['_id'] = self._graph_dict['_id']
self._graph_dict, url = _save_graph(graph=d, actions=[_GraphPostAction.AUTO_LAYOUT, _GraphPostAction.SAVE], client=self.client)
logging.info('Graph successfully saved: {}'.format(url))
return self
def approve(self):
d = self._dictify()
if self._graph_dict:
d['_id'] = self._graph_dict['_id']
self._graph_dict, url = _save_graph(graph=d, actions=[_GraphPostAction.AUTO_LAYOUT, _GraphPostAction.APPROVE], client=self.client)
logging.info('Graph successfully approved: {}'.format(url))
return self
def wait(self):
if not self._graph_dict:
raise InvalidUssageError("The graph neigher saved nor approved yet")
if self._graph_dict["graph_running_status"].upper() == _GraphRunningStatus.CREATED:
raise InvalidUssageError("The graph must be approved first")
while True:
graph = _get_obj('graphs', self._graph_dict["_id"], self.client)
counter = collections.Counter(
[
node['node_running_status'] for node in graph['nodes']
]
)
graph_running_status = graph['graph_running_status']
numerator = counter.get(_NodeRunningStatus.SUCCESS, 0)
denominator = sum(counter.values()) - counter.get(_NodeRunningStatus.STATIC, 0)
if denominator > 0:
progress = float(numerator) / denominator
else:
progress = 1.0
node_running_statuss = map(
lambda r: '{}: {}'.format(r[0], r[1]),
counter.items()
)
logging.info('\t'.join(
[
graph_running_status,
'{0:.0f}%'.format(progress * 100)
] + node_running_statuss))
if graph_running_status.upper() not in [
_GraphRunningStatus.READY,
_GraphRunningStatus.RUNNING,
_GraphRunningStatus.SUCCESS]:
raise GraphFailed('Graph finished with status `{}`'.format(graph_running_status))
if graph_running_status.upper() == _GraphRunningStatus.SUCCESS:
logging.info('Graph finished with status `{}`'.format(graph_running_status))
break
time.sleep(1)
return self
|
from __future__ import division
import os.path
import numpy as np
from unet import INPUT_SIZE, OUTPUT_SIZE, output_size_for_input
import normalize
import gzip
import cPickle as pickle
import loss_weighting
import skimage.morphology
from augment import augment
import time
import re
from params import params as P
_EPSILON = 1e-8
def get_image(filename, deterministic):
with gzip.open(filename,'rb') as f:
lung = pickle.load(f)
truth_filename = filename.replace('lung','nodule')
segmentation_filename = filename.replace('lung','lung_masks')
#segmentation_filename = re.sub(r'subset[0-9]','',segmentation_filename)
if os.path.isfile(truth_filename):
with gzip.open(truth_filename,'rb') as f:
truth = np.array(pickle.load(f),dtype=np.float32)
else:
truth = np.zeros_like(lung)
if os.path.isfile(segmentation_filename):
with gzip.open(segmentation_filename,'rb') as f:
outside = np.where(pickle.load(f)>0,0,1)
else:
outside = np.where(lung==0,1,0)
print 'lung not found'
if P.ERODE_SEGMENTATION > 0:
kernel = skimage.morphology.disk(P.ERODE_SEGMENTATION)
outside = skimage.morphology.binary_erosion(outside, kernel)
outside = np.array(outside, dtype=np.float32)
if P.AUGMENT and not deterministic:
lung, truth, outside = augment([lung, truth, outside])
if P.RANDOM_CROP > 0:
im_x = lung.shape[0]
im_y = lung.shape[1]
x = np.random.randint(0, max(1,im_x-P.RANDOM_CROP))
y = np.random.randint(0, max(1,im_y-P.RANDOM_CROP))
lung = lung[x:x+P.RANDOM_CROP, y:y+P.RANDOM_CROP]
truth = truth[x:x+P.RANDOM_CROP, y:y+P.RANDOM_CROP]
outside = outside[x:x+P.RANDOM_CROP, y:y+P.RANDOM_CROP]
truth = np.array(np.round(truth),dtype=np.int64)
outside = np.array(np.round(outside),dtype=np.int64)
#Set label of outside pixels to -10
truth = truth - (outside*10)
lung = lung*(1-outside)
lung = lung-outside*3000
if P.INPUT_SIZE > 0:
lung = crop_or_pad(lung, INPUT_SIZE, -3000)
truth = crop_or_pad(truth, OUTPUT_SIZE, 0)
outside = crop_or_pad(outside, OUTPUT_SIZE, 1)
else:
out_size = output_size_for_input(lung.shape[1], P.DEPTH)
#lung = crop_or_pad(lung, INPUT_SIZE, -1000)
truth = crop_or_pad(truth, out_size, 0)
outside = crop_or_pad(outside, out_size, 1)
lung = normalize.normalize(lung)
lung = np.expand_dims(np.expand_dims(lung, axis=0),axis=0)
if P.ZERO_CENTER:
lung = lung - P.MEAN_PIXEL
truth = np.array(np.expand_dims(np.expand_dims(truth, axis=0),axis=0),dtype=np.int64)
return lung, truth
def crop_or_pad(image, desired_size, pad_value):
if image.shape[0] < desired_size:
offset = int(np.ceil((desired_size-image.shape[0])/2))
image = np.pad(image, offset, 'constant', constant_values=pad_value)
if image.shape[0] > desired_size:
offset = (image.shape[0]-desired_size)//2
image = image[offset:offset+desired_size,offset:offset+desired_size]
return image
def load_images(filenames, deterministic=False):
slices = [get_image(filename, deterministic) for filename in filenames]
lungs, truths = zip(*slices)
l = np.array(np.concatenate(lungs,axis=0), dtype=np.float32)
t = np.concatenate(truths,axis=0)
# Weight the loss by class balancing, classes other than 0 and 1
# get set to 0 (the background is -10)
w = loss_weighting.weight_by_class_balance(t, classes=[0,1])
#Set -1 labels back to label 0
t = np.clip(t, 0, 100000)
return l, t, w, filenames
def get_scan_name(filename):
scan_name = filename.replace('\\','/').split('/')[-1].split('_')[0]
return scan_name
def train_splits_by_z(filenames, data_resolution=0.5, n_splits=None):
import pandas as pd
resolution_of_scan = pd.read_csv('../../data/imagename_zspacing.csv',header=None,names=['filename','spacing'],index_col=False)
scan_names = set(map(get_scan_name, filenames))
resolutions = [resolution_of_scan[resolution_of_scan['filename']==scan].iloc[0]['spacing'] for scan in scan_names]
scan_filenames = []
for scan in scan_names:
scan_filenames.append(filter(lambda x: scan in x, filenames))
split_per_scan = [int(np.round(r/data_resolution)) for r in resolutions] #Amount of splits to divide the filenames over
random_offsets = [np.random.permutation(range(x)) for x in split_per_scan]
if n_splits is None:
n_splits = np.round(max(resolutions)/data_resolution)
splits = [ [] for _ in xrange(n_splits)]
for i, s in enumerate(splits):
for r, scan, filenames_in_scan, n, offset in zip(resolutions, scan_names, scan_filenames, split_per_scan, random_offsets):
#n = int(np.round(r/data_resolution))
start = offset[i%n]
s += filenames_in_scan[start%n::n]
return splits
|
"""
This module handles the orders.
Orders are described in the xml files, and are the actions to be executed
for example when keyboard keys are hitted, or in reaction to other orders.
Every order is thus initialized after the parsing fo the xml files
and the created in this file.
Each order is applied to a target called the emitter meaning that expressions
will be evaluated within this target context ie self.x will be target.x.
Without more precisions, when an event is raised by an order, its target is
transmitted and order generated be this event will share that target.
Set : target.param <- value
Setobj : target.param <- value and value is interpreted as an object
Timer : raise event in value ms
Event : raise event on the target target
Create : create an object of type base, execute the initcode (that allows
transmitting fields of the target to the created object) and raise event on
the created object - if an emitter is specified, it is used as a way to fix
the identifier of the created object since the emitter can't exist
Destroy : destroy the emitter
Condition : raise event if value is True
Move : param being a list (quests for example) move the emitter from
source.param to dest.param
Watchdog : start looking after target.param, when it changes to value (that was
evaluated once by the time of the order), it raises the event
if once is True, the watchdog is destroy after the first event happened
"""
from collections import namedtuple
from enum import IntEnum
from shared.const import CODING
ORDERTYPE = IntEnum('OrderType', 'Set Timer Event Create Destroy Condition '
'Move Setobj Watchdog')
Condition = namedtuple("Condition", "target event once")
class Order:
""" A change to be done on the world"""
# Attention aux collisions avec args et type
params = [None] * (len(ORDERTYPE)+1) #XXX c'pas top
params[ORDERTYPE.Set] = ["target", "param", "value"]
params[ORDERTYPE.Timer] = ["event", "value"]
params[ORDERTYPE.Event] = ["event", "target"]
params[ORDERTYPE.Create] = ["event", "base", "init"]
params[ORDERTYPE.Destroy] = []
params[ORDERTYPE.Condition] = ["event", "value"]
params[ORDERTYPE.Move] = ["source", "dest", "param"]
params[ORDERTYPE.Setobj] = ["target", "param", "value"]
params[ORDERTYPE.Watchdog] = ["target", "param", "value", "event", "once"]
def __init__(self):
self.type = None
self.args = []
def __getattr__(self, attr):
return self.args[self.params[self.type].index(attr)]
def __setattr__(self, attr, val):
if attr in ("type", "args") or attr not in self.params[self.type]:
object.__setattr__(self, attr, val)
else:
self.args[self.params[self.type].index(attr)] = val
def copy(self):
""" Copy the object from class Order """
# une copy.deepcopy aurait copié params
obj = Order()
obj.type = self.type
obj.args = self.args[:]
return obj
def setType(self, typ):
""" Initialise args according to the given type """
self.type = typ
self.args = [None]*len(self.params[self.type])
def load(self, dat, named):
""" Initialise the order with an Xml structure """
self.setType(ORDERTYPE.__members__[dat["type"].capitalize()])
for key in dat.keys():
if key != 'type':
if isinstance(dat[key], dict) and dat[key].get("id") is not None:
self.args[self.params[self.type].index(key)] =\
str(named[dat[key]['id']].ident)
else:
self.args[self.params[self.type].index(key)] = dat[key]
return self
def toBytes(self): # TODO éliminer tt les str => ids de param
""" Bytes to send the order on the network """
def addStr(s):
assert len(s) < 1<<16
byt.extend(len(s).to_bytes(2, 'big'))
byt.extend(s.encode(CODING))
byt = bytearray()
byt.append(self.type)
for arg in self.args: addStr(arg)
return bytes(byt)
def fromBytes(self, byt):
""" Retrieve order from network bytes """
def getStr():
nonlocal i
l = int.from_bytes(byt[i:i+2], 'big')
s = byt[i+2 : i+2+l].decode(CODING)
i += l+2
return s
self.type = byt[0]
i = 1
self.args = [getStr() for _ in range(len(self.params[self.type]))]
assert self.toBytes() == byt[:i]
return self, i
class OrderDispatcher:
""" Treat orders for client and server """
def __init__(self, world, handle, timer):
self.world = world
self.handle = handle
self.timer = timer
async def treat(self, emitter, order):
""" Treat an order and return an order to retransmit if any """
world = self.world
try:
if order.type == ORDERTYPE.Set:
target = emitter if order.target == "emitter" else eval(order.target)
try:
val = target.contextEval(order.value)
except:
print(emitter.case)
raise
preval = target.params[order.param]
if val != preval:
target.params[order.param] = val
returnOrder = order.copy()
returnOrder.value = str(val)
for condition in target.conditions[order.param][val]:
await self.handle(condition.target, condition.event)
# XXX pas fameux
target.conditions[order.param][val] = \
list(filter(lambda x: not x.once,
target.conditions[order.param][val]))
if not target.conditions[order.param][val]:
del target.conditions[order.param][val]
return returnOrder
return None
if order.type == ORDERTYPE.Timer:
# les Timer transmettent leur contexte
if emitter:
self.timer.add(emitter.contextEval(order.value), self.handle,
args=[emitter, order.event])
else:
self.timer.add(int(order.value), self.handle,
args=[emitter, order.event])
return None
if order.type == ORDERTYPE.Event:
if order.target:
await self.handle(eval('emitter.'+order.target), order.event)
else:
await self.handle(emitter, order.event)
return None
if order.type == ORDERTYPE.Create:
new = world.ids[int(order.base)]
if isinstance(emitter, int):
obj = new.create(emitter)
world.numid[0] = emitter
else:
obj = new.create()
self.world.objects.append(obj)
exec(order.init)
if self.handle:
await self.handle(obj, order.event)
return order
if order.type == ORDERTYPE.Destroy:
# TODO nécessite de trouver tous les pointeurs ??
self.world.objects.remove(emitter)
self.world.ids.pop(emitter.ident)
return order
if order.type == ORDERTYPE.Condition:
if emitter.contextEval(order.value):
await self.handle(emitter, order.event)
return None
if order.type == ORDERTYPE.Move:
eval(order.source+"."+order.param).remove(emitter)
eval(order.dest+"."+order.param).append(emitter)
return order
if order.type == ORDERTYPE.Setobj: # TODO à améliorer ressemble à Set
# FIXME plante avec un aléa
target = emitter if order.target == "emitter" else eval(order.target)
val = target.contextEval(order.value)
preval = eval("target."+order.param)
if val != preval:
exec("target."+order.param+"=val")
return order
return None
if order.type == ORDERTYPE.Watchdog:
val = emitter.contextEval(order.value)
conds = eval(order.target).conditions[order.param][val]
conds.append(Condition(emitter, order.event, order.once))
return None
except:
print("An order failed !")
print(emitter)
print(order.type)
print(order.args)
raise
|
<filename>ROMS/pmacc/tools/post_tools/rompy/tags/rompy-0.1/rompy/plot_utils.py<gh_stars>0
import datetime as dt
import time
from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
from matplotlib.figure import Figure
from matplotlib.axes import Axes
from matplotlib.colors import Normalize, ListedColormap, LinearSegmentedColormap, hsv_to_rgb
from matplotlib.cm import ScalarMappable
from matplotlib import ticker
from mpl_toolkits.basemap import Basemap
import numpy as np
import matplotlib.pyplot as plt
import utils
__version__ = '0.1'
def time_series_formatter(x,pos=None):
return dt.datetime.fromtimestamp(x).strftime('%Y-%m-%d %H:%MZ')
def map_varname(v):
mapping = {
'temp':'Temperature',
'salt':'Salinity',
'U':'Velocity',
}
return mapping[v]
def red_blue_cm():
cdict = {'red': [(0.0, 0.0, 0.0),
(0.5,1.0,1.0),
(1.0, 0.83, 0.83)],
'green': [(0.0, 0.34, 0.34),
(0.5, 1.0, 1.0),
(1.0, 0.0, 0.0)],
'blue': [(0.0, 0.75, 0.75),
(0.5, 1.0, 1.0),
(1.0, 0.0, 0.0)]
}
return LinearSegmentedColormap('red_blue_cm',cdict,N=256)
# return ListedColormap(['b','w','r'],name='red_blue',N=None)
def banas_cm(a,b,c,d):
norm = Normalize(vmin=a,vmax=d,clip=False)
cdict = {'red':[],'green':[],'blue':[]}
if not a==b:
# add dark blue
cdict['red'].append((0., 0., 0.))
cdict['green'].append((0., 0., 0.))
cdict['blue'].append((0., 0., 0.25))
# add blue
cdict['red'].append((norm(b), 0., 0.))
cdict['green'].append((norm(b), 0., 0.))
cdict['blue'].append((norm(b), 1.0, 1.0))
else:
cdict['red'].append((0., 0., 0.))
cdict['green'].append((0., 0., 0.))
cdict['blue'].append((0., 0., 1.0))
# add green between blue and yellow
cdict['red'].append((norm(b + (c-b)/4.0), 0., 0.))
cdict['green'].append((norm(b + (c-b)/4.0), 1.0, 1.0))
cdict['blue'].append((norm(b + (c-b)/4.0), 0., 0.))
# add yellow in the middle
cdict['red'].append((norm((b+c)/2.0), 1.0, 1.0))
cdict['green'].append((norm((b+c)/2.0), 1.0, 1.0))
cdict['blue'].append((norm((b+c)/2.0), 0., 0.))
if not c==d:
# add red
cdict['red'].append((norm(c), 1.0, 1.0))
cdict['green'].append((norm(c), 0., 0.))
cdict['blue'].append((norm(c), 0., 0.))
# add dark red
cdict['red'].append((1.0, 0.25, 0.25))
cdict['green'].append((1.0, 0., 0.))
cdict['blue'].append((1.0, 0., 0.))
else:
cdict['red'].append((1.0, 1.0, 1.))
cdict['green'].append((1.0, 0., 0.))
cdict['blue'].append((1.0, 0., 0.))
return LinearSegmentedColormap('banas_cm',cdict,N=100)
def banas_hsv_cm(a,b,c,d,N=100):
norm = Normalize(vmin=a,vmax=d,clip=False)
cdict = {'red':[],'green':[],'blue':[]}
if N >= 100:
n = N
else:
n = 100
aa = norm(a) # 0.0
bb = norm(b)
cc = norm(c)
yy = 0.5*(bb+cc) # yellow is half way between blue and red
dd = norm(d) # 1.0
center_value = 0.87
end_value = 0.65
tail_end_value = 0.3
blue_hue = 0.55
yellow_hue = 1./6.
red_hue = 0.04
green_hue = 1./3.
gg = ((green_hue - blue_hue)/(yellow_hue - blue_hue))*(yy-bb) + bb
green_desaturation_width = 0.67
green_desaturation_amount = 0.5
ii = np.linspace(0.,1.,n)
hue = np.zeros(ii.shape)
sat = np.ones(ii.shape)
val = np.zeros(ii.shape)
hsv = np.zeros((1,n,3))
val_scaler = -(center_value - end_value)/((cc-yy)*(cc-yy))
hue_scaler = -(blue_hue - yellow_hue)/((yy-bb)*(yy-bb))
for i in range(len(ii)):
if ii[i] < bb: # if true then aa is less than bb
#hue[i] = blue_hue
hsv[0,i,0] = blue_hue
#val[i] = tail_end_value*(1 - (ii[i]-aa)/(bb-aa) ) + end_value*( (ii[i]-aa)/(bb-aa) )
hsv[0,i,2] = tail_end_value*(1 - (ii[i]-aa)/(bb-aa) ) + end_value*( (ii[i]-aa)/(bb-aa) )
elif ii[i] <= yy:
#hsv[0,i,0] = blue_hue*(1 - (ii[i]-bb)/(yy-bb) ) + yellow_hue*( (ii[i]-bb)/(yy-bb) )
hsv[0,i,0] = hue_scaler*(ii[i] -2*bb + yy)*(ii[i] - yy)+yellow_hue
hsv[0,i,2] = end_value*(1 - (ii[i]-bb)/(yy-bb) ) + center_value*( (ii[i]-bb)/(yy-bb) )
elif ii[i] <= cc:
hsv[0,i,0] = yellow_hue*(1 - (ii[i]-yy)/(cc-yy) ) + red_hue*( (ii[i]-yy)/(cc-yy) )
#hsv[0,i,2] = center_value*(1 - (ii[i]-yy)/(cc-yy) ) + end_value*( (ii[i]-yy)/(cc-yy) )
hsv[0,i,2] = val_scaler*(ii[i] -2*yy + cc)*(ii[i] - cc)+end_value
elif ii[i] <= dd:
hsv[0,i,0] = red_hue
hsv[0,i,2] = end_value*(1 - (ii[i]-cc)/(dd-cc) ) + tail_end_value*( (ii[i]-cc)/(dd-cc) )
hsv[0,i,1] = 1.0 - green_desaturation_amount * np.exp(-np.power(3.0*(ii[i]-gg)/((cc-bb)*green_desaturation_width),2.0))
# plt.plot(np.linspace(a,d,n),hsv[0,:,0],'r',np.linspace(a,d,n),hsv[0,:,1],'g',np.linspace(a,d,n),hsv[0,:,2],'b')
# plt.show()
rgb = hsv_to_rgb(hsv)
cdict['red'].append((0.,0.,rgb[0,0,0]))
cdict['green'].append((0.,0.,rgb[0,0,1]))
cdict['blue'].append((0.,0.,rgb[0,0,2]))
for j in range(len(ii)-2):
i = j+1
cdict['red'].append((ii[i],rgb[0,i,0],rgb[0,i+1,0]))
cdict['green'].append((ii[i],rgb[0,i,1],rgb[0,i+1,1]))
cdict['blue'].append((ii[i],rgb[0,i,2],rgb[0,i+1,2]))
cdict['red'].append((1.0,rgb[0,-1,0],rgb[0,-1,0]))
cdict['green'].append((1.0,rgb[0,-1,1],rgb[0,-1,1]))
cdict['blue'].append((1.0,rgb[0,-1,2],rgb[0,-1,2]))
return LinearSegmentedColormap('banas_cm',cdict,N=N)
def make_cmap_sm_norm(d=None,clim=None,cmap=None):
if cmap == 'red_blue':
cmap = red_blue_cm()
if cmap == 'banas_cm':
if clim==None:
cmap = banas_cm(np.min(d[:]),np.min(d[:]),np.max(d[:]),np.max(d[:]))
elif len(clim) == 2:
cmap = banas_cm(clim[0],clim[0],clim[1],clim[1])
elif len(clim) == 4:
cmap = banas_cm(clim[0],clim[1],clim[2],clim[3])
elif cmap == 'banas_hsv_cm':
if clim==None:
cmap = banas_hsv_cm(np.min(d[:]),np.min(d[:]),np.max(d[:]),np.max(d[:]))
elif len(clim) == 2:
cmap = banas_hsv_cm(clim[0],clim[0],clim[1],clim[1])
elif len(clim) == 4:
cmap = banas_hsv_cm(clim[0],clim[1],clim[2],clim[3])
norm = Normalize(vmin=clim[0],vmax=clim[-1],clip=False)
sm = ScalarMappable(norm=norm,cmap=cmap)
sm.set_clim(vmin=clim[0],vmax=clim[-1])
sm.set_array(np.array([0]))
return cmap,sm,norm
def plot_surface(x,y,data,filename='/Users/lederer/tmp/rompy.tmp.png'):
print('Making plot')
fig = Figure(facecolor='white',figsize=(12.0,12.0))
ax = fig.add_subplot(111)
ax.pcolormesh(x,y,data)
# ax.contour(x,y,data,20)
ax.axis('tight')
ax.set_aspect('equal')
ax.grid()
FigureCanvas(fig).print_png(filename)
def plot_map(lon,lat,data,filename='/Users/lederer/tmp/rompy.map.png',resolution='h',clim=None,cmap='banas_hsv_cm',title=None, caxis_label=None):
fig = Figure(facecolor='white',figsize=(12.0,9.0))
# ax = fig.add_subplot(111)
longest_side_size = 24.0
#ax = fig.add_axes((0.,0.,1.,1.),axisbg='grey')
cmap,sm,norm = make_cmap_sm_norm(d=data,clim=clim,cmap=cmap)
ax1 = fig.add_axes((0.1,0.1,0.4,0.8),axisbg='grey')
ax2 = fig.add_axes((0.5,0.1,0.4,0.8),axisbg='grey')
cax = fig.add_axes([0.9, 0.1, 0.02, 0.8],frameon=False)
lllat = np.min(lat)
urlat = np.max(lat)
lllon = np.min(lon)
urlon = np.max(lon)
# puget sound bounding box
psbb_lllat = 47.0
psbb_urlat = 48.5
psbb_lllon = -123.2
psbb_urlon = -122.1
# print(lllat,urlat,lllon,urlon)
m1 = Basemap(projection='merc',llcrnrlat=lllat,urcrnrlat=urlat,llcrnrlon=lllon,urcrnrlon=urlon,resolution=resolution,ax=ax1)
m2 = Basemap(projection='merc',llcrnrlat=psbb_lllat,urcrnrlat=psbb_urlat,llcrnrlon=psbb_lllon,urcrnrlon=psbb_urlon,resolution='f',ax=ax2)
x1,y1 = m1(*(lon,lat))
x2,y2 = m2(*(lon,lat))
# Code to make the map fit snuggly with the png
#print(np.max(x), np.min(x), np.max(y),np.min(y))
# width = np.max(x) - np.min(x)
# height = np.max(y) - np.min(y)
# if width >= height:
# fig.set_size_inches(longest_side_size, (height/width)*longest_side_size)
# else:
# fig.set_size_inches((width/height)*longest_side_size, longest_side_size)
# ax.set_position([0.,0.,1.,1.])
# bbox = ax.get_position()
# print(bbox.xmin, bbox.xmax, bbox.ymin, bbox.ymax)
#
# fig.set_size_inches((bbox.xmax - bbox.xmin)*longest_side_size, (bbox.ymax - bbox.ymin)*longest_side_size)
# ax.set_position([0.,0.,1.,1.])
# bbox = ax.get_position()
# print(bbox.xmin, bbox.xmax, bbox.ymin, bbox.ymax)
#
#
# if clim==None:
# cmap = banas_hsv_cm(np.min(data[:]),np.min(data[:]),np.max(data[:]),np.max(data[:]))
# norm = Normalize(vmin=np.min(data[:]),vmax=np.max(data[:]),clip=False)
# elif len(clim) == 2:
# cmap = banas_hsv_cm(clim[0],clim[0],clim[1],clim[1],N=20)
# norm = Normalize(vmin=clim[0],vmax=clim[-1],clip=False)
# elif len(clim) == 4:
# cmap = banas_hsv_cm(clim[0],clim[1],clim[2],clim[3])
# norm = Normalize(vmin=clim[0],vmax=clim[-1],clip=False)
pcm1 = m1.pcolormesh(x1,y1,data,cmap=cmap,norm=norm)
m1.drawcoastlines(linewidth=0.5)
pcm2 = m2.pcolormesh(x2,y2,data,cmap=cmap,norm=norm)
m2.drawcoastlines(linewidth=0.5)
my_colorbar = fig.colorbar(sm,cax=cax)
if not caxis_label == None:
my_colorbar.set_label(caxis_label)
if not title == None:
ax1.set_title(title)
FigureCanvas(fig).print_png(filename)
def plot_profile(data,depth,filename='/Users/lederer/tmp/rompy.profile.png'):
fig = Figure()
ax = fig.add_subplot(111)
ax.plot(data,depth)
ax.grid()
FigureCanvas(fig).print_png(filename)
def plot_mickett(coords,data,varname='',region='',filename='/Users/lederer/tmp/rompy.mickett.png',n=1,x_axis_style='kilometers',x_axis_offset=0,clim=None,cmap=None,labeled_contour_gap=None):
fig = Figure(facecolor='white')
fontsize = 8
cmap,sm,norm = make_cmap_sm_norm(d=data,clim=clim,cmap=cmap)
ax1 = fig.add_axes([0.1, 0.55, 0.75, 0.4])
ax2 = fig.add_axes([0.1, 0.1, 0.75, 0.4])
cax = fig.add_axes([0.9, 0.1, 0.02, 0.8],frameon=False)
x_axis_as_km = utils.coords_to_km(coords)
station_locations = x_axis_as_km[0:-1:n]
#
# if not clim == None:
# norm = Normalize(vmin=clim[0],vmax=clim[-1],clip=False)
# sm = ScalarMappable(norm=norm,cmap=cmap)
# sm.set_clim(vmin=clim[0],vmax=clim[-1])
# sm.set_array(np.array([0]))
# else:
# norm = None
#
my_plot11 = ax1.contourf(np.tile(x_axis_as_km,(coords['zm'].shape[0],1)),coords['zm'],data,100,norm=norm,cmap=cmap)
my_plot12 = ax1.contour(np.tile(x_axis_as_km,(coords['zm'].shape[0],1)),coords['zm'],data,100,linewidths=1,linestyle=None,norm=norm,cmap=cmap)
if labeled_contour_gap is not None:
if int(labeled_contour_gap) == labeled_contour_gap:
contour_label_fmt = '%d'
else:
contour_label_fmt = '%1.2f'
solid_contours = np.arange(clim[0],clim[-1],labeled_contour_gap)
# ax1_xlim = ax1.get_xlim()
my_plot13 = ax1.contour(np.tile(x_axis_as_km,(coords['zm'].shape[0],1)),coords['zm'],data,solid_contours,colors='k',linewidths=0.5)
ax1.clabel(my_plot13,inline=True,fmt=contour_label_fmt,fontsize=fontsize)
# ax1.set_xlim(ax1_xlim)
my_plot14 = ax1.plot(station_locations, 1.5*np.ones(len(station_locations)),'v',color='grey')
ax1.fill_between(x_axis_as_km,coords['zm'][0,:],ax1.get_ylim()[0],color='grey')
# ax1.set_ylim((-20,ax1.get_ylim()[1]))
ax1.set_ylim((-20,2))
ax1.set_xlim((0,x_axis_as_km[-1]))
for yticklabel in ax1.get_yticklabels():
yticklabel.set_fontsize(fontsize)
my_plot21 = ax2.contourf(np.tile(x_axis_as_km,(coords['zm'].shape[0],1)),coords['zm'],data,100,norm=norm,cmap=cmap)
my_plot22 = ax2.contour(np.tile(x_axis_as_km,(coords['zm'].shape[0],1)),coords['zm'],data,100,linewidths=1,linestyle=None,norm=norm,cmap=cmap)
if labeled_contour_gap is not None:
# ax2_xlim = ax2.get_xlim()
my_plot23 = ax2.contour(np.tile(x_axis_as_km,(coords['zm'].shape[0],1)),coords['zm'],data,solid_contours,colors='k',linewidths=0.5)
ax2.clabel(my_plot23,inline=True,fmt=contour_label_fmt,fontsize=fontsize)
# ax2.set_xlim = ax2_xlim
# print(ax2.get_ylim())
# ax2.fill_between(x_axis_as_km,coords['zm'][0,:],ax2.get_ylim()[0],color='grey')
ax2.fill_between(x_axis_as_km,coords['zm'][0,:],-1000.0,color='grey')
# ax2.set_ylim(ax2.get_ylim()[0],2)
# print(ax2.get_ylim())
ax2.set_ylim((np.min(coords['zm'][:])-20.0),2)
# print(ax2.get_ylim())
ax2.set_xlim((0,x_axis_as_km[-1]))
for yticklabel in ax2.get_yticklabels():
yticklabel.set_fontsize(fontsize)
# if clim == None:
# sm = my_plot11
my_colorbar = fig.colorbar(sm,cax=cax)
if labeled_contour_gap is not None:
my_colorbar.add_lines(my_plot23)
ax1.set_title('%s %s from a ROMS run' % (region,varname))
ax1.set_ylabel('depth in meters',position=(0.05,0))
# ax1.set_xticks(10*np.arange(x_axis_as_km[-1]/10))
ax1.set_xticks(station_locations)
ax1.set_xticklabels('')
if x_axis_style == 'kilometers' or x_axis_style == 'kilometer':
#tick_list = x_axis_as_km[::n]
#ax2.set_xticks(tick_list)
#ax2.set_xticklabels([int(tick) for tick in tick_list],size=fontsize)
td = 10 #tick_distance
ax2.set_xticks(td*np.arange(x_axis_as_km[-1]/td) + (x_axis_offset % td))
ax2.set_xticklabels([int(num) for num in np.arange(-int(x_axis_offset - x_axis_offset % td),x_axis_as_km[-1],td)])
for xticklabel in ax2.get_xticklabels():
xticklabel.set_fontsize(fontsize)
ax2.set_xlabel('Kilometers')
elif x_axis_style == 'stations' or x_axis_style == 'station':
if region == 'Hood Canal':
tick_list = x_axis_as_km[::n]
ax2.set_xticks(tick_list)
ax2.set_xticklabels(utils.hood_canal_station_list(),size=fontsize)
ax2.set_xlabel('Station ID')
elif region == 'Main Basin':
tick_list = x_axis_as_km[::n]
ax2.set_xticks(tick_list)
ax2.set_xticklabels(utils.main_basin_station_list(),size=fontsize)
ax2.set_xlabel('Station ID')
else:
ax2.set_xticks(x_axis_as_km)
ax2.set_xticklabels('')
ax2.set_xlabel('Kilometers')
FigureCanvas(fig).print_png(filename)
def plot_time_series_profile(t,z,d,filename='/Users/lederer/tmp/rompy.time_series_profile.png',clim=None,cmap='banas_hsv_cm',varname=None, title=None, caxis_label=None):
fontsize = 8
cmap,sm,norm = make_cmap_sm_norm(d=d,clim=clim,cmap=cmap)
fig = Figure(facecolor='white')
ax1 = fig.add_axes([0.1, 0.55, 0.75, 0.32])
ax2 = fig.add_axes([0.1, 0.18, 0.75, 0.32])
cax = fig.add_axes([0.9, 0.18, 0.02, 0.69],frameon=False)
my_plot11 = ax1.contourf(t,z,d,100,norm=norm,cmap=cmap)
my_plot12 = ax1.contour(t,z,d,100,linewidths=1,linestyle=None,norm=norm,cmap=cmap)
my_plot21 = ax2.contourf(t,z,d,100,norm=norm,cmap=cmap)
my_plot22 = ax2.contour(t,z,d,100,linewidths=1,linestyle=None,norm=norm,cmap=cmap)
my_colorbar = fig.colorbar(sm,cax=cax)
if not caxis_label == None:
my_colorbar.set_label(caxis_label)
ax1.set_ylim(-20,2)
ax1.set_xlim(t[0][0],t[-1][-1])
# lets pick some x ticks that aren't stupid
xmin_dt = dt.datetime.fromtimestamp(t[0][0])
xmax_dt = dt.datetime.fromtimestamp(t[-1][-1])
time_window = xmax_dt -xmin_dt
if (time_window) < dt.timedelta(hours=48):
date_list = []
next_time = xmax_dt- dt.timedelta(seconds = xmax_dt.minute*60 + xmax_dt.second)
while next_time >= xmin_dt:
date_list.append(next_time)
next_time = next_time - dt.timedelta(hours=6)
elif (time_window) < dt.timedelta(days=8):
date_list = []
next_time = xmax_dt - dt.timedelta(seconds = (xmax_dt.hour*60 + xmax_dt.minute)*60 + xmax_dt.second)
while next_time >= xmin_dt:
date_list.append(next_time)
next_time = next_time - dt.timedelta(days=1)
elif (time_window) < dt.timedelta(days=50):
date_list = []
next_time = xmax_dt - dt.timedelta(seconds = (xmax_dt.hour*60 + xmax_dt.minute)*60 + xmax_dt.second)
while next_time >= xmin_dt:
date_list.append(next_time)
next_time = next_time - dt.timedelta(days=7)
else :
date_list = [xmin_dt, xmax_dt]
x_tick_list = []
for date in date_list:
x_tick_list.append(time.mktime(date.timetuple()))
ax2.xaxis.set_major_locator(ticker.FixedLocator(x_tick_list))
for yticklabel in ax1.get_yticklabels():
yticklabel.set_fontsize(fontsize)
ax1.set_xticklabels('')
ax2.set_xlim(t[0][0],t[-1][-1])
ax2.set_ylim(np.min(z[0,:]),np.max(z[-1,:]))
for yticklabel in ax2.get_yticklabels():
yticklabel.set_fontsize(fontsize)
locs = ax2.get_xticks()
new_labels = []
ax2.xaxis.set_major_formatter(ticker.FuncFormatter(time_series_formatter))
for label in ax2.get_xticklabels():
label.set_ha('right')
label.set_rotation(30)
if title == None or title == '':
ax1.set_title('%s Over Time at a Point'% map_varname(varname))
else:
ax1.set_title(title)
FigureCanvas(fig).print_png(filename)
def plot_parker(coords,data,varname='',title=None,region='',filename='/Users/lederer/tmp/rompy.mickett.png',n=1,x_axis_style='kilometers',resolution='i',x_axis_offset=0,clim=None,cmap=None,labeled_contour_gap=None, caxis_label=None):
fig = Figure(facecolor='white',figsize=(12.0,9.0))
fontsize = 8
cmap,sm,norm = make_cmap_sm_norm(d=data,clim=clim,cmap=cmap)
ax1 = fig.add_axes([0.1, 0.55, 0.65, 0.4]) # top 20 meters
ax2 = fig.add_axes([0.1, 0.1, 0.65, 0.4]) # full column
ax3 = fig.add_axes([0.7, 0.55, 0.3, 0.4],axis_bgcolor='white')#'#298FAF') # map of domain containing curtain
cax = fig.add_axes([0.84, 0.1, 0.02, 0.4],frameon=False) # subplot for the color axis
x_axis_as_km = utils.coords_to_km(coords)
station_locations = x_axis_as_km[0:-1:n]
my_plot11 = ax1.contourf(np.tile(x_axis_as_km,(coords['zm'].shape[0],1)),coords['zm'],data,100,norm=norm,cmap=cmap)
my_plot12 = ax1.contour(np.tile(x_axis_as_km,(coords['zm'].shape[0],1)),coords['zm'],data,100,linewidths=1,linestyle=None,norm=norm,cmap=cmap)
if labeled_contour_gap is not None:
if int(labeled_contour_gap) == labeled_contour_gap:
contour_label_fmt = '%d'
else:
contour_label_fmt = '%1.2f'
solid_contours = np.arange(clim[0],clim[-1],labeled_contour_gap)
my_plot13 = ax1.contour(np.tile(x_axis_as_km,(coords['zm'].shape[0],1)),coords['zm'],data,solid_contours,colors='k',linewidths=0.5)
ax1.clabel(my_plot13,inline=True,fmt=contour_label_fmt,fontsize=fontsize)
my_plot14 = ax1.plot(station_locations, 1.5*np.ones(len(station_locations)),'v',color='grey')
ax1.fill_between(x_axis_as_km,coords['zm'][0,:],ax1.get_ylim()[0],color='grey')
ax1.set_ylim((-20,2))
ax1.set_xlim((0,x_axis_as_km[-1]))
for yticklabel in ax1.get_yticklabels():
yticklabel.set_fontsize(fontsize)
my_plot21 = ax2.contourf(np.tile(x_axis_as_km,(coords['zm'].shape[0],1)),coords['zm'],data,100,norm=norm,cmap=cmap)
my_plot22 = ax2.contour(np.tile(x_axis_as_km,(coords['zm'].shape[0],1)),coords['zm'],data,100,linewidths=1,linestyle=None,norm=norm,cmap=cmap)
if labeled_contour_gap is not None:
my_plot23 = ax2.contour(np.tile(x_axis_as_km,(coords['zm'].shape[0],1)),coords['zm'],data,solid_contours,colors='k',linewidths=0.5)
ax2.clabel(my_plot23,inline=True,fmt=contour_label_fmt,fontsize=fontsize)
ax2.fill_between(x_axis_as_km,coords['zm'][0,:],-1000.0,color='grey')
ax2.set_ylim((np.min(coords['zm'][:])-20.0),2)
ax2.set_xlim((0,x_axis_as_km[-1]))
for yticklabel in ax2.get_yticklabels():
yticklabel.set_fontsize(fontsize)
my_colorbar = fig.colorbar(sm,cax=cax)
if not caxis_label == None:
my_colorbar.set_label(caxis_label)
if labeled_contour_gap is not None:
my_colorbar.add_lines(my_plot23)
if title==None:
ax1.set_title('%s %s from a ROMS run' % (region,varname))
else:
ax1.set_title(title)
ax1.set_ylabel('depth in meters',position=(0.05,0))
ax1.set_xticks(station_locations)
ax1.set_xticklabels('')
if x_axis_style == 'kilometers' or x_axis_style == 'kilometer':
td = 10 #tick_distance
left_most_tick_label = -x_axis_offset + (x_axis_offset % td)
left_most_tick = left_most_tick_label + x_axis_offset
ax2.set_xticks(np.arange(left_most_tick,x_axis_as_km[-1],td))
ax2.set_xticklabels([int(num) for num in np.arange(left_most_tick_label, x_axis_as_km[-1],td)])
# ax2.set_xticks(td*np.arange(x_axis_as_km[-1]/td) + (x_axis_offset % td))
# ax2.set_xticklabels([int(num) for num in np.arange(-int(x_axis_offset - x_axis_offset % td),x_axis_as_km[-1],td)])
for xticklabel in ax2.get_xticklabels():
xticklabel.set_fontsize(fontsize)
ax2.set_xlabel('Kilometers')
elif x_axis_style == 'stations' or x_axis_style == 'station':
if region == 'Hood Canal':
tick_list = x_axis_as_km[::n]
ax2.set_xticks(tick_list)
ax2.set_xticklabels(utils.hood_canal_station_list(),size=fontsize)
ax2.set_xlabel('Station ID')
elif region == 'Main Basin':
tick_list = x_axis_as_km[::n]
ax2.set_xticks(tick_list)
ax2.set_xticklabels(utils.main_basin_station_list(),size=fontsize)
ax2.set_xlabel('Station ID')
else:
ax2.set_xticks(x_axis_as_km)
ax2.set_xticklabels('')
ax2.set_xlabel('Kilometers')
# make map in the top right corner
# these lat lon values are derived from the curtain defined for the plot
# lllat = np.min(coords['ym'])
# urlat = np.max(coords['ym'])
# lllon = np.min(coords['xm'])
# urlon = np.max(coords['xm'])
# lat lon values for the inset map show a close-up of the Puget Sound
lllat = 47.0
urlat = 48.5
lllon = -123.3
urlon = -122.2
m = Basemap(projection='merc',llcrnrlat=lllat,urcrnrlat=urlat,llcrnrlon=lllon,urcrnrlon=urlon,resolution=resolution,ax=ax3)
x,y = m(*(coords['xm'],coords['ym']))
# pcm = m.plot(x,y,'r')
m.drawcoastlines(linewidth=0.5)
m.fillcontinents(color='#ECECEC')
pcm1 = m.plot(x,y,'r',linewidth=0.5)
pcm2 = m.plot(x[0:-1:n],y[0:-1:n],'.k')
FigureCanvas(fig).print_png(filename)
|
<filename>neighapp/models.py
from django.db import models
from datetime import datetime as dt
from django.contrib.auth.models import User
from cloudinary.models import CloudinaryField
class Location(models.Model):
name = models.CharField(max_length=30)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
# save location
def save_location(self):
self.save()
def __str__(self):
return self.name
class Neighbourhood(models.Model):
neighbourhood_name = models.CharField(max_length=200)
neighbourhood_location = models.CharField(max_length=200)
neighbourhood_description = models.CharField(max_length=500)
neighbourhood_photo = CloudinaryField('photo', default='photo')
admin = models.ForeignKey(User, on_delete=models.CASCADE, related_name='admin')
created_at = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.neighbourhood_name
def save_neighbourhood(self):
self.save()
def delete_neighbourhood(self):
self.delete()
@classmethod
def find_neighbourhood(cls, neighbourhood_id):
return cls.objects.filter(id=neighbourhood_id)
@property
def occupants_count(self):
return self.neighbourhood_users.count()
def update_neighbourhood(self):
neighbourhood_name = self.neighbourhood_name
self.neighbourhood_name = neighbourhood_name
class Business(models.Model):
name = models.CharField(max_length=50)
email = models.EmailField(max_length=50)
description = models.TextField(blank=True, null=True)
user = models.ForeignKey(User, on_delete=models.CASCADE)
neighbourhood = models.ForeignKey(Neighbourhood, on_delete=models.CASCADE)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
# create business
def create_business(self):
self.save()
# delete business
def delete_business(self):
self.delete()
# update business
def update_business(self):
self.update()
# search business
@classmethod
def search_by_name(cls, search_term):
business = cls.objects.filter(name__icontains=search_term)
return business
# find business by id
@classmethod
def find_business(cls, id):
business = cls.objects.get(id=id)
return business
def __str__(self):
return self.name
# contact class model
class Contact(models.Model):
name = models.CharField(max_length=50)
email = models.EmailField(max_length=50, blank=True, null=True)
phone = models.CharField(max_length=50)
user = models.ForeignKey(User, on_delete=models.CASCADE)
neighbourhood = models.ForeignKey(
Neighbourhood, on_delete=models.CASCADE, default=1)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
# creating contact
def create_contact(self):
self.save()
# deleting contact
def delete_contact(self):
self.delete()
# updating contact
def update_contact(self):
self.update()
# searching for contact
@classmethod
def search_by_name(cls, search_term):
contact = cls.objects.filter(name__icontains=search_term)
return contact
# find contact by id
@classmethod
def find_contact(cls, id):
contact = cls.objects.get(id=id)
return contact
def __str__(self):
return self.name
class Profile(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
name = models.CharField(max_length=50)
email = models.EmailField(max_length=100, blank=True, null=True)
neighbourhood = models.ForeignKey(Neighbourhood, on_delete=models.CASCADE, blank=True, null=True)
profile_pic = CloudinaryField('image')
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def save_profile(self):
self.save()
def __str__(self):
return self.name
class Post(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE, null=True)
title = models.CharField(max_length=150)
image = CloudinaryField('images')
content = models.TextField(max_length=300, blank=True)
timestamp = models.DateTimeField(auto_now_add=True)
neighbourhood = models.ForeignKey(
Neighbourhood, on_delete=models.CASCADE, default='', null=True, blank=True)
def create_post(self):
self.save()
def save_post(self):
return self.save()
def delete_post(self):
self.delete()
def __str__(self):
return self.title |
# -*- coding: utf-8 -*-
#
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
import os
import shutil
import torch
from dgl.data.utils import download, _get_dgl_url, extract_archive
from dgllife.utils.complex_to_graph import *
from dgllife.utils.io import load_molecule
def remove_dir(dir):
if os.path.isdir(dir):
try:
shutil.rmtree(dir)
except OSError:
pass
def test_acnn_graph_construction_and_featurization():
remove_dir('tmp1')
remove_dir('tmp2')
url = _get_dgl_url('dgllife/example_mols.tar.gz')
local_path = 'tmp1/example_mols.tar.gz'
download(url, path=local_path)
extract_archive(local_path, 'tmp2')
pocket_mol, pocket_coords = load_molecule(
'tmp2/example_mols/example.pdb', remove_hs=True)
ligand_mol, ligand_coords = load_molecule(
'tmp2/example_mols/example.pdbqt', remove_hs=True)
pocket_mol_with_h, pocket_coords_with_h = load_molecule(
'tmp2/example_mols/example.pdb', remove_hs=False)
remove_dir('tmp1')
remove_dir('tmp2')
# Test default case
g = ACNN_graph_construction_and_featurization(ligand_mol,
pocket_mol,
ligand_coords,
pocket_coords)
assert set(g.ntypes) == set(['protein_atom', 'ligand_atom'])
assert set(g.etypes) == set(['protein', 'ligand', 'complex', 'complex', 'complex', 'complex'])
assert g.number_of_nodes('protein_atom') == 286
assert g.number_of_nodes('ligand_atom') == 21
assert g.number_of_edges('protein') == 3432
assert g.number_of_edges('ligand') == 252
assert g.number_of_edges(('protein_atom', 'complex', 'protein_atom')) == 3349
assert g.number_of_edges(('ligand_atom', 'complex', 'ligand_atom')) == 131
assert g.number_of_edges(('protein_atom', 'complex', 'ligand_atom')) == 121
assert g.number_of_edges(('ligand_atom', 'complex', 'protein_atom')) == 83
assert 'atomic_number' in g.nodes['protein_atom'].data
assert 'atomic_number' in g.nodes['ligand_atom'].data
assert torch.allclose(g.nodes['protein_atom'].data['mask'],
torch.ones(g.number_of_nodes('protein_atom'), 1))
assert torch.allclose(g.nodes['ligand_atom'].data['mask'],
torch.ones(g.number_of_nodes('ligand_atom'), 1))
assert 'distance' in g.edges['protein'].data
assert 'distance' in g.edges['ligand'].data
assert 'distance' in g.edges[('protein_atom', 'complex', 'protein_atom')].data
assert 'distance' in g.edges[('ligand_atom', 'complex', 'ligand_atom')].data
assert 'distance' in g.edges[('protein_atom', 'complex', 'ligand_atom')].data
assert 'distance' in g.edges[('ligand_atom', 'complex', 'protein_atom')].data
# Test max_num_ligand_atoms and max_num_protein_atoms
max_num_ligand_atoms = 30
max_num_protein_atoms = 300
g = ACNN_graph_construction_and_featurization(ligand_mol,
pocket_mol,
ligand_coords,
pocket_coords,
max_num_ligand_atoms=max_num_ligand_atoms,
max_num_protein_atoms=max_num_protein_atoms)
assert g.number_of_nodes('ligand_atom') == max_num_ligand_atoms
assert g.number_of_nodes('protein_atom') == max_num_protein_atoms
ligand_mask = torch.zeros(max_num_ligand_atoms, 1)
ligand_mask[:ligand_mol.GetNumAtoms(), :] = 1.
assert torch.allclose(ligand_mask, g.nodes['ligand_atom'].data['mask'])
protein_mask = torch.zeros(max_num_protein_atoms, 1)
protein_mask[:pocket_mol.GetNumAtoms(), :] = 1.
assert torch.allclose(protein_mask, g.nodes['protein_atom'].data['mask'])
# Test neighbor_cutoff
neighbor_cutoff = 6.
g = ACNN_graph_construction_and_featurization(ligand_mol,
pocket_mol,
ligand_coords,
pocket_coords,
neighbor_cutoff=neighbor_cutoff)
assert g.number_of_edges('protein') == 3405
assert g.number_of_edges('ligand') == 193
assert g.number_of_edges(('protein_atom', 'complex', 'protein_atom')) == 3331
assert g.number_of_edges(('ligand_atom', 'complex', 'ligand_atom')) == 123
assert g.number_of_edges(('protein_atom', 'complex', 'ligand_atom')) == 119
assert g.number_of_edges(('ligand_atom', 'complex', 'protein_atom')) == 82
# Test max_num_neighbors
g = ACNN_graph_construction_and_featurization(ligand_mol,
pocket_mol,
ligand_coords,
pocket_coords,
max_num_neighbors=6)
assert g.number_of_edges('protein') == 1716
assert g.number_of_edges('ligand') == 126
assert g.number_of_edges(('protein_atom', 'complex', 'protein_atom')) == 1691
assert g.number_of_edges(('ligand_atom', 'complex', 'ligand_atom')) == 86
assert g.number_of_edges(('protein_atom', 'complex', 'ligand_atom')) == 40
assert g.number_of_edges(('ligand_atom', 'complex', 'protein_atom')) == 25
# Test strip_hydrogens
g = ACNN_graph_construction_and_featurization(pocket_mol_with_h,
pocket_mol_with_h,
pocket_coords_with_h,
pocket_coords_with_h,
strip_hydrogens=True)
assert g.number_of_nodes('ligand_atom') != pocket_mol_with_h.GetNumAtoms()
assert g.number_of_nodes('protein_atom') != pocket_mol_with_h.GetNumAtoms()
non_h_atomic_numbers = []
for i in range(pocket_mol_with_h.GetNumAtoms()):
atom = pocket_mol_with_h.GetAtomWithIdx(i)
if atom.GetSymbol() != 'H':
non_h_atomic_numbers.append(atom.GetAtomicNum())
non_h_atomic_numbers = torch.tensor(non_h_atomic_numbers).float().reshape(-1, 1)
assert torch.allclose(non_h_atomic_numbers, g.nodes['ligand_atom'].data['atomic_number'])
assert torch.allclose(non_h_atomic_numbers, g.nodes['protein_atom'].data['atomic_number'])
if __name__ == '__main__':
test_acnn_graph_construction_and_featurization()
|
<gh_stars>0
"""
This module provides a Django models that can be used for querying and
manipulating CEDA user accounts.
"""
__author__ = "<NAME>"
__date__ = "2019-08-28"
__copyright__ = "Copyright 2019 United Kingdom Research and Innovation"
__license__ = "BSD - see LICENSE file in top-level directory"
from datetime import date
from dateutil.relativedelta import relativedelta
from django.conf import settings
from django.db import models
from django.contrib.auth import models as auth_models
from django.core.exceptions import ValidationError
from django.contrib.postgres.fields import ArrayField
from django_countries.fields import CountryField
from jasmin_notifications.models import NotifiableUserMixin
class Institution(models.Model):
"""
Model class representing an institution that a user can belong to.
"""
class Meta:
ordering = ('name', 'country')
#: The name of the institution
name = models.CharField(max_length=200)
#: The country of the institution
country = CountryField()
#: The type of the institution
institution_type = models.CharField(
max_length=20,
choices=[
("NERC", "NERC"),
("University", "University"),
("School", "School"),
("Government", "Government"),
("Commercial", "Commercial"),
("Other", "Other"),
]
)
def __str__(self):
return "{}, {}".format(self.name, self.country.name)
class CEDAUser(auth_models.AbstractUser, NotifiableUserMixin):
"""
Custom user model for the `ceda_auth` package.
* Provides access to the :py:class:`Account` for the user as a cached property
* Adds additional fields for 'suspension reason'
* Tracks email confirmation
"""
class Meta:
verbose_name = 'CEDA User'
verbose_name_plural = 'CEDA Users'
ordering = ('username', )
# This is mostly for createsuperuser
REQUIRED_FIELDS = ['email', 'first_name',
'last_name', 'discipline', 'institution_id']
# Modify these fields to be required
first_name = models.CharField(max_length=50)
last_name = models.CharField(max_length=50)
#: The discipline that the user studies
DISCIPLINE_CHOICES = [
("Atmospheric Physics", "Atmospheric Physics"),
("Atmospheric Chemistry", "Atmospheric Chemistry"),
("Climate Change", "Climate Change"),
("Earth System Science", "Earth System Science"),
("Marine Science", "Marine Science"),
("Terrestrial and Fresh Water", "Terrestrial and Fresh Water"),
("Earth Observation", "Earth Observation"),
("Polar Science", "Polar Science"),
("Geography", "Geography"),
("Engineering", "Engineering"),
("Medical/Biological Sciences", "Medical/Biological Sciences"),
("Mathematics/Computer Science", "Mathematics/Computer Science"),
("Economics", "Economics"),
("Personal use", "Personal use"),
("Other", "Other"),
]
discipline = models.CharField(
max_length=30,
choices=DISCIPLINE_CHOICES,
help_text='Please select the closest match to the discipline that you work in'
)
#: The type of degree that the user is studying for
DEGREE_CHOICES = [
("", "Not studying for a degree"),
("First degree", "First Degree (Bachelor's / Undergraduate Master's)"),
("Postgraduate Master's", "Postgraduate Master's"),
("Doctorate", "Doctorate"),
("Other", "Other"),
]
degree = models.CharField(
max_length=30, blank=True,
choices=DEGREE_CHOICES,
help_text='The type of degree you are studying for, if applicable'
)
#: The user's institution
institution = models.ForeignKey(
Institution, models.CASCADE, null=True, blank=True)
#: Indicates if the user is a service user
service_user = models.BooleanField(
default=False,
help_text='Indicates if this user is a service user, i.e. a user that '
'exists to run a service rather than a regular user account.'
)
#: If this is a service user, these are the responsible users
responsible_users = models.ManyToManyField(
"self",
symmetrical=False, blank=True,
limit_choices_to={'service_user': False},
help_text='For service users, these are the users responsible for the '
'administration of the service user.'
)
#: The time at which the user last confirmed their email address
email_confirmed_at = models.DateTimeField(null=True, blank=True)
#: The time at which the user accepted the JASMIN Terms and Conditions
conditions_accepted_at = models.DateTimeField(null=True, blank=True)
#: The username of the user who approved this user for root access
approved_for_root_by = models.CharField(
max_length=200, null=True, blank=True)
#: The datetime at which the user was approved for root access
approved_for_root_at = models.DateTimeField(null=True, blank=True)
#: The reason why the user was suspended (for the user)
user_reason = models.TextField(
blank=True,
verbose_name='Reason for suspension (user)',
help_text='Indicate why the user has been suspended'
)
#: Internal details on user suspension
internal_reason = models.TextField(
blank=True,
verbose_name='Reason for suspension (internal)',
help_text='Any internal details about the user\'s suspension that '
'should not be displayed to the user'
)
jasminaccountid = models.CharField(unique=True, max_length=20, null=True, blank=True)
has_ftp_password = models.BooleanField(
default=False,
help_text='Indicates if this user has created a FTP password.'
)
def email_confirm_required(self):
"""
Returns true if the user needs to confirm their email address soon, false
otherwise.
"""
if not self.email_confirmed_at:
return False
deltas = settings.CEDA_AUTH['EMAIL_CONFIRM_NOTIFY_DELTAS']
confirm_by = self.email_confirmed_at.date() + relativedelta(years=1)
threshold = date.today() + deltas[0]
return confirm_by < threshold
def clean(self):
errors = {}
# Ensure that an account with the current username exists
# if not Account.objects.filter(username = self.username).exists():
# errors['username'] = 'An account with this username does not exist.'
if self.email:
# If email is given, it must be case-insensitive unique
q = CEDAUser.objects.filter(email__iexact=self.email)
if not self._state.adding:
q = q.exclude(pk=self.pk)
if q.exists():
errors['email'] = 'Email address is already in use.'
elif not self.service_user:
# Email address is required for regular users
errors['email'] = 'This field is required.'
# Ensure that a reason is given if the account is suspended
if self.is_active:
if self.user_reason:
errors['user_reason'] = 'Must not be present for active account.'
if self.internal_reason:
errors['internal_reason'] = 'Must not be present for active account.'
else:
if not self.user_reason:
errors['user_reason'] = 'Please give a reason for suspension.'
if self.approved_for_root_by and not self.approved_for_root_at:
errors['approved_for_root_at'] = 'Required to indicate root access is approved.'
if self.approved_for_root_at and not self.approved_for_root_by:
errors['approved_for_root_by'] = 'Required to indicate root access is approved.'
if errors:
raise ValidationError(errors)
def check_password(self, raw_password):
# For service users, the password is *never* correct
if self.service_user:
return False
return super().check_password(raw_password)
def set_password(self, raw_password):
# For service users, always set an unusable password
if self.service_user:
self.set_unusable_password()
return
super().set_password(raw_password)
def notify(self, *args, **kwargs):
# During an import, disable all notifications
if getattr(settings, 'IS_CEDA_IMPORT', False):
return
# Only send notifications for migrated users
# If there is no MIGRATED_USERS setting, then assume all users are migrated
if self.username not in getattr(settings, 'MIGRATED_USERS', [self.username]):
return
# For service users, we want to notify the responsible users instead
if self.service_user:
for user in self.responsible_users.all():
user.notify(*args, **kwargs)
else:
super().notify(*args, **kwargs)
def notify_if_not_exists(self, *args, **kwargs):
# During an import, disable all notifications
if getattr(settings, 'IS_CEDA_IMPORT', False):
return
# Only send notifications for migrated users
# If there is no MIGRATED_USERS setting, then assume all users are migrated
if self.username not in getattr(settings, 'MIGRATED_USERS', [self.username]):
return
# For service users, we want to notify the responsible users instead
if self.service_user:
for user in self.responsible_users.all():
user.notify_if_not_exists(*args, **kwargs)
else:
super().notify_if_not_exists(*args, **kwargs)
def notify_pending_deadline(self, *args, **kwargs):
# During an import, disable all notifications
if getattr(settings, 'IS_CEDA_IMPORT', False):
return
# Only send notifications for migrated users
# If there is no MIGRATED_USERS setting, then assume all users are migrated
if self.username not in getattr(settings, 'MIGRATED_USERS', [self.username]):
return
# For service users, we want to notify the responsible users instead
if self.service_user:
for user in self.responsible_users.all():
user.notify_pending_deadline(*args, **kwargs)
else:
super().notify_pending_deadline(*args, **kwargs)
|
# Copyright (C) 2019 Apple Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import json
import time
from collections import defaultdict
from flask import abort, jsonify, request
from resultsdbpy.flask_support.util import AssertRequest, query_as_kwargs, limit_for_query, boolean_query
from resultsdbpy.controller.commit import Commit
from resultsdbpy.controller.commit_controller import uuid_range_for_query, HasCommitContext
from resultsdbpy.controller.configuration import Configuration
from resultsdbpy.controller.configuration_controller import configuration_for_query
class UploadController(HasCommitContext):
DEFAULT_LIMIT = 100
def __init__(self, commit_controller, upload_context):
super(UploadController, self).__init__(commit_controller.commit_context)
self.commit_controller = commit_controller
self.upload_context = upload_context
@query_as_kwargs()
@uuid_range_for_query()
@limit_for_query(DEFAULT_LIMIT)
@configuration_for_query()
def _find_uploads_for_query(self, configurations=None, suite=None, branch=None, begin=None, end=None, recent=None, limit=None, **kwargs):
AssertRequest.query_kwargs_empty(**kwargs)
recent = boolean_query(*recent)[0] if recent else True
with self.upload_context:
if not suite:
suites = set()
for config_suites in self.upload_context.find_suites(configurations=configurations, recent=recent).values():
[suites.add(suite) for suite in config_suites]
else:
suites = set(suite)
current_uploads = 0
result = defaultdict(dict)
for suite in suites:
if current_uploads >= limit:
break
results_dict = self.upload_context.find_test_results(
configurations=configurations, suite=suite, branch=branch[0],
begin=begin, end=end, recent=recent, limit=(limit - current_uploads),
)
for config, results in results_dict.items():
current_uploads += len(results)
result[config][suite] = results
return result
def download(self):
AssertRequest.is_type(['GET'])
with self.upload_context:
uploads = self._find_uploads_for_query()
response = []
for config, suite_results in uploads.items():
for suite, results in suite_results.items():
for result in results:
config.sdk = result.get('sdk')
response.append(dict(
configuration=Configuration.Encoder().default(config),
suite=suite,
commits=Commit.Encoder().default(result['commits']),
timestamp=result['timestamp'],
test_results=result['test_results'],
))
return jsonify(response)
def upload(self):
if request.method == 'GET':
return self.download()
AssertRequest.is_type(['POST'])
AssertRequest.no_query()
with self.upload_context:
try:
data = request.form or json.loads(request.get_data())
except ValueError:
abort(400, description='Expected uploaded data to be json')
try:
configuration = Configuration.from_json(data.get('configuration', {}))
except (ValueError, TypeError):
abort(400, description='Invalid configuration')
suite = data.get('suite')
if not suite:
abort(400, description='No test suite specified')
commits = [self.commit_controller.register(commit=commit) for commit in data.get('commits', [])]
test_results = data.get('test_results', {})
if not test_results:
abort(400, description='No test results specified')
timestamp = data.get('timestamp', time.time())
version = data.get('version', 0)
try:
self.upload_context.upload_test_results(configuration, commits, suite, test_results, timestamp, version=version)
except (TypeError, ValueError) as error:
abort(400, description=str(error))
processing_results = self.upload_context.process_test_results(configuration, commits, suite, test_results, timestamp)
return jsonify(dict(status='ok', processing=processing_results))
def process(self):
AssertRequest.is_type(['POST'])
with self.upload_context:
uploads = self._find_uploads_for_query()
if not uploads:
abort(404, description='No uploads matching the specified criteria')
response = []
for config, suite_results in uploads.items():
for suite, results in suite_results.items():
for result in results:
config.sdk = result.get('sdk')
processing_results = self.upload_context.process_test_results(
configuration=config, commits=result['commits'], suite=suite,
test_results=result['test_results'], timestamp=result['timestamp'],
)
response.append(dict(
configuration=Configuration.Encoder().default(config),
suite=suite,
commits=Commit.Encoder().default(result['commits']),
timestamp=result['timestamp'],
processing=processing_results,
))
return jsonify(response)
@query_as_kwargs()
@configuration_for_query()
def suites(self, configurations=None, recent=None, suite=None, branch=None, **kwargs):
AssertRequest.is_type(['GET'])
AssertRequest.query_kwargs_empty(**kwargs)
with self.upload_context:
suites_by_config = self.upload_context.find_suites(
configurations=configurations,
recent=boolean_query(*recent)[0] if recent else True,
branch=branch[0] if branch else None,
)
result = []
for config, candidate_suites in suites_by_config.items():
suites_for_config = [s for s in candidate_suites if not suite or s in suite]
if suites_for_config:
result.append([config, suites_for_config])
if not result:
abort(404, description='No suites matching the specified criteria')
return jsonify(Configuration.Encoder().default(result))
|
<gh_stars>0
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import mock
import random
from twisted.trial import unittest
from twisted.python import failure
from twisted.internet import defer
from buildbot.test.fake import fakedb, fakemaster
from buildbot.process import builder, buildrequest
from buildbot.db import buildrequests
from buildbot.util import epoch2datetime
class TestBuilderBuildCreation(unittest.TestCase):
def setUp(self):
# a collection of rows that would otherwise clutter up every test
self.base_rows = [
fakedb.SourceStamp(id=21),
fakedb.Buildset(id=11, reason='because', sourcestampid=21),
]
def makeBuilder(self, patch_random=False, **config_kwargs):
"""Set up C{self.bldr}"""
self.bstatus = mock.Mock()
self.factory = mock.Mock()
self.master = fakemaster.make_master()
# only include the necessary required config, plus user-requested
config = dict(name="bldr", slavename="slv", builddir="bdir",
slavebuilddir="sbdir", factory=self.factory)
config.update(config_kwargs)
self.bldr = builder.Builder(config, self.bstatus)
self.master.db = self.db = db = fakedb.FakeDBConnector(self)
self.master.master_name = db.buildrequests.MASTER_NAME
self.master.master_incarnation = db.buildrequests.MASTER_INCARNATION
self.bldr.master = self.master
# patch into the _startBuildsFor method
self.builds_started = []
def _startBuildFor(slavebuilder, buildrequests):
self.builds_started.append((slavebuilder, buildrequests))
return defer.succeed(None)
self.bldr._startBuildFor = _startBuildFor
if patch_random:
# patch 'random.choice' to always take the slave that sorts
# last, based on its name
self.patch(random, "choice",
lambda lst : sorted(lst, key=lambda m : m.name)[-1])
# we don't want the reclaim service running during tests..
self.bldr.reclaim_svc.disownServiceParent()
self.bldr.startService()
def assertBuildsStarted(self, exp):
# munge builds_started into a list of (slave, [brids])
builds_started = [
(sl.name, [ br.id for br in buildreqs ])
for (sl, buildreqs) in self.builds_started ]
self.assertEqual(sorted(builds_started), sorted(exp))
def setSlaveBuilders(self, slavebuilders):
"""C{slaves} maps name : available"""
self.bldr.slaves = []
for name, avail in slavebuilders.iteritems():
sb = mock.Mock(spec=['isAvailable'], name=name)
sb.name = name
sb.isAvailable.return_value = avail
self.bldr.slaves.append(sb)
# services
def test_stopService_flushes(self):
self.makeBuilder()
# just check that stopService calls this and waits
# for the deferred to fire
events = []
long_d = defer.Deferred()
long_d.addCallback(lambda _ : events.append('long_d'))
self.bldr.maybeStartBuild = lambda : long_d
stop_d = self.bldr.stopService()
stop_d.addCallback(lambda _ : events.append('stop_d'))
# nothing should have happened yet
self.assertEqual(events, [])
# finish the maybeStartBuild invocation..
long_d.callback(None)
# and then check that things happened in the right order
def check(_):
self.assertEqual(events, [ 'long_d', 'stop_d' ])
stop_d.addCallback(check)
return stop_d
# maybeStartBuild
def do_test_maybeStartBuild(self, rows=[], exp_claims=[], exp_builds=[],
exp_fail=None):
d = self.db.insertTestData(rows)
d.addCallback(lambda _ :
self.bldr.maybeStartBuild())
def check(_):
self.failIf(exp_fail)
self.db.buildrequests.assertMyClaims(exp_claims)
self.assertBuildsStarted(exp_builds)
d.addCallback(check)
def eb(f):
f.trap(exp_fail)
d.addErrback(eb)
return d
def test_maybeStartBuild_no_buildreqests(self):
self.makeBuilder()
self.setSlaveBuilders({'test-slave11':1})
return self.do_test_maybeStartBuild(exp_claims=[], exp_builds=[])
def test_maybeStartBuild_no_slavebuilders(self):
self.makeBuilder()
rows = [
fakedb.BuildRequest(id=11, buildsetid=10, buildername="bldr"),
]
return self.do_test_maybeStartBuild(rows=rows,
exp_claims=[], exp_builds=[])
def test_maybeStartBuild_limited_by_slaves(self):
self.makeBuilder(mergeRequests=False)
self.setSlaveBuilders({'test-slave1':1})
rows = self.base_rows + [
fakedb.BuildRequest(id=10, buildsetid=11, buildername="bldr",
submitted_at=130000),
fakedb.BuildRequest(id=11, buildsetid=11, buildername="bldr",
submitted_at=135000),
]
return self.do_test_maybeStartBuild(rows=rows,
exp_claims=[10], exp_builds=[('test-slave1', [10])])
def test_maybeStartBuild_limited_by_available_slaves(self):
self.makeBuilder(mergeRequests=False)
self.setSlaveBuilders({'test-slave1':0, 'test-slave2':1})
rows = self.base_rows + [
fakedb.BuildRequest(id=10, buildsetid=11, buildername="bldr",
submitted_at=130000),
fakedb.BuildRequest(id=11, buildsetid=11, buildername="bldr",
submitted_at=135000),
]
return self.do_test_maybeStartBuild(rows=rows,
exp_claims=[10], exp_builds=[('test-slave2', [10])])
def test_maybeStartBuild_unlimited(self):
self.makeBuilder(mergeRequests=False, patch_random=True)
self.setSlaveBuilders({'test-slave1':1, 'test-slave2':1})
rows = self.base_rows + [
fakedb.BuildRequest(id=10, buildsetid=11, buildername="bldr",
submitted_at=130000),
fakedb.BuildRequest(id=11, buildsetid=11, buildername="bldr",
submitted_at=135000),
]
return self.do_test_maybeStartBuild(rows=rows,
exp_claims=[10, 11],
exp_builds=[('test-slave2', [10]), ('test-slave1', [11])])
def test_maybeStartBuild_limited_by_requests(self):
self.makeBuilder(mergeRequests=False, patch_random=True)
self.setSlaveBuilders({'test-slave1':1, 'test-slave2':1})
rows = self.base_rows + [
fakedb.BuildRequest(id=11, buildsetid=11, buildername="bldr"),
]
return self.do_test_maybeStartBuild(rows=rows,
exp_claims=[11], exp_builds=[('test-slave2', [11])])
def test_maybeStartBuild_chooseSlave_None(self):
self.makeBuilder()
self.bldr._chooseSlave = lambda avail : defer.succeed(None)
self.setSlaveBuilders({'test-slave1':1, 'test-slave2':1})
rows = self.base_rows + [
fakedb.BuildRequest(id=11, buildsetid=11, buildername="bldr"),
]
return self.do_test_maybeStartBuild(rows=rows,
exp_claims=[], exp_builds=[])
def test_maybeStartBuild_chooseSlave_bogus(self):
self.makeBuilder()
self.bldr._chooseSlave = lambda avail : defer.succeed(mock.Mock())
self.setSlaveBuilders({'test-slave1':1, 'test-slave2':1})
rows = self.base_rows + [
fakedb.BuildRequest(id=11, buildsetid=11, buildername="bldr"),
]
return self.do_test_maybeStartBuild(rows=rows,
exp_claims=[], exp_builds=[])
def test_maybeStartBuild_chooseSlave_fails(self):
self.makeBuilder()
self.bldr._chooseSlave = lambda avail : defer.fail(RuntimeError("xx"))
self.setSlaveBuilders({'test-slave1':1, 'test-slave2':1})
rows = self.base_rows + [
fakedb.BuildRequest(id=11, buildsetid=11, buildername="bldr"),
]
return self.do_test_maybeStartBuild(rows=rows,
exp_claims=[], exp_builds=[], exp_fail=RuntimeError)
def test_maybeStartBuild_chooseBuild_None(self):
self.makeBuilder()
self.bldr._chooseBuild = lambda reqs : defer.succeed(None)
self.setSlaveBuilders({'test-slave1':1, 'test-slave2':1})
rows = self.base_rows + [
fakedb.BuildRequest(id=11, buildsetid=11, buildername="bldr"),
]
return self.do_test_maybeStartBuild(rows=rows,
exp_claims=[], exp_builds=[])
def test_maybeStartBuild_chooseBuild_bogus(self):
self.makeBuilder()
self.bldr._chooseBuild = lambda reqs : defer.succeed(mock.Mock())
self.setSlaveBuilders({'test-slave1':1, 'test-slave2':1})
rows = self.base_rows + [
fakedb.BuildRequest(id=11, buildsetid=11, buildername="bldr"),
]
return self.do_test_maybeStartBuild(rows=rows,
exp_claims=[], exp_builds=[])
def test_maybeStartBuild_chooseBuild_fails(self):
self.makeBuilder(patch_random=True)
self.bldr._chooseBuild = lambda reqs : defer.fail(RuntimeError("xx"))
self.setSlaveBuilders({'test-slave1':1, 'test-slave2':1})
rows = self.base_rows + [
fakedb.BuildRequest(id=11, buildsetid=11, buildername="bldr"),
]
return self.do_test_maybeStartBuild(rows=rows,
exp_claims=[], exp_builds=[], exp_fail=RuntimeError)
def test_maybeStartBuild_mergeRequests_fails(self):
self.makeBuilder(patch_random=True)
def _mergeRequests(breq, unclaimed_requests, mergeRequests_fn):
return defer.fail(RuntimeError("xx"))
self.bldr._mergeRequests = _mergeRequests
self.setSlaveBuilders({'test-slave1':1, 'test-slave2':1})
rows = self.base_rows + [
fakedb.BuildRequest(id=11, buildsetid=11, buildername="bldr"),
]
return self.do_test_maybeStartBuild(rows=rows,
exp_claims=[], exp_builds=[], exp_fail=RuntimeError)
def test_maybeStartBuild_claim_race(self):
self.makeBuilder(patch_random=True)
# fake a race condition on the buildrequests table
old_claimBuildRequests = self.db.buildrequests.claimBuildRequests
def claimBuildRequests(brids):
# first, ensure this only happens the first time
self.db.buildrequests.claimBuildRequests = old_claimBuildRequests
# claim brid 10 for some other master
assert 10 in brids
self.db.buildrequests.fakeClaimBuildRequest(10, 136000,
master_name="interloper", master_incarnation="interloper")
# ..and fail
return defer.fail(buildrequests.AlreadyClaimedError())
self.db.buildrequests.claimBuildRequests = claimBuildRequests
self.setSlaveBuilders({'test-slave1':1, 'test-slave2':1})
rows = self.base_rows + [
fakedb.BuildRequest(id=10, buildsetid=11, buildername="bldr",
submitted_at=130000), # will turn out to be claimed!
fakedb.BuildRequest(id=11, buildsetid=11, buildername="bldr",
submitted_at=135000),
]
return self.do_test_maybeStartBuild(rows=rows,
exp_claims=[11], exp_builds=[('test-slave2', [11])])
def test_maybeStartBuild_builder_stopped(self):
self.makeBuilder()
# this will cause an exception if maybeStartBuild tries to start
self.bldr.slaves = None
# so we just hope this does not fail
d = self.bldr.stopService()
d.addCallback(lambda _ : self.bldr.maybeStartBuild())
return d
# _chooseSlave
def do_test_chooseSlave(self, nextSlave, exp_choice=None, exp_fail=None):
self.makeBuilder(nextSlave=nextSlave)
slavebuilders = [ mock.Mock(name='sb%d' % i) for i in range(4) ]
d = self.bldr._chooseSlave(slavebuilders)
def check(sb):
self.assertIdentical(sb, slavebuilders[exp_choice])
def failed(f):
f.trap(exp_fail)
d.addCallbacks(check, failed)
return d
def test_chooseSlave_default(self):
self.patch(random, "choice", lambda lst : lst[2])
return self.do_test_chooseSlave(None, exp_choice=2)
def test_chooseSlave_nextSlave_simple(self):
def nextSlave(bldr, lst):
self.assertIdentical(bldr, self.bldr)
return lst[1]
return self.do_test_chooseSlave(nextSlave, exp_choice=1)
def test_chooseSlave_nextSlave_deferred(self):
def nextSlave(bldr, lst):
self.assertIdentical(bldr, self.bldr)
return defer.succeed(lst[1])
return self.do_test_chooseSlave(nextSlave, exp_choice=1)
def test_chooseSlave_nextSlave_exception(self):
def nextSlave(bldr, lst):
raise RuntimeError
return self.do_test_chooseSlave(nextSlave, exp_fail=RuntimeError)
def test_chooseSlave_nextSlave_failure(self):
def nextSlave(bldr, lst):
return defer.fail(failure.Failure(RuntimeError()))
return self.do_test_chooseSlave(nextSlave, exp_fail=RuntimeError)
# _chooseBuild
def do_test_chooseBuild(self, nextBuild, exp_choice=None, exp_fail=None):
self.makeBuilder(nextBuild=nextBuild)
def mkrq(n):
brdict = dict(brobj=mock.Mock(name='br%d' % n))
brdict['brobj'].brdict = brdict
return brdict
requests = [ mkrq(i) for i in range(4) ]
d = self.bldr._chooseBuild(requests)
def check(sb):
self.assertIdentical(sb, requests[exp_choice])
def failed(f):
f.trap(exp_fail)
d.addCallbacks(check, failed)
return d
def test_chooseBuild_default(self):
"default chooses the first in the list, which should be the earliest"
return self.do_test_chooseBuild(None, exp_choice=0)
def test_chooseBuild_nextBuild_simple(self):
def nextBuild(bldr, lst):
self.assertIdentical(bldr, self.bldr)
return lst[3]
return self.do_test_chooseBuild(nextBuild, exp_choice=3)
def test_chooseBuild_nextBuild_deferred(self):
def nextBuild(bldr, lst):
self.assertIdentical(bldr, self.bldr)
return defer.succeed(lst[2])
return self.do_test_chooseBuild(nextBuild, exp_choice=2)
def test_chooseBuild_nextBuild_exception(self):
def nextBuild(bldr, lst):
raise RuntimeError
return self.do_test_chooseBuild(nextBuild, exp_fail=RuntimeError)
def test_chooseBuild_nextBuild_failure(self):
def nextBuild(bldr, lst):
return defer.fail(failure.Failure(RuntimeError()))
return self.do_test_chooseBuild(nextBuild, exp_fail=RuntimeError)
# _brdictToBuildRequest
@defer.deferredGenerator
def test_brdictToBuildRequest(self):
self.makeBuilder()
# set up all of the data required for a BuildRequest object
wfd = defer.waitForDeferred(
self.db.insertTestData([
fakedb.SourceStamp(id=234),
fakedb.Buildset(id=30, sourcestampid=234, reason='foo',
submitted_at=1300305712, results=-1),
fakedb.BuildRequest(id=19, buildsetid=30, buildername='bldr',
priority=13, submitted_at=1300305712, results=-1),
]))
yield wfd
wfd.getResult()
wfd = defer.waitForDeferred(
self.db.buildrequests.getBuildRequest(19))
yield wfd
brdict = wfd.getResult()
wfd = defer.waitForDeferred(
self.bldr._brdictToBuildRequest(brdict))
yield wfd
br = wfd.getResult()
# just check that the BuildRequest looks reasonable -
# test_process_buildrequest checks the whole thing
self.assertEqual(br.reason, 'foo')
# and check that the cross-pointers are correct
self.assertIdentical(br.brdict, brdict)
self.assertIdentical(brdict['brobj'], br)
self.bldr._breakBrdictRefloops([brdict])
# _getMergeRequestsFn
def do_test_getMergeRequestsFn(self, builder_param, global_param,
expected):
self.makeBuilder(mergeRequests=builder_param)
self.master.mergeRequests=global_param
self.assertEqual(self.bldr._getMergeRequestsFn(), expected)
def test_getMergeRequestsFn_defaults(self):
self.do_test_getMergeRequestsFn(None, None,
buildrequest.BuildRequest.canBeMergedWith)
def test_getMergeRequestsFn_global_True(self):
self.do_test_getMergeRequestsFn(None, True,
buildrequest.BuildRequest.canBeMergedWith)
def test_getMergeRequestsFn_global_False(self):
self.do_test_getMergeRequestsFn(None, False, None)
def test_getMergeRequestsFn_global_function(self):
function = lambda : None
self.do_test_getMergeRequestsFn(None, function, function)
def test_getMergeRequestsFn_builder_True(self):
self.do_test_getMergeRequestsFn(True, False,
buildrequest.BuildRequest.canBeMergedWith)
def test_getMergeRequestsFn_builder_False(self):
self.do_test_getMergeRequestsFn(False, True, None)
def test_getMergeRequestsFn_builder_function(self):
function = lambda : None
self.do_test_getMergeRequestsFn(function, None, function)
# _mergeRequests
@defer.deferredGenerator
def test_mergeRequests(self):
self.makeBuilder()
# set up all of the data required for a BuildRequest object
wfd = defer.waitForDeferred(
self.db.insertTestData([
fakedb.SourceStamp(id=234),
fakedb.Buildset(id=30, sourcestampid=234, reason='foo',
submitted_at=1300305712, results=-1),
fakedb.BuildRequest(id=19, buildsetid=30, buildername='bldr',
priority=13, submitted_at=1300305712, results=-1),
fakedb.BuildRequest(id=20, buildsetid=30, buildername='bldr',
priority=13, submitted_at=1300305712, results=-1),
fakedb.BuildRequest(id=21, buildsetid=30, buildername='bldr',
priority=13, submitted_at=1300305712, results=-1),
]))
yield wfd
wfd.getResult()
wfd = defer.waitForDeferred(
defer.gatherResults([
self.db.buildrequests.getBuildRequest(id)
for id in (19, 20, 21)
]))
yield wfd
brdicts = wfd.getResult()
def mergeRequests_fn(breq, other):
# merge evens with evens, odds with odds
return breq.id % 2 == other.id % 2
# check odds
wfd = defer.waitForDeferred(
self.bldr._mergeRequests(brdicts[0], brdicts, mergeRequests_fn))
yield wfd
self.assertEqual(wfd.getResult(), [ brdicts[0], brdicts[2] ])
# check evens
wfd = defer.waitForDeferred(
self.bldr._mergeRequests(brdicts[1], brdicts, mergeRequests_fn))
yield wfd
self.assertEqual(wfd.getResult(), [ brdicts[1] ])
def test_mergeRequests_no_merging(self):
self.makeBuilder()
breq = dict(dummy=1)
d = self.bldr._mergeRequests(breq, [ breq, breq ], None)
def check(res):
self.assertEqual(res, [breq])
d.addCallback(check)
return d
def test_mergeRequests_singleton_list(self):
self.makeBuilder()
breq = dict(dummy=1)
def is_not_called(*args):
self.fail("should not be called")
self.bldr._brdictToBuildRequest = is_not_called
d = self.bldr._mergeRequests(breq, [ breq ], lambda x,y : None)
def check(res):
self.assertEqual(res, [breq])
d.addCallback(check)
return d
# other methods
def test_reclaimAllBuilds_empty(self):
# just to be sure this doesn't crash
self.makeBuilder()
d = self.bldr.reclaimAllBuilds()
return d
def test_reclaimAllBuilds(self):
self.makeBuilder()
claims = []
def fakeClaimBRs(*args):
claims.append(args)
return defer.succeed(None)
self.bldr.master.db.buildrequests.claimBuildRequests = fakeClaimBRs
def mkbld(brids):
bld = mock.Mock(name='Build')
bld.requests = []
for brid in brids:
br = mock.Mock(name='BuildRequest %d' % brid)
br.id = brid
bld.requests.append(br)
return bld
old = mkbld([15]) # keep a reference to the "old" build
self.bldr.old_building[old] = None
self.bldr.building.append(mkbld([10,11,12]))
d = self.bldr.reclaimAllBuilds()
def check(_):
self.assertEqual(claims, [ (set([10,11,12,15]),) ])
d.addCallback(check)
return d
class TestGetOldestRequestTime(unittest.TestCase):
def setUp(self):
# a collection of rows that would otherwise clutter up every test
self.base_rows = [
fakedb.SourceStamp(id=21),
fakedb.Buildset(id=11, reason='because', sourcestampid=21),
fakedb.BuildRequest(id=111, submitted_at=1000,
buildername='bldr1', claimed_at=0, buildsetid=11),
fakedb.BuildRequest(id=222, submitted_at=2000,
buildername='bldr1', claimed_at=2001, buildsetid=11),
fakedb.BuildRequest(id=333, submitted_at=3000,
buildername='bldr1', claimed_at=0, buildsetid=11),
fakedb.BuildRequest(id=444, submitted_at=2500,
buildername='bldr2', claimed_at=2501, buildsetid=11),
]
def makeBuilder(self, name):
self.bstatus = mock.Mock()
self.factory = mock.Mock()
self.master = mock.Mock()
# only include the necessary required config
config = dict(name=name, slavename="slv", builddir="bdir",
slavebuilddir="sbdir", factory=self.factory)
self.bldr = builder.Builder(config, self.bstatus)
self.master.db = self.db = db = fakedb.FakeDBConnector(self)
self.master.master_name = db.buildrequests.MASTER_NAME
self.master.master_incarnation = db.buildrequests.MASTER_INCARNATION
self.bldr.master = self.master
# we don't want the reclaim service running during tests..
self.bldr.reclaim_svc.disownServiceParent()
self.bldr.startService()
def test_gort_unclaimed(self):
self.makeBuilder(name='bldr1')
d = self.db.insertTestData(self.base_rows)
d.addCallback(lambda _ : self.bldr.getOldestRequestTime())
def check(rqtime):
self.assertEqual(rqtime, epoch2datetime(1000))
d.addCallback(check)
return d
def test_gort_all_claimed(self):
self.makeBuilder(name='bldr2')
d = self.db.insertTestData(self.base_rows)
d.addCallback(lambda _ : self.bldr.getOldestRequestTime())
def check(rqtime):
self.assertEqual(rqtime, None)
d.addCallback(check)
return d
|
#! /usr/bin/python
# RANDLABEL.py
#
# Generates random ORDPATH label in the format compatible with
# ordpath-test.
#
# -l, --lenght=<integer> length of generated label
# --setup=<filename> read ORDPATH codec setup from the file
# specified; each generated component has
# equal probability to hit any of the
# intervals specified by setup
# --clamp=<integer> discard some intervals from setup before
# generation; intervals are ordered by
# proximity to the "sweet spot"; positive
# clamp value limits the length of intervals
# list; non-positive clamp value K is
# interpreted as a request to discard first
# |K| elements from the list
# REFENCODE.py
#
# Reference implementation of the ORDPATH codec. Reads label from stdin
# and writes results to stdout. Input and output formats are compatible
# with ordpath-test.
#
# --setup=<filename> read ORDPATH codec setup from the file
# specified
import sys, os, math, re, random, getopt
def parseSetup(s = None):
s = s or """
0000001 : 48
0000010 : 32
0000011 : 16
000010 : 12
000011 : 8
00010 : 6
00011 : 4
001 : 3
01 : 3 : 0
100 : 4
101 : 6
1100 : 8
1101 : 12
11100 : 16
11101 : 32
11110 : 48"""
acc = []
offset = 0
cpos = 0
sweet = 0
for ind, m in enumerate(re.finditer(
r'([01]+)\s*:\s*(\d+)(?:\s*:\s*(-?\d+))?', s)):
pfx, w, orig = m.groups()
w = int(w)
sz = 1 << w
if orig:
orig = int(orig)
sweet = ind
offset = orig - cpos
acc.append((cpos, cpos + sz, pfx, w))
cpos += sz
l = [(abs(i-sweet), b+offset, e+offset, pfx, width)
for (i, (b, e, pfx, width)) in enumerate(acc)]
l.sort(lambda x, y: cmp(x[0], y[0]))
return [val[1:] for val in l]
def inputOutput(args):
if len(args) > 2:
raise Exception('Excess arguments given, expecting at most 2')
args += ['-'] * (2 - len(args))
return (sys.stdin if args[0] == '-' else open(args[0], 'rb')), (
sys.stdout if args[1] == '-' else open(args[1], 'wb'))
def randLabel(setup, l):
return [random.randrange(*random.choice(setup)[:2]) for i in xrange(l)]
def randlabelMain(opts = []):
length = 10
setupstr = None
clamp = 0
optlist, args = getopt.getopt(
opts, 'l:', ['length=', 'setup=', 'clamp='])
for o, a in optlist:
if o in ['-l', '--length']: length = int(a)
elif o in ['--setup']:
with open(a) as f:
setupstr = f.read()
elif o in ['--clamp']: clamp = int(a)
input, output = inputOutput(args)
setup = parseSetup(setupstr)
clamped = setup[-clamp : ] if clamp <= 0 else setup[ : clamp]
data = randLabel(clamped, length)
output.write('\n'.join(map(str, data)) + '\n')
def refEncode(setup, label):
return ''.join([(lambda (b, e, pfx, width):
pfx + str.format("{0:0{1}b}", c-b, width)) (
next((i for i in setup if c >= i[0] and c < i[1])))
for c in label])
def refencodeMain(opts = []):
setupstr = None
optlist, args = getopt.getopt(opts, '', ['setup='])
for o, a in optlist:
if o in ['--setup']:
with open(a) as f:
setupstr = f.read()
input, output = inputOutput(args)
setup = parseSetup(setupstr)
label = map(int, input.read().split())
elabel = refEncode(setup, label)
output.write("%-15d\n" % len(elabel))
t = elabel + '0' * 7
encoded = ''.join((chr(int(t[i:i+8], 2)) for i in range(0, len(elabel),8)))
output.write(encoded)
if __name__ == '__main__':
{
'refencode.py': refencodeMain,
'randlabel.py': randlabelMain
} [os.path.basename(sys.argv[0])] (sys.argv[1:])
|
# Lint as: python3
"""SHARK Importer"""
import iree.compiler.tflite as iree_tflite_compile
import iree.runtime as iree_rt
import numpy as np
import os
import sys
import csv
import tensorflow as tf
import urllib.request
from shark.shark_inference import SharkInference
import iree.compiler.tflite as ireec_tflite
from shark.iree_utils import IREE_TARGET_MAP
class SharkImporter:
def __init__(
self,
model_name: str = None,
model_path: str = None,
model_type: str = "tflite",
model_source_hub: str = "tfhub",
device: str = None,
dynamic: bool = False,
jit_trace: bool = False,
benchmark_mode: bool = False,
input_details=None,
output_details=None,
tank_url: str = None,
):
self.model_name = model_name
self.model_path = model_path
self.model_type = model_type
self.model_source_hub = model_source_hub
self.device = device
self.dynamic = dynamic
self.jit_trace = jit_trace
self.benchmark_mode = benchmark_mode
self.inputs = None
self.input_details = input_details
self.output_details = output_details
self.tflite_saving_file = None
self.tflite_tosa_file = None
self.tank_url = tank_url
# create tmp model file directory
if self.model_path is None and self.model_name is None:
print(
"Error. No model_path, No model name,Please input either one."
)
return
if self.model_source_hub == "tfhub":
# compile and run tfhub tflite
if self.model_type == "tflite":
load_model_success = self.load_tflite_model()
if load_model_success == False:
print("Error, load tflite model fail")
return
if (self.input_details == None) or (
self.output_details == None
):
print(
"Setting up tflite interpreter to get model input details"
)
self.tflite_interpreter = tf.lite.Interpreter(
model_path=self.tflite_saving_file
)
self.tflite_interpreter.allocate_tensors()
# default input initialization
(
self.input_details,
self.output_details,
) = self.get_model_details()
inputs = self.generate_inputs(
self.input_details
) # device_inputs
self.setup_inputs(inputs)
def load_tflite_model(self):
print("Setting up for TMP_DIR")
tflite_workdir = os.path.join(
os.path.dirname(__file__), "./../gen_shark_tank/tflite"
)
os.makedirs(tflite_workdir, exist_ok=True)
print(f"TMP_TFLITE_DIR = {tflite_workdir}")
# use model name get dir.
tflite_model_name_dir = os.path.join(
tflite_workdir, str(self.model_name)
)
# TODO Download model from google bucket to tflite_model_name_dir by tank_url
os.makedirs(tflite_model_name_dir, exist_ok=True)
print(f"TMP_TFLITE_MODELNAME_DIR = {tflite_model_name_dir}")
self.tflite_saving_file = "/".join(
[tflite_model_name_dir, str(self.model_name) + "_tflite.tflite"]
)
self.tflite_tosa_file = "/".join(
[tflite_model_name_dir, str(self.model_name) + "_tosa.mlir"]
)
if os.path.exists(self.tflite_saving_file):
print(
"Local address for tflite model file Exists: ",
self.tflite_saving_file,
)
else:
print("No local tflite file, Download tflite model")
if self.model_path is None:
# get model file from tflite_model_list.csv or download from gs://bucket
print("No model_path, get from tflite_model_list.csv")
tflite_model_list_path = os.path.join(
os.path.dirname(__file__),
"../tank/tflite/tflite_model_list.csv",
)
tflite_model_list = csv.reader(open(tflite_model_list_path))
for row in tflite_model_list:
if str(row[0]) == self.model_name:
self.model_path = row[1]
if self.model_path is None:
print("Error, No model path find in tflite_model_list.csv")
return False
urllib.request.urlretrieve(self.model_path, self.tflite_saving_file)
if os.path.exists(self.tflite_tosa_file):
print("Exists", self.tflite_tosa_file)
else:
print(
"No tflite tosa.mlir, please use python generate_sharktank.py to download tosa model"
)
return True
def generate_inputs(self, input_details):
args = []
for input in input_details:
print(str(input["shape"]), input["dtype"].__name__)
args.append(np.zeros(shape=input["shape"], dtype=input["dtype"]))
return args
def get_model_details(self):
if self.model_type == "tflite":
print("Get tflite input output details")
self.input_details = self.tflite_interpreter.get_input_details()
self.output_details = self.tflite_interpreter.get_output_details()
return self.input_details, self.output_details
def setup_inputs(self, inputs):
print("Setting up inputs")
self.inputs = inputs
def compile(self, inputs=None):
if inputs is not None:
self.setup_inputs(inputs)
# preprocess model_path to get model_type and Model Source Hub
print("Shark Importer Intialize SharkInference and Do Compile")
if self.model_source_hub == "tfhub":
if os.path.exists(self.tflite_tosa_file):
print("Use", self.tflite_tosa_file, "as TOSA compile input")
# compile and run tfhub tflite
print("Inference tflite tosa model")
tosa_model = []
with open(self.tflite_tosa_file) as f:
tosa_model = f.read()
self.shark_module = SharkInference(
tosa_model,
self.inputs,
device=self.device,
dynamic=self.dynamic,
jit_trace=self.jit_trace,
)
self.shark_module.set_frontend("tflite-tosa")
self.shark_module.compile()
else:
# compile and run tfhub tflite
print("Inference tfhub tflite model")
self.shark_module = SharkInference(
self.tflite_saving_file,
self.inputs,
device=self.device,
dynamic=self.dynamic,
jit_trace=self.jit_trace,
)
self.shark_module.set_frontend("tflite")
self.shark_module.compile()
elif self.model_source_hub == "huggingface":
print("Inference", self.model_source_hub, " not implemented yet")
elif self.model_source_hub == "jaxhub":
print("Inference", self.model_source_hub, " not implemented yet")
def forward(self, inputs=None):
if inputs is not None:
self.setup_inputs(inputs)
# preprocess model_path to get model_type and Model Source Hub
print("Shark Importer forward Model")
if self.model_source_hub == "tfhub":
shark_results = self.shark_module.forward(self.inputs)
# Fix type information for unsigned cases.
# for test compare result
shark_results = list(shark_results)
for i in range(len(self.output_details)):
dtype = self.output_details[i]["dtype"]
shark_results[i] = shark_results[i].astype(dtype)
return shark_results
elif self.model_source_hub == "huggingface":
print("Inference", self.model_source_hub, " not implemented yet")
elif self.model_source_hub == "jaxhub":
print("Inference", self.model_source_hub, " not implemented yet")
|
<filename>solidata_api/api/api_dataset_outputs/endpoint_dso.py
# -*- encoding: utf-8 -*-
"""
endpoint_dso.py
"""
from solidata_api.api import *
log.debug(">>> api_dataset_outputs ... creating api endpoints for DSO")
from . import api, document_type
### create namespace
ns = Namespace('infos', description='Dataset outputs : request and list all dso infos')
### import models
from solidata_api._models.models_dataset_output import *
mod_doc = Dso_infos(ns)
model_doc_out = mod_doc.mod_complete_out
model_doc_guest_out = mod_doc.model_guest_out
model_doc_min = mod_doc.model_minimum
models = {
"model_doc_out" : model_doc_out ,
"model_doc_guest_out" : model_doc_guest_out ,
"model_doc_min" : model_doc_min ,
}
from solidata_api._models.models_stats import *
mod_stats = Stats_query(ns, document_type)
mod_stats_query = mod_stats.model_stats_query
### + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ###
### ROUTES
### + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ###
### cf : response codes : https://restfulapi.net/http-status-codes/
@ns.doc(security='apikey')
@ns.route("/get_one/<string:doc_id>")
class Dso_infos_(Resource):
"""
DSO infos
GET - Shows a document's infos
"""
@ns.doc('dso_infos')
@ns.expect(query_pag_args, query_data_dso_arguments)
# @jwt_optional
@jwt_optional_sd
@ns.doc(params={'doc_id': 'the dataset output oid'})
def get(self, doc_id):
"""
get infos of a specific dso in db
:param doc_id : dsi's oid <doc_id>
>
--- needs : a dso/project doc_id in the url
--- optional : request arguments (pagination|query), json web token in headers... (cf : solidata_api._parsers.parser_classes)
>>> returns : dso/project data
"""
### DEBUGGING
print()
print("-+- "*40)
log.debug( "ROUTE class : %s", self.__class__.__name__ )
### DEBUG check
# log.debug ("payload : \n{}".format(pformat(ns.payload)))
### check client identity and claims
# claims = get_jwt_claims()
claims = returnClaims()
log.debug("claims : \n %s", pformat(claims) )
### query db from generic function
query_args = query_data_dso_arguments.parse_args(request)
page_args = pagination_arguments.parse_args(request)
results, response_code = Query_db_doc (
ns,
models,
document_type,
doc_id,
claims,
page_args,
query_args,
roles_for_complete = ["admin"],
)
log.debug("results have been retrieved ... " )
# log.debug("results : \n%s ", pformat(results) )
# log.debug("results['data']['infos']['title'] : %s", results['data']['infos']['title'])
# log.debug("len(results['data']['data_raw']['f_data']) : %s", len(results['data']['data_raw']['f_data']) )
return results, response_code
@ns.doc(security='apikey')
@ns.route("/get_one_stats/<string:doc_id>")
class Dso_stats_(Resource):
@ns.doc('dso_stats')
@ns.expect( [mod_stats_query], query_data_stats_arguments)
# @jwt_optional
@jwt_optional_sd
@ns.doc(params={'doc_id': 'the dataset input oid'})
def post(self, doc_id):
"""
post stat request from a specific dso in db
:param doc_id : dsi's oid <doc_id>
>
--- needs : dso's oid <doc_id>
--- query args : search_for / only_stats / ...
>>> returns : dso stats
"""
### DEBUGGING
print()
print("-+- "*40)
log.debug( "ROUTE class : %s", self.__class__.__name__ )
document_stat_type = "dso_doc"
### check client identity and claims
claims = returnClaims()
log.debug("claims : \n %s", pformat(claims) )
# log.debug("request : \n%s", pformat(request.__dict__) )
log.debug("request.args : \n%s", pformat(request.args) )
### DEBUG check payload
log.debug ("payload : \n{}".format(pformat(ns.payload)))
### query db from generic function
query_args = query_data_stats_arguments.parse_args(request)
log.debug("query_args : \n%s", pformat(query_args) )
stats_results = {
"msg" : "Dear user, here comes the several series you requested on this document...",
"query" : query_args,
"series" : []
}
stats_response_code = 200
for payload_req in ns.payload :
results, response_code = Query_db_stats (
ns,
document_type,
claims,
query_args,
doc_id = doc_id,
is_one_stat = True,
roles_for_complete = ["admin"],
payload = payload_req["agg_fields"]
)
log.debug("stats results have been retrieved ... " )
log.debug("results : \n%s ", pformat(results) )
stats_results["series"].append({
"serie_id" : payload_req["serie_id"],
"results" : results,
})
log.debug ("stats_results : \n%s", pformat(stats_results) )
# return results, response_code
return stats_results, stats_response_code
@ns.doc(security='apikey')
@ns.route('/list')
class Dso_List(Resource):
@ns.doc('dso_list')
@ns.expect(query_pag_args)
# @jwt_optional
@jwt_optional_sd
# @anonymous_required
def get(self):
"""
list of all dso in db
>
--- needs : nothing
--- optionnal args : pagination, list of oid_prj, list of tags, query...
>>> returns : dso/prj data as a list
"""
### DEBUGGING
print()
print("-+- "*40)
log.debug( "ROUTE class : %s", self.__class__.__name__ )
log.debug( "request : \n%s", pformat(request.__dict__) )
### DEBUG check
log.debug ("payload : \n{}".format(pformat(ns.payload)))
### check client identity and claims
# claims = get_jwt_claims()
claims = returnClaims()
log.debug("claims : \n %s", pformat(claims) )
# log.debug("request.args : %s ", request.args)
# args_type = type(request.__dict__["args"])
# log.debug("args_type : %s ", args_type)
### query db from generic function
query_args = query_arguments.parse_args(request)
page_args = pagination_arguments.parse_args(request)
log.debug ("page_args : \n{}".format(page_args))
results, response_code = Query_db_list (
ns,
models,
document_type,
claims,
page_args,
query_args,
roles_for_complete = ["admin"],
)
log.debug("results have been retrieved ... " )
# log.debug("results : \n%s ", pformat(results) )
return results, response_code
@ns.doc(security='apikey')
@ns.route("/list_stats")
class Dso_list_stats_(Resource):
@ns.doc('dso_list_stats')
# @ns.expect(query_data_stats_arguments)
@ns.expect( [mod_stats_query], query_data_stats_arguments)
# @ns.expect({ "stats_query": [mod_stats_query] }, query_data_stats_arguments)
# @ns.expect( [[mod_stats_query] })
# @jwt_optional
@jwt_optional_sd
def post(self):
"""
post stat request from a list of dso in db
>
--- query args : search_for / only_stats / ...
>>> returns : dso list stats data
"""
### DEBUGGING
print()
print("-+- "*40)
log.debug( "ROUTE class : %s", self.__class__.__name__ )
### check client identity and claims
claims = returnClaims()
log.debug("claims : \n %s", pformat(claims) )
# log.debug("request : \n%s", pformat(request.__dict__) )
log.debug("request.args : \n%s", pformat(request.args) )
### DEBUG check payload
log.debug ("ns.payload : \n{}".format(pformat(ns.payload)))
### query db from generic function
query_args = query_data_stats_arguments.parse_args(request)
log.debug("query_args : \n%s", pformat(query_args) )
results, response_code = Query_db_stats (
ns,
document_type,
claims,
query_args,
roles_for_complete = ["admin"],
payload = ns.payload
)
log.debug("stats results have been retrieved ... " )
log.debug("results : \n%s ", pformat(results) )
return results, response_code |
<gh_stars>0
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import platform
from DocumentSearch import settings
import docx
from pdfminer.pdfpage import PDFPage
from pdfminer.layout import LAParams
from pdfminer.pdfinterp import PDFResourceManager,PDFPageInterpreter
from pdfminer.converter import TextConverter
from cStringIO import StringIO
from elasticsearch import Elasticsearch
import json
def get_file_absolute_path(relative_file):
sys_type = platform.system()
if sys_type == 'Windows':
#windows下使用
the_file_name = settings.MEDIA_ROOT + '\\' + str(relative_file).replace('/', '\\').decode('utf-8')
elif sys_type == 'Linux':
#linux下使用
the_file_name = settings.MEDIA_ROOT + "/" + str(relative_file).decode('utf-8')
else:
#非linux或windows下,如unbantu等皆使用linux的标准
the_file_name = settings.MEDIA_ROOT + "/" + str(relative_file).decode('utf-8')
return the_file_name
#导入数据到elasticsearch
def sync_es(inputdict,idnum):
es = Elasticsearch([settings.ES_URL])
documentmapping = {
"mappings" : {
"documentsearch" : {
"_all": {
"analyzer": "ik_max_word",
"search_analyzer": "ik_max_word",
"term_vector": "no",
"store": "false"
},
"properties" : {
"docname" : {
"type" : "string",
"analyzer": "ik_max_word",
"search_analyzer": "ik_max_word",
"include_in_all": "true",
"boost": 8
},
"doctype" : {
"type" : "string",
"analyzer": "ik_max_word",
"search_analyzer": "ik_max_word",
"include_in_all": "true",
"boost": 8
},
"description" : {
"type" : "string",
"analyzer": "ik_max_word",
"search_analyzer": "ik_max_word",
"include_in_all": "true",
"boost": 8
},
"content" : {
"type" : "string",
"analyzer": "ik_max_word",
"search_analyzer": "ik_max_word",
"include_in_all": "true",
"boost": 8
},
"filepath" : {
"type" : "string",
"index":"no"
},
}
}
}
}
indexName = "documentindex"
if not es.indices.exists(indexName):
es.indices.create(index = indexName, body = documentmapping,ignore = 400)
return es.index(index=indexName, doc_type="documentsearch", body=inputdict, id=idnum)
def import_txt_content(id,doc_title,doc_type,doc_description,filepath):
with open(filepath.decode("utf-8"),'rU') as f:
f_content = f.read()
es_import_dict = {}
es_import_dict[u'docname'] = doc_title
es_import_dict[u'doctype'] = doc_type
es_import_dict[u'description'] = doc_description
es_import_dict[u'filepath'] = filepath
try:
tmpcontent = unicode(f_content.decode('utf-8'))
except:
tmpcontent = unicode(f_content.decode('gbk'))
es_import_dict[u'content'] = tmpcontent
#print json.dumps(es_import_dict)
return sync_es(es_import_dict,id)
def import_word_content(id,doc_title,doc_type,doc_description,filepath):
document = docx.Document(filepath)
docText = '\n'.join([
paragraph.text.encode('utf-8') for paragraph in document.paragraphs
])
es_import_dict = {}
es_import_dict[u'docname'] = doc_title
es_import_dict[u'doctype'] = doc_type
es_import_dict[u'description'] = doc_description
es_import_dict[u'filepath'] = filepath
try:
tmpcontent = unicode(docText.decode('utf-8'))
except:
tmpcontent = unicode(docText.decode('gbk'))
es_import_dict[u'content'] = tmpcontent
return sync_es(es_import_dict,id)
def import_pdf_content(id,doc_title,doc_type,doc_description,filepath):
retstr = StringIO()
rsrcmgr = PDFResourceManager()
laparams = LAParams()
codec = 'utf-8'
device = TextConverter(rsrcmgr,retstr,codec=codec,laparams=laparams)
with open(filepath, 'rb') as f:
interpreter = PDFPageInterpreter(rsrcmgr, device)
for page in PDFPage.get_pages(f):
interpreter.process_page(page)
device.close()
pdfstr = retstr.getvalue()
retstr.close()
try:
tmpcontent = unicode(pdfstr.decode('utf-8'))
except:
tmpcontent = unicode(pdfstr.decode('gbk'))
es_import_dict = {}
es_import_dict[u'docname'] = doc_title
es_import_dict[u'doctype'] = doc_type
es_import_dict[u'description'] = doc_description
es_import_dict[u'filepath'] = filepath
es_import_dict[u'content'] = tmpcontent
return sync_es(es_import_dict,id)
def search_result(queryStatements):
es = Elasticsearch([settings.ES_URL])
indexName = "documentindex"
queryBody = {
"query" : {
"query_string" : {
"analyze_wildcard" : "true",
"default_operator" : "AND",
"query" : queryStatements
}
}
}
#print queryBody
queryResult = es.search(index=indexName,body=queryBody)
return queryResult
def del_es_doc(id):
es = Elasticsearch([settings.ES_URL])
indexName = "documentindex"
return es.delete(index=indexName,doc_type="documentsearch",id=id) |
from django.core.urlresolvers import reverse
from edivorce.apps.core.models import Question
from edivorce.apps.core.utils.question_step_mapping import question_step_mapping, pre_qual_step_question_mapping
def evaluate_numeric_condition(target, reveal_response):
"""
Tests whether the reveal_response contains a numeric condition. If so, it will
evaluate the numeric condition and return the results of that comparison.
:param target: the questions value being tested against
:param reveal_response: the numeric condition that will be evaluated against
:return: boolean result of numeric condition evaluation or None if there is no
numeric condition to evaluate.
"""
if target == '': # cannot evaluate if answer is blank
return None
if reveal_response.startswith('>='):
return float(target) >= float(reveal_response[2:])
elif reveal_response.startswith('<='):
return float(target) <= float(reveal_response[2:])
elif reveal_response.startswith('=='):
return float(target) == float(reveal_response[2:])
elif reveal_response.startswith('<'):
return float(target) < float(reveal_response[1:])
elif reveal_response.startswith('>'):
return float(target) > float(reveal_response[1:])
return None
def get_step_status(responses_by_step):
status_dict = {}
for step, lst in responses_by_step.items():
if not lst:
status_dict[step] = "Not started"
else:
if is_complete(step, lst)[0]:
status_dict[step] = "Complete"
else:
status_dict[step] = "Started"
return status_dict
def is_complete(step, lst):
"""
Check required field of question for complete state
Required: question is always require user response to be complete
Conditional: Optional question needed depends on reveal_response value of conditional_target.
"""
if not lst:
return False, []
question_list = Question.objects.filter(key__in=question_step_mapping[step])
required_list = list(question_list.filter(required='Required').values_list("key", flat=True))
conditional_list = list(question_list.filter(required='Conditional'))
complete = True
missing_responses = []
for question_key in required_list:
# everything in the required_list is required
if not __has_value(question_key, lst):
complete = False
missing_responses += [question_key]
for question in conditional_list:
# find the response to the conditional target
for target in lst:
if target["question_id"] == question.conditional_target:
if __condition_met(question.reveal_response, target, lst):
# the condition was met then the question is required.
# ... so check if it has a value
if not __has_value(question.key, lst):
complete = False
missing_responses += [question.key]
return complete, missing_responses
def get_formatted_incomplete_list(missed_question_keys):
"""
Returns a list of dicts that contain the following information for the question
that was not answered. Each dict contains the name of the question, as stored in
the database, and the url of the page where the question is found.
:param missed_question_keys:
:return: list of dicts.
"""
missed_questions = []
for missed_question in Question.objects.filter(key__in=missed_question_keys):
for step, questions in pre_qual_step_question_mapping.items():
if missed_question.key in questions:
missed_questions.append({
'title': missed_question.name,
'step_url': reverse('prequalification', kwargs={'step': step})
})
return missed_questions
def __condition_met(reveal_response, target, lst):
# check whether using a numeric condition
numeric_condition_met = evaluate_numeric_condition(target["value"], reveal_response)
if numeric_condition_met is None:
if target["value"] != reveal_response:
return False
elif numeric_condition_met is False:
return False
# return true if the target is not Conditional
if target['question__required'] != 'Conditional':
return True
else:
# if the target is Conditional and the condition was met, check the target next
reveal_response = target["question__reveal_response"]
conditional_target = target["question__conditional_target"]
for new_target in lst:
if new_target["question_id"] == conditional_target:
# recursively search up the tree
return __condition_met(reveal_response, new_target, lst)
# if the for loop above didn't find the target, then the target question
# is unanswered and the condition was not met
return False
def __has_value(key, lst):
for user_response in lst:
if user_response["question_id"] == key:
answer = user_response["value"]
if answer != "" and answer != "[]" and answer != '[["",""]]':
return True
return False
|
# -*- coding: utf-8 -*-
from datetime import datetime
from flask import render_template, Flask, g, session, abort, redirect, jsonify, request, url_for
from werkzeug.middleware.proxy_fix import ProxyFix
from urllib.parse import urlparse, urlunparse
from flask_dance.consumer.storage.sqla import SQLAlchemyStorage
from telegram.messageentity import MessageEntity
import telegram
import logging
import os
from . import exceptions, models
from .helpers import get_bot
from .notion import notion_bp
from .database import SessionManager, SessionLocal
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(__name__)
notion_headers = {
'Notion-Version': '2021-05-11',
}
def _telegram_user():
telegram_user_id = g.get('telegram_user_id', None) or session.get('telegram_user_id', None)
telegram_user = None
if telegram_user_id:
telegram_user = g.db.query(models.TelegramUser).filter(models.TelegramUser.telegram_user_id == telegram_user_id).first()
return telegram_user
def _reauth_notion(message):
# delete notion, trigger send auth
user = _telegram_user()
g.db.delete(user.notion_auth)
g.db.commit()
_send_auth_message(user, message)
def _send_welcome_message(user, message):
bot = get_bot()
bot.sendMessage(
chat_id = user.telegram_chat_id,
text = 'Hello! This little bot will create a new page in whatever database you point it at.',
)
def _send_auth_message(user, message):
login_url = full_url_for('notion_auth', telegram_user_id=user.telegram_user_id)
bot = get_bot()
bot.sendMessage(
chat_id = user.telegram_chat_id,
text = 'Visit the following URL to connect your Notion account - %s - and note for now ONLY CHOOSE ONE database!' % login_url,
)
def _send_database_message(user, message):
login_url = full_url_for('notion_auth', telegram_user_id=user.telegram_user_id)
bot = get_bot()
bot.sendMessage(
chat_id = user.telegram_chat_id,
text = 'You need to tell me which database you want me to add pages to. Hang on while I show you a list...',
)
request = notion_bp.session.get('/v1/databases', headers=notion_headers)
if request.ok:
data = request.json()
results = data['results']
if not results:
bot.sendMessage(
chat_id = user.telegram_chat_id,
text = 'I don\'t have access to any databases, you may need to disconnect this integration within your Notion settings page to allow access to different databases.',
)
else:
def _database_title(database):
return database['title'][0]['plain_text']
if len(results) == 1:
bot.sendMessage(
chat_id = user.telegram_chat_id,
text = 'Found 1 database - %s - and setting that as default' % _database_title(results[0]),
)
_choose_database(results[0]['id'])
else:
bot.sendMessage(
chat_id = user.telegram_chat_id,
text = 'I found %d databases, but I can only handle one at the moment. You\'ll have to remove the integration and re-add it in order to choose which databases (pages) you allow me to access.',
)
# TODO multiple databases requires implementing /database command, and taking a parameter
# databases = "\n".join([
# '[inline /database %s](%s)' % (database['id'], _database_title(database))
# for database in results
# ])
# bot.sendMessage(
# chat_id = user.telegram_chat_id,
# text = databases,
# )
else:
bot.sendMessage(
chat_id = user.telegram_chat_id,
text = 'Something went wrong, try again in a few minutes!',
)
def _track_user_from_message(db, message):
created = False
default_language_code = 'en'
user = db.query(models.TelegramUser).filter(models.TelegramUser.telegram_user_id == message.from_user.id).first()
if not user:
created = True
user = models.TelegramUser(
telegram_user_id=message.from_user.id,
telegram_chat_id=message.chat.id,
first_name=message.from_user.first_name or '',
last_name=message.from_user.last_name or '',
username=message.from_user.username or '',
language_code=message.from_user.language_code or default_language_code,
first_seen=datetime.now(),
last_seen=datetime.now(),
)
else:
db.telegram_chat_id = message.chat_id
db.last_seen = datetime.now()
db.add(user)
db.commit()
return user, created
def _handle_start(message):
user = _telegram_user()
_send_auth_message(user, message)
def _handle_help(message):
help_copy = 'Some helpful message goes here about /help'
bot = get_bot()
bot.sendMessage(
chat_id = message.chat.id,
text = help_copy,
)
def _handle_stop(message):
bot = get_bot()
bot.sendMessage(
chat_id = message.chat.id,
text = 'NOT IMPLEMENTED /stop',
)
def _handle_about(message):
about_copy = 'Some aboutful message goes here about /about'
bot = get_bot()
bot.sendMessage(
chat_id = message.chat.id,
text = about_copy,
)
def _handle_database(message = None):
_send_database_message(_telegram_user(), message)
class SQLAlchemySessionStorage(SQLAlchemyStorage):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
SQLAlchemySessionStorage.session = property(lambda x: g.db)
notion_bp.storage = SQLAlchemySessionStorage(models.NotionAuth, None, user=_telegram_user)
def request_source():
(scheme, netloc, path, params, query, fragment) = urlparse(request.url)
return urlunparse((scheme, netloc, '', None, None, None))
def full_url_for(*args, **kwargs):
base_url = kwargs.pop('_base_url', None) or request_source()
url = url_for(*args, **kwargs)
full_url = base_url + url
return full_url
def _choose_database(database_id):
response = notion_bp.session.get('/v1/databases/%s' % database_id, headers=notion_headers)
assert response.ok, response.text
data = response.json()
user = _telegram_user()
notion_auth = user.notion_auth
notion_auth.database = data
g.db.add(notion_auth)
g.db.commit()
g.db.refresh(notion_auth)
def build_app():
app = Flask(__name__)
app.wsgi_app = ProxyFix(app.wsgi_app)
app.secret_key = os.environ['SECRET_KEY']
app.register_blueprint(notion_bp, url_prefix="/login")
exceptions.init(app)
@app.before_request
def pre_request():
g.db = SessionLocal()
g.telegram_user_id = None
@app.after_request
def post_request(response):
g.telegram_user_id = None
try:
g.db.rollback()
except:
pass
finally:
g.db.close()
g.db = None
return response
@app.route('/all-done')
def back_to_telegram():
# TODO send a message to the telegram user, with choosing database
_handle_database()
return render_template('back_to_telegram.html')
@app.route('/auth-test')
def auth_test():
pass
@app.route('/', methods = [ 'GET', 'POST' ])
def webhook():
# {
# "message" : {
# "chat" : {
# "first_name" : "Matt",
# "id" : 983713751,
# "last_name" : "Wilson",
# "type" : "private",
# "username" : "ironslob"
# },
# "date" : 1604824088,
# "entities" : [
# {
# "length" : 6,
# "offset" : 0,
# "type" : "bot_command"
# }
# ],
# "from" : {
# "first_name" : "Matt",
# "id" : 983713751,
# "is_bot" : false,
# "language_code" : "en",
# "last_name" : "Wilson",
# "username" : "ironslob"
# },
# "message_id" : 2,
# "text" : "/start"
# },
# "update_id" : 263842683
# }
data = request.json
if not data:
return abort(400)
bot = get_bot()
update = telegram.Update.de_json(
data = request.json,
bot = bot,
)
message = update.message
# is this the first time we've seen a user?
# yes -> send welcome message
# no ->
# do we have notion credentials for this user?
# no -> send notion connection message
# yes ->
# is this a command?
# yes -> handle it
# no -> send message contents to notion as new block
# if message.bot
# if message.chat
# if message.voice
bot = get_bot()
# update.effective_chat.permissions
if message and not message.from_user.is_bot:
user, created = _track_user_from_message(g.db, message)
notion_auth = user.notion_auth
if user:
g.telegram_user_id = user.telegram_user_id
# slightly hack approach to "is this the first time we've seen this user?"
if created:
_send_welcome_message(user, message)
commands = filter(lambda entity: entity.type == MessageEntity.BOT_COMMAND, message.entities)
command = next(commands, None)
if command:
command_text = message.text[command.offset:command.length]
command_handlers = {
'/start': _handle_start,
'/help': _handle_help,
'/stop': _handle_stop,
'/about': _handle_about,
'/database': _handle_database,
'/reauth': _reauth_notion,
}
requires_auth = [
'/database',
]
if command_text in command_handlers:
if command_text in requires_auth and not notion_auth:
bot.sendMessage(
chat_id = message.chat.id,
text = 'Not ready for that! You need to setup with Notion first',
)
_send_auth_message(user, message)
else:
command_handlers[command_text](message)
else:
bot.sendMessage(
chat_id = message.chat.id,
text = 'You sent a command I don\'t understand - %s' % command_text,
)
elif not notion_auth:
_send_auth_message(user, message)
elif not notion_auth.database:
_send_database_message(user, message)
else:
# send message to notion
properties = notion_auth.database['properties']
title_property = next(filter(lambda key: properties[key]['type'] == 'title', properties.keys()))
payload = {
'parent': {
'type': 'database_id',
'database_id': notion_auth.database['id'],
},
'properties': {
title_property: {
'type': 'title',
'title': [
{
'type': 'text',
'text': {
'content': message.text,
}
}
]
}
}
}
response = notion_bp.session.post('/v1/pages', json=payload, headers=notion_headers)
msg = 'Done! ✅'
if not response.ok:
msg = 'Error from Notion 😩'
bot.sendMessage(
chat_id = message.chat.id,
text = msg,
)
return jsonify({ 'ok': 1 })
@app.route('/notion/<telegram_user_id>')
def notion_auth(telegram_user_id):
session['telegram_user_id'] = telegram_user_id
session.modified = True
return redirect(url_for('notion.login'))
@app.route('/ping')
def ping():
return jsonify({ 'ok': 1 })
# can also use curl -F "url=https://<YOURDOMAIN.EXAMPLE>/<WEBHOOKLOCATION>" https://api.telegram.org/bot<YOURTOKEN>/setWebhook
@app.route('/setup', methods = [ 'GET', 'POST' ])
def setup_webhook():
bot = get_bot()
logger.debug('setting webhook to {request.url_root}')
# webhook = bot.set_webhook('https://webhook.site/<PASSWORD>-<PASSWORD>-<PASSWORD>')
webhook = bot.set_webhook(request.url_root)
return jsonify({ 'ok': 1 })
return app
application = build_app()
if __name__ == '__main__':
application.run(
debug = True,
port = 6000,
)
|
from collections import namedtuple
import logging
import os
import signal
import socket
logger = logging.getLogger(__name__)
def bimap_dict(key_f, val_f, d):
return {
key_f(k): val_f(v)
for k, v in d.items()
}
def str2sig(s):
try:
return int(s)
except ValueError:
pass
for sig in signal.Signals:
if s.upper() == sig.name:
return sig.value
raise ValueError("unknown signal '{}'".format(s))
class GraphException(Exception):
pass
class Node(namedtuple('Node', ('command', 'inputs', 'outputs', 'sockets', 'separate_group', 'signals'))):
@classmethod
def from_dict(cls, description):
unknown_keys = description.keys() - set(cls._fields)
if len(unknown_keys) > 0:
logger.warning("Unknown keys in node description dict: {}".format(unknown_keys))
return cls(
command=[str(p) for p in description['command']],
inputs=bimap_dict(int, str, description.get('inputs', {})),
outputs=bimap_dict(int, str, description.get('outputs', {})),
sockets=bimap_dict(int, str, description.get('sockets', {})),
separate_group=bool(description.get('separate_group', False)),
signals=[str2sig(str(s)) for s in description.get('signals', [])],
)
class Graph(namedtuple('Graph', ('inputs', 'outputs', 'sockets', 'nodes'))):
@classmethod
def from_dict(cls, description):
unknown_keys = description.keys() - set(cls._fields)
if len(unknown_keys) > 0:
logger.warning("Unknown keys in graph description dict: {}".format(unknown_keys))
g = cls(
inputs=bimap_dict(str, int, description.get('inputs', {})),
outputs=bimap_dict(str, int, description.get('outputs', {})),
sockets=bimap_dict(str, int, description.get('sockets', {})),
nodes=list(map(Node.from_dict, description.get('nodes', []))),
)
g.check_for_pipe_collisions()
g.check_pipe_directions()
g.check_for_fd_collisions()
g.check_sockets()
g.check_for_dead_ends()
return g
def check_for_pipe_collisions(self):
colliding = self.inputs.keys() & self.outputs.keys()
if len(colliding) > 0:
raise GraphException("Some pipes specified as both global inputs and outputs: {}".format(colliding))
def check_pipe_directions(self):
for node_id, node in enumerate(self.nodes):
for pipe_name in self.inputs.keys():
if pipe_name in node.outputs.values():
raise GraphException(
"Pipe named '{}', definded as global input, "
"is used as output in node {}.".format(
pipe_name, node_id,
)
)
for pipe_name in self.outputs.keys():
if pipe_name in node.inputs.values():
raise GraphException(
"Pipe named '{}', definded as global output, "
"is used as input in node {}.".format(
pipe_name, node_id,
)
)
def check_for_fd_collisions(self):
for node_id, node in enumerate(self.nodes):
colliding_fds = (
(
node.inputs.keys() & node.outputs.keys()
) | (
node.inputs.keys() & node.sockets.keys()
) | (
node.outputs.keys() & node.sockets.keys()
)
)
if len(colliding_fds) > 0:
raise GraphException(
"Multiple pipes/sockets specified for single fd. "
"I'm sorry, I'm afraid I can't connect that. (node {}, fds {})".format(
node_id, colliding_fds,
)
)
def check_for_dead_ends(self):
written_pipes = set(self.inputs.keys())
read_pipes = set(self.outputs.keys())
for node in self.nodes:
written_pipes.update(node.outputs.values())
read_pipes.update(node.inputs.values())
only_written = written_pipes - read_pipes
only_read = read_pipes - written_pipes
if len(only_written) > 0:
logger.warning("Some pipes are never read: {}".format(only_written))
if len(only_read) > 0:
logger.warning("Some pipes are never written: {}".format(only_read))
def check_sockets(self):
# dict socket_id -> number of uses
socket_uses = {}
for node_id, node in enumerate(self.nodes):
for socket_id in node.sockets.values():
n = socket_uses.get(socket_id, 0)
n += 1
if n > 2:
logger.warning(
"Socket name '{}' is used more than two times (node {})."
"I can take this. And you can easily make mistake.".format(
socket_id, node_id,
)
)
socket_uses[socket_id] = n
for socket_id, n in socket_uses.items():
if n == 1:
logger.warning(
"Socket name '{}' is used only one time."
"The other end will be flapping in the breeze (untill we close it).".format(
socket_id,
)
)
def apply_fd_mapping(fd_mapping):
""" Takes dict target fd -> present fd. Moves fds to match the mapping. """
def _dup_mapping(fd, new_fd):
logger.debug("fd {} duped to {}".format(fd, new_fd))
for target_fd in fd_mapping.keys():
if fd_mapping[target_fd] == fd:
fd_mapping[target_fd] = new_fd
for target_fd in fd_mapping.keys():
fd = fd_mapping[target_fd]
if fd == target_fd:
# nothing to do
logger.debug("fd {} already in place".format(fd))
continue
# if needed make target fd free
if target_fd in fd_mapping.values():
saved_fd = os.dup(target_fd)
_dup_mapping(target_fd, saved_fd)
os.dup2(fd, target_fd, inheritable=False)
_dup_mapping(fd, target_fd)
class PipeGraphSpawner:
Process = namedtuple('Process', ('command', 'signals'))
@classmethod
def from_graph(cls, graph):
spawner = cls(
inputs=graph.inputs,
outputs=graph.outputs,
)
for node in graph.nodes:
spawner.spawn(
node.command,
node.inputs, node.outputs, node.sockets,
node.separate_group, node.signals,
)
return spawner
def __init__(self, inputs={}, outputs={}, sockets={}):
self._reading_ends = {}
self._writing_ends = {}
self._socket_other_ends = {}
# collection of running subprocesses. dict pid -> Process
self._processes = {}
def register_fds(our_dict, input_dict):
for id, fd in input_dict.items():
os.set_inheritable(fd, False)
our_dict[id] = fd
register_fds(self._writing_ends, outputs)
register_fds(self._reading_ends, inputs)
register_fds(self._socket_other_ends, sockets)
def spawn(self, command, inputs, outputs, sockets, separate_group, signals):
fd_mapping = {}
fds_to_be_closed_in_parent = []
for subprocess_fd, pipe_id in inputs.items():
assert(subprocess_fd not in fd_mapping)
fd_mapping[subprocess_fd] = self._reading_end_fd(pipe_id)
for subprocess_fd, pipe_id in outputs.items():
assert(subprocess_fd not in fd_mapping)
fd_mapping[subprocess_fd] = self._writing_end_fd(pipe_id)
for subprocess_fd, socket_id in sockets.items():
assert(subprocess_fd not in fd_mapping)
fd = self._get_and_clear_socket_end(socket_id)
fd_mapping[subprocess_fd] = fd
fds_to_be_closed_in_parent.append(fd)
pid = os.fork()
if pid == 0:
# prepare fds
apply_fd_mapping(fd_mapping)
for fd in fd_mapping.keys():
os.set_inheritable(fd, True)
if separate_group:
# create new process group
logger.debug("creating new process group")
os.setpgid(0, 0)
# run target executable
os.execvp(command[0], command)
else:
assert(pid not in self._processes)
self._processes[pid] = self.Process(command=command, signals=signals)
logger.info(
"process %d spawned command=%s fd_mapping=%s",
pid, command, fd_mapping,
)
for fd in fds_to_be_closed_in_parent:
logger.debug("fd {}: closing".format(fd))
os.close(fd)
return pid
def _reading_end_fd(self, pipe_id):
if pipe_id not in self._reading_ends:
self._make_pipe(pipe_id)
return self._reading_ends[pipe_id]
def _writing_end_fd(self, pipe_id):
if pipe_id not in self._writing_ends:
self._make_pipe(pipe_id)
return self._writing_ends[pipe_id]
def _get_and_clear_socket_end(self, socket_id):
""" Behold! This method is unexpectedly unpure!
Calling this method twice will have different results.
Caller is responsible for taking care of retrieved fd. Especially she should close it after use.
"""
if socket_id in self._socket_other_ends:
fd = self._socket_other_ends[socket_id]
del self._socket_other_ends[socket_id]
return fd
else:
def getfd(sock):
fd = sock.detach()
assert(fd >= 0)
return fd
fd_a, fd_b = map(getfd, socket.socketpair())
logger.info("socket pair '{}' created, fds {} <-> {}".format(socket_id, fd_a, fd_b))
self._socket_other_ends[socket_id] = fd_b
return fd_a
def _make_pipe(self, pipe_id):
reading_end, writing_end = os.pipe()
logger.info("pipe '{}' created, fds {} -> {}".format(pipe_id, writing_end, reading_end))
assert(pipe_id not in self._writing_ends)
self._writing_ends[pipe_id] = writing_end
assert(pipe_id not in self._reading_ends)
self._reading_ends[pipe_id] = reading_end
def close_fds(self):
for fd in self._writing_ends.values():
logger.debug("fd {}: closing".format(fd))
os.close(fd)
for fd in self._reading_ends.values():
logger.debug("fd {}: closing".format(fd))
os.close(fd)
for fd in self._socket_other_ends.values():
logger.warning("fd {}: closing (unused socket end)".format(fd))
os.close(fd)
def join(self):
statusses = {}
while len(self._processes) > 0:
pid, code = os.wait()
if pid in self._processes:
status = code // 256 # extract high byte which is exit code
if status != 0:
logger.warning(
"process %d (%s) exited with unsuccessful code %d",
pid, self._processes[pid].command, status,
)
else:
logger.info(
"process %d (%s) exited with status %d",
pid, self._processes[pid].command, status,
)
del self._processes[pid]
statusses[pid] = status
else:
logger.warning("got exit status for unknown process %d", pid)
return statusses
def dispatch_signal(self, sig):
logger.debug("got %s (%d)", signal.Signals(sig).name, sig)
for pid, process in self._processes.items():
if sig in process.signals:
logger.info("killing %d (%s) with %s (%d)", pid, process.command, signal.Signals(sig).name, sig)
os.kill(pid, sig)
|
# coding=utf-8
# Copyright 2021 The Deeplab2 Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for axial_resnet_instances."""
import os
from absl import flags
from absl.testing import parameterized
import numpy as np
import tensorflow as tf
from deeplab2.model import test_utils
from deeplab2.model.encoder import axial_resnet_instances
FLAGS = flags.FLAGS
class AxialResnetInstancesTest(tf.test.TestCase, parameterized.TestCase):
# The parameter count does not include the classification head.
@parameterized.parameters(
('resnet50', 1, 23508032),
('resnet50_beta', 1, 23631808), # 123776 more than resnet50
('max_deeplab_s_backbone', 1, 41343424),
('max_deeplab_l_backbone', 1, 175115392),
('axial_resnet_s', 1, 11466912),
('axial_resnet_l', 1, 43714048), # 127872 fewer than axial_deeplab_l
('axial_deeplab_s', 1, 11565856),
('axial_deeplab_l', 1, 43841920),
('swidernet', 1, 109014080), # SWideRNet-(1,1,1) without SE or SAC
('swidernet', 3, 333245504), # Should be more than 3 x 109014080
('swidernet', 4.5, 487453760), # Rounded down to [13, 27, 13, 13]
('axial_swidernet', 1, 136399392),
('axial_swidernet', 3, 393935520),
('axial_swidernet', 4.5, 570346912),
)
def test_model_output_shape_and_num_params(
self, model_name, backbone_layer_multiplier, expected_num_params):
model = axial_resnet_instances.get_model(
model_name,
backbone_layer_multiplier=backbone_layer_multiplier,
bn_layer=tf.keras.layers.BatchNormalization,
conv_kernel_weight_decay=0.0001)
output = model(tf.keras.Input(shape=(224, 224, 3)))
if model_name in ('axial_resnet_s', 'axial_deeplab_s'):
self.assertListEqual(output['res5'].get_shape().as_list(),
[None, 14, 14, 1024])
else:
self.assertListEqual(output['res5'].get_shape().as_list(),
[None, 14, 14, 2048])
num_params = np.sum(
[np.prod(v.get_shape().as_list()) for v in model.trainable_weights])
self.assertEqual(num_params, expected_num_params)
def test_resnet50_variable_checkpoint_names(self):
model = axial_resnet_instances.get_model(
'resnet50',
bn_layer=tf.keras.layers.BatchNormalization,
conv_kernel_weight_decay=0.0001)
model(tf.keras.Input(shape=(224, 224, 3)))
variable_names = [w.name for w in model.trainable_weights]
test_variable_name = 'resnet50/stage4/block6/conv3_bn/batch_norm/beta:0'
self.assertIn(test_variable_name, variable_names)
temp_dir = self.create_tempdir()
temp_path = os.path.join(temp_dir, 'ckpt')
checkpoint = tf.train.Checkpoint(encoder=model)
checkpoint.save(temp_path)
latest_checkpoint = tf.train.latest_checkpoint(temp_dir)
reader = tf.train.load_checkpoint(latest_checkpoint)
checkpoint_names = reader.get_variable_to_shape_map().keys()
test_checkpoint_name = 'encoder/_stage4/_block6/_conv3_bn/_batch_norm/gamma/.ATTRIBUTES/VARIABLE_VALUE'
self.assertIn(test_checkpoint_name, checkpoint_names)
def test_max_deeplab_s_output_shape_and_num_params(self):
model = axial_resnet_instances.get_model(
'max_deeplab_s',
bn_layer=tf.keras.layers.BatchNormalization,
conv_kernel_weight_decay=0.0001)
endpoints = model(tf.keras.Input(shape=(65, 65, 3)))
self.assertListEqual(endpoints['backbone_output'].get_shape().as_list(),
[None, 5, 5, 2048])
self.assertListEqual(
endpoints['transformer_class_feature'].get_shape().as_list(),
[None, 128, 256])
self.assertListEqual(
endpoints['transformer_mask_feature'].get_shape().as_list(),
[None, 128, 256])
self.assertListEqual(endpoints['feature_panoptic'].get_shape().as_list(),
[None, 17, 17, 256])
self.assertListEqual(endpoints['feature_semantic'].get_shape().as_list(),
[None, 5, 5, 2048])
num_params = np.sum(
[np.prod(v.get_shape().as_list()) for v in model.trainable_weights])
self.assertEqual(num_params, 61726624)
def test_max_deeplab_l_output_shape_and_num_params(self):
model = axial_resnet_instances.get_model(
'max_deeplab_l',
bn_layer=tf.keras.layers.BatchNormalization,
conv_kernel_weight_decay=0.0001)
endpoints = model(tf.keras.Input(shape=(65, 65, 3)))
self.assertListEqual(endpoints['backbone_output'].get_shape().as_list(),
[None, 5, 5, 2048])
self.assertListEqual(
endpoints['transformer_class_feature'].get_shape().as_list(),
[None, 128, 512])
self.assertListEqual(
endpoints['transformer_mask_feature'].get_shape().as_list(),
[None, 128, 512])
self.assertListEqual(endpoints['feature_panoptic'].get_shape().as_list(),
[None, 17, 17, 256])
self.assertListEqual(endpoints['feature_semantic'].get_shape().as_list(),
[None, 17, 17, 256])
num_params = np.sum(
[np.prod(v.get_shape().as_list()) for v in model.trainable_weights])
self.assertEqual(num_params, 450523232)
def test_global_attention_absolute_positional_encoding_names(self):
model = axial_resnet_instances.get_model(
'max_deeplab_s_backbone',
block_group_config={'use_global_beyond_stride': 16,
'positional_encoding_type': '1D',
'axial_layer_config': {
'use_query_rpe_similarity': False,
'use_key_rpe_similarity': False,
'retrieve_value_rpe': False}},
bn_layer=tf.keras.layers.BatchNormalization,
conv_kernel_weight_decay=0.0001)
model(tf.keras.Input(shape=(224, 224, 3)))
variable_names = [w.name for w in model.trainable_weights]
test_variable_name1 = 'max_deeplab_s_backbone/stage4/add_absolute_positional_encoding/height_axis_embeddings:0'
test_variable_name2 = 'max_deeplab_s_backbone/stage4/block2/attention/global/qkv_kernel:0'
self.assertIn(test_variable_name1, variable_names)
self.assertIn(test_variable_name2, variable_names)
@parameterized.product(
(dict(model_name='resnet50', backbone_layer_multiplier=1),
dict(model_name='resnet50_beta', backbone_layer_multiplier=1),
dict(model_name='wide_resnet41', backbone_layer_multiplier=1),
dict(model_name='swidernet', backbone_layer_multiplier=2)),
output_stride=[4, 8, 16, 32])
def test_model_atrous_consistency_with_output_stride_four(
self, model_name, backbone_layer_multiplier, output_stride):
tf.random.set_seed(0)
# Create the input.
pixel_inputs = test_utils.create_test_input(1, 225, 225, 3)
# Create the model and the weights.
model_1 = axial_resnet_instances.get_model(
model_name,
backbone_layer_multiplier=backbone_layer_multiplier,
bn_layer=tf.keras.layers.BatchNormalization,
conv_kernel_weight_decay=0.0001,
output_stride=4)
# Create the weights.
model_1(pixel_inputs, training=False)
# Set the batch norm gamma as non-zero so that the 3x3 convolution affects
# the output.
for weight in model_1.trainable_weights:
if '/gamma:0' in weight.name:
weight.assign(tf.ones_like(weight))
# Dense feature extraction followed by subsampling.
pixel_outputs = model_1(pixel_inputs, training=False)['res5']
downsampling_stride = output_stride // 4
expected = pixel_outputs[:, ::downsampling_stride, ::downsampling_stride, :]
# Feature extraction at the nominal network rate.
model_2 = axial_resnet_instances.get_model(
model_name,
backbone_layer_multiplier=backbone_layer_multiplier,
bn_layer=tf.keras.layers.BatchNormalization,
conv_kernel_weight_decay=0.0001,
output_stride=output_stride)
# Create the weights.
model_2(pixel_inputs, training=False)
# Make the two networks use the same weights.
model_2.set_weights(model_1.get_weights())
output = model_2(pixel_inputs, training=False)['res5']
# Normalize the outputs. Since we set batch_norm gamma to 1, the output
# activations can explode to a large standard deviation, which sometimes
# cause numerical errors beyond the tolerances.
normalizing_factor = tf.math.reduce_std(expected)
# Compare normalized outputs.
self.assertAllClose(output / normalizing_factor,
expected / normalizing_factor,
atol=1e-4, rtol=1e-4)
@parameterized.parameters(
('resnet50',),
('resnet50_beta',),
('max_deeplab_s_backbone',),
('max_deeplab_l_backbone',),
('axial_resnet_s',),
('axial_resnet_l',),
('axial_deeplab_s',),
('axial_deeplab_l',),
('swidernet',),
('axial_swidernet',),
)
def test_model_export(self, model_name):
model = axial_resnet_instances.get_model(
model_name,
output_stride=16,
backbone_layer_multiplier=1.0,
bn_layer=tf.keras.layers.BatchNormalization,
conv_kernel_weight_decay=0.0001,
# Disable drop path as it is not compatible with model exporting.
block_group_config={'drop_path_keep_prob': 1.0})
model(tf.keras.Input([257, 257, 3], batch_size=1), training=False)
export_dir = os.path.join(
FLAGS.test_tmpdir, 'test_model_export', model_name)
model.save(export_dir)
if __name__ == '__main__':
tf.test.main()
|
<reponame>rocky-roll-call/rrc-backend<gh_stars>0
# stdlib
from datetime import datetime
from shutil import rmtree
# django
from django.conf import settings
from django.contrib.auth.models import User
from django.test import TestCase
from django.urls import reverse
# library
from rest_framework import status
from rest_framework.test import APIClient
# app
from users.tests.test_user_photo import make_image
from ..models import Cast
class CastModelTestCase(TestCase):
"""
Tests the Cast model directly
"""
def setUp(self):
self.profile = User.objects.create_user(
username="test", email="<EMAIL>", password="<PASSWORD>"
).profile
self.cast = Cast.objects.create(name="Test Cast")
def test_details(self):
"""Check that features were created"""
self.assertEqual(self.cast.slug, "test-cast")
self.assertIsInstance(self.cast.created, datetime)
self.assertIsInstance(self.cast.modified, datetime)
def _add_check_remove(self, fadd, fcheck, fremv):
"""Runs lifecycle checks on a user"""
self.assertFalse(fcheck(self.profile))
fadd(self.profile)
self.assertTrue(fcheck(self.profile))
with self.assertRaises(ValueError):
fadd(self.profile)
fremv(self.profile)
self.assertFalse(fcheck(self.profile))
with self.assertRaises(ValueError):
fremv(self.profile)
def test_managers(self):
"""Tests manager lifecycle"""
# Requires membership first
with self.assertRaises(ValueError):
self.cast.add_manager(self.profile)
self.cast.add_member(self.profile)
self._add_check_remove(
self.cast.add_manager, self.cast.is_manager, self.cast.remove_manager
)
self.cast.remove_member(self.profile)
def test_members(self):
"""Tests membership lifecycle"""
self._add_check_remove(
self.cast.add_member, self.cast.is_member, self.cast.remove_member
)
def test_member_add_removes_request(self):
"""Tests adding a member removes an existing request"""
self.cast.add_member_request(self.profile)
self.assertTrue(self.cast.has_requested_membership(self.profile))
self.assertFalse(self.cast.is_member(self.profile))
self.cast.add_member(self.profile)
self.assertTrue(self.cast.is_member(self.profile))
self.assertFalse(self.cast.has_requested_membership(self.profile))
def test_member_add_if_blocked(self):
"""Can't add member if blocked"""
self.cast.block_user(self.profile)
self.assertTrue(self.cast.is_blocked(self.profile))
with self.assertRaises(ValueError):
self.cast.add_member(self.profile)
self.assertFalse(self.cast.is_member(self.profile))
self.assertTrue(self.cast.is_blocked(self.profile))
self.cast.unblock_user(self.profile)
def test_member_remove_if_manager(self):
"""Cannot remove member if a manager"""
self.cast.add_member(self.profile)
self.cast.add_manager(self.profile)
self.assertTrue(self.cast.is_manager(self.profile))
with self.assertRaises(ValueError):
self.cast.remove_member(self.profile)
self.assertTrue(self.cast.is_manager(self.profile))
self.cast.remove_manager(self.profile)
self.cast.remove_member(self.profile)
def test_requests(self):
"""Tests membership request lifecycle"""
self._add_check_remove(
self.cast.add_member_request,
self.cast.has_requested_membership,
self.cast.remove_member_request,
)
def test_request_if_member(self):
"""Can't request if already a member"""
self.cast.add_member(self.profile)
self.assertTrue(self.cast.is_member(self.profile))
with self.assertRaises(ValueError):
self.cast.add_member_request(self.profile)
self.assertFalse(self.cast.has_requested_membership(self.profile))
self.cast.remove_member(self.profile)
def test_request_if_blocked(self):
"""Can't request if blocked"""
self.cast.block_user(self.profile)
self.assertFalse(self.cast.has_requested_membership(self.profile))
with self.assertRaises(ValueError):
self.cast.add_member_request(self.profile)
self.assertFalse(self.cast.has_requested_membership(self.profile))
self.cast.unblock_user(self.profile)
def test_blocked(self):
"""Tests blocked user lifecycle"""
self._add_check_remove(
self.cast.block_user, self.cast.is_blocked, self.cast.unblock_user
)
def test_block_if_manager(self):
"""Can't block a manager"""
self.cast.add_member(self.profile)
self.cast.add_manager(self.profile)
self.assertTrue(self.cast.is_manager(self.profile))
with self.assertRaises(ValueError):
self.cast.block_user(self.profile)
self.assertFalse(self.cast.is_blocked(self.profile))
self.assertTrue(self.cast.is_manager(self.profile))
self.cast.remove_manager(self.profile)
self.cast.remove_member(self.profile)
def test_block_removes_members(self):
"""Blocking an existing member should remove them"""
self.cast.add_member(self.profile)
self.assertTrue(self.cast.is_member(self.profile))
self.cast.block_user(self.profile)
self.assertTrue(self.cast.is_blocked(self.profile))
self.assertFalse(self.cast.is_member(self.profile))
class CastAPITestCase(TestCase):
"""
Test the Cast API
"""
def setUp(self):
rmtree(settings.MEDIA_ROOT, ignore_errors=True)
user = User.objects.create_user(
username="test", email="<EMAIL>", password="<PASSWORD>"
)
self.profile = user.profile
self.cast1 = Cast.objects.create(name="Test Cast")
self.cast2 = Cast.objects.create(name="Another Cast")
self.cast1.add_member(self.profile)
self.cast1.add_manager(self.profile)
self.client = APIClient()
self.client.force_authenticate(user=user)
def tearDown(self):
rmtree(settings.MEDIA_ROOT, ignore_errors=True)
def test_list(self):
"""Tests calling cast list"""
response = self.client.get(reverse("casts"))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 2)
def test_create(self):
"""Tests creating a new cast"""
name, desc, email = "New Cast", "A new cast", "<EMAIL>"
response = self.client.post(
reverse("casts"), {"name": name, "description": desc, "email": email}
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
cast = Cast.objects.get(pk=response.data["id"])
self.assertEqual(cast.name, name)
self.assertEqual(cast.description, desc)
self.assertEqual(cast.email, email)
self.assertEqual(cast.slug, "new-cast")
self.assertTrue(cast.is_member(self.profile))
self.assertTrue(cast.is_manager(self.profile))
def test_unique_name(self):
"""Casts must have a unique name because of the generated url slug"""
response = self.client.post(reverse("casts"), {"name": self.cast1.name})
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_retrieve(self):
"""Tests cast detail request"""
response = self.client.get(reverse("cast", kwargs={"pk": self.cast1.pk}))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertIn("name", response.data)
def test_update(self):
""""Tests updating cast details"""
self.assertEqual(self.cast1.name, "Test Cast")
name, slug = "Updated Cast", "updated-cast"
response = self.client.patch(
reverse("cast", kwargs={"pk": self.cast1.pk}), data={"name": name}
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["name"], name)
self.assertEqual(response.data["slug"], slug)
cast = Cast.objects.get(pk=self.cast1.pk)
self.assertEqual(cast.name, name)
self.assertEqual(cast.slug, slug)
def test_forbidden_update(self):
"""Prohibit updates to other casts"""
response = self.client.patch(reverse("cast", kwargs={"pk": self.cast2.pk}))
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_update_image(self):
"""Tests updating a cast logo"""
self.assertEqual(self.cast1.logo, "")
tmpim = make_image()
with open(tmpim.name, "rb") as data:
response = self.client.patch(
reverse("cast", kwargs={"pk": self.cast1.pk}),
{"logo": data},
format="multipart",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertTrue(response.data["logo"].endswith(".jpg"))
self.assertIn("logo", response.data["logo"])
cast = Cast.objects.get(pk=self.cast1.pk)
self.assertTrue(cast.logo.path.endswith(".jpg"))
self.assertIn("logo", cast.logo.path)
self.assertIn(self.cast1.slug, cast.logo.path)
def test_delete(self):
"""Tests that a manager can delete their casts but not others"""
response = self.client.delete(reverse("cast", kwargs={"pk": self.cast2.pk}))
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
# Delete should fail if the cast has more than one manager
profile = User.objects.create_user(
username="mctest", email="<EMAIL>", password="<PASSWORD>"
).profile
self.cast1.add_member(profile)
self.cast1.add_manager(profile)
response = self.client.delete(reverse("cast", kwargs={"pk": self.cast1.pk}))
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.cast1.remove_manager(profile)
response = self.client.delete(reverse("cast", kwargs={"pk": self.cast1.pk}))
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
response = self.client.delete(reverse("cast", kwargs={"pk": self.cast1.pk}))
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
class CastListAPITestCase(TestCase):
"""
Test the Cast API
"""
def setUp(self):
user = User.objects.create_user(
username="test", email="<EMAIL>", password="<PASSWORD>"
)
self.profile1 = user.profile
self.profile2 = User.objects.create_user(
username="mctest", email="<EMAIL>", password="<PASSWORD>"
).profile
self.cast1 = Cast.objects.create(name="Test Cast")
self.cast2 = Cast.objects.create(name="Another Cast")
self.cast1.add_member(self.profile1)
self.cast1.add_manager(self.profile1)
self.client = APIClient()
self.client.force_authenticate(user=user)
def _list_add_check_remove(self, url_name: str, fcheck: str):
"""Tests list endpoint's add, check, remove, and dne events"""
url = reverse(url_name, kwargs={"pk": self.cast1.pk, "pid": self.profile2.pk})
# Cannot remove non-added profile
response = self.client.delete(url)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertFalse(getattr(self.cast1, fcheck)(self.profile2))
# Add profile
response = self.client.post(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertTrue(getattr(self.cast1, fcheck)(self.profile2))
# Cannot re-add profile
response = self.client.post(url)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
# Remove profile
response = self.client.delete(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertFalse(getattr(self.cast1, fcheck)(self.profile2))
# Bad cast
response = self.client.delete(
reverse(url_name, kwargs={"pk": 0, "pid": self.profile2.pk})
)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
# Bad profile
response = self.client.delete(
reverse(url_name, kwargs={"pk": self.cast1.pk, "pid": 0})
)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
# Not allowed for other casts
response = self.client.post(
reverse(url_name, kwargs={"pk": self.cast2.pk, "pid": self.profile2.pk})
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_member_list(self):
"""Tests member add/remove endpoint"""
self._list_add_check_remove("cast-member", "is_member")
def test_manager_list(self):
"""Tests member add/remove endpoint"""
# Cannot add a non-member
response = self.client.post(
reverse(
"cast-manager", kwargs={"pk": self.cast1.pk, "pid": self.profile2.pk}
)
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.cast1.add_member(self.profile2)
self._list_add_check_remove("cast-manager", "is_manager")
def test_request_list(self):
"""Tests member request add/remove endpoint"""
self._list_add_check_remove("cast-member-request", "has_requested_membership")
def test_blocked_list(self):
"""Tests blocked user add/remove endpoint"""
self._list_add_check_remove("cast-blocked", "is_blocked")
|
<gh_stars>1-10
import os
from typing import List, Dict, Optional, Tuple
import numpy as np
import ray
from pyarrow.csv import ConvertOptions
__SUPPORT_FILE_FORMAT__ = {
"csv": "csv",
"parquet": "parquet",
# "json": "json",
# "npy": "npy",
}
from modin.pandas import DataFrame
class FSRayDataSets:
def __init__(self, label_col_name):
"""
Args:
label_col_name: 标签名
"""
# 标签名
self.label_col_name = label_col_name
self.ds = None
# 所有特征列表
self.schemas = None
self.file_format = None
self.data_source = None
def get_data_info(self, data_source):
self.file_format = self.__check_data_source_format(data_source)
if isinstance(data_source, str):
data_source = [data_source]
self.data_source = data_source
self.read_data(self.data_source[0])
self.schemas = self.ds.schema().names
self.schemas.remove(self.label_col_name)
del self.ds
return self.schemas
def read_data(self, data_source, *, filesystem: Optional["pyarrow.fs.FileSystem"] = None,
columns: Optional[List[str]] = None, parallelism: int = 200,
_tensor_column_schema: Optional[Dict[str, Tuple[np.dtype, Tuple[int, ...]]]] = None,
transformer: str = None,
**read_args):
if self.file_format is not None:
self.file_format = self.__check_data_source_format(data_source)
if self.file_format == __SUPPORT_FILE_FORMAT__["csv"]:
if columns is not None:
if read_args is None:
read_args = {
"convert_options": ConvertOptions(include_columns=columns)}
else:
read_args["convert_options"] = ConvertOptions(include_columns=columns)
ds = ray.data.read_csv(data_source, filesystem=filesystem, parallelism=parallelism, **read_args)
else:
ds = ray.data.read_csv(data_source, filesystem=filesystem, parallelism=parallelism, **read_args)
elif self.file_format == __SUPPORT_FILE_FORMAT__["parquet"]:
ds = ray.data.read_parquet(data_source, filesystem=filesystem, columns=columns, parallelism=parallelism,
_tensor_column_schema=_tensor_column_schema, **read_args)
else:
raise NotImplementedError("数据格式:{} 暂不支持!".format(data_source))
if self.schemas is not None:
self.schemas = ds.schema().names
# 移除列名中的标签列
self.schemas.remove(self.label_col_name)
if transformer is None:
self.ds = ds
elif transformer == "modin":
self.ds = ds.to_modin()
def __check_data_source_format(self, data_source):
if isinstance(data_source, str):
file_format = data_source[data_source.rindex(".") + 1:]
try:
return __SUPPORT_FILE_FORMAT__[file_format]
except KeyError:
raise ValueError("文件格式不支持,仅支持从以下文件中读取:{}".format(__SUPPORT_FILE_FORMAT__.keys()))
if isinstance(data_source, List):
return self.__check_data_source_format(data_source[0])
else:
pass
def get_column_by_name(self, column):
print("进程:{} 正在获取列:{} 的数据".format(os.getpid(), column))
if self.ds is not None and isinstance(self.ds, DataFrame):
return self.ds[column].values
def get_column_by_name_from_data_source(self, column):
print("进程:{} 正在获取列:{} 的数据".format(os.getpid(), column))
if self.data_source is not None:
print("从原始文件中读取列数据...")
self.read_data(self.data_source, columns=[column], transformer="modin")
return self.ds[column].values
if self.ds is not None and isinstance(self.ds, DataFrame):
return self.ds[column].values
if __name__ == '__main__':
dataset = FSRayDataSets("Response")
data_sources_ = ["../data/final_test.csv"]
# 按列读取
schemas_ = dataset.get_data_info(data_sources_)
for s in schemas_:
s_data = dataset.get_column_by_name_from_data_source(s)
print(s.shape)
# 全部读取
dataset.read_data(data_sources_, transformer="modin")
ds = dataset.ds
print(ds.shape[0])
print(ds.iloc[:1000, :].shape)
col = dataset.get_column_by_name(dataset.label_col_name)
print(len(col))
|
<gh_stars>1-10
import datetime
import dash_html_components as html
import dash_core_components as dcc
from dash_labs import Input, State, Output
from ..fixtures import test_template
def test_dropdown_builder(test_template):
options = [{"label": s.upper(), "value": s} for s in ["a", "b", "c"]]
component_dep = test_template.dropdown_input(
id="test-dropdown",
options=options,
value="b",
opts=dict(disabled=True),
)
assert isinstance(component_dep, Input)
assert component_dep.component_property == "value"
component = component_dep.component_id
assert isinstance(component, dcc.Dropdown)
assert component.id == "test-dropdown"
assert component.options == options
assert component.value == "b"
assert component.disabled is True
def test_slider_builder(test_template):
min, max, step, val, id = 1, 10, 0.5, 5, "test-slider"
component_dep = test_template.slider_input(
min, max, id=id, value=val, opts=dict(disabled=True)
)
assert isinstance(component_dep, Input)
assert component_dep.component_property == "value"
component = component_dep.component_id
assert isinstance(component, dcc.Slider)
assert component.id == "test-slider"
assert component.min == min
assert component.max == max
assert component.value == val
assert component.disabled is True
# Template enables persistent tooltips by default
assert isinstance(component.tooltip, dict)
# But can be overridden with tooltip argument, and can override kind to State
component_dep = test_template.slider_input(
min,
max,
id=id,
value=val,
kind=State,
opts=dict(tooltip=None),
)
assert isinstance(component_dep, State)
assert component_dep.component_property == "value"
component = component_dep.component_id
assert getattr(component, "tooltip", None) is None
def test_input_builder(test_template):
component_dep = test_template.textbox_input(
"Starting", id="test-input", opts=dict(disabled=True)
)
assert isinstance(component_dep, Input)
assert component_dep.component_property == "value"
component = component_dep.component_id
assert isinstance(component, dcc.Input)
assert component.id == "test-input"
assert component.value == "Starting"
assert component.disabled is True
def test_checklist_builder(test_template):
options = ["a", "b", "c"]
expected_options = [{"label": s, "value": s} for s in options]
component_dep = test_template.checklist_input(
options,
value=["b", "c"],
id="test-checklist",
opts=dict(className="checklist-class"),
)
assert isinstance(component_dep, Input)
assert component_dep.component_property == "value"
component = component_dep.component_id
assert isinstance(component, dcc.Checklist)
assert component.id == "test-checklist"
assert component.options == expected_options
assert component.value == ["b", "c"]
assert component.className == "checklist-class"
def test_button_builder(test_template):
component_dep = test_template.button_input(
"Hello, world", id="test-button", opts=dict(disabled=True)
)
assert isinstance(component_dep, Input)
assert component_dep.component_property == "n_clicks"
component = component_dep.component_id
assert isinstance(component, html.Button)
assert component.id == "test-button"
assert component.children == "Hello, world"
assert component.disabled is True
def test_markdown_builder(test_template):
component_dep = test_template.markdown_output(
"Hello, world", id="test-markdown", opts=dict(dedent=False)
)
assert isinstance(component_dep, Output)
assert component_dep.component_property == "children"
component = component_dep.component_id
assert isinstance(component, dcc.Markdown)
assert component.id == "test-markdown"
assert component.children == "Hello, world"
assert component.dedent is False
def test_graph_builder(test_template):
figure = dict(
data=[dict(y=[1, 3, 2])], layout=dict(title=dict(text="Figure Title"))
)
config = dict(config_prop="config-val")
component_dep = test_template.graph_output(
figure=figure, id="test-graph", config=config
)
assert isinstance(component_dep, Output)
assert component_dep.component_property == "figure"
component = component_dep.component_id
assert isinstance(component, dcc.Graph)
assert component.figure == figure
assert component.config == config
def test_date_picker_single_builder(test_template):
today = datetime.date.today()
component_dep = test_template.date_picker_single_input(
today, id="test-datepicker", opts=dict(month_format="MM YY")
)
assert isinstance(component_dep, Input)
assert component_dep.component_property == "date"
component = component_dep.component_id
assert isinstance(component, dcc.DatePickerSingle)
assert component.date == today.isoformat()
assert component.id == "test-datepicker"
assert component.month_format == "MM YY"
def test_date_picker_range_builder(test_template):
start_date = datetime.date(2000, 1, 1)
end_date = datetime.date(200, 1, 15)
component_dep = test_template.date_picker_range_input(
start_date,
end_date,
id="test-daterangepicker",
opts=dict(month_format="MM YY"),
)
assert isinstance(component_dep, Input)
assert component_dep.component_property == ("start_date", "end_date")
component = component_dep.component_id
assert isinstance(component, dcc.DatePickerRange)
assert component.start_date == start_date.isoformat()
assert component.end_date == end_date.isoformat()
assert component.id == "test-daterangepicker"
assert component.month_format == "MM YY"
|
from pathlib import Path
from typing import Union, List, Optional
import numpy as np
import torch
from torch.utils.data import DataLoader
from torchvision import transforms
from src.datamodules.RotNet.utils.image_analytics import get_analytics_data
from src.datamodules.RotNet.datasets.cropped_dataset import CroppedRotNet, ROTATION_ANGLES
from src.datamodules.RotNet.utils.misc import validate_path_for_self_supervised
from src.datamodules.utils.wrapper_transforms import OnlyImage
from src.datamodules.base_datamodule import AbstractDatamodule
from src.utils import utils
log = utils.get_logger(__name__)
class RotNetDivaHisDBDataModuleCropped(AbstractDatamodule):
def __init__(self, data_dir: str, data_folder_name: str,
selection_train: Optional[Union[int, List[str]]] = None,
selection_val: Optional[Union[int, List[str]]] = None,
selection_test: Optional[Union[int, List[str]]] = None,
crop_size: int = 256, num_workers: int = 4, batch_size: int = 8,
shuffle: bool = True, drop_last: bool = True):
super().__init__()
self.data_folder_name = data_folder_name
analytics_data = get_analytics_data(input_path=Path(data_dir), data_folder_name=self.data_folder_name,
get_gt_data_paths_func=CroppedRotNet.get_gt_data_paths)
self.mean = analytics_data['mean']
self.std = analytics_data['std']
self.class_encodings = np.array(ROTATION_ANGLES)
self.num_classes = len(self.class_encodings)
self.class_weights = torch.as_tensor([1 / self.num_classes for _ in range(self.num_classes)])
self.image_transform = OnlyImage(transforms.Compose([transforms.ToTensor(),
transforms.Normalize(mean=self.mean, std=self.std),
transforms.RandomCrop(size=crop_size)]))
self.num_workers = num_workers
self.batch_size = batch_size
self.shuffle = shuffle
self.drop_last = drop_last
self.data_dir = validate_path_for_self_supervised(data_dir=data_dir, data_folder_name=self.data_folder_name)
self.selection_train = selection_train
self.selection_val = selection_val
self.selection_test = selection_test
self.dims = (3, crop_size, crop_size)
# Check default attributes using base_datamodule function
self._check_attributes()
def setup(self, stage: Optional[str] = None):
super().setup()
if stage == 'fit' or stage is None:
self.train = CroppedRotNet(**self._create_dataset_parameters('train'), selection=self.selection_train)
log.info(f'Initialized train dataset with {len(self.train)} samples.')
self._check_min_num_samples(num_samples=len(self.train), data_split='train',
drop_last=self.drop_last)
self.val = CroppedRotNet(**self._create_dataset_parameters('val'), selection=self.selection_val)
log.info(f'Initialized val dataset with {len(self.val)} samples.')
self._check_min_num_samples(num_samples=len(self.val), data_split='val',
drop_last=self.drop_last)
if stage == 'test':
self.test = CroppedRotNet(**self._create_dataset_parameters('test'), selection=self.selection_test)
log.info(f'Initialized test dataset with {len(self.test)} samples.')
# self._check_min_num_samples(num_samples=len(self.test), data_split='test',
# drop_last=False)
def _check_min_num_samples(self, num_samples: int, data_split: str, drop_last: bool):
num_processes = self.trainer.num_processes
batch_size = self.batch_size
if drop_last:
if num_samples < (self.trainer.num_processes * self.batch_size):
log.error(
f'#samples ({num_samples}) in "{data_split}" smaller than '
f'#processes({num_processes}) times batch size ({batch_size}). '
f'This only works if drop_last is false!')
raise ValueError()
else:
if num_samples < (self.trainer.num_processes * self.batch_size):
log.warning(
f'#samples ({num_samples}) in "{data_split}" smaller than '
f'#processes ({num_processes}) times batch size ({batch_size}). '
f'This works due to drop_last=False, however samples might occur multiple times. '
f'Check if this behavior is intended!')
def train_dataloader(self, *args, **kwargs) -> DataLoader:
return DataLoader(self.train,
batch_size=self.batch_size,
num_workers=self.num_workers,
shuffle=self.shuffle,
drop_last=self.drop_last,
pin_memory=True)
def val_dataloader(self, *args, **kwargs) -> Union[DataLoader, List[DataLoader]]:
return DataLoader(self.val,
batch_size=self.batch_size,
num_workers=self.num_workers,
shuffle=self.shuffle,
drop_last=self.drop_last,
pin_memory=True)
def test_dataloader(self, *args, **kwargs) -> Union[DataLoader, List[DataLoader]]:
return DataLoader(self.test,
batch_size=self.batch_size,
num_workers=self.num_workers,
shuffle=False,
drop_last=False,
pin_memory=True)
def _create_dataset_parameters(self, dataset_type: str = 'train'):
is_test = dataset_type == 'test'
return {'path': self.data_dir / dataset_type,
'data_folder_name': self.data_folder_name,
'image_transform': self.image_transform,
'classes': self.class_encodings,
'is_test': is_test}
|
import os
import re
import hashlib
import argparse
CODE_HEADER = "// Auto-generated\n\n"
class Instruction:
argument = False
occurrence = None
closing_instr = False
opening_instr = False
keep_content = True
strip_whitespaces = False
def __init__(self, start, end, arg):
self.start = start
self.end = end
self.arg = arg
def execute(self, context):
pass
class NameInstruction(Instruction):
name = "name"
argument = "required"
occurrence = [0, 1]
def __init__(self, start, end, arg):
if not re.match(r"[a-zA-Z_]", arg):
raise ValueError(f"Invalid '{self.name}' argument", arg)
super().__init__(start, end, arg)
class OutputInstruction(Instruction):
name = "output"
argument = "required"
occurrence = [0, 1]
def __init__(self, start, end, arg):
if not re.match(r"[\w_/-]", arg):
raise ValueError(f"Invalid '{self.name}' argument", arg)
super().__init__(start, end, arg)
class ExportInstruction(Instruction):
name = "export"
occurrence = 1
argument = "optional"
strip_whitespaces = True
def __init__(self, start, end, arg):
if arg and not re.match(r"[a-zA-Z_]", arg):
raise ValueError(f"Invalid '{self.name}' argument", arg)
super().__init__(start, end, arg)
def execute(self, context):
output_variable = self.arg or f"{context.module_key}_EXPORT"
context.out_module_source += [
f"\n/*[capsule:__exportblock__]*/\n",
f"#define {context.module_key} {context.api_key}\n",
f"#define {output_variable} {{\\\n"
] + [
f" [{index}] = {func.func_name},\\\n"
for index, func in enumerate(context.functions)
] + [
"}\n",
f"/*[capsule:__endexportblock__]*/\n\n"
]
class CopyInstruction(Instruction):
name = "copy"
closing_instr = "endcopy"
content_start = None
content_end = None
def execute(self, context):
context.out_capsule_source += [
context.module_source[self.content_start:self.content_end].strip() + "\n\n"
]
class EndCopyInstruction(Instruction):
name = "endcopy"
opening_instr = "copy"
class ExportBlockInstruction(Instruction):
name = "__exportblock__"
closing_instr = "__endexportblock__"
keep_content = False
strip_whitespaces = True
class EndExportBlockInstruction(Instruction):
name = "__endexportblock__"
opening_instr = "__exportblock__"
keep_content = False
class FunctionInstruction(Instruction):
name = "function"
def __init__(self, start, end, match, index):
ret, decl = [x.strip() for x in match]
match = re.match(r"(.*)\s*\(([\s\S]*)\)", decl)
if not match:
raise ValueError("Invalid declaration", decl)
func_name, func_args = match.groups()
self.func_name = func_name.strip()
self.func_ret = ret
self.func_args = [x.strip() for x in func_args.strip().split(",")]
self.func_index = index
super().__init__(start, end, "")
def execute(self, context):
func_id = self.func_name.upper() + "_ID"
context.out_capsule_source += [
f"#define {func_id} {self.func_index}\n"
]
has_state = "_ctx_var" in self.func_args
func_args = list(self.func_args)
if has_state:
func_args.remove("_ctx_var")
has_args = bool(func_args)
if has_state:
func_args.insert(0, "void*")
if has_args:
context.out_capsule_source += [
f"#define {self.func_name}(...) \\\n"
]
else:
context.out_capsule_source += [
f"#define {self.func_name}() \\\n"
]
func_varargs = []
if has_state:
func_varargs.append(f"_ctx->{context.api_key}__CTX")
if has_args:
func_varargs.append("__VA_ARGS__")
func_args = ", ".join(func_args)
func_varargs = ", ".join(func_varargs)
context.out_capsule_source += [
f" (({self.func_ret} (*) ({func_args}))({context.api_key}__API[{func_id}]))( \\\n",
f" {func_varargs})\n\n"
]
INSTRUCTIONS = {
x.name: x
for x in [
NameInstruction,
OutputInstruction,
ExportInstruction,
CopyInstruction,
EndCopyInstruction,
ExportBlockInstruction,
EndExportBlockInstruction
]
}
class Context:
def __init__(self, module_name, module_source, module_key, api_key, functions):
self.module_name = module_name
self.module_source = module_source
self.out_module_source = []
self.out_capsule_source = []
self.module_key = module_key
self.api_key = api_key
self.functions = functions
def main(params=None):
parser = argparse.ArgumentParser()
parser.add_argument("root")
args = parser.parse_args(params)
for dirname, dirs, files in os.walk(args.root):
for filename in files:
module, ext = os.path.splitext(filename)
if ext != ".c":
continue
module_path = os.path.join(dirname, filename)
generate_capsule(module, module_path, dirname)
def generate_capsule(module, module_path, dirname):
with open(module_path, "rt") as f:
module_source = f.read()
instructions, errors = parse_c_file(module_source)
if errors:
for msg, line, extra in errors:
extra = f": {extra}" if extra else ""
print(f" Line {line}: {msg}{extra}")
return
if not instructions:
return
print(f"{module_path}:")
name = [instr for instr in instructions if instr.name == "name"]
if name:
module_name = name[0].arg.upper()
else:
module_name = module.upper().replace(".", "_").replace("-", "_") + "_API"
output = [instr for instr in instructions if instr.name == "output"]
if output:
capsule_filename = output[0].arg
if not os.path.basename(capsule_filename):
capsule_filename = os.path.join(capsule_filename, f"{module}.h")
else:
capsule_filename = capsule_filename.splitext()[0] + ".h"
else:
capsule_filename = f"capsule/{module}.h"
assert len(instructions)
functions = [
instr for instr in instructions if isinstance(instr, FunctionInstruction)
]
api_key = module_name.lower() + "_" + hashlib.md5(
repr([(x.func_name, x.func_args) for x in functions]).encode("utf-8")
).hexdigest()
context = Context(module_name, module_source, module_name, api_key, functions)
context.out_capsule_source += [
CODE_HEADER,
f"#ifndef CAPSULE_{module_name}\n",
f"#define CAPSULE_{module_name}\n\n",
f"static int {api_key}__LOADED = 0;\n",
f"static void *{api_key}__API[{len(functions)}];\n\n",
f"#define {module_name} {api_key}\n\n",
]
context.out_module_source.append(module_source[:instructions[0].start])
prev_instr = None
for instr in instructions:
if prev_instr:
context.out_module_source.append(module_source[prev_instr.end:instr.start])
if instr.keep_content:
data = module_source[instr.start:instr.end]
if instr.strip_whitespaces:
data = data.strip() + "\n"
context.out_module_source.append(data)
instr.execute(context)
prev_instr = instr
context.out_module_source.append(module_source[instructions[-1].end:])
context.out_capsule_source.append("#endif\n")
has_changes = False
out_module_source = "".join(context.out_module_source)
out_capsule_source = "".join(context.out_capsule_source)
if out_module_source != module_source:
print(f" Rewrite {module_path}")
with open(module_path, "wt") as f:
f.write(out_module_source)
has_changes = True
capsule_source = None
capsule_path = os.path.join(dirname, capsule_filename)
if os.path.exists(capsule_path):
with open(capsule_path, "rt") as f:
capsule_source = f.read()
if out_capsule_source != capsule_source:
os.makedirs(os.path.dirname(capsule_path), exist_ok=True)
print(f" Rewrite {capsule_path}")
with open(capsule_path, "wt") as f:
f.write(out_capsule_source)
has_changes = True
if not has_changes:
print(" No changes")
def parse_c_file(source):
errors = []
instructions = []
for index, match in enumerate(re.finditer(r"CAPSULE_API\s*\((.*)\)([^{;]*)", source)):
start, end = match.span()
try:
instructions.append(
FunctionInstruction(start, end, match.groups(), index)
)
except ValueError as e:
errors.append((
e.args[0],
len(source[:start].splitlines()) + 1,
e.args[1]
))
for match in re.finditer(r"/\*\s*\[capsule:(\w*)\s*([\w_/-]*)?\s*]\s*\*/\s*", source):
start, end = match.span()
kind, args = match.groups()
if kind not in INSTRUCTIONS:
errors.append((
"Invalid instruction",
len(source[:start].splitlines()) + 1,
kind
))
continue
instr = INSTRUCTIONS[kind]
if args and not instr.argument:
errors.append((
f"Unexpected '{instr.name}' argument",
len(source[:start].splitlines()) + 1,
args
))
continue
if not args and instr.argument == "required":
errors.append((
f"'{instr.name}' missing required argument",
len(source[:start].splitlines()) + 1,
args
))
continue
try:
instructions.append(instr(start, end, args))
except ValueError as e:
errors.append((
e.args[0],
len(source[:start].splitlines()) + 1,
e.args[1]
))
continue
if not instructions or errors:
return None, errors
instructions.sort(key=lambda x: x.start)
for index, instr in enumerate(instructions):
if instr.opening_instr:
try:
if instructions[index - 1].name != instr.opening_instr:
raise IndexError()
except IndexError:
errors.append((
f"Missing opening '{instr.opening_instr}' instruction",
len(source[:instr.start].splitlines()) + 1,
None
))
if instr.closing_instr:
try:
if instructions[index + 1].name != instr.closing_instr:
raise IndexError()
except IndexError:
errors.append((
f"Missing closing '{instr.closing_instr}' instruction",
len(source[:instr.start].splitlines()) + 1,
None
))
for instr_class in INSTRUCTIONS.values():
if instr_class.occurrence:
items = [instr for instr in instructions if type(instr) is instr_class]
if isinstance(instr_class.occurrence, int):
min_occurrence = max_occurrence = instr_class.occurrence
else:
min_occurrence, max_occurrence = instr_class.occurrence
if len(items) < min_occurrence:
errors.append((
f"Missing required '{instr_class.name}' instruction",
1,
None
))
if len(items) > max_occurrence:
if max_occurrence == 1:
for item in items[max_occurrence:]:
errors.append((
f"Only one '{instr_class.name}' instruction is allowed",
len(source[:item.start].splitlines()) + 1,
None
))
else:
for item in items[max_occurrence:]:
errors.append((
f"Too much '{instr_class.name}' instructions",
len(source[:item.start].splitlines()) + 1,
None
))
if errors:
return None, errors
final_instructions = []
for index, instr in enumerate(instructions):
if instr.closing_instr:
instr.content_start = instr.end
instr.content_end = instructions[index + 1].start
instr.end = instructions[index + 1].end
elif instr.opening_instr:
continue
final_instructions.append(instr)
return final_instructions, None
if __name__ == "__main__":
main()
|
<filename>biblepaycentral/proposal/models.py
import uuid
from django.db import models
from django.urls import reverse
EXPENSE_TYPES = (
('unknown', 'UNKNOWN'),
('charity', 'Charity'),
('pr', 'PR'),
('p2p', 'P2P'),
('it', 'IT'),
)
class Proposal(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
# time when the proposal was first added to our database
inserted_at = models.DateTimeField(auto_now_add=True)
# governence object id in the blockchain
# see "biblepay-cli gobject get [gobjectid]" for details about this proposal
gobjectid = models.CharField(max_length=100)
# all details about the proposal in form of a hex-encoded string
# you will find all these information decoded below. We store the string
# if we miss to decode any information, so that we can decode them later
hex_string = models.TextField(default="")
# what network is for this
network = models.CharField(max_length=20, default="unknown")
# name of the proposal, choosen by the user
name = models.CharField(max_length=250, default="")
# the users biblepay address, used to send the coins to when the proposal is accepted
receive_address = models.CharField(max_length=100, default="")
# amount requested by the user. Can not be changed later
amount = models.DecimalField(max_digits=14, decimal_places=4, default=0)
# discussion/detail url for this proposal, every proposal should have one
url = models.CharField(max_length=250, default="")
# the expense type can be:
expense_type = models.CharField(max_length=10, choices=EXPENSE_TYPES, default="unknown")
# in theory, porposals could start end end in different times, but we don't use that
# right now
unix_starttime = models.IntegerField(default=0)
unix_endtime = models.IntegerField(default=0)
# times of the main pool related to the submission of the porposal
prepare_time = models.DateTimeField(null=True, default=None)
submit_time = models.DateTimeField(null=True, default=None)
trigger_time = models.DateTimeField(null=True, default=None)
# then the proposal was paid from the sancturaries
paid_time = models.DateTimeField(null=True, default=None)
# unclear, always empty
funded_time = models.DateTimeField(null=True, default=None)
# unclear
prepare_txid = models.CharField(max_length=100, default="")
# unclear, seems to be a copy of the gobjectid
submit_txid = models.CharField(max_length=100, default="")
# id of the new height/block that is the superblock
# that paid the proposal. Is empty for not-paid proposals
superblock_txid = models.CharField(max_length=100, default="")
# the height of the superblock that paid the proposal
height = models.IntegerField(null=True, default=None)
# unclear
trigger_txid = models.CharField(max_length=100)
# information if the proposal was commited from the main pool
# to the blockchain
prepared = models.BooleanField(default=False)
submitted = models.BooleanField(default=False)
# who many sanctuaries votes and what they voted
yes_count = models.IntegerField(default=0)
no_count = models.IntegerField(default=0)
abstain_count = models.IntegerField(default=0)
# yes_count - no_count = absolute_yes_count
absolute_yes_count = models.IntegerField(default=0)
# masternode count at the time of this proposal, relevant for the
# absolute_yes_count, as you need to have > 10% count of the
# masternode_count as absolute_yes_count, or the proposal is not
# accepted in the next superblock
masternode_count = models.IntegerField(default=0)
# used to disable entries that got removed from the main pool, but we want to keep
# them
active = models.BooleanField(default=True)
def get_absolute_url(self):
return reverse('proposals')
def __str__(self):
return '%s (%s)'% (self.name, self.expense_type)
def is_fundable(self):
""" returns true if the amount of absolute_yes_count is at least 10%
of the max vote count (masternode_count) """
if self.absolute_yes_count >= (self.masternode_count / 100) * 10:
return True
return False
|
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.words.service}.
"""
import time
from twisted.cred import checkers, credentials, portal
from twisted.internet import address, defer, reactor
from twisted.internet.defer import Deferred, DeferredList, maybeDeferred, succeed
from twisted.spread import pb
from twisted.test import proto_helpers
from twisted.trial import unittest
from twisted.words import ewords, service
from twisted.words.protocols import irc
class RealmTests(unittest.TestCase):
def _entityCreationTest(self, kind):
# Kind is "user" or "group"
realm = service.InMemoryWordsRealm("realmname")
name = "test" + kind.lower()
create = getattr(realm, "create" + kind.title())
get = getattr(realm, "get" + kind.title())
flag = "create" + kind.title() + "OnRequest"
dupExc = getattr(ewords, "Duplicate" + kind.title())
noSuchExc = getattr(ewords, "NoSuch" + kind.title())
# Creating should succeed
p = self.successResultOf(create(name))
self.assertEqual(name, p.name)
# Creating the same user again should not
self.failureResultOf(create(name)).trap(dupExc)
# Getting a non-existent user should succeed if createUserOnRequest is True
setattr(realm, flag, True)
p = self.successResultOf(get("new" + kind.lower()))
self.assertEqual("new" + kind.lower(), p.name)
# Getting that user again should return the same object
newp = self.successResultOf(get("new" + kind.lower()))
self.assertIdentical(p, newp)
# Getting a non-existent user should fail if createUserOnRequest is False
setattr(realm, flag, False)
self.failureResultOf(get("another" + kind.lower())).trap(noSuchExc)
def testUserCreation(self):
return self._entityCreationTest("User")
def testGroupCreation(self):
return self._entityCreationTest("Group")
def testUserRetrieval(self):
realm = service.InMemoryWordsRealm("realmname")
# Make a user to play around with
user = self.successResultOf(realm.createUser("testuser"))
# Make sure getting the user returns the same object
retrieved = self.successResultOf(realm.getUser("testuser"))
self.assertIdentical(user, retrieved)
# Make sure looking up the user also returns the same object
lookedUp = self.successResultOf(realm.lookupUser("testuser"))
self.assertIdentical(retrieved, lookedUp)
# Make sure looking up a user who does not exist fails
(self.failureResultOf(realm.lookupUser("nosuchuser")).trap(ewords.NoSuchUser))
def testUserAddition(self):
realm = service.InMemoryWordsRealm("realmname")
# Create and manually add a user to the realm
p = service.User("testuser")
user = self.successResultOf(realm.addUser(p))
self.assertIdentical(p, user)
# Make sure getting that user returns the same object
retrieved = self.successResultOf(realm.getUser("testuser"))
self.assertIdentical(user, retrieved)
# Make sure looking up that user returns the same object
lookedUp = self.successResultOf(realm.lookupUser("testuser"))
self.assertIdentical(retrieved, lookedUp)
def testGroupRetrieval(self):
realm = service.InMemoryWordsRealm("realmname")
group = self.successResultOf(realm.createGroup("testgroup"))
retrieved = self.successResultOf(realm.getGroup("testgroup"))
self.assertIdentical(group, retrieved)
(self.failureResultOf(realm.getGroup("nosuchgroup")).trap(ewords.NoSuchGroup))
def testGroupAddition(self):
realm = service.InMemoryWordsRealm("realmname")
p = service.Group("testgroup")
self.successResultOf(realm.addGroup(p))
group = self.successResultOf(realm.getGroup("testGroup"))
self.assertIdentical(p, group)
def testGroupUsernameCollision(self):
"""
Try creating a group with the same name as an existing user and
assert that it succeeds, since users and groups should not be in the
same namespace and collisions should be impossible.
"""
realm = service.InMemoryWordsRealm("realmname")
self.successResultOf(realm.createUser("test"))
self.successResultOf(realm.createGroup("test"))
def testEnumeration(self):
realm = service.InMemoryWordsRealm("realmname")
self.successResultOf(realm.createGroup("groupone"))
self.successResultOf(realm.createGroup("grouptwo"))
groups = self.successResultOf(realm.itergroups())
n = [g.name for g in groups]
n.sort()
self.assertEqual(n, ["groupone", "grouptwo"])
class TestCaseUserAgg:
def __init__(
self,
user,
realm,
factory,
address=address.IPv4Address("TCP", "127.0.0.1", 54321),
):
self.user = user
self.transport = proto_helpers.StringTransportWithDisconnection()
self.protocol = factory.buildProtocol(address)
self.transport.protocol = self.protocol
self.user.mind = self.protocol
self.protocol.makeConnection(self.transport)
def write(self, stuff):
self.protocol.dataReceived(stuff)
class IRCProtocolTests(unittest.TestCase):
STATIC_USERS = [
"useruser",
"otheruser",
"someguy",
"firstuser",
"username",
"userone",
"usertwo",
"userthree",
"userfour",
b"userfive",
"someuser",
]
def setUp(self):
self.realm = service.InMemoryWordsRealm("realmname")
self.checker = checkers.InMemoryUsernamePasswordDatabaseDontUse()
self.portal = portal.Portal(self.realm, [self.checker])
self.factory = service.IRCFactory(self.realm, self.portal)
c = []
for nick in self.STATIC_USERS:
if isinstance(nick, bytes):
nick = nick.decode("utf-8")
c.append(self.realm.createUser(nick))
self.checker.addUser(nick, nick + "_password")
return DeferredList(c)
def _assertGreeting(self, user):
"""
The user has been greeted with the four messages that are (usually)
considered to start an IRC session.
Asserts that the required responses were received.
"""
# Make sure we get 1-4 at least
response = self._response(user)
expected = [irc.RPL_WELCOME, irc.RPL_YOURHOST, irc.RPL_CREATED, irc.RPL_MYINFO]
for (prefix, command, args) in response:
if command in expected:
expected.remove(command)
self.assertFalse(expected, f"Missing responses for {expected!r}")
def _login(self, user, nick, password=None):
if password is None:
password = nick + <PASSWORD>"
user.write(f"PASS {password}\<PASSWORD>")
user.write(f"NICK {nick} extrainfo\r\n")
def _loggedInUser(self, name):
user = self.successResultOf(self.realm.lookupUser(name))
agg = TestCaseUserAgg(user, self.realm, self.factory)
self._login(agg, name)
return agg
def _response(self, user, messageType=None):
"""
Extracts the user's response, and returns a list of parsed lines.
If messageType is defined, only messages of that type will be returned.
"""
response = user.transport.value()
if bytes != str and isinstance(response, bytes):
response = response.decode("utf-8")
response = response.splitlines()
user.transport.clear()
result = []
for message in map(irc.parsemsg, response):
if messageType is None or message[1] == messageType:
result.append(message)
return result
def testPASSLogin(self):
user = self._loggedInUser("firstuser")
self._assertGreeting(user)
def test_nickServLogin(self):
"""
Sending NICK without PASS will prompt the user for their password.
When the user sends their password to NickServ, it will respond with a
Greeting.
"""
firstuser = self.successResultOf(self.realm.lookupUser("firstuser"))
user = TestCaseUserAgg(firstuser, self.realm, self.factory)
user.write("NICK firstuser extrainfo\r\n")
response = self._response(user, "PRIVMSG")
self.assertEqual(len(response), 1)
self.assertEqual(response[0][0], service.NICKSERV)
self.assertEqual(response[0][1], "PRIVMSG")
self.assertEqual(response[0][2], ["firstuser", "Password?"])
user.transport.clear()
user.write("PRIVMSG nickserv firstuser_password\r\n")
self._assertGreeting(user)
def testFailedLogin(self):
firstuser = self.successResultOf(self.realm.lookupUser("firstuser"))
user = TestCaseUserAgg(firstuser, self.realm, self.factory)
self._login(user, "firstuser", "<PASSWORD>")
response = self._response(user, "PRIVMSG")
self.assertEqual(len(response), 1)
self.assertEqual(response[0][2], ["firstuser", "Login failed. Goodbye."])
def testLogout(self):
logout = []
firstuser = self.successResultOf(self.realm.lookupUser("firstuser"))
user = TestCaseUserAgg(firstuser, self.realm, self.factory)
self._login(user, "firstuser")
user.protocol.logout = lambda: logout.append(True)
user.write("QUIT\r\n")
self.assertEqual(logout, [True])
def testJoin(self):
firstuser = self.successResultOf(self.realm.lookupUser("firstuser"))
somechannel = self.successResultOf(self.realm.createGroup("somechannel"))
somechannel.meta["topic"] = "some random topic"
# Bring in one user, make sure he gets into the channel sanely
user = TestCaseUserAgg(firstuser, self.realm, self.factory)
self._login(user, "firstuser")
user.transport.clear()
user.write("JOIN #somechannel\r\n")
response = self._response(user)
self.assertEqual(len(response), 5)
# Join message
self.assertEqual(response[0][0], "firstuser!firstuser@realmname")
self.assertEqual(response[0][1], "JOIN")
self.assertEqual(response[0][2], ["#somechannel"])
# User list
self.assertEqual(response[1][1], "353")
self.assertEqual(response[2][1], "366")
# Topic (or lack thereof, as the case may be)
self.assertEqual(response[3][1], "332")
self.assertEqual(response[4][1], "333")
# Hook up another client! It is a CHAT SYSTEM!!!!!!!
other = self._loggedInUser("otheruser")
other.transport.clear()
user.transport.clear()
other.write("JOIN #somechannel\r\n")
# At this point, both users should be in the channel
response = self._response(other)
event = self._response(user)
self.assertEqual(len(event), 1)
self.assertEqual(event[0][0], "otheruser!otheruser@realmname")
self.assertEqual(event[0][1], "JOIN")
self.assertEqual(event[0][2], ["#somechannel"])
self.assertEqual(response[1][0], "realmname")
self.assertEqual(response[1][1], "353")
self.assertIn(
response[1][2],
[
["otheruser", "=", "#somechannel", "firstuser otheruser"],
["otheruser", "=", "#somechannel", "otheruser firstuser"],
],
)
def test_joinTopicless(self):
"""
When a user joins a group without a topic, no topic information is
sent to that user.
"""
firstuser = self.successResultOf(self.realm.lookupUser("firstuser"))
self.successResultOf(self.realm.createGroup("somechannel"))
# Bring in one user, make sure he gets into the channel sanely
user = TestCaseUserAgg(firstuser, self.realm, self.factory)
self._login(user, "firstuser")
user.transport.clear()
user.write("JOIN #somechannel\r\n")
response = self._response(user)
responseCodes = [r[1] for r in response]
self.assertNotIn("332", responseCodes)
self.assertNotIn("333", responseCodes)
def testLeave(self):
user = self._loggedInUser("useruser")
self.successResultOf(self.realm.createGroup("somechannel"))
user.write("JOIN #somechannel\r\n")
user.transport.clear()
other = self._loggedInUser("otheruser")
other.write("JOIN #somechannel\r\n")
user.transport.clear()
other.transport.clear()
user.write("PART #somechannel\r\n")
response = self._response(user)
event = self._response(other)
self.assertEqual(len(response), 1)
self.assertEqual(response[0][0], "useruser!useruser@realmname")
self.assertEqual(response[0][1], "PART")
self.assertEqual(response[0][2], ["#somechannel", "leaving"])
self.assertEqual(response, event)
# Now again, with a part message
user.write("JOIN #somechannel\r\n")
user.transport.clear()
other.transport.clear()
user.write("PART #somechannel :goodbye stupidheads\r\n")
response = self._response(user)
event = self._response(other)
self.assertEqual(len(response), 1)
self.assertEqual(response[0][0], "useruser!useruser@realmname")
self.assertEqual(response[0][1], "PART")
self.assertEqual(response[0][2], ["#somechannel", "goodbye stupidheads"])
self.assertEqual(response, event)
user.write(b"JOIN #somechannel\r\n")
user.transport.clear()
other.transport.clear()
user.write(b"PART #somechannel :goodbye stupidheads1\r\n")
response = self._response(user)
event = self._response(other)
self.assertEqual(len(response), 1)
self.assertEqual(response[0][0], "useruser!useruser@realmname")
self.assertEqual(response[0][1], "PART")
self.assertEqual(response[0][2], ["#somechannel", "goodbye stupidheads1"])
self.assertEqual(response, event)
def testGetTopic(self):
user = self._loggedInUser("useruser")
group = service.Group("somechannel")
group.meta["topic"] = "This is a test topic."
group.meta["topic_author"] = "some_fellow"
group.meta["topic_date"] = 77777777
self.successResultOf(self.realm.addGroup(group))
user.transport.clear()
user.write("JOIN #somechannel\r\n")
response = self._response(user)
self.assertEqual(response[3][0], "realmname")
self.assertEqual(response[3][1], "332")
# XXX Sigh. irc.parsemsg() is not as correct as one might hope.
self.assertEqual(
response[3][2], ["useruser", "#somechannel", "This is a test topic."]
)
self.assertEqual(response[4][1], "333")
self.assertEqual(
response[4][2], ["useruser", "#somechannel", "some_fellow", "77777777"]
)
user.transport.clear()
user.write("TOPIC #somechannel\r\n")
response = self._response(user)
self.assertEqual(response[0][1], "332")
self.assertEqual(
response[0][2], ["useruser", "#somechannel", "This is a test topic."]
)
self.assertEqual(response[1][1], "333")
self.assertEqual(
response[1][2], ["useruser", "#somechannel", "some_fellow", "77777777"]
)
def testSetTopic(self):
user = self._loggedInUser("useruser")
somechannel = self.successResultOf(self.realm.createGroup("somechannel"))
user.write("JOIN #somechannel\r\n")
other = self._loggedInUser("otheruser")
other.write("JOIN #somechannel\r\n")
user.transport.clear()
other.transport.clear()
other.write("TOPIC #somechannel :This is the new topic.\r\n")
response = self._response(other)
event = self._response(user)
self.assertEqual(response, event)
self.assertEqual(response[0][0], "otheruser!otheruser@realmname")
self.assertEqual(response[0][1], "TOPIC")
self.assertEqual(response[0][2], ["#somechannel", "This is the new topic."])
other.transport.clear()
somechannel.meta["topic_date"] = 12345
other.write("TOPIC #somechannel\r\n")
response = self._response(other)
self.assertEqual(response[0][1], "332")
self.assertEqual(
response[0][2], ["otheruser", "#somechannel", "This is the new topic."]
)
self.assertEqual(response[1][1], "333")
self.assertEqual(
response[1][2], ["otheruser", "#somechannel", "otheruser", "12345"]
)
other.transport.clear()
other.write("TOPIC #asdlkjasd\r\n")
response = self._response(other)
self.assertEqual(response[0][1], "403")
def testGroupMessage(self):
user = self._loggedInUser("useruser")
self.successResultOf(self.realm.createGroup("somechannel"))
user.write("JOIN #somechannel\r\n")
other = self._loggedInUser("otheruser")
other.write("JOIN #somechannel\r\n")
user.transport.clear()
other.transport.clear()
user.write("PRIVMSG #somechannel :Hello, world.\r\n")
response = self._response(user)
event = self._response(other)
self.assertFalse(response)
self.assertEqual(len(event), 1)
self.assertEqual(event[0][0], "useruser!useruser@realmname")
self.assertEqual(event[0][1], "PRIVMSG", -1)
self.assertEqual(event[0][2], ["#somechannel", "Hello, world."])
def testPrivateMessage(self):
user = self._loggedInUser("useruser")
other = self._loggedInUser("otheruser")
user.transport.clear()
other.transport.clear()
user.write("PRIVMSG otheruser :Hello, monkey.\r\n")
response = self._response(user)
event = self._response(other)
self.assertFalse(response)
self.assertEqual(len(event), 1)
self.assertEqual(event[0][0], "useruser!useruser@realmname")
self.assertEqual(event[0][1], "PRIVMSG")
self.assertEqual(event[0][2], ["otheruser", "Hello, monkey."])
user.write("PRIVMSG nousernamedthis :Hello, monkey.\r\n")
response = self._response(user)
self.assertEqual(len(response), 1)
self.assertEqual(response[0][0], "realmname")
self.assertEqual(response[0][1], "401")
self.assertEqual(
response[0][2], ["useruser", "nousernamedthis", "No such nick/channel."]
)
def testOper(self):
user = self._loggedInUser("useruser")
user.transport.clear()
user.write("OPER user pass\r\n")
response = self._response(user)
self.assertEqual(len(response), 1)
self.assertEqual(response[0][1], "491")
def testGetUserMode(self):
user = self._loggedInUser("useruser")
user.transport.clear()
user.write("MODE useruser\r\n")
response = self._response(user)
self.assertEqual(len(response), 1)
self.assertEqual(response[0][0], "realmname")
self.assertEqual(response[0][1], "221")
self.assertEqual(response[0][2], ["useruser", "+"])
def testSetUserMode(self):
user = self._loggedInUser("useruser")
user.transport.clear()
user.write("MODE useruser +abcd\r\n")
response = self._response(user)
self.assertEqual(len(response), 1)
self.assertEqual(response[0][1], "472")
def testGetGroupMode(self):
user = self._loggedInUser("useruser")
self.successResultOf(self.realm.createGroup("somechannel"))
user.write("JOIN #somechannel\r\n")
user.transport.clear()
user.write("MODE #somechannel\r\n")
response = self._response(user)
self.assertEqual(len(response), 1)
self.assertEqual(response[0][1], "324")
def testSetGroupMode(self):
user = self._loggedInUser("useruser")
self.successResultOf(self.realm.createGroup("groupname"))
user.write("JOIN #groupname\r\n")
user.transport.clear()
user.write("MODE #groupname +abcd\r\n")
response = self._response(user)
self.assertEqual(len(response), 1)
self.assertEqual(response[0][1], "472")
def testWho(self):
group = service.Group("groupname")
self.successResultOf(self.realm.addGroup(group))
users = []
for nick in "userone", "usertwo", "userthree":
u = self._loggedInUser(nick)
users.append(u)
users[-1].write("JOIN #groupname\r\n")
for user in users:
user.transport.clear()
users[0].write("WHO #groupname\r\n")
r = self._response(users[0])
self.assertFalse(self._response(users[1]))
self.assertFalse(self._response(users[2]))
wantusers = ["userone", "usertwo", "userthree"]
for (prefix, code, stuff) in r[:-1]:
self.assertEqual(prefix, "realmname")
self.assertEqual(code, "352")
(
myname,
group,
theirname,
theirhost,
theirserver,
theirnick,
flag,
extra,
) = stuff
self.assertEqual(myname, "userone")
self.assertEqual(group, "#groupname")
self.assertTrue(theirname in wantusers)
self.assertEqual(theirhost, "realmname")
self.assertEqual(theirserver, "realmname")
wantusers.remove(theirnick)
self.assertEqual(flag, "H")
self.assertEqual(extra, "0 " + theirnick)
self.assertFalse(wantusers)
prefix, code, stuff = r[-1]
self.assertEqual(prefix, "realmname")
self.assertEqual(code, "315")
myname, channel, extra = stuff
self.assertEqual(myname, "userone")
self.assertEqual(channel, "#groupname")
self.assertEqual(extra, "End of /WHO list.")
def testList(self):
user = self._loggedInUser("someuser")
user.transport.clear()
somegroup = self.successResultOf(self.realm.createGroup("somegroup"))
somegroup.size = lambda: succeed(17)
somegroup.meta["topic"] = "this is the topic woo"
# Test one group
user.write("LIST #somegroup\r\n")
r = self._response(user)
self.assertEqual(len(r), 2)
resp, end = r
self.assertEqual(resp[0], "realmname")
self.assertEqual(resp[1], "322")
self.assertEqual(resp[2][0], "someuser")
self.assertEqual(resp[2][1], "somegroup")
self.assertEqual(resp[2][2], "17")
self.assertEqual(resp[2][3], "this is the topic woo")
self.assertEqual(end[0], "realmname")
self.assertEqual(end[1], "323")
self.assertEqual(end[2][0], "someuser")
self.assertEqual(end[2][1], "End of /LIST")
user.transport.clear()
# Test all groups
user.write("LIST\r\n")
r = self._response(user)
self.assertEqual(len(r), 2)
fg1, end = r
self.assertEqual(fg1[1], "322")
self.assertEqual(fg1[2][1], "somegroup")
self.assertEqual(fg1[2][2], "17")
self.assertEqual(fg1[2][3], "this is the topic woo")
self.assertEqual(end[1], "323")
def testWhois(self):
user = self._loggedInUser("someguy")
otherguy = service.User("otherguy")
otherguy.itergroups = lambda: iter(
[service.Group("groupA"), service.Group("groupB")]
)
otherguy.signOn = 10
otherguy.lastMessage = time.time() - 15
self.successResultOf(self.realm.addUser(otherguy))
user.transport.clear()
user.write("WHOIS otherguy\r\n")
r = self._response(user)
self.assertEqual(len(r), 5)
wuser, wserver, idle, channels, end = r
self.assertEqual(wuser[0], "realmname")
self.assertEqual(wuser[1], "311")
self.assertEqual(wuser[2][0], "someguy")
self.assertEqual(wuser[2][1], "otherguy")
self.assertEqual(wuser[2][2], "otherguy")
self.assertEqual(wuser[2][3], "realmname")
self.assertEqual(wuser[2][4], "*")
self.assertEqual(wuser[2][5], "otherguy")
self.assertEqual(wserver[0], "realmname")
self.assertEqual(wserver[1], "312")
self.assertEqual(wserver[2][0], "someguy")
self.assertEqual(wserver[2][1], "otherguy")
self.assertEqual(wserver[2][2], "realmname")
self.assertEqual(wserver[2][3], "Hi mom!")
self.assertEqual(idle[0], "realmname")
self.assertEqual(idle[1], "317")
self.assertEqual(idle[2][0], "someguy")
self.assertEqual(idle[2][1], "otherguy")
self.assertEqual(idle[2][2], "15")
self.assertEqual(idle[2][3], "10")
self.assertEqual(idle[2][4], "seconds idle, signon time")
self.assertEqual(channels[0], "realmname")
self.assertEqual(channels[1], "319")
self.assertEqual(channels[2][0], "someguy")
self.assertEqual(channels[2][1], "otherguy")
self.assertEqual(channels[2][2], "#groupA #groupB")
self.assertEqual(end[0], "realmname")
self.assertEqual(end[1], "318")
self.assertEqual(end[2][0], "someguy")
self.assertEqual(end[2][1], "otherguy")
self.assertEqual(end[2][2], "End of WHOIS list.")
class TestMind(service.PBMind):
def __init__(self, *a, **kw):
self.joins = []
self.parts = []
self.messages = []
self.meta = []
def remote_userJoined(self, user, group):
self.joins.append((user, group))
def remote_userLeft(self, user, group, reason):
self.parts.append((user, group, reason))
def remote_receive(self, sender, recipient, message):
self.messages.append((sender, recipient, message))
def remote_groupMetaUpdate(self, group, meta):
self.meta.append((group, meta))
pb.setUnjellyableForClass(TestMind, service.PBMindReference)
class PBProtocolTests(unittest.TestCase):
def setUp(self):
self.realm = service.InMemoryWordsRealm("realmname")
self.checker = checkers.InMemoryUsernamePasswordDatabaseDontUse()
self.portal = portal.Portal(self.realm, [self.checker])
self.serverFactory = pb.PBServerFactory(self.portal)
self.serverFactory.protocol = self._protocolFactory
self.serverFactory.unsafeTracebacks = True
self.clientFactory = pb.PBClientFactory()
self.clientFactory.unsafeTracebacks = True
self.serverPort = reactor.listenTCP(0, self.serverFactory)
self.clientConn = reactor.connectTCP(
"127.0.0.1", self.serverPort.getHost().port, self.clientFactory
)
def _protocolFactory(self, *args, **kw):
self._serverProtocol = pb.Broker(0)
return self._serverProtocol
def tearDown(self):
d3 = Deferred()
self._serverProtocol.notifyOnDisconnect(lambda: d3.callback(None))
return DeferredList(
[
maybeDeferred(self.serverPort.stopListening),
maybeDeferred(self.clientConn.disconnect),
d3,
]
)
def _loggedInAvatar(self, name, password, mind):
nameBytes = name
if isinstance(name, str):
nameBytes = name.encode("ascii")
creds = credentials.UsernamePassword(nameBytes, password)
self.checker.addUser(nameBytes, password)
d = self.realm.createUser(name)
d.addCallback(lambda ign: self.clientFactory.login(creds, mind))
return d
@defer.inlineCallbacks
def testGroups(self):
mindone = TestMind()
one = yield self._loggedInAvatar("one", b"p1", mindone)
mindtwo = TestMind()
two = yield self._loggedInAvatar("two", b"p2", mindtwo)
mindThree = TestMind()
three = yield self._loggedInAvatar(b"three", b"p3", mindThree)
yield self.realm.createGroup("foobar")
yield self.realm.createGroup(b"barfoo")
groupone = yield one.join("foobar")
grouptwo = yield two.join(b"barfoo")
yield two.join("foobar")
yield two.join(b"barfoo")
yield three.join("foobar")
yield groupone.send({b"text": b"hello, monkeys"})
yield groupone.leave()
yield grouptwo.leave()
|
<gh_stars>1-10
import threading, sys
import random, string
import socket
#p=21 q=109 n=2289 e=7 d=1543
def encrypt(data):
e,n=7,2289
print("encrypting : "+data)
intdata = [x for x in map(ord, data)]
crypteddata = [((x ** e) % n) for x in intdata]
return ",".join(map(str, crypteddata))
def decrypt(data):
d,n=1543,2289
print("decrypting : "+data)
decrypteddata = [(x ** d) % n for x in map(int,data.split(','))]
decryptedchar = [x for x in map(chr, decrypteddata)]
return "".join(map(str, decryptedchar))
class BreakRoom:
def __init__(self, host, port, code):
self.server_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.server_socket.bind((host,port))
self.code=code
self.the_queue = []
self._generate_new_queue()
threading.Thread(name="comm", target=self.comm).start()
def gettask(self):
if not len(self.the_queue):
return b'Thanks no work rn'
job=self.the_queue.pop()
print("Sending job "+job)
return ("job"+":"+self.code+":"+job).encode()
def comm(self):
while True:
m, address = self.server_socket.recvfrom(1024)
#print("What"+str(message)+" "+str(address))
m=m.decode().split(":")
if m[0] == 'job':
self.server_socket.sendto(self.gettask(),address)
print(address)
elif m[0] == 'res':
print("GOT RESULT : {}".format(m[1]))
break
elif m[0] == 'auth':
#handle auth right here
self.server_socket.sendto(("auth:"+decrypt(m[1])).encode(),address)
def _generate_new_queue(self):
nums = '1234567890'
special = '`~!@#$%^&*()_+-='
space = ' '
self.the_queue.append(str(string.ascii_lowercase))
self.the_queue.append(str(string.ascii_letters))
self.the_queue.append(str(string.ascii_lowercase + nums))
self.the_queue.append(str(string.ascii_letters + nums))
self.the_queue.append(str(string.ascii_uppercase))
self.the_queue.append(str(string.ascii_uppercase + nums))
self.the_queue.append(str(string.ascii_lowercase + nums + special))
self.the_queue.append(str(string.ascii_lowercase + special))
self.the_queue.append(str(string.ascii_letters + special))
self.the_queue.append(str(string.ascii_letters + nums + special))
self.the_queue.append(str(string.ascii_uppercase + nums + special))
self.the_queue.append(str(string.ascii_uppercase + special))
self.the_queue.append(str(string.ascii_lowercase + nums + space))
self.the_queue.append(str(string.ascii_uppercase + nums + space))
self.the_queue.append(str(string.ascii_letters + nums + space))
self.the_queue.append(str(string.ascii_lowercase + nums + space))
self.the_queue.append(str(string.ascii_uppercase + nums + space))
self.the_queue.append(str(string.ascii_letters + nums + space))
self.the_queue.reverse()
if __name__ == '__main__':
if len(sys.argv) == 4:
BreakRoom(sys.argv[1], int(sys.argv[2]), sys.argv[3])
else:
print("syntax : python3 master.py [local ip addr] [port] [sha1 code /sha1 file] optional[no of localslaves]\n "
"eg : python3 master.py 192.168.1.105 9987 e5acb1a96e34cd7f263aada0b69aa58bdd722a03 3 ")
|
import numpy as np
import pandas as pd
from scipy.stats import beta
from src.base.sampler import Sampler
class BinaryPAL_ACS_Sampler(Sampler):
def __init__(self, *args, **kwargs):
self.n_p = kwargs.pop("n_p") if "n_p" in kwargs else 25
self.M = kwargs.pop("M") if "M" in kwargs else 1
self.step_size = kwargs.pop("step_size") if "step_size" in kwargs else 0.01
super().__init__(*args, **kwargs)
def inform(self, *args, **kwargs):
super().inform(*args, **kwargs)
self.n_features = len(self.dataset.get_cf_names())
def sample(self):
self.update()
if len(self.dataset.complete.index) < self.initial_batch_size:
return self.initial_sample()
known_cf = self.dataset.complete[self.dataset.get_cf_names()]
labels = [0, 1]
class_instances = [
self.dataset.complete[self.dataset.complete == label].index
for label in labels
]
n_known = [
len(self.dataset.complete.index & class_instances[label])
for label in labels
]
weights = [np.mean(n_known) + 1 / (i + 1) for i in n_known]
evaluation_instances = np.array(
[
self.sample_from_label(class_instances[label], self.n_p)
for label in labels
]
)
sampled_instances = np.array(
[self.sample_from_label(class_instances[label], self.M) for label in labels]
)
known_data = [
known_cf[known_cf.index.isin(class_instances[label])] for label in labels
]
current_performance = np.array(
[
[self.estimate_exp_perf(i, known_data) for i in instances]
for label, instances in zip(labels, evaluation_instances)
]
)
expected_performance = np.array(
[
[
self.estimate_exp_perf(
i,
known_data,
sampled_data=sampled_instances[label],
sampled_label=list(labels).index(label),
)
for i in instances
]
for label, instances in zip(labels, evaluation_instances)
]
)
diff = expected_performance - current_performance
gain = [diff[i] * weights[i] for i in labels]
pgain = [np.mean(gain[i]) / self.M for i in labels]
return self.sample_class(np.argmax(pgain))
def sample_from_label(self, label, n):
"""Samples n pseudo-instances from the distribution of the given class."""
n_known = len(self.dataset.complete.index & label)
ratio = n_known / (n_known + 2)
samples = []
for _ in range(n):
if self.rng.random() <= ratio:
mu = self.dataset.complete[self.dataset.get_cf_names()].loc[
self.rng.choice(self.dataset.complete.index & label)
]
samples.append(
[
self.rng.normal(mu[i], self.parzen_window[i])
for i in range(self.n_features)
]
)
else:
samples.append(
[
self.rng.uniform(self.lower_bound[i], self.upper_bound[i])
for i in range(self.n_features)
]
)
return np.array(samples)
def estimate_exp_perf(
self, p_instance, known_data, sampled_data=None, sampled_label=None
):
"""Estimates the expected performance for a given pseudo-instance and its assigned
label, given the already known data."""
n, k = self.get_label_stats(
p_instance,
known_data,
sampled_data=sampled_data,
sampled_label=sampled_label,
)
max_k = max(k)
return beta.mean(max_k + 1, n - max_k + 1)
def get_label_stats(
self, p_instance, known_data, sampled_data=None, sampled_label=None
):
"""Determines the label statistics (n and k) for a given pseudo-instance and its
assigned label, given the already known data."""
n, k = 0, [0, 0]
for i, label in enumerate(known_data):
freq = 0
if sampled_data is not None and sampled_label == i:
label = label.append(
pd.DataFrame(
sampled_data,
columns=self.dataset.get_cf_names(),
index=[f"s{i}" for i in range(self.M)],
)
)
for instance in label.iterrows():
freq += np.exp(
(np.linalg.norm(instance[1] - p_instance) ** 2)
/ (2 * np.mean(self.parzen_window) ** 2)
)
n += freq
k[i] = freq
return n, k
def update(self):
"""Updates the relevant parameters to the PAL-ACS method."""
self.upper_bound = [
self.dataset.complete[cf].max() for cf in self.dataset.get_cf_names()
]
self.lower_bound = [
self.dataset.complete[cf].min() for cf in self.dataset.get_cf_names()
]
self.parzen_window = [
(self.upper_bound[i] - self.lower_bound[i]) / 10
for i in range(self.n_features)
]
def sample_class(self, lbl):
"""Sample an instance from the given (binary) class."""
if not (lbl == 0 or lbl == 1):
raise ValueError
return (
self.rng.choice(
self.dataset.incomplete[
self.dataset.incomplete[self.dataset.get_y_name()] == lbl
].index
)
if len(
self.dataset.incomplete[
self.dataset.incomplete[self.dataset.get_y_name()] == lbl
].index
)
> 0
else self.rng.choice(self.dataset.incomplete.index)
)
def get_init(self):
return 2
def to_string(self):
return "binary-pal-acs"
|
<reponame>augusto-herrmann/frictionless-py
import os
import pkgutil
from collections import OrderedDict
from importlib import import_module
from .exception import FrictionlessException
from .helpers import cached_property
from .control import Control
from .dialect import Dialect
from .file import File
from . import errors
# NOTE:
# On the next iteration we can improve the plugin system to provide prioritization
# Also, we might cosider having plugin.name although module based naming might be enough
class System:
"""System representation
API | Usage
-------- | --------
Public | `from frictionless import system`
This class provides an ability to make system Frictionless calls.
It's available as `frictionless.system` singletone.
"""
def __init__(self):
self.__dynamic_plugins = OrderedDict()
def register(self, name, plugin):
"""Register a plugin
Parameters:
name (str): plugin name
plugin (Plugin): plugin to register
"""
self.__dynamic_plugins[name] = plugin
if "methods" in self.__dict__:
del self.__dict__["plugins"]
del self.__dict__["methods"]
# Actions
actions = [
"create_check",
"create_control",
"create_dialect",
"create_error",
"create_file",
"create_loader",
"create_parser",
"create_server",
"create_step",
"create_storage",
"create_type",
]
def create_check(self, descriptor):
"""Create checks
Parameters:
descriptor (dict): check descriptor
Returns:
Check: check
"""
code = descriptor.get("code", "")
for func in self.methods["create_check"].values():
check = func(descriptor)
if check is not None:
return check
for Class in vars(import_module("frictionless.checks")).values():
if getattr(Class, "code", None) == code:
return Class(descriptor)
note = f'cannot create check "{code}". Try installing "frictionless-{code}"'
raise FrictionlessException(errors.CheckError(note=note))
def create_control(self, resource, *, descriptor):
"""Create control
Parameters:
resource (Resource): control resource
descriptor (dict): control descriptor
Returns:
Control: control
"""
control = None
for func in self.methods["create_control"].values():
control = func(resource, descriptor=descriptor)
if control is not None:
return control
return Control(descriptor)
def create_dialect(self, resource, *, descriptor):
"""Create dialect
Parameters:
resource (Resource): dialect resource
descriptor (dict): dialect descriptor
Returns:
Dialect: dialect
"""
dialect = None
for func in self.methods["create_dialect"].values():
dialect = func(resource, descriptor=descriptor)
if dialect is not None:
return dialect
return Dialect(descriptor)
def create_error(self, descriptor):
"""Create errors
Parameters:
descriptor (dict): error descriptor
Returns:
Error: error
"""
code = descriptor.get("code", "")
for func in self.methods["create_error"].values():
error = func(descriptor)
if error is not None:
return error
for Class in vars(import_module("frictionless.errors")).values():
if getattr(Class, "code", None) == code:
return Class(descriptor)
note = f'cannot create error "{code}". Try installing "frictionless-{code}"'
raise FrictionlessException(errors.Error(note=note))
def create_file(self, source, **options):
"""Create file
Parameters:
source (any): file source
options (dict): file options
Returns:
File: file
"""
file = File(source, **options)
for func in self.methods["create_file"].values():
plugin_file = func(file)
if plugin_file is not None:
return plugin_file
return file
def create_loader(self, resource):
"""Create loader
Parameters:
resource (Resource): loader resource
Returns:
Loader: loader
"""
loader = None
name = resource.scheme
for func in self.methods["create_loader"].values():
loader = func(resource)
if loader is not None:
return loader
note = f'cannot create loader "{name}". Try installing "frictionless-{name}"'
raise FrictionlessException(errors.SchemeError(note=note))
def create_parser(self, resource):
"""Create parser
Parameters:
resource (Resource): parser resource
Returns:
Parser: parser
"""
parser = None
name = resource.format
for func in self.methods["create_parser"].values():
parser = func(resource)
if parser is not None:
return parser
note = f'cannot create parser "{name}". Try installing "frictionless-{name}"'
raise FrictionlessException(errors.FormatError(note=note))
def create_server(self, name, **options):
"""Create server
Parameters:
name (str): server name
options (str): server options
Returns:
Server: server
"""
server = None
for func in self.methods["create_server"].values():
server = func(name, **options)
if server is not None:
return server
note = f'cannot create server "{name}". Try installing "frictionless-{name}"'
raise FrictionlessException(errors.GeneralError(note=note))
def create_step(self, descriptor):
"""Create steps
Parameters:
descriptor (dict): step descriptor
Returns:
Step: step
"""
code = descriptor.get("code", "")
for func in self.methods["create_step"].values():
step = func(descriptor)
if step is not None:
return step
for Class in vars(import_module("frictionless.steps")).values():
if getattr(Class, "code", None) == code:
return Class(descriptor)
note = f'cannot create check "{code}". Try installing "frictionless-{code}"'
raise FrictionlessException(errors.StepError(note=note))
def create_storage(self, name, source, **options):
"""Create storage
Parameters:
name (str): storage name
options (str): storage options
Returns:
Storage: storage
"""
for func in self.methods["create_storage"].values():
storage = func(name, source, **options)
if storage is not None:
return storage
note = f'cannot create storage "{name}". Try installing "frictionless-{name}"'
raise FrictionlessException(errors.GeneralError(note=note))
def create_type(self, field):
"""Create checks
Parameters:
field (Field): corresponding field
Returns:
Type: type
"""
code = field.type
for func in self.methods["create_type"].values():
type = func(field)
if type is not None:
return type
for Class in vars(import_module("frictionless.types")).values():
if getattr(Class, "code", None) == code:
return Class(field)
note = f'cannot create type "{code}". Try installing "frictionless-{code}"'
raise FrictionlessException(errors.FieldError(note=note))
# Methods
@cached_property
def methods(self):
methods = {}
for action in self.actions:
methods[action] = OrderedDict()
for name, plugin in self.plugins.items():
if action in vars(type(plugin)):
func = getattr(plugin, action, None)
methods[action][name] = func
return methods
# Plugins
@cached_property
def plugins(self):
modules = OrderedDict()
for item in pkgutil.iter_modules():
if item.name.startswith("frictionless_"):
module = import_module(item.name)
modules[item.name] = module
module = import_module("frictionless.plugins")
for _, name, _ in pkgutil.iter_modules([os.path.dirname(module.__file__)]):
module = import_module(f"frictionless.plugins.{name}")
modules[name] = module
plugins = OrderedDict(self.__dynamic_plugins)
for name, module in modules.items():
Plugin = getattr(module, f"{name.capitalize()}Plugin", None)
if Plugin:
plugin = Plugin()
plugins[name] = plugin
return plugins
system = System()
|
# -*- coding: utf-8 -*-
import sys
import webbrowser
try:
import urllib.parse as urlparse
except ImportError:
import urlparse as urlparse
import tkinter as tk
def open_browsers(event):
global window
product = productNameEntry.get()
searchedArray = []
if surugayaCheck.get() == True:
surugayaURL = "https://www.suruga-ya.jp/search?category=&search_word=" + \
urlparse.quote(product.encode('utf-8'))
if newestSort.get() == True:
surugayaURL = surugayaURL + "&rankBy=modificationTime%3Adescending"
searchedArray.append(surugayaURL)
if mandarakeCheck.get() == True:
mandarakeURL = "https://order.mandarake.co.jp/order/listPage/list?keyword=" + \
urlparse.quote(product.encode('utf-8'))
if newestSort.get() == True:
mandarakeURL = mandarakeURL + "&sort=arrival&sortOrder=1"
searchedArray.append(mandarakeURL)
if mercariCheck.get() == True:
mercariURL = "https://www.mercari.com/jp/search/?keyword=" + \
urlparse.quote(product.encode('utf-8'))
if newestSort.get() == True:
mercariURL = mercariURL + "&sort_order=created_desc"
searchedArray.append(mercariURL)
if yahooCheck.get() == True:
yahooURL = "https://auctions.yahoo.co.jp/search/search?auccat=&tab_ex=commerce&ei=utf-8&aq=-1&oq=&sc_i=&exflg=1&p=" + \
urlparse.quote(product.encode('utf-8')) + "&x=0&y=0"
if newestSort.get() == True:
yahooURL = yahooURL + "&new=1"
searchedArray.append(yahooURL)
if otamartCheck.get() == True:
otamartURL = "https://otamart.com/search/?keyword=" + \
urlparse.quote(product.encode('utf-8'))
searchedArray.append(otamartURL)
for searchPage in searchedArray:
webbrowser.open_new(searchPage)
window = tk.Tk()
window.title('Second Hand')
# window.geometry('500x500')
productLabel = tk.Label(window, text='商品名')
productLabel.grid(row=0, column=0, sticky='w')
productNameEntry = tk.Entry(window)
productNameEntry.grid(row=0, column=1)
searchButton = tk.Button(window, text='Search')
searchButton.bind('<Button-1>', open_browsers)
searchButton.grid(row=0, column=2)
surugayaCheck = tk.BooleanVar()
surugayaCheckButton = tk.Checkbutton(
window, text="Suruga-ya", variable=surugayaCheck, onvalue=True, offvalue=False, height=0, width=0)
surugayaCheckButton.select()
surugayaCheckButton.grid(row=1, column=0, sticky='w')
mandarakeCheck = tk.BooleanVar()
mandarakeCheckButton = tk.Checkbutton(
window, text="Mandarake", variable=mandarakeCheck, onvalue=True, offvalue=False, height=0, width=0)
mandarakeCheckButton.select()
mandarakeCheckButton.grid(row=1, column=1)
mercariCheck = tk.BooleanVar()
mercariCheckButton = tk.Checkbutton(
window, text="Mercari", variable=mercariCheck, onvalue=True, offvalue=False, height=0, width=0)
mercariCheckButton.select()
mercariCheckButton.grid(row=2, column=0, sticky='w')
yahooCheck = tk.BooleanVar()
yahooCheckButton = tk.Checkbutton(
window, text="Yahoo Auction", variable=yahooCheck, onvalue=True, offvalue=False, height=0, width=0)
yahooCheckButton.select()
yahooCheckButton.grid(row=2, column=1)
otamartCheck = tk.BooleanVar()
otamartCheckButton = tk.Checkbutton(
window, text="Otamart", variable=otamartCheck, onvalue=True, offvalue=False, height=0, width=0)
otamartCheckButton.select()
otamartCheckButton.grid(row=2, column=2, sticky='w')
newestSort = tk.BooleanVar()
newestSortButton = tk.Checkbutton(
window, text="新品顺", variable=newestSort, onvalue=True, offvalue=False, height=0, width=0)
newestSortButton.select()
newestSortButton.grid(row=3, column=0, sticky='w')
window.mainloop()
|
import datetime
import time
from collections import deque
from loguru import logger
from cat import Cat
from classifier import Classifier
from feeder import Feeder, FOOD_PACKAGE_SIZE
from notifications import (
send_boot_up_notifications,
send_visitation_info,
PHOTO_COOLDOWN,
)
from stream import start_stream, capture_frame
SECONDS_BETWEEN_FRAMES = 1
CONSECUTIVE_MATCHES = 4
CLASSIFIER_THRESHOLD = 0.8 # in [0., 1.]
VALIDATION_COOLDOWN = 10 # seconds
BANK_HOURS = (datetime.time(9, 30), datetime.time(15, 30))
class Bank:
def __init__(self):
self.feeder = Feeder()
self.classifier = Classifier()
self.video_stream = start_stream()
self.cats = {
"nadir": Cat("Nadir"),
"zenith": Cat("Zenith"),
}
self.bank_hours = BANK_HOURS
self.short_term_match_history = deque(
[None] * CONSECUTIVE_MATCHES, maxlen=CONSECUTIVE_MATCHES
)
self.last_photo = None
def run(self) -> None:
send_boot_up_notifications()
while True:
# effectively pause program during off hours
if not self._is_open():
self._closing_procedure()
continue
# run classifier on frame
frame = capture_frame(self.video_stream)
match, certainty = self.classifier.evaluate(frame)
logger.debug(f"Detecting {match} ({certainty})")
if certainty > CLASSIFIER_THRESHOLD:
self.short_term_match_history.append(match)
# check for consecutive cat matches
if match not in self.cats or len(set(self.short_term_match_history)) != 1:
time.sleep(SECONDS_BETWEEN_FRAMES)
continue
if self.cats[match].feed(FOOD_PACKAGE_SIZE):
self._feeding_procedure(match)
if (
self.last_photo is None
or (datetime.datetime.now() - self.last_photo).seconds > PHOTO_COOLDOWN
):
send_visitation_info(frame, self.cats[match].name)
self.last_photo = datetime.datetime.now()
# control video capture cycle length
time.sleep(
SECONDS_BETWEEN_FRAMES
) # TODO: continue with while loop with condition instead
def _is_open(self) -> bool:
start_time, end_time = self.bank_hours
if start_time < datetime.datetime.now().time() < end_time:
return True
return False
def _feeding_procedure(self, match) -> None:
self.feeder.dispense_food()
time.sleep(VALIDATION_COOLDOWN)
feeding_frame = capture_frame(self.video_stream)
validation_match = self.classifier.validate(feeding_frame, match)
if match != validation_match:
logger.warning("Customer validation failed - amending error...")
self.cats[match].amend_balance_up()
self.cats[validation_match].amend_balance_down()
def _closing_procedure(self) -> None:
logger.info("Food bank is now closing for today")
logger.info("Resetting daily food balance and sending daily reports")
for cat in self.cats.values():
cat.reset_balance()
cat.send_daily_report()
opening_time, closing_time = BANK_HOURS
if datetime.datetime.now().time() > closing_time:
opening_day = datetime.date.today() + datetime.timedelta(days=1)
else:
opening_day = datetime.date.today()
sleep_duration = (
datetime.datetime.combine(opening_day, opening_time)
- datetime.datetime.now()
)
logger.info(
f"Food bank opens again at {opening_time} "
f"(in {sleep_duration.seconds / 3600:.2f} hours)"
)
self.video_stream.release()
time.sleep(sleep_duration.seconds)
logger.info("Reopening the bank now")
self.video_stream = start_stream()
def stop(self) -> None:
logger.info("Gracefully stopping program")
self.feeder.shut_down()
self.video_stream.release()
|
# coding: UTF-8
import time
import torch
import numpy as np
from torch import nn
from torch.nn.parallel import DistributedDataParallel
from torch.testing._internal.data.network1 import Net
from torch.utils.data import DataLoader, DistributedSampler
from models.FastText import Model
from train_eval import train, init_network
from importlib import import_module
import argparse
import torch.distributed as dist
from torch.nn.parallel import DistributedDataParallel
parser = argparse.ArgumentParser(description='Chinese Text Classification')
parser.add_argument('--model', type=str, required=False, help='choose a model: TextCNN, TextRNN, FastText, TextRCNN, TextRNN_Att, DPCNN, Transformer')
parser.add_argument('--embedding', default='pre_trained', type=str, help='random or pre_trained')
parser.add_argument('--word', default=False, type=bool, help='True for word, False for char')
parser.add_argument('--device_ids',type=str,default='2',help="Training Devices")
parser.add_argument('--local_rank',type=int,default=-1,help="DDP parameter,do not modify")
args = parser.parse_args()
if __name__ == '__main__':
dataset = 'THUCNews' # 数据集
# 搜狗新闻:embedding_SougouNews.npz, 腾讯:embedding_Tencent.npz, 随机初始化:random
embedding = 'embedding_SougouNews.npz'
if args.embedding == 'random':
embedding = 'random'
model_name = args.model # 'TextRCNN' # TextCNN, TextRNN, FastText, TextRCNN, TextRNN_Att, DPCNN, Transformer
model_name = 'Transformer'
#model_name = 'FastText'
if model_name == 'FastText':
from utils_fasttext import build_dataset, build_iterator, get_time_dif
embedding = 'random'
else:
from utils import build_dataset, build_iterator, get_time_dif
x = import_module('models.' + model_name)
config = x.Config(dataset, embedding)
np.random.seed(1)
torch.manual_seed(1)
torch.cuda.manual_seed_all(1)
torch.backends.cudnn.deterministic = True # 保证每次结果一样
start_time = time.time()
# --------------------------------------
print("Loading data...")
vocab, train_data, dev_data, test_data = build_dataset(config, args.word)
# --------------------------------------
train_iter = build_iterator(train_data, config)
dev_iter = build_iterator(dev_data, config)
test_iter = build_iterator(test_data, config)
time_dif = get_time_dif(start_time)
print("Time usage:", time_dif)
# train
config.n_vocab = len(vocab)
# 单机单卡GPU
model = x.Model(config).to(config.device) # 原来的形式
#单机单进程多卡GPU
#gpus = [2,3]
#model = nn.DataParallel(model.cuda(), device_ids=gpus, output_device=gpus[0])
'''
#单机多进程多卡GPU python -m torch.distributed.launch --nproc_per_node 4 --master_port 8005 run.py --device_ids=4,5,6,7
device_ids = list(map(int,args.device_ids.split(',')))
dist.init_process_group(backend='nccl',init_method='env://')
device = torch.device('cuda:{}'.format(device_ids[args.local_rank]))
torch.cuda.set_device(device)
model = x.Model(config).to(device)
model = DistributedDataParallel(model,device_ids=[device_ids[args.local_rank]],output_device=device_ids[args.local_rank])
'''
# 多块GPU
#model = x.Model(config)#.to(config.device)
#model = torch.nn.DataParallel(model, device_ids=[0, 1, 2, 3]).cuda()
if model_name != 'Transformer':
init_network(model)
print(model.parameters)
train(config, model, train_iter, dev_iter, test_iter)
|
<filename>pytprot/pytprot.py
#!/usr/bin/python3
import parser as args # argparser script
import inputfunctions, chainfunctions, modelfunctions
# Import the argparser object with the necessary arguments
options = args.output_argparser()
################
# INPUT PDBs
# Processing the input PDB files
################
verb = False
if options.verbose: # If the verbose flag is activated
verb = True
print("\tReading PDBs\n")
pdblist = args.check_infile_names(options.infile) # List of PATHS to the pdb files correctly named
### MACROCOMPLEX INPUT
# If only 1 PDB, containing a full model is provided
if options.macrocomplex and len(pdblist) == 1: # -m flag activated, only 1 input file
pdb_macro = pdblist[0] # Convert to string
if options.verbose:
print("\n\nSplitting and re-combining input chains into a pdb_dict...")
#Parsing the macrocomplex into Structure with interacting pairs and return a file list to pdblist type dictionary
strdict = inputfunctions.macrocomplex_parser(pdb_macro, interact_dist=options.contact_distance, interact_contacts=options.contact_num, redundant_dist=1.9, verbose=verb)
### INTERACTING PAIRS INPUT
# A list of PDB chain pairs is provided
else:
# If -m indicated
if options.macrocomplex:
if options.verbose:
sys.stderr.write("-m flag wrongly indicated. The input is a list of PDB files")
sys.stderr.flush()
# Convert input file list to pdblist type dictionary
strdict = inputfunctions.pdb_parser(pdblist, verbose=True)
#print("STR DICT", strdict)
################
# PROCESSING CHAINS
# Re-format list of interacting chains to properly construct the model
################
##### EQUIVALENT CHIAN ID
### Obtain the equivalence between chain id in numbers and letters
##### PDB DICT
### Obtain the same dict as strdict but with the chain id changed to numbers
equivalent_chains, pdb_dict = inputfunctions.chain_processing(strdict)
##### DIFFERENT CHAINS
### Check the number of "different" chains of a macrocomplex (debugging purposes)
if options.macrocomplex:
diff_chains = set()
for str in pdb_dict.values():
for chain in str.get_chains():
diff_chains.add(chain)
if options.verbose:
print(f"We have {len(diff_chains)} different chains.")
##### SIMILAR SEQUENCE CHAINS
### Obtain high-sequence (>95%) similarity chains from the interacting pairs
similar_chains_prot = chainfunctions.similar_chains(pdb_dict, "Protein", verbose=verb) # Protein
similar_chains_dna = chainfunctions.similar_chains(pdb_dict, "DNA", verbose=verb) # Nucleic acids
# Merge both dictionaries
similar_chains = similar_chains_prot
similar_chains.update(similar_chains_dna)
if options.verbose:
print(f"Similar chains: ")
for x, y in similar_chains.items():
print(f"{x}\t{y}")
print("\n")
##### UNIQUE-COMMON CHAINS
### Build specific type of dictionary, necessary for model building
unicommon = chainfunctions.unique_common_chains(similar_chains, verbose=verb)
if options.verbose:
print("UNIQUE-COMMON Chains:")
for x, y in sorted(unicommon.items(), key=lambda x:x[0]):
print(f"UNIQUE CHAIN: {x}\tSIMILAR CHAINS: {y}")
print("\n")
################
# STOICHIOMETRY CHECK
# Read stoichiometry file
################
if options.stoichiometry:
if options.verbose:
print(f"Reading stechiometry...")
# Fetch stoichiometry from input
stoichiometry_input = inputfunctions.stoichiometry_parser(options.stoichiometry)
# Complete unicommons
diff_chains = set()
for str in pdb_dict.values():
for chain in str.get_chains():
diff_chains.add(chain)
unicommon = chainfunctions.unicommon_completer(unicommon, stoichiometry_input, diff_chains)
else:
if options.verbose:
print("No stoichiometry provided... The model will add as many input chains as possible.")
## model construction
if options.stoichiometry:
final_model = modelfunctions.model_construction(unicommon, pdb_dict, equivalent_chains, stoichiometry_input=stoichiometry_input, verbose=verb, forcing=True)
else:
final_model = modelfunctions.model_construction(unicommon, pdb_dict, equivalent_chains, verbose=verb, forcing=True)
modelfunctions.save_model(final_model, pdblist, outdir=options.outdir, verbose=verb, macrocomplex=options.macrocomplex)
|
from typing import Optional
from tatsu.ast import AST
from wewcompiler.objects.base import (CompileContext, ExpressionObject,
Scope, StatementObject,
with_ctx)
from wewcompiler.objects.ir_object import (Jump, JumpTarget, Register,
Resize, Return, SaveVar,
Epilog)
from wewcompiler.objects.literals import ArrayLiteral
from wewcompiler.objects.types import Pointer, Type, Array, Function, Void
class VariableDecl(StatementObject):
__slots__ = ("name", "_type", "val")
def __init__(self, name: str, type: Type, val: Optional[ExpressionObject] = None, *, ast: Optional[AST] = None):
super().__init__(ast=ast)
self.name = name
self._type = type
self.val = val
@property
async def type(self) -> Type:
if self._type is None or self._type == "infer":
if self.val is None:
raise self.error(f"Variable {self.name} has no initialiser or type.")
self._type = await self.val.type
return self._type
@property
def identifier(self) -> str:
return self.name
@with_ctx
async def compile(self, ctx: CompileContext):
my_type = await self.type
if isinstance(my_type, Void):
raise self.error("Cannot create variables with void type.")
if self.val is None: # just a declaration, no types so exit here
var = ctx.declare_variable(self.name, my_type)
if isinstance(my_type, Array):
var.type = Pointer(my_type.to)
var.lvalue_is_rvalue = True
return
if isinstance(self.val, ArrayLiteral):
await self.val.insert_type(my_type)
await self.val.check_types(my_type)
# copy back the type of the literal to retrieve the size info
my_type = await self.val.type
var = ctx.declare_variable(self.name, my_type)
if isinstance(my_type, Array):
# setup storage location for the array
var.lvalue_is_rvalue = True
self.val.var = var
await self.val.compile(ctx)
else:
reg: Register = await self.val.compile(ctx)
if reg.size != var.size:
reg0 = reg.resize(var.size, var.type.signed)
ctx.emit(Resize(reg, reg0))
reg = reg0
ctx.emit(SaveVar(var, reg))
var.type = Pointer(var.type.to)
# var size is now set to the size of the array
elif isinstance(self.val, ExpressionObject):
val_type = await self.val.type
if isinstance(val_type, Void):
raise self.val.error("Cannot create variables with void type.")
if not val_type.implicitly_casts_to(my_type):
raise self.error(f"Specified type {my_type} does not match value type {val_type}")
var = ctx.declare_variable(self.name, my_type)
reg: Register = await self.val.compile(ctx)
if reg.size != var.size:
reg0 = reg.resize(var.size, var.type.signed)
ctx.emit(Resize(reg, reg0))
reg = reg0
ctx.emit(SaveVar(var, reg))
class ReturnStmt(StatementObject):
__slots__ = ("expr",)
def __init__(self, expr: Optional[ExpressionObject] = None, *, ast: Optional[AST] = None):
super().__init__(ast=ast)
self.expr = expr
@with_ctx
async def compile(self, ctx: CompileContext):
fn_type: Function = ctx.top_function.type
if self.expr is None:
if not isinstance(fn_type.returns, Void):
raise self.error(f"Void return in function of return type: '{fn_type.returns}'")
# all scopes but the function scope
for i in reversed(ctx.scope_stack[1:]):
ctx.emit(Epilog(i))
ctx.emit(Return(ctx.top_function))
else:
expr_type = await self.expr.type
if not fn_type.returns.implicitly_casts_to(expr_type):
raise self.error(f"Return type '{expr_type}' cannot be casted to '{fn_type.returns}'.")
reg = await self.expr.compile(ctx)
# all scopes but the function scope
for i in reversed(ctx.scope_stack[1:]):
ctx.emit(Epilog(i))
if reg.size != fn_type.returns.size:
reg0 = reg.resize(fn_type.returns.size, fn_type.returns.signed)
ctx.emit(Resize(reg, reg0))
reg = reg0
ctx.emit(Return(ctx.top_function, reg))
class IFStmt(StatementObject):
__slots__ = ("cond", "body", "else_")
def __init__(self, cond: ExpressionObject, body: Scope,
else_: Optional[Scope] = None, *, ast: Optional[AST] = None):
super().__init__(ast=ast)
self.cond = cond
self.body = body
self.else_ = else_
@with_ctx
async def compile(self, ctx: CompileContext):
cond: Register = await self.cond.compile(ctx)
# if we have an else clause we rearrange to jump over that
# instead of inverting the condition to jump over the truth case
if self.else_:
end_jump, else_jump = JumpTarget(), JumpTarget()
ctx.emit(Jump(else_jump, cond))
await self.else_.compile(ctx)
ctx.emit(Jump(end_jump))
ctx.emit(else_jump)
await self.body.compile(ctx)
ctx.emit(end_jump)
else:
if_body, end_jump = JumpTarget(), JumpTarget()
ctx.emit(Jump(if_body, cond))
ctx.emit(Jump(end_jump))
ctx.emit(if_body)
await self.body.compile(ctx)
ctx.emit(end_jump)
class LoopStmt(StatementObject):
__slots__ = ("cond", "body")
def __init__(self, cond: ExpressionObject, body: Scope, *, ast: Optional[AST] = None):
super().__init__(ast=ast)
self.cond = cond
self.body = body
@with_ctx
async def compile(self, ctx: CompileContext):
test_jump = JumpTarget()
continue_jump = JumpTarget()
end_jump = JumpTarget()
# the start of the loop (test the condition)
ctx.emit(test_jump)
cond: Register = await self.cond.compile(ctx) # evaluate condition
# if nonzero, jump over the jump to the end
# (Alternatively, test for zero and jump to end if zero, but this is 1 op (Jump), vs 3 (Test, Set, Jump))
ctx.emit(Jump(continue_jump, cond))
ctx.emit(Jump(end_jump))
ctx.emit(continue_jump)
await self.body.compile(ctx)
ctx.emit(Jump(test_jump))
ctx.emit(end_jump)
|
import netCDF4
import matplotlib.gridspec as gridspec
import matplotlib.pyplot as plt
import numpy as np
from matplotlib.colors import LinearSegmentedColormap
import matplotlib.dates as mdates
import pandas as pd
import xarray as xr
from pandas.plotting import register_matplotlib_converters
register_matplotlib_converters()
import seaborn as sns
sns.set()
import utils
import time
sed_crit = 0.1
import glob
def plt_part(df,p_part,col,axis,norm):
startdate = np.datetime64('2000-01-01T00:00:00')
d = df.where(df.plantpart == p_part,drop = True)
d['dif_depth'] = d.sea_floor_depth_below_sea_level - d.z
grp = d.groupby('trajectory')
loop = [[utils.get_start_sed_depth(d),n,d] for n,d in grp if utils.get_start_sed_depth(d) != (None,None,None)]
s = list(map(lambda x : x[0], loop))
trajectories = list(map(lambda x : x[1], loop))
ds_all = list(map(lambda x : x[2], loop))
starts = list(map(lambda x : x[0], s))
seds = list(map(lambda x : x[1], s))
sed_depths = list(map(lambda x : x[2], s))
for k,ds in enumerate(ds_all): # loop over trajectories
start,sed = starts[k],seds[k]
if start != sed:
if norm == True:
#lifetime = ds.time[stop].values - ds.time[start].values
dif = ds.time[start]-startdate
x = ds.time[start:sed+1] - dif
z = ds.z[start:sed+1]
elif norm == False:
x = d.time[start:sed+1]
z = d.z[start:sed+1]
axis.plot(x,z,'-', color = col,linewidth = 0.3,alpha = 0.5,zorder = 9)
axis.plot(x[-1],z[-1].values,'ko', markersize = 0.5,zorder = 10)
if norm == True:
axis.set_title('Distibution of particles (type {}), normalized by time'.format(p_part))
frmt = '%M-%d'
elif norm == False:
axis.set_title('Distibution of particles (type {})'.format(p_part))
frmt = '%b/%d'
axis.xaxis.set_major_formatter(mdates.DateFormatter(frmt))
axis.set_ylabel('Depth, m')
axis.set_xlabel('Month,day of the release')
axis.set_ylim(350,0)
import datetime
axis.set_xlim(startdate,'2000-02-15T00:00:00')
return sed_depths
def call_make_plot_mf(paths,experiment,normalize):
fig = plt.figure(figsize=(11.69 , 8.27), dpi=100,
facecolor='white')
gs = gridspec.GridSpec(3,2, width_ratios=[3, 1])
gs.update(left=0.08, right=0.98 ,top = 0.96,bottom = 0.08,
wspace=0.13 ,hspace=0.37)
ax1 = fig.add_subplot(gs[0])
ax1_1 = fig.add_subplot(gs[1])
ax2 = fig.add_subplot(gs[2])
ax2_1 = fig.add_subplot(gs[3])
ax3 = fig.add_subplot(gs[4])
ax3_1 = fig.add_subplot(gs[5])
with xr.open_mfdataset(paths,concat_dim='time') as ds: #
df = ds.load()
#df = xr.open_mfdataset(paths,concat_dim='time')
df = df.where(df.status > -1, drop = True)
df['z'] = df['z'] * -1.
sed_depths1 = plt_part(df,1,'#d65460',ax1,normalize)
sed_depths2 = plt_part(df,2,'g',ax2,normalize)
sed_depths4 = plt_part(df,4,'#006080',ax3,normalize)
bins = np.arange(1,200,10)
ax1_1.hist(sed_depths1,bins = bins,density = True,color = 'k')
ax2_1.hist(sed_depths2,bins = bins,density = True,color = 'k')
ax3_1.hist(sed_depths4,bins = bins,density = True,color = 'k')
for axis2 in (ax1_1,ax2_1,ax3_1):
axis2.set_title('Sedimentation depths')
axis2.set_xlim(0,200)
#if normalize == True:
# plt.savefig('Figures/Kelp_trajectories_and_sedimentation_norm_experiment{}.png'.format(experiment),format = 'png')
#else:
# plt.savefig('Figures/Kelp_trajectories_and_sedimentation.png',format = 'png')
print("--- It took %s seconds to run the script ---" % (time.time() - start_time))
plt.show()
if __name__ == '__main__':
start_time = time.time()
pol = utils.get_polygons()
experiments = (1,2,3,4,5)
#paths = utils.get_paths(polygons,experiment = 1)
allpaths = (glob.glob("Data/*.nc"))
#for exp in experiments:
# call_make_plot_mf(utils.get_paths(pol,experiment = exp),experiment = exp,normalize =True)
#call_make_plot_mf(utils.get_paths(pol,experiment = 2),experiment = 2,normalize =True)
call_make_plot_mf(utils.get_paths(pol,experiment = 5),experiment = 5,normalize =True) |
<gh_stars>0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2019-08-22 17:09
# @Author : Yangxiaofei
# @File : rds.py
# @Role : 获取阿里云RDS信息入库
import json
from aliyunsdkcore.client import AcsClient
from aliyunsdkrds.request.v20140815.DescribeDBInstancesRequest import DescribeDBInstancesRequest
from aliyunsdkrds.request.v20140815.DescribeDBInstanceAttributeRequest import DescribeDBInstanceAttributeRequest
from libs.db_context import DBContext
from models.db import DB
from models.server import AssetConfigs, model_to_dict
from libs.web_logs import ins_log
from opssdk.operate import MyCryptV2
import fire
class RdsApi():
def __init__(self, access_id, access_key, region):
self.idc = '阿里云'
self.region = region
self.access_id = access_id
self.access_key = access_key
self.page_number = 1 # 实例状态列表的页码。起始值:1 默认值:1
self.page_size = 5 # 分页查询时设置的每页行数。最大值:100 默认值:30
self.client = self.create_client()
def create_client(self):
client = AcsClient(self.access_id, self.access_key, self.region)
return client
def set_desc_request(self):
request = DescribeDBInstancesRequest()
request.set_accept_format('json')
request.set_PageNumber(self.page_number)
request.set_PageSize(self.page_size)
return request
def get_desc_response(self):
"""
获取返回值
:return:
"""
response_data = {}
err = None
request = self.set_desc_request()
try:
response = self.client.do_action_with_exception(request)
response_data = json.loads(str(response, encoding="utf8"))
except Exception as e:
err = e
# print('Reponse:{}'.format(response_data))
return response_data, err
def get_tocal_rds_instanceid_list(self):
pass
"""
获取所有RDS实例ID列表
:return:
"""
# i = 1
# while True:
# response_data, err = self.get_desc_response()
# if err != None: break
# if 'Items' not in response_data:break
# if 'DBInstance' not in response_data:break
# i += 1
# rds_data = response_data['Items']['DBInstance']
#
#
# if rds_data:
# ins_log.read_log('info', 'Region:{region} - PageNumber:{page}'.format(region=self.region, page=self.page_size))
# print(rds_data)
# yield rds_data
# res = list(map(self.get_attribute_rds, rds_data))
# else:
# break
def get_rds_count(self):
"""
获取机器总数
:return:
"""
response_data, err = self.get_desc_response()
if err != None:
ins_log.read_log('error', err)
return False
count = response_data['TotalRecordCount']
# print('RdsCount: {count}'.format(count=count))
return count
def get_db_instance_id(self):
"""
获取数据库实例ID
:return:
"""
response_data, err = self.get_desc_response()
if err != None: return False
rds_data = response_data['Items']['DBInstance']
if not rds_data: return False
db_instanceid_list = []
for i in rds_data:
db_instanceid_list.append(i.get('DBInstanceId'))
# print('InstanceIDList: {}'.format(db_instanceid_list))
return db_instanceid_list
def get_attribute_response(self):
"""
获取实例详细信息
:return:
"""
instance_id_list = self.get_db_instance_id()
if not isinstance(instance_id_list, list):
raise TypeError
rds_attribute_data_list = []
try:
request = DescribeDBInstanceAttributeRequest()
request.set_accept_format('json')
for instance_id in instance_id_list:
request.set_DBInstanceId(instance_id)
response = self.client.do_action_with_exception(request)
response_data = json.loads(str(response, encoding="utf8"))
rds_attribute_data = response_data['Items']['DBInstanceAttribute'][0]
rds_attribute_data_list.append(rds_attribute_data)
return rds_attribute_data_list
except Exception as e:
print(e)
return False
def get_rds_info(self):
"""
只获取到想要的信息
:return:
"""
rds_attribute_data_list = self.get_attribute_response()
if not rds_attribute_data_list:
ins_log.read_log('error', 'Not fount rds attribute info...')
return False
rds_list = []
for i in rds_attribute_data_list:
asset_data = dict()
asset_data['db_instance_id'] = i.get('DBInstanceId')
asset_data['db_code'] = i.get('DBInstanceDescription')
asset_data['db_class'] = i.get('DBInstanceClass')
asset_data['db_host'] = i.get('ConnectionString')
asset_data['db_port'] = i.get('Port')
asset_data['db_disk'] = i.get('DBInstanceStorage')
asset_data['db_type'] = i.get('Engine')
asset_data['db_version'] = i.get('EngineVersion')
asset_data['state'] = i.get('DBInstanceStatus')
asset_data['db_mark'] = i.get('DBInstanceType')
asset_data['db_region'] = i.get('ZoneId')
asset_data['db_detail'] = i.get('DBInstanceDescription')
rds_list.append(asset_data)
return rds_list
def sync_cmdb(self):
"""
将我们拿到的数据入库
:return:
"""
rds_info_list = self.get_rds_info()
if not rds_info_list:
ins_log.read_log('error', '[Error]: Not get redis info...')
return False
with DBContext('w') as session:
for rds in rds_info_list:
ins_log.read_log('info', 'RDS信息:{}'.format(rds))
db_code = rds.get('db_code')
try:
db_user = rds['user']
except KeyError:
# 没有从接口看到阿里云的User
db_user = 'root'
exist_rds = session.query(DB).filter(DB.db_code == db_code).first()
if exist_rds:
session.query(DB).filter(DB.db_code == db_code).update({
DB.idc: self.idc, DB.db_class: rds.get('db_class'), DB.db_host: rds.get('db_host'),
DB.db_port: rds.get('db_port'), DB.db_user: db_user,
DB.db_disk: rds.get('db_disk'), DB.db_region: rds.get('db_region'),
DB.db_type: rds.get('db_type'), DB.db_version: rds.get('db_version'),
DB.state: rds.get('state'), DB.db_mark: rds.get('db_mark'),
DB.db_instance_id: rds.get('db_instance_id'), DB.db_detail: rds.get('db_detail')})
else:
new_db = DB(idc=self.idc, db_code=db_code, db_class=rds.get('db_class'), db_host=rds.get('db_host'),
db_port=rds.get('db_port'), db_user=db_user, db_disk=rds.get('db_disk'),
db_region=rds.get('db_region'), db_type=rds.get('db_type'), db_mark=rds.get('db_mark'),
db_version=rds.get('db_version'), state=rds.get('state'),
db_instance_id=rds.get('db_instance_id'), db_detail=rds.get('db_detail'))
session.add(new_db)
session.commit()
def test_auth(self):
self.page_number = '1'
self.page_size = '1'
request = self.set_desc_request()
response = self.client.do_action_with_exception(request)
response_data = json.loads(str(response, encoding="utf8"))
return response_data
def index(self):
"""
阿里云若机器超过100台需要进行通过PageSize+PageSize获取
:return:
"""
count = self.get_rds_count()
print('Tocal:{}'.format(count))
self.page_size = 100
mod = count % self.page_size
if mod:
total_page_number = int(count / self.page_size) + 1
else:
total_page_number = int(count / self.page_size)
for cur_page_number in range(1, total_page_number + 1):
self.page_number = cur_page_number
ins_log.read_log('info', '开始同步阿里云区域:{}第{}页的{}台数据库'.format(self.region, self.page_number, self.page_size))
self.sync_cmdb()
# def index(self):
# """
# 若机器超过100台需要进行通过offset+limit获取
# :return:
# """
# count = self.get_rds_count()
# # print('Tocal:{}'.format(count))
# for c in range(0, count, 100):
# self.page_number = str(c)
# if (c + 100) > count:
# self.page_size = str(count)
# else:
# self.page_size = str(c + 100)
# ins_log.read_log('info',
# '开始同步阿里云区域:{}的第{}--{}台RDS数据库'.format(self.region, self.page_number, self.page_size))
# self.sync_cmdb()
def get_configs():
"""
get id / key / region info
:return:
"""
aliyun_configs_list = []
with DBContext('r') as session:
aliyun_configs_info = session.query(AssetConfigs).filter(AssetConfigs.account == '阿里云',
AssetConfigs.state == 'true').all()
for data in aliyun_configs_info:
data_dict = model_to_dict(data)
data_dict['create_time'] = str(data_dict['create_time'])
data_dict['update_time'] = str(data_dict['update_time'])
aliyun_configs_list.append(data_dict)
return aliyun_configs_list
def main():
"""
从接口获取已经启用的配置
:return:
"""
mc = MyCryptV2()
aliyun_configs_list = get_configs()
if not aliyun_configs_list:
ins_log.read_log('error', '没有获取到阿里云资产配置信息,跳过')
return False
for config in aliyun_configs_list:
access_id = config.get('access_id')
access_key = mc.my_decrypt(config.get('access_key')) # 解密后使用
region = config.get('region')
obj = RdsApi(access_id, access_key, region)
obj.index()
if __name__ == '__main__':
fire.Fire(main)
|
<gh_stars>0
# vim: set et sw=4 sts=4 fileencoding=utf-8:
#
# An alternate Python Minecraft library for the Rasperry-Pi
# Copyright (c) 2013-2016 <NAME> <<EMAIL>>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
The block module defines the :class:`Block` class, which is used to represent
the type of a block and any associated data it may have, and the class which is
used to implement the :attr:`~picraft.world.World.blocks` attribute on the
:class:`~picraft.world.World` class.
.. note::
All items in this module, except the compatibility constants, are available
from the :mod:`picraft` namespace without having to import
:mod:`picraft.block` directly.
The following items are defined in the module:
Block
=====
.. autoclass:: Block(id, data)
Compatibility
=============
Finally, the module also contains compatibility values equivalent to those
in the mcpi.block module of the reference implementation. Each value represents
the type of a block with no associated data:
=================== ==================== =====================
AIR FURNACE_ACTIVE MUSHROOM_RED
BED FURNACE_INACTIVE NETHER_REACTOR_CORE
BEDROCK GLASS OBSIDIAN
BEDROCK_INVISIBLE GLASS_PANE REDSTONE_ORE
BOOKSHELF GLOWING_OBSIDIAN SAND
BRICK_BLOCK GLOWSTONE_BLOCK SANDSTONE
CACTUS GOLD_BLOCK SAPLING
CHEST GOLD_ORE SNOW
CLAY GRASS SNOW_BLOCK
COAL_ORE GRASS_TALL STAIRS_COBBLESTONE
COBBLESTONE GRAVEL STAIRS_WOOD
COBWEB ICE STONE
CRAFTING_TABLE IRON_BLOCK STONE_BRICK
DIAMOND_BLOCK IRON_ORE STONE_SLAB
DIAMOND_ORE LADDER STONE_SLAB_DOUBLE
DIRT LAPIS_LAZULI_BLOCK SUGAR_CANE
DOOR_IRON LAPIS_LAZULI_ORE TNT
DOOR_WOOD LAVA TORCH
FARMLAND LAVA_FLOWING WATER
FENCE LAVA_STATIONARY WATER_FLOWING
FENCE_GATE LEAVES WATER_STATIONARY
FIRE MELON WOOD
FLOWER_CYAN MOSS_STONE WOOD_PLANKS
FLOWER_YELLOW MUSHROOM_BROWN WOOL
=================== ==================== =====================
Use these compatibility constants by importing the block module explicitly.
For example::
>>> from picraft import block
>>> block.AIR
<Block "air" id=0 data=0>
>>> block.TNT
<Block "tnt" id=46 data=0>
"""
from __future__ import (
unicode_literals,
absolute_import,
print_function,
division,
)
try:
from itertools import izip as zip
except ImportError:
pass
str = type('')
import io
import warnings
from math import sqrt
from collections import namedtuple
from itertools import cycle
from pkg_resources import resource_stream
from .exc import EmptySliceWarning
from .vector import Vector, vector_range
import re
from random import choice
# from .blockdata import *
from .blockinterface import BLOCK_INTERFACES, BlockInterface
from .entity import Entity
from .interface import MethodDetailsStructure, SignatureElement, BlockDataStructure
from .method import MethodManager
from .utils import wrap_in_quote, BlockDBStructure, _noner, Caster
# def _read_block_data(filename_or_object):
# if isinstance(filename_or_object, str):
# stream = io.open(filename_or_object, 'rb')
# else:
# stream = filename_or_object
# for line in stream:
# line = line.decode('utf-8').strip()
# if line and not line.startswith('#'):
# id, data, pi, pocket, name, description = line.split(None, 5)
# yield int(id), int(data), bool(int(pi)), bool(int(pocket)), name, description
def _read_block_data(filename_or_object):
if isinstance(filename_or_object, str):
stream = io.open(filename_or_object, 'rb')
else:
stream = filename_or_object
for line in stream:
line = line.decode('utf-8').strip()
if line and not line.startswith('#'):
name, colour, interface, old_block_id, old_block_data, material_properties, other, old_block_description = line.split(
None, 7)
# name, block_data = line.split(None, 2)
# yield and set to None any 'None' strings
yield BlockDBStructure(
name,
_noner(colour),
_noner(interface),
_noner(old_block_id),
_noner(old_block_data),
_noner(old_block_description),
_noner(material_properties),
other
)
#
# def _read_block_color(filename_or_object):
# if isinstance(filename_or_object, str):
# stream = io.open(filename_or_object, 'rb')
# else:
# stream = filename_or_object
# int2color = lambda n: ((n & 0xff0000) >> 16, (n & 0xff00) >> 8, n & 0xff)
# for line in stream:
# line = line.decode('utf-8').strip()
# if line and not line.startswith('#'):
# id, data, color = line.split(None, 2)
# yield int(id), int(data), int2color(int(color, 16))
class BlockDescriptor(object):
signature = 'CraftBlockData'
std_namespace = "minecraft"
def __init__(self, block_descriptor):
"""
Pattern needs to match two type of block descriptor. One where block data information is shared
and one where no block data is present. Like
CraftBlockData{minecraft:jungle_stairs[facing=west,half=bottom,shape=straight,waterlogged=false]}
and
CraftBlockData{minecraft:air}
:param block_descriptor:
"""
self.pattern = '(' + self.signature + '\{(.+):(.+?)(\[.+\])?\})'
self._block_descriptor = block_descriptor
self._namespace = None
self._block_name = None
self._block_data = {}
self.parse_from()
def __repr__(self):
return "BlockDescription(%s,%s)" % (self._block_name, self._block_data)
@classmethod
def from_string(cls, block_descriptor):
return cls(block_descriptor)
@classmethod
def block_data_to_string(cls, block_name, block_data, brackets=True, delimter=",", namespace=None):
namespace = cls.std_namespace if namespace is None else namespace
r = []
for k, v in block_data.items():
r.append(f"{k}={v}")
params = delimter.join(r)
params = "[" + params + "]" if brackets else params
namespace_component = f"{namespace}:" if namespace else namespace
return f"{namespace_component}{block_name.lower()}{params}" if r else ""
@classmethod
def to_string(cls, block_name, block_data):
return f"{cls.signature}" + "{" + f"{cls.std_namespace}:{block_name}" + f"{cls.block_data_to_string(block_name, block_data)}" + "}"
@property
def block_descriptor(self):
return self._block_descriptor
@block_descriptor.setter
def block_descriptor(self, value):
self._block_descriptor = value
@property
def namespace(self):
return self._namespace
@property
def block_name(self):
return self._block_name
@property
def block_data(self):
return self._block_data
def parse_from(self, block_desriptor=None):
block_desriptor = block_desriptor if block_desriptor else self._block_descriptor
match = re.search(self.pattern, block_desriptor)
if not (match):
raise ValueError("This block details were not returned in the expected format: %s" % block_desriptor)
_, self._namespace, self._block_name, block_data_text = match.groups()
if block_data_text:
block_data_text = block_data_text.replace("[", "").replace("]", "")
block_data_list = block_data_text.split(',')
else:
block_data_list = []
for o in block_data_list:
k, v = o.split('=')
self.block_data[k] = v
return (self._namespace, self._block_name, self.block_data)
# class BlockBase(object):
# # def add_valid_block_data(self):
# # pass
# pass
# POLISHED_GRANITE #52392F None 1 2 Polished Granite isRegular,isCompleteBlock,isBlock,isSolid,isItem,isOccluding,hardness(1.5),blastResistance(6.0),maxDurability(0),maxStackSize(64) link:https://minecraft.gamepedia.com/Polished_Granite
class Block(object):
_BLOCKS_DB = {
(block_db.name.upper()): block_db
# (block_data, name, colour, old_block_id, old_block_data, old_block_description, material_properties, other)
for block_db in _read_block_data(resource_stream(__name__, 'block3.data'))
}
_BLOCKS_BY_COLOUR = {
block_details.colour_tuple_of_int(): block_details for block_details in _BLOCKS_DB.values() if
block_details.is_complete_block() and block_details.name.upper() not in [
"SAND", # UNSTABLE
"ICE", # MELTS
"BLUE_ICE", # MELTS
"SHULKER_BOX", # SHULKER BOXES DISAPPEAR AT A DISTANCE?
"WHITE_SHULKER_BOX",
"ORANGE_SHULKER_BOX",
"MAGENTA_SHULKER_BOX",
"LIGHT_BLUE_SHULKER_BOX",
"YELLOW_SHULKER_BOX",
"LIME_SHULKER_BOX",
"PINK_SHULKER_BOX",
"GRAY_SHULKER_BOX",
"LIGHT_GRAY_SHULKER_BOX",
"CYAN_SHULKER_BOX",
"PURPLE_SHULKER_BOX",
"BLUE_SHULKER_BOX",
"BROWN_SHULKER_BOX",
"GREEN_SHULKER_BOX",
"RED_SHULKER_BOX",
"BLACK_SHULKER_BOX"
] and not block_details.is_transparent_block()
}
print(f"{len(_BLOCKS_BY_COLOUR)} colour blocks found.")
_BLOCKS_BY_OLD_ID_AND_DATA = {
(block_details.old_block_id, block_details.old_block_data): block_details for block_details in
_BLOCKS_DB.values() if block_details.has_old_block_info()
}
# Blocks in this list cannot be made into instances - (Mainly AIR at the moment) but it will be how I will implement
# that a block has been destroyed. i.e. if I re-read its block and block data and it is AIR then I know that it has
# been destroyed.
_NON_INSTANCE_BLOCKS = [
'AIR'
]
def __init__(self, block_name=None, block_data=None, strict=True, update_on_change=True, silent=True,
refresh_after_call=True, not_instance=False, force_instance=False, **kwargs):
"""
:param block_name:
:param block_data:
:param validators:
:param strict: Defines that we raise errors for things we think won't work, or we let them pass
:param kwargs:
# Block is created seperately from being placed in the Minecraft world. Until the block as a _connection and
# a _block_vector it is NOT an instance. Once those two properties are set then it can be manipulated as an instance
"""
# print(f"{__class__} init called.")
self._init_complete = False
self._valid_methods = {}
self._interfaces = []
self._connection = None
self._block_vector = None # type: Vector
self._strict = strict
self._block_name = block_name.upper() if block_name else block_name
# Valid block data contains the list of block data properties and (MAYBE) their valid values
self._valid_block_data = {}
# Block data contains the current value of the various block data properties
self._block_data = {} if block_data is None else block_data
self._method_manager = None
self._is_placeable_by_blocks = True
self._update_on_change = update_on_change
# Should we reload the block after a method call - i.e. block_data will be out of date if the method changes
# stuff.
self._refresh_after_call = refresh_after_call
# If we never want this block to be an instance, then we set not_instance to True. This will stop the kludges
# being run to find out directional and multiplefaces information
self._not_instance = not_instance
self._force_instance = force_instance
self._silent = silent
self._init_complete = True
self.initialise_from_name()
for k, v in kwargs.items():
self._block_data[k] = v
self.process_defaults()
self.add_valid_methods()
# This is used as a kludge so that getattribute can look for method calls and pass the original method call name
# on to the _stub_method handler. The MAY BE problematic in a threaded environment but I don't know enough to be
# sure.
self._last_method = None
@classmethod
def from_colour(cls, colour, exact=False):
"""
Construct a :class:`Block` instance from a *color* which can be
represented as:
* A tuple of ``(red, green, blue)`` integer byte values between 0 and
255
* A tuple of ``(red, green, blue)`` float values between 0.0 and 1.0
* A string in the format '#rrggbb' where rr, gg, and bb are hexadecimal
representations of byte values.
If *exact* is ``False`` (the default), and an exact match for the
requested color cannot be found, the nearest color (determined simply
by Euclidian distance) is returned. If *exact* is ``True`` and an exact
match cannot be found, a :exc:`ValueError` will be raised::
>>> from picraft import *
>>> Block.from_colour('#ffffff')
<Block "wool" id=35 data=0>
>>> Block.from_colour('#ffffff', exact=True)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "picraft/block.py", line 351, in from_color
if exact:
ValueError: no blocks match color #ffffff
>>> Block.from_colour((1, 0, 0))
<Block "wool" id=35 data=14>
Note that calling the default constructor with any of the formats
accepted by this method is equivalent to calling this method::
>>> Block('#ffffff')
<Block "wool" id=35 data=0>
"""
if isinstance(colour, bytes):
colour = colour.decode('utf-8')
if isinstance(colour, str):
try:
if not (colour.startswith('#') and len(colour) == 7):
raise ValueError()
colour = (
int(colour[1:3], 16),
int(colour[3:5], 16),
int(colour[5:7], 16))
except ValueError:
raise ValueError('unrecognized color format: %s' % colour)
else:
try:
r, g, b = colour
except (TypeError, ValueError):
raise ValueError('expected three values in color')
if 0.0 <= r <= 1.0 and 0.0 <= g <= 1.0 and 0.0 <= b <= 1.0:
colour = tuple(int(n * 255) for n in colour)
try:
# Try for an exact match
block_structure = cls._BLOCKS_BY_COLOUR[colour]
except KeyError:
r, g, b = colour
if exact:
raise ValueError(
'no blocks match color #%06x' % (r << 16 | g << 8 | b))
diff = lambda block_color: sqrt(
sum((c1 - c2) ** 2 for c1, c2 in zip(colour, block_color)))
matched_colour = sorted(cls._BLOCKS_BY_COLOUR, key=diff)[0]
block_structure = cls._BLOCKS_BY_COLOUR[matched_colour]
return cls(block_structure.name)
@property
def refresh_after_call(self):
return self._refresh_after_call
@refresh_after_call.setter
def refresh_after_call(self, value):
self._refresh_after_call = value
@property
def silent(self):
return self._silent
@silent.setter
def silent(self, value):
self._silent = value
def initialise_from_name(self, override_name=None):
name = self._block_name if override_name is None else override_name.upper()
# If the name has changed then reset the valid block_data and methods
if name != self._block_name:
# We must have been refreshed and the block changed so reset what we thought we knew abou the block
self._valid_block_data = {}
block_structure: BlockDBStructure = Block._BLOCKS_DB.get(name, None)
if self._block_name is None or block_structure is None or block_structure.interface is None:
return
# Now lookup the interfaces from the interface for this block
self._interfaces: BlockInterface = BLOCK_INTERFACES.get(block_structure.interface.upper())
if self._interfaces is None:
print(f"INTERFACE NOT DEFINED FOR BLOCK: {self._block_name}")
return
# We now have a BlockInterace description of the method and data interfaces for this block. Let's add the data
# and interfaces to this block
for data_interface in self._interfaces.data_interfaces():
self.register_valid_block_data_raw(data_interface)
for method_interface in self._interfaces.method_interfaces():
self.register_valid_method_raw(method_interface)
self.update_block(silent=True)
@property
def interfaces(self):
return self._interfaces
def interfaces_name(self):
return self._interfaces.interface_name if self._interfaces else ""
def data_interface_names(self):
return [x.name for x in self._interfaces.data_interfaces()]
def method_interface_names(self):
return [x.name for x in self._interfaces.method_interfaces()]
@property
def update_on_change(self):
return self._update_on_change
@update_on_change.setter
def update_on_change(self, value):
self._update_on_change = value
def __ga__(self, item):
return object.__getattribute__(self, item)
def __getattribute_or_null(self, item):
try:
r = object.__getattribute__(self, item)
except AttributeError as e:
r = {}
return r
def __getattribute__(self, item):
# THIS NEEDS RE-ARCHITECTING ONCE I PROPERLY UNDERSTAND HOW TO WORK WITH __getattribute__
if not item.startswith("_") and object.__getattribute__(self, '_init_complete'):
valid_method = item in self.__getattribute_or_null('_valid_methods')
valid_block_data = item in self.__getattribute_or_null('_valid_block_data')
if valid_method:
self._last_method = item
item = '_stub_method'
elif valid_block_data:
return self.__getattribute_or_null('_block_data')[item]
return object.__getattribute__(self, item)
def __setattr__(self, key, value):
# print(f"__SETATTR__ called with key: {key}, value: {value}")
if not key.startswith("_") and object.__getattribute__(self, '_init_complete'):
if self.is_valid_block_data_name(key):
self.set_block_data(key, value)
if self._update_on_change:
self.update_block(silent=True)
super().__setattr__(key, value)
def _stub_method(self, *args, **kwargs):
# This is called by the dynamically bound methods
# Original method called was made to self.last_method, so we can use that to call the method on the minecraft
# side
# We've got here because an external call to a method has been bound to this stub. We now need to get the
# call signature for the intended method and invoke it.
intended_method = self._last_method
# Get the method details - named tuple with .name, .get and .signature
method_details = self.get_method_details(intended_method)
method_name = f"{intended_method}|{method_details.signature}"
r = None
if method_details.get:
r = self.invoke_method_return(method_details, *args)
else:
self.invoke_method(method_details, *args)
if self._refresh_after_call:
self.refresh_block()
return r
def get_method_details(self, method_name):
return self._valid_methods.get(method_name)
@property
def is_placeable_by_blocks(self):
return self._is_placeable_by_blocks
def normalise_block_name(self, name):
return name.upper()
@property
def block_vector(self):
return self._block_vector
@block_vector.setter
def block_vector(self, value):
self._block_vector = value
@property
def block_data(self):
return self._block_data
@classmethod
def validate_block_name(cls, block_name):
"""
This method checks the block_name that was passed against the database of known blocks.
It returns True or False to signify a valid or invalid block.
:param block_name:
:return:
"""
return cls.normalise_block_name(block_name) in cls._BLOCKS_DB
@classmethod
def valid_block_names(cls):
return [x for x in cls._BLOCKS_DB.keys()]
def set_block_data(self, name, value, override_strict=False):
strict_mode = self._strict and not override_strict
name = self.normalise_block_data_name(name)
# Cast the value
value = Caster.to_string(value, "string")
valid_key = True
valid_value = True
if name not in self.get_valid_block_data_names():
print(f"block_data with an unknown key is being set: Block:{self._block_name}, key:{name}")
valid_key = False
if valid_key:
# Validate here
if not self.is_valid_block_data_value(name, value):
valid_value = False
if (not valid_key) and strict_mode:
raise ValueError(
'%s is not a valid type of block data in Block %s.' %
(wrap_in_quote(name), wrap_in_quote(self._block_name))
)
if (not valid_value) and strict_mode:
raise ValueError(
'%s is not a valid value for type of block data %s in Block %s.' %
(wrap_in_quote(value), wrap_in_quote(name), wrap_in_quote(self._block_name))
)
self._block_data[name] = value
if self._update_on_change:
self.update_block(silent=True)
return self
def get_block_data_info(self, name, override_strict=False):
"""
Return the meta information for block_data
:param name: the name of the block data item
:param override_strict: Whether we should raise errors for block data that is not found.
:return: tuple of name, current value for this block data item
"""
strict_mode = self._strict and not override_strict
name = self.normalise_block_data_name(name)
if (name not in self.get_valid_block_data_names() or name not in self._block_data):
# We haven't found the key
if strict_mode:
raise ValueError(
'The block data item %s is unknown in block: %s' % (wrap_in_quote(name), self._block_name)
)
else:
print(f"Warning block_data value for {name} was not found in block {self._block_name}.")
current_value = ""
else:
current_value = self._block_data[name]
return name, current_value
def get_next_valid_block_data_value(self, name, value):
name = self.normalise_block_data_name(name)
choices = self.get_valid_block_data_choices(name)
index = choices.index(value)
index += 1
if index > (len(choices) - 1):
index = 0
return choices[index]
def get_random_valid_block_data_value(self, name):
name = self.normalise_block_data_name(name)
choices = self.get_valid_block_data_choices(name)
return choice(choices)
def cycle_block_data(self, name):
name = self.normalise_block_data_name(name)
name, current_value = self.get_block_data_info(name)
new_value = self.get_next_valid_block_data_value(name, current_value)
self.set_block_data(name, new_value)
return self
def random_block_data(self, name):
name = self.normalise_block_data_name(name)
new_value = self.get_random_valid_block_data_value(name)
self.set_block_data(name, new_value)
return self
def valid_values(self, name):
pass
#
# name, current_value, validator = self.get_block_data_info(name)
# return list(validator.all_valid_values())
def describe_block_data(self, block_data_details):
print(
f" {block_data_details.name:30} has a default value of {'<' + str(block_data_details.default) + '>':20} and the choices are {block_data_details.choices}")
def describe_method(self, method_details):
signature_bits = method_details.signature.split(":")
return_value = signature_bits[0] if signature_bits[0] != "void" else ""
parameter_value = ",".join(signature_bits[1:])
return_details = f"- Returns a value of <{return_value}>" if method_details.get else ""
method_and_params = f"{method_details.name}({parameter_value})"
return f"{method_and_params:40} {return_details}"
def document(self, print_it=False):
pass
print(f"Block of type: {self._block_name}")
print(f" BlockData : {self._block_data}")
block_type = "Skeleton block - A skeleton block has not yet been placed in the minecraft world" \
if not self.is_instance() \
else f"Instance block at {self._block_vector} - An instance block has been placed in the Minecraft world and can have methods called on it"
print(f" {block_type}")
if len(self._valid_methods) == 0:
print(f" No registered methods")
else:
print(f" The following methods are registered:")
for method in self.get_valid_method_names():
details = self.get_method_details(method)
print(f" {self.describe_method(details)}")
if len(self._valid_block_data) == 0:
print(f" No block data is available for this block.")
else:
print(f" The following block data properties are available:")
for block_data_name in self.get_valid_block_data_names():
details = self._valid_block_data[block_data_name]
self.describe_block_data(details)
# r = f"Valid block_data values for block: '{self._block_name}'...\n"
# if self._validators:
# for k, v in self._validators.items():
# default_value, validator = v
# r += f"{validator.document(k, default_value)}"
# if print_it:
# print(r)
@property
def strict(self):
return self._strict
@strict.setter
def strict(self, value):
self._strict = value
@classmethod
def from_string(cls, block_desriptor, connection=None, vector=None, block_data_cache=None):
# if vector is passed then the block will become an instance
bd = BlockDescriptor(block_desriptor)
new = cls(bd.block_name, bd.block_data)
if vector and connection:
# If we've been passed a vector and a connection then we can become an instance (placed in the MC world)
new.make_instance(connection, vector, block_data_cache=block_data_cache)
return new
def to_string(self):
# Re-write the block_descriptor
return BlockDescriptor.to_string(self._block_name, self._block_data)
def __repr__(self):
return '<BlockBase %s>' % (self.block_name)
def can_be_instance(self):
# Return True if this can be an instance
return self._force_instance or (self._block_name.upper() not in self._NON_INSTANCE_BLOCKS)
def refresh_block(self):
# Read or re-read the block information from minecraft
# getBlockWithData returns in the following format
# CraftBlockData{minecraft:grass_block[snowy=false]}
self.check_is_instance()
cmd = 'world.getBlockWithData(%d,%d,%d)' % (self._block_vector.x, self._block_vector.y, self._block_vector.z)
block_desriptor = self._connection.transact(cmd)
bd = BlockDescriptor(block_desriptor)
self._block_name = bd.block_name
self._block_data = bd.block_data
print(f"refresh_block got {bd}")
self.initialise_from_name(override_name=bd.block_name)
self.process_defaults()
return bd
def set_block(self, connection, vector, block_data_cache=None, make_instance=True):
# instance - whether we try to set make this block an instance - i.e. give it a connection
old_vector = self._block_vector
if self._block_data:
cmd = 'world.setBlockWithBlocData(%d,%d,%d,%s,%s)' % (
vector.x,
vector.y,
vector.z,
self._block_name.upper(),
BlockDescriptor.block_data_to_string(
self._block_name,
self._block_data,
brackets=True,
delimter="/"
)
)
else:
cmd = 'world.setBlock(%d,%d,%d,%s)' % (
vector.x, vector.y, vector.z, self._block_name.upper()
)
# print(f"set_block: About to send this command: {cmd}")
connection.send(cmd)
# Now set this block as an instance so it's possible for methods to be invoked on it
if (vector != old_vector or self._connection is None) and make_instance:
self.make_instance(connection, vector, block_data_cache=block_data_cache)
def set_blocks(self, connection, vector_from, vector_to):
#
# EXPERIMENTAL
#
# IF I've arrived here, I've tried to use a contiguous vector_range to set all blocks in the range to be a
# single block type.
#
# IF that is the case then I need to knock back the vector range by one. Otherwise I get an unintendedly large
# range.
# HOWEVER. How to handle the reducing of the range by one. We must assume that vector.from is accurate but that
# vector_to must be reduced. BUT if the number is negative, do we add or subtract 1?
#
# difference = (vector_to - vector_from) - 1
vector_to = vector_to - 1
if self._block_data:
cmd = 'world.setBlocksWithData(%d,%d,%d,%d,%d,%d,%s,%s)' % (
vector_from.x,
vector_from.y,
vector_from.z,
vector_to.x,
vector_to.y,
vector_to.z,
self._block_name.upper(),
BlockDescriptor.block_data_to_string(
self._block_name,
self._block_data,
brackets=True,
delimter="/"
)
)
else:
cmd = 'world.setBlocks(%d,%d,%d,%d,%d,%d,%s)' % (
vector_from.x, vector_from.y, vector_from.z,
vector_to.x, vector_to.y, vector_to.z,
self._block_name.upper()
)
# print(f"set_blocks: About to send this command: {cmd}")
connection.send(
cmd
)
@property
def block_data(self):
return self._block_data
@block_data.setter
def block_data(self, value):
self._block_data = value
@property
def block_name(self):
return self._block_name
@block_name.setter
def block_name(self, value):
self._block_name = value
# name property was added so I could use the same method across entities and blocks to get the name. Need to
# rationalise the two methods to get _block_name at some point
@property
def name(self):
return self._block_name
@name.setter
def name(self, value):
self._block_name = value
def process_defaults(self, name=None):
# If we have any block data defaults that are not currently set or overridden then we need to set them
# If there are multiple sets of valid values inside _valid_block_data then the last default will override
# previous defaults.
for block_data_name in self.get_valid_block_data_names():
# Check if we are only processing a named set of defaults
if name is not None and name != block_data_name:
continue
structure = self._valid_block_data[block_data_name]
default = structure.default
if block_data_name not in self._block_data and structure.create:
self.set_block_data(block_data_name, structure.default)
def normalise_block_data_name(self, name):
return name.lower()
def get_valid_block_data_names(self):
return self._valid_block_data.keys()
# def get_valid_block_data_values(self, name):
# name = self.normalise_block_data_name(name)
# return self._valid_block_data[name]
def is_valid_block_data_name(self, name):
return self.normalise_block_data_name(name) in self._valid_block_data
def get_valid_block_data_choices(self, name):
name = self.normalise_block_data_name(name)
structure = self._valid_block_data[name]
return structure.choices
def is_valid_block_data_value(self, name, value):
name = self.normalise_block_data_name(name)
return value in self.get_valid_block_data_choices(name)
def register_valid_block_data_raw(self, block_data):
if block_data is not None:
name = self.normalise_block_data_name(block_data.name)
self._valid_block_data[name] = block_data
self.process_defaults(name)
else:
print("GOT A NONE WHILST REGISTERING VALID BLOCK DATA. ALL IS GOOD.")
return self
def get_valid_methods(self):
return self._valid_methods.keys()
def normalise_method_name(self, method_name):
return method_name.lower()
def register_valid_method_raw(self, method_details):
if method_details is not None:
method_name = method_details.name # type: MethodDetailsStructure
self._valid_methods[method_name] = method_details
else:
print("GOT A NONE WHILST REGISTERING VALID METHODS. ALL IS GOOD.")
return self
def register_valid_method(self, method_name, calling_signature, get=True):
method_name = self.normalise_method_name(method_name)
self.register_valid_method_raw(
MethodDetailsStructure(name=method_name, get=False, signature=calling_signature)
)
return self
def get_valid_method_names(self):
return self._valid_methods.keys()
def is_valid_method_name(self, method_name):
method_name = self.normalise_method_name(method_name)
return method_name in self._valid_methods
# def add_valid_block_data(self):
# """
# This is overridden in subclasses
# :param block:
# :return:
# """
# super().add_valid_block_data()
# pass
def add_valid_methods(self):
pass
def update_block(self, silent=None):
"""
If the block is an instance (has a connection and a vector in the game) then we update the block with the
current block data.
:return:
"""
silent = self._silent if silent is None else silent
if silent:
if not self.is_instance():
return
else:
self.check_is_instance()
self.set_block(self._connection, self._block_vector)
return self
def directional_hack(self):
# blocks with Directional interface do not support all of the possible directions. To establish which ones are
# available you need to call the getFaces method on the block instance. To hack this, this method will be called
# by the make_instance method so that it can manipulate the _block_data structure
self.check_is_instance()
faces = self.invoke_method_return(MethodDetailsStructure("getFaces", True, "BlockFaces:void"))
face = self.invoke_method_return(MethodDetailsStructure("getFacing", True, "BlockFace:void"))
bds = BlockDataStructure("facing", face.lower(), [x.lower() for x in faces], True)
return bds
def multiplefacing_hack(self):
# ToDo - needs some thought...
pass
def place_at(self, world, vector, make_instance=True):
self.set_block(world._connection, vector, make_instance=make_instance)
return self
def make_instance(self, connection, block_vector, block_data_cache=None):
if self.can_be_instance():
self._connection = connection
self._block_vector = block_vector
else:
raise ValueError("Blocks of this type %s cannot be made into an instance." % (self._block_name))
if 'multiplefacing' == self.interfaces_name():
self.multiplefacing_hack()
if 'directional' == self.interfaces_name():
block_name = self.normalise_block_name(self._block_name)
if block_data_cache is not None:
bds = block_data_cache.get(block_name, None)
else:
bds = None
print(f"No bds was passed")
if bds is None:
bds = self.directional_hack()
self.register_valid_block_data_raw(bds)
if block_data_cache is not None:
block_data_cache[block_name] = bds
self.update_block(silent=True)
def is_instance(self):
return self._connection is not None and self._block_vector is not None
def check_is_instance(self):
if not self.is_instance():
raise ValueError(
'The Block is a shell and has not been placed in the minecraft world or it was not placed as a single block but part of a group.')
def invoke_material_method_return(self, method_name):
cmd = 'block.material.invokeMethod(%d,%d,%d,%s)' % (
self._block_vector.x,
self._block_vector.y,
self._block_vector.z,
method_name
)
try:
r = self._connection.transact(cmd)
except CommandError:
print('CommandError processing: %s' % (cmd))
r = None
return r
def isSolid(self):
return self.invoke_material_method_return("isSolid") == "true"
def isBlock(self):
return self.invoke_material_method_return("isBlock") == "true"
def isAir(self):
return self.invoke_material_method_return("isAir") == "true"
def isBurnable(self):
return self.invoke_material_method_return("isBurnable") == "true"
def isEdible(self):
return self.invoke_material_method_return("isEdible") == "true"
def isFlammable(self):
return self.invoke_material_method_return("isFlammable") == "true"
def isFuel(self):
return self.invoke_material_method_return("isFuel") == "true"
def isInteractable(self):
return self.invoke_material_method_return("isInteractable") == "true"
def isItem(self):
return self.invoke_material_method_return("isItem") == "true"
def isOccluding(self):
return self.invoke_material_method_return("isOccluding") == "true"
def isRecord(self):
return self.invoke_material_method_return("isRecord") == "true"
def getBlastResistance(self):
return self.invoke_material_method_return("getBlastResistance")
def getHardness(self):
return self.invoke_material_method_return("getHardness")
def getMaxDurability(self):
return self.invoke_material_method_return("getMaxDurability")
def getMaxStackSize(self):
return self.invoke_material_method_return("getMaxStackSize")
# def _method_call(self, method_name, return_value, *args):
# self.check_is_instance()
# all_args = ",".join([str(x) for x in args])
# signature = method_name + "," + all_args if all_args else method_name
# cmd = 'block.invokeMethod(%d,%d,%d,%s)' % (
# self._block_vector.x,
# self._block_vector.y,
# self._block_vector.z,
# signature
# )
# print(f" About to call: {method_name} with a signature of: {signature}: cmd = {cmd}")
# try:
# if return_value:
# r = self._connection.transact(cmd)
# else:
# r = None
# self._connection.send(cmd)
# except CommandError:
# print('CommandError processing: %s' % (cmd))
# r = None
# return r
def check_method_manager(self):
if self._method_manager is None:
self._method_manager = MethodManager("block", self._connection)
def invoke_method(self, method_details, *args):
raw = method_details.raw
self.check_is_instance()
self.check_method_manager()
self._method_manager.method_call(self._block_vector, method_details, *args)
# self._method_call(method_name, False, *args)
def invoke_method_return(self, method_details, *args):
raw = method_details.raw
self.check_is_instance()
self.check_method_manager()
return self._method_manager.method_call(self._block_vector, method_details, *args)
# return self._method_call(method_name, True, *args)
class Blocks(object):
"""
This class implements the :attr:`~picraft.world.World.blocks` attribute.
"""
def __init__(self, connection):
self._connection = connection
# Block data cache allows for the blocks entity to hold on to expensive information that Block tries
# to gather.
self._block_data_cache = {}
def get_cached_block_data(self, block_name):
return self._block_data_cache.get(name, None)
def set_cached_block_data(self, block_name, block_data):
self._block_data_cache[block_name] = block_data
def __repr__(self):
return '<Blocks>'
def _get_blocks(self, vrange):
return [
# Block.from_string('%d,0' % int(i))
# Block2.from_string('%s' % i)
Block.from_string('%s' % i)
for i in self._connection.transact(
'world.getBlocks(%d,%d,%d,%d,%d,%d)' % (
vrange.start.x, vrange.start.y, vrange.start.z,
vrange.stop.x - vrange.step.x,
vrange.stop.y - vrange.step.y,
vrange.stop.z - vrange.step.z)
).split(',')
]
def _get_block_loop(self, vrange):
# getBlockWithData returns in the following format
# CraftBlockData{minecraft:grass_block[snowy=false]}
return [
# Block.from_string(
# Block2.from_string(
Block.from_string(
self._connection.transact(
'world.getBlockWithData(%d,%d,%d)' %
(v.x, v.y, v.z)))
for v in vrange
]
def __getitem__(self, index):
if isinstance(index, slice):
index = vector_range(index.start, index.stop, index.step)
if isinstance(index, vector_range):
vrange = index
if not vrange:
warnings.warn(EmptySliceWarning(
"ignoring empty slice passed to blocks"))
elif (
abs(vrange.step) == Vector(1, 1, 1) and
vrange.order == 'zxy' and
self._connection.server_version == 'raspberry-juice'):
# Query for a simple unbroken range (getBlocks fast-path)
# against a Raspberry Juice server
return self._get_blocks(vrange)
else:
# Query for any other type of range (non-unit step, wrong
# order, etc.)
return self._get_block_loop(vrange)
else:
try:
index.x, index.y, index.z
except AttributeError:
# Query for an arbitrary collection of vectors
return self._get_block_loop(index)
else:
# Query for a single vector
# As it's a single block we can pass connection and vector so that the block becomes an instance -
# (placed in the MC world)
return Block.from_string(
self._connection.transact(
'world.getBlockWithData(%d,%d,%d)' %
(index.x, index.y, index.z))
, self._connection, index,
self._block_data_cache
)
def _set_blocks(self, vrange, block_or_entity):
assert vrange.step == Vector(1, 1, 1)
block_or_entity.set_blocks(self._connection, vrange.start, vrange.stop)
# self._connection.send(
# 'world.setBlocks(%d,%d,%d,%d,%d,%d,%d,%d)' % (
# vrange.start.x, vrange.start.y, vrange.start.z,
# vrange.stop.x - 1, vrange.stop.y - 1, vrange.stop.z - 1,
# block.id, block.data))
def _set_block_loop(self, vrange, blocks_or_entities):
for v, b in zip(vrange, blocks_or_entities):
b.set_block(self._connection, v, block_data_cache=self._block_data_cache, make_instance=False)
# self._connection.send(
# 'world.setBlock(%d,%d,%d,%d,%d)' % (
# v.x, v.y, v.z, b.id, b.data))
def test_for_single_thing(self):
value.block_name
def __setitem__(self, index, value):
r = None
if isinstance(index, slice):
index = vector_range(index.start, index.stop, index.step)
if isinstance(index, vector_range):
vrange = index
if not vrange:
warnings.warn(EmptySliceWarning(
"ignoring empty slice passed to blocks"))
else:
try:
# Test for a single block
if isinstance(value, Block):
value.block_name
elif isinstance(value, Entity):
value.type_id
except AttributeError:
# Assume multiple blocks have been specified for the range
self._set_block_loop(vrange, value)
else:
# We're dealing with a single block for a simple unbroken
# range (setBlocks fast-path)
if abs(vrange.step) == Vector(1, 1, 1):
r = self._set_blocks(vrange, value)
else:
r = self._set_block_loop(vrange, (value,) * len(vrange))
else:
try:
# Test for a single block / entity
value.is_placeable_by_blocks
# value.id, value.data
except AttributeError:
# Assume multiple blocks have been specified with a collection
# of vectors
self._set_block_loop(index, value)
else:
try:
index.x, index.y, index.z
except AttributeError:
# Assume a single block has been specified for a collection
# of vectors
self._set_block_loop(index, cycle((value,)))
else:
# A single block for a single vector
r = value.set_block(self._connection, index, block_data_cache=self._block_data_cache,
make_instance=True)
# self._connection.send(
# 'world.setBlock(%d,%d,%d,%d,%d)' % (
# index.x, index.y, index.z, value.id, value.block_name))
return r
|
<filename>Watcher/Watcher.py
# This is the live version
import time
import json
import threading
import sqlite3
from sopel import module
from sopel import tools
from sseclient import SSEClient as EventSource
class watcher():
reports = []
logReports = []
class wiki():
db = sqlite3.connect("/home/ubuntu/.sopel/modules/wiki.db", check_same_thread=False)
c = db.cursor()
data = c.execute('SELECT * from config;').fetchall()[0]
stream, botAct, botPass, csrf, botNick = data
hushList = ["simplewiki", "ptwiki", "enwiki", "wikidata", "metawiki"]
def checkTable(project):
# Checks for tables existence. Returns 1 for True and 0 for False and NoneType for error
try:
data = wiki.c.execute('SELECT name FROM sqlite_master WHERE type="table" AND name="%s";' % project).fetchone()
return data
except:
return None
def createTable(project):
# Creates a new table... Used after checking with checkTable(project)
try:
wiki.c.execute('CREATE TABLE ' + project + '(page TEXT, nick TEXT, channel TEXT, notify TEXT);')
wiki.db.commit()
return True
except:
return None
def checkPage(project, title):
# Check and see if EventStream item needs to be processed
try:
check = wiki.c.execute('SELECT * from %s where page="%s";' % (project, title)).fetchone()
return check
except:
return None
def getPage(project, title):
# If checkPage(project, title) returned an existing page, get the info to process
try:
data = wiki.c.execute('SELECT * from %s where page="%s";' % (project, title)).fetchall()
return data
except:
return None
def getPageNicks(project, page, chan):
# While processing getPage(project, title), get the specific nicks we need to notify per channel
try:
data = wiki.c.execute('SELECT nick from %s where page="%s" and channel="%s" and notify="yes";' % (project, page, chan)).fetchall()
return data
except:
return None
def checkNewPage(project, page, nick, channel):
try:
check = wiki.c.execute('SELECT * from %s where page="%s" and nick="%s" and channel="%s";' % (project, page, nick, channel)).fetchone()
return check
except:
return None
def createPage(project, page, nick, channel):
# Add a page to be watched by a nick. Should be used after checking for already watched page
try:
notify = "no"
schema = "INSERT INTO " + project + "(page, nick, channel, notify) VALUES(?,?,?,?);"
wiki.c.execute(schema, (page, nick, channel, notify))
wiki.db.commit()
return True
except:
return None
def setNotify(project, page, nick, channel, notify):
# Change the notify settings of an entry
try:
work = wiki.c.execute('UPDATE %s set notify="%s" where page="%s" and nick="%s" and channel="%s";' % (project, notify, page, nick, channel))
wiki.db.commit()
return True
except:
return None
def deletePage(project, page, nick, channel):
try:
wiki.c.execute('DELETE FROM ' + project + 'WHERE page="%s" AND channel="%s" AND nick="%s";' % (page, channel, nick))
wiki.db.commit()
return True
except:
return None
def checkSysop(actName):
# Check to see if a username is in the Global Sysops table. Returns 1 for yes, 0 for no, None for error
try:
response = wiki.c.execute('SELECT account from globalsysops where account="%s";' % actName).fetchall()
return response
except:
return None
def logSend(change):
action = str(change['log_type']).upper()
pageLink = change['meta']['uri']
editor = change['user']
title = change['title']
comment = str(change['comment']).replace('\n','')
report = None
if action == "NEWUSERS":
report = "Account created: " + editor + " " + pageLink
elif action == "BLOCK":
report = "Log action: " + action + " || " + editor + " blocked " + pageLink + " " + comment[:200]
elif action == "ABUSEFILTER":
report = action + " activated by " + editor + " " + pageLink
elif action == "MOVE":
report = "Log action: " + action + " || " + editor + " moved " + pageLink + " " + comment[:200]
elif action == "PATROL" or action == "REVIEW" or action == "THANKS" or action == "UPLOAD":
pass
else:
report = "Log action: " + action + " || " + editor + " " + pageLink + " " + comment[:200]
if report is not None:
watcher.logReports.append(report)
def editSend(change):
changeWiki = change['wiki']
if wiki.checkTable(changeWiki) is not None:
changeTitle = change['title']
chRev = str(change['revision']['new'])
chURL = change['server_url']
chDiff = chURL + "/w/index.php?diff=" + chRev
chComment = change['comment']
if wiki.checkPage(changeWiki, changeTitle):
data = wiki.getPage(changeWiki, changeTitle)
channels = []
for record in data:
if record[3] == "yes":
channels.append(record[2])
channels = list(dict.fromkeys(channels)) # Collapse duplicate channels
for chan in channels:
nicks = ""
data = wiki.getPageNicks(changeWiki, changeTitle, chan)
for nick in data:
if nicks == "":
nicks = nick[0]
else:
nicks = nick[0] + " " + nicks
newReport = chan + " " + nicks + ": " + changeTitle + " was edited. " + chDiff + " Summary: " + chComment
watcher.reports.append(newReport)
def dispatcher(change):
if change['type'] == "log":
if wiki.checkSysop(change['user']) and change['wiki'] not in wiki.hushList:
logSend(change)
else:
pass
elif change['type'] == "edit":
editSend(change)
else:
pass
def listener(url):
for event in EventSource(url):
if event.event == 'message':
try:
change = json.loads(event.data)
dispatcher(change)
except ValueError:
pass
def watcherAdd(msg, nick, chan):
action, project, page = msg.split(' ', 2)
if wiki.checkTable(project) is None:
if wiki.createTable(project) is None:
response = "Error creating table! Help me Operator873..."
return response
if wiki.checkNewPage(project, page, nick, chan) is None:
if wiki.createPage(project, page, nick, chan) is not None:
response = "%s: I will report changes to %s on %s in this channel with no ping." % (nick, page, project)
else:
response = "Ugh. Something blew up. Operator873 help me..."
else:
response = "%s: I'm already reporting changes to %s for you here." % (nick, page)
return response
def watcherDel(msg, nick, chan):
action, project, page = msg.split(' ', 2)
if wiki.checkNewPage(project, page, nick, chan) is not None:
if wiki.deletePage(project, page, nick, chan) is True:
response = "%s: I won't report changes to %s on %s anymore." % (nick, page, project)
else:
response = "Ugh. Something blew up. Operator873 help me..."
else:
response = "%s: It doesn't look like you're watching %s on %s." % (nick, page, project)
return response
def watcherPing(msg, nick, chan):
action, switch, project, page = msg.split(' ', 3)
if wiki.setNotify(project, page, nick, chan, switch) is not None:
response = nick + ": pings are now " + switch + " for " + page + " on " + project + " in this channel."
else:
response = "Ugh. Something blew up. Operator873 help me..."
return response
listen = threading.Thread(target=listener, args=(wiki.stream,))
@module.require_owner(message="This function is only available to Operator873")
@module.commands('watchstart')
def watchstart(bot, trigger):
listen.start()
bot.say("Listening to EventStream...", "##Operator873")
@module.interval(2)
def readlogReports(bot):
if len(watcher.logReports) > 0:
for report in watcher.logReports:
bot.say(report, "##873bots")
watcher.logReports.remove(report)
@module.interval(3)
def readEditReports(bot):
if len(watcher.reports) > 0:
for report in watcher.reports:
channel, msg = report.split(' ', 1)
bot.say(msg, channel)
watcher.reports.remove(report)
@module.require_chanmsg(message="This message must be used in the channel")
@module.commands('watch')
def watch(bot, trigger):
watchAction = trigger.group(3)
if watchAction == "add" or watchAction == "Add" or watchAction == "+":
if trigger.group(5) == "":
bot.say("Command seems misformed. Syntax: !watch add proj page")
else:
bot.say(watcherAdd(trigger.group(2), trigger.nick, trigger.sender))
elif watchAction == "del" or watchAction == "Del" or watchAction == "-":
if trigger.group(5) == "":
bot.say("Command seems misformed. Syntax: !watch del proj page")
else:
bot.say(watcherDel(trigger.group(2), trigger.nick, trigger.sender))
elif watchAction == "ping" or watchAction == "Ping":
if trigger.group(6) == "":
bot.say("Command seems misformed. Syntax: !watch del proj page")
else:
bot.say(watcherPing(trigger.group(2), trigger.nick, trigger.sender))
else:
bot.say("I don't recognzie that command. Options are: Add & Del") |
<reponame>beliveau-lab/OligoMiner
#!/usr/bin/env python
# --------------------------------------------------------------------------
# OligoMiner
# bedToFastq.py
#
# (c) 2016 Molecular Systems Lab
# Wyss Institute for Biologically-Inspired Engineering
# Harvard University
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# --------------------------------------------------------------------------
# Specific script name.
scriptName = 'bedToFastq'
# Specify script version.
Version = '1.7'
# Import module for handling input arguments.
import argparse
def convertBedToFastq(inputFile, outNameVal):
"""Converts a .bed file to a .fastq file."""
# Determine the stem of the input filename.
fileName = str(inputFile).split('.')[0]
# Open input file for reading.
with open(inputFile, 'r') as f:
file_read = [line.strip() for line in f]
# Create list to hold output.
outList = []
# A list to hold arbitrary quality scores for each base in the candidate
# probe.
quals = ['~' * len(file_read[i].split('\t')[3]) \
for i in range(len(file_read))]
# Parse out probe information and write into .fastq format.
for i in range(0, len(file_read), 1):
chrom = file_read[i].split('\t')[0]
start = file_read[i].split('\t')[1]
stop = file_read[i].split('\t')[2]
probeSeq = file_read[i].split('\t')[3]
outList.append('@%s:%s-%s\n%s\n+\n%s' \
% (chrom, start, stop, probeSeq, quals[i]))
# Determine the name of the output file.
if outNameVal is None:
outName = fileName
else:
outName = outNameVal
# Create the output file.
output = open('%s.fastq' % outName, 'w')
# Write the output file
output.write('\n'.join(outList))
output.close()
def main():
"""Converts a .bed file to a .fastq file, taking the filenames as
command line arguments."""
# Allow user to input parameters on command line.
userInput = argparse.ArgumentParser(description=\
'%s version %s. Requires a .bed file with first four columns in the '
'format chromosome <tab> start <tab> stop <tab> sequence such as the '
'.bed files produced by outputClean. Returns a .fastq file.' \
% (scriptName, Version))
requiredNamed = userInput.add_argument_group('required arguments')
requiredNamed.add_argument('-f', '--file', action='store', required=True,
help='The .bed file to convert to .fastq')
userInput.add_argument('-o', '--output', action='store', default=None,
type=str,
help='Specify the name prefix of the output file')
# Import user-specified command line values
args = userInput.parse_args()
inputFile = args.file
outNameVal = args.output
convertBedToFastq(inputFile, outNameVal)
if __name__ == '__main__':
main()
|
import unittest
from mock import mock
from parameterized import parameterized
from conans.test.utils.conanfile import MockConanfile, MockSettings
from conans.client.tools import OSInfo
from conans.errors import ConanInvalidConfiguration, ConanException
from conans.tools import check_min_cppstd, valid_min_cppstd
class UserInputTests(unittest.TestCase):
def test_check_cppstd_type(self):
""" cppstd must be a number
"""
conanfile = MockConanfile(MockSettings({}))
with self.assertRaises(ConanException) as raises:
check_min_cppstd(conanfile, "gnu17", False)
self.assertEqual("cppstd parameter must be a number", str(raises.exception))
class CheckMinCppStdTests(unittest.TestCase):
def _create_conanfile(self, compiler, version, os, cppstd, libcxx=None):
settings = MockSettings({"arch": "x86_64",
"build_type": "Debug",
"os": os,
"compiler": compiler,
"compiler.version": version,
"compiler.cppstd": cppstd})
if libcxx:
settings.values["compiler.libcxx"] = libcxx
conanfile = MockConanfile(settings)
return conanfile
@parameterized.expand(["98", "11", "14", "17"])
def test_check_min_cppstd_from_settings(self, cppstd):
""" check_min_cppstd must accept cppstd less/equal than cppstd in settings
"""
conanfile = self._create_conanfile("gcc", "9", "Linux", "17", "libstdc++")
check_min_cppstd(conanfile, cppstd, False)
@parameterized.expand(["98", "11", "14"])
def test_check_min_cppstd_from_outdated_settings(self, cppstd):
""" check_min_cppstd must raise when cppstd is greater when supported on settings
"""
conanfile = self._create_conanfile("gcc", "9", "Linux", cppstd, "libstdc++")
with self.assertRaises(ConanInvalidConfiguration) as raises:
check_min_cppstd(conanfile, "17", False)
self.assertEqual("Current cppstd ({}) is lower than the required C++ standard "
"(17).".format(cppstd), str(raises.exception))
@parameterized.expand(["98", "11", "14", "17"])
def test_check_min_cppstd_from_settings_with_extension(self, cppstd):
""" current cppstd in settings must has GNU extension when extensions is enabled
"""
conanfile = self._create_conanfile("gcc", "9", "Linux", "gnu17", "libstdc++")
check_min_cppstd(conanfile, cppstd, True)
conanfile.settings.values["compiler.cppstd"] = "17"
with self.assertRaises(ConanException) as raises:
check_min_cppstd(conanfile, cppstd, True)
self.assertEqual("The cppstd GNU extension is required", str(raises.exception))
def test_check_min_cppstd_unsupported_standard(self):
""" check_min_cppstd must raise when the compiler does not support a standard
"""
conanfile = self._create_conanfile("gcc", "9", "Linux", None, "libstdc++")
with self.assertRaises(ConanInvalidConfiguration) as raises:
check_min_cppstd(conanfile, "42", False)
self.assertEqual("Current cppstd (gnu14) is lower than the required C++ standard (42).",
str(raises.exception))
def test_check_min_cppstd_gnu_compiler_extension(self):
""" Current compiler must support GNU extension on Linux when extensions is required
"""
conanfile = self._create_conanfile("gcc", "9", "Linux", None, "libstdc++")
with mock.patch("platform.system", mock.MagicMock(return_value="Linux")):
with mock.patch.object(OSInfo, '_get_linux_distro_info'):
with mock.patch("conans.client.tools.settings.cppstd_default", return_value="17"):
with self.assertRaises(ConanException) as raises:
check_min_cppstd(conanfile, "17", True)
self.assertEqual("The cppstd GNU extension is required", str(raises.exception))
def test_no_compiler_declared(self):
conanfile = self._create_conanfile(None, None, "Linux", None, "libstdc++")
with self.assertRaises(ConanException) as raises:
check_min_cppstd(conanfile, "14", False)
self.assertEqual("Could not obtain cppstd because there is no declared compiler in the "
"'settings' field of the recipe.", str(raises.exception))
class ValidMinCppstdTests(unittest.TestCase):
def _create_conanfile(self, compiler, version, os, cppstd, libcxx=None):
settings = MockSettings({"arch": "x86_64",
"build_type": "Debug",
"os": os,
"compiler": compiler,
"compiler.version": version,
"compiler.cppstd": cppstd})
if libcxx:
settings.values["compiler.libcxx"] = libcxx
conanfile = MockConanfile(settings)
return conanfile
@parameterized.expand(["98", "11", "14", "17"])
def test_valid_min_cppstd_from_settings(self, cppstd):
""" valid_min_cppstd must accept cppstd less/equal than cppstd in settings
"""
conanfile = self._create_conanfile("gcc", "9", "Linux", "17", "libstdc++")
self.assertTrue(valid_min_cppstd(conanfile, cppstd, False))
@parameterized.expand(["98", "11", "14"])
def test_valid_min_cppstd_from_outdated_settings(self, cppstd):
""" valid_min_cppstd returns False when cppstd is greater when supported on settings
"""
conanfile = self._create_conanfile("gcc", "9", "Linux", cppstd, "libstdc++")
self.assertFalse(valid_min_cppstd(conanfile, "17", False))
@parameterized.expand(["98", "11", "14", "17"])
def test_valid_min_cppstd_from_settings_with_extension(self, cppstd):
""" valid_min_cppstd must returns True when current cppstd in settings has GNU extension and
extensions is enabled
"""
conanfile = self._create_conanfile("gcc", "9", "Linux", "gnu17", "libstdc++")
self.assertTrue(valid_min_cppstd(conanfile, cppstd, True))
conanfile.settings.values["compiler.cppstd"] = "17"
self.assertFalse(valid_min_cppstd(conanfile, cppstd, True))
def test_valid_min_cppstd_unsupported_standard(self):
""" valid_min_cppstd must returns False when the compiler does not support a standard
"""
conanfile = self._create_conanfile("gcc", "9", "Linux", None, "libstdc++")
self.assertFalse(valid_min_cppstd(conanfile, "42", False))
def test_valid_min_cppstd_gnu_compiler_extension(self):
""" valid_min_cppstd must returns False when current compiler does not support GNU extension
on Linux and extensions is required
"""
conanfile = self._create_conanfile("gcc", "9", "Linux", None, "libstdc++")
with mock.patch("platform.system", mock.MagicMock(return_value="Linux")):
with mock.patch.object(OSInfo, '_get_linux_distro_info'):
with mock.patch("conans.client.tools.settings.cppstd_default", return_value="gnu1z"):
self.assertFalse(valid_min_cppstd(conanfile, "20", True))
@parameterized.expand(["98", "11", "14", "17"])
def test_min_cppstd_mingw_windows(self, cppstd):
""" GNU extensions HAS effect on Windows when running a cross-building for Linux
"""
with mock.patch("platform.system", mock.MagicMock(return_value="Windows")):
conanfile = self._create_conanfile("gcc", "9", "Linux", "gnu17", "libstdc++")
self.assertTrue(valid_min_cppstd(conanfile, cppstd, True))
conanfile.settings.values["compiler.cppstd"] = "17"
self.assertFalse(valid_min_cppstd(conanfile, cppstd, True))
|
#by tom, for tom, nothing to see here, walk away.
#version 2.1: modified logic, separated die() and log_parsing_error(), tests ok
#version 2.0: host status check added, now testing
#version 1.1: iface status removed, cpu and mem adjusted, now testing
#version 1.0: ok, it does work, but logfile format changed, partial in rewrite in 1.1
#version 0.1: functionality ok, proceed with production data testing
import re
import os
import sys
import fnmatch
import logging as l
#######GLOBAL VARS !!######
logger=None
errorLog=None
cputhreshold=60
memthreshold=60
debug=True
#prefix="/fwcheck"
prefix="."
###########################
def usage_and_exit():
print "die\n"
sys.exit(-1)
def setup_errorlog():
global errorLog
global debug
global prefix
errorLog = l.getLogger("errorlog")
if debug:
errorLog.setLevel(l.INFO)
else:
errorLog.setLevel(l.ERROR)
fh=l.FileHandler("%s/ITM-fwlog-check-status-errorlog.log" % prefix)
if debug:
fh.setLevel(l.INFO)
else:
fh.setLevel(l.ERROR)
fmt=l.Formatter('%(asctime)s : %(levelname)s : %(message)s')
fh.setFormatter(fmt)
errorLog.addHandler(fh)
return
def log_parsing_error(msg):
global errorLog
#if errorLog == None: setup_errorlog()
errorLog.critical(msg)
msg += '\n'
return
#like parsing_error, but also exit
def die(msg):
global errorLog
#if errorLog == None: setup_errorlog()
errorLog.critical(msg)
msg += '\n'
sys.exit(msg)
def log_error(fwname, error, reason):
#line: <timestamp : fwname : error-what : reason >
global logger
logger.log(l.ERROR, "%s : %s : %s", fwname, error, reason)
return
def setup_logger():
global logger
global prefix
logger = l.getLogger("fwlog")
logger.setLevel(l.ERROR)
fh=l.FileHandler('%s/fwlog-check.log' % prefix)
fh.setLevel(l.ERROR)
fmt=l.Formatter('%(asctime)s : %(message)s')
fh.setFormatter(fmt)
logger.addHandler(fh)
return
def check_cpu_status(fwname, logcontent):
global debug
global cputhreshold
#2.3 - extract line - cpu
#CPU utilization for 5 seconds = 27%; 1 minute: 29%; 5 minutes: 29%
line=re.findall('CPU utilization for 5 seconds =\s+\d+%; 1 minute:\s+(\d+)%; 5 minutes:\s+\d+%.*', logcontent)
if len(line) > 1:
log_parsing_error("%s : Parse Error, cpu line count >1, not correct" % fwname)
return
if len(line) < 1:
log_parsing_error("%s : Parse Error, cpu load data not found" % fwname)
return
#2.4 - threshold check - cpu
if int(line[0]) > cputhreshold: log_error(fwname, "CPU ALERT", "%s%% in use" % line[0] )
return
def check_mem_status(fwname, logcontent):
global debug
global memthreshold
#2.1 - extract relevant lines - mem
#'^Used memory:\s+\d+ bytes \( ?(\d+)%\)$'
line=re.findall('Used memory:\s+\d+ bytes \( ?(\d+)%\)', logcontent, flags=re.M)
if len(line) > 1:
log_parsing_error("%s : Parse Error, memory line count >1, not correct" % fwname)
return
if len(line) < 1:
log_parsing_error("%s : Parse Error, memory usage data not found" % fwname)
return
#2.2 - threshold check
if int(line[0]) > memthreshold: log_error(fwname, "MEMORY ALERT", "%s%% in use" % line[0] )
return
def check_fw_status(fwname, logcontent):
global debug
line=re.findall('This host: (\w+) - (\w+)',logcontent)
if len(line) > 1:
log_parsing_error("%s : Parse Error, host status line count >1, not correct" % fwname)
return
if len(line) < 1 or len(line[0]) < 2:
log_parsing_error("%s : Parse Error, host status data not found or incorrect" % fwname)
return
host=line[0][0]
status=line[0][1]
if host != "Primary" and host != "Secondary":
log_parsing_error("%s : Parse Error, incorrect data" % fwname)
return
#3.1-check
if host == "Primary" and status != "Active": log_error(fwname, "STATUS ALERT", "Primary host not in Active status")
if host == "Secondary" and status != "Standby": log_error(fwname, "STATUS ALERT", "Secondary host not in Standby status")
return
def main():
global debug
global errorLog
global prefix
#0.1-setup logging facility
setup_logger()
#0.2 - setup error log
setup_errorlog()
#0.3-for all log files, only if named fwcheck_*
logpath="%s/" % prefix #production
#logpath="./fwcheck/" #testing
logfiles=[]
try:
for file in os.listdir(logpath):
if fnmatch.fnmatch(file, 'check_fw-*'):
logfiles.append(file)
if debug:
msg = "log files found: %s" % ", ".join(logfiles)
sys.stderr.write(msg+'\n')
errorLog.info(msg)
if len(logfiles)== 0 : raise OSError
except OSError:
die("logpath %s not found or no logs inside" % logpath)
for logfile in logfiles:
fwname = re.findall('check_fw-(.*)',logfile)[0]
if fwname == None: die("Init Error, log files names not correct")
if debug:
print fwname + '\n'
#XXX riga orribile: vecchi firewall producono log "non compliant" alla normale formattazione
#per ora li saltiamo, SOLO in debug mode, in produzione mi aspetto non siano piu' presenti tali log
#e in presenza di file non compliant lo script deve giustamente fallire
if fwname == 'bruxelles' or fwname == 'contarini' or fwname == 'rovigo' or fwname == 'intranet':
msg="skipping fw %s" % fwname
errorLog.info(msg)
sys.stderr.write(msg+'\n')
continue
f=open(logpath+logfile)
logcontent=f.read()
f.close()
#1-check interfaces
#not needed anymore, removed
#2-check cpu and memory
check_cpu_status(fwname, logcontent)
check_mem_status(fwname, logcontent)
#3-Host status check:
check_fw_status(fwname, logcontent)
return
if __name__ == "__main__":
#Run as main program
main()
|
<reponame>maxipi/python-monthly-calendar-plot
import matplotlib.pyplot as plt
import pandas as pd
import calendar
import copy
import seaborn as sns
from math import ceil
def monthly_calendar_figure(series, cols=3, cmap='RdYlGn_r', min_value=0.001, color_unter='lightgray', h=12, w=17):
"""
create a calendar with each month separate. week numbers as rows and day of the week names as columns
Args:
series (pandas.Series): Timeseries with a daily DatetimeIndex
cols (int): number of columns of months in the figure
height_cm (float): width of the plot in centimeter, default=DIN A3
width_cm (float): height of the plot in centimeter, default=DIN A3
Returns:
matplotlib.pyplot.Figure: figure with of the calendar
"""
month_groups = series.groupby(series.index.to_period('M'))
rows = ceil(month_groups.ngroups / cols)
fig, axes = plt.subplots(rows, cols)
fig.set_size_inches(h=h, w=w)
# (left, bottom, right, top)
fig.tight_layout(pad=1.0, #h_pad=h_pad, w_pad=w_pad,
rect=(0, 0, 1, 0.97)
)
# from left column to right column
axes = axes.T
if isinstance(cmap, str):
cmap = copy.copy(plt.cm.get_cmap(cmap))
cmap.set_under(color_unter)
weekday_names = list(calendar.day_abbr)
for (month, month_ts), ax in zip(month_groups, axes.reshape(rows * cols, )): # month, month_ts
print(month)
month_df = month_ts.index.isocalendar()
month_df['ts'] = month_ts
month_df['day_of_month'] = month_ts.index.day
month_df['year_week'] = month_df.year + month_df.week / 100
df = pd.pivot(month_df, index='year_week', columns='day', values='ts')
# from "1" to "Mon", ...
df.columns = weekday_names
ax = sns.heatmap(df, annot=True, square=False, linewidths=1, cmap=cmap, vmin=min_value, vmax=series.max() + 1,
cbar=False, ax=ax)
ax.set_ylabel('')
ax.set_title(month.strftime('%Y - %B'))
ax.xaxis.tick_top()
# [str(x) for x in ax.get_yticklabels()]
ax.set_yticklabels(((df.index - df.index.astype(int)).values*100).round(0).astype(int))
ax.set_ylim(6, 0)
# ax.yaxis.set_ticks_position('none')
ax.tick_params(axis=u'both', which=u'both', width=0, length=0.01)
ax.tick_params(axis='y', which=u'both', rotation=0)
# ---------------------------------
# day annotation
for week, dayofweek, day in month_df[['year_week', 'day', 'day_of_month']].values:
row = df.index.tolist().index(week)
# row = (week - month_df.year_week.min())*100
col = dayofweek
ax.text(col-1+0.1, row+0.2, f'{day:2d}', ha='left', va='top', size=7, family='monospace',
bbox=dict(boxstyle='round', facecolor='white', alpha=0.5))
# day = pd.pivot(month_df, index='week', columns='day', values='day_of_month')
# cmap2 = plt.cm.get_cmap('RdYlGn')
# cmap2.set_gamma(100)
# ax = sns.heatmap(df, annot=day, square=False, linewidths=1, cmap=cmap2, vmin=None, vmax=None,
# cbar=False, ax=ax, fmt='0.0f',
# annot_kws=dict(horizontalalignment='legt',
# fontsize=8,
# verticalalignment='top'
# ))
# fig.tight_layout()
return fig
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# auto_py2to3 is licensed under the Mulan PSL v2.
# You can use this software according to the terms and conditions of the Mulan PSL v2.
# You may obtain a copy of Mulan PSL v2 at:
# http://license.coscl.org.cn/MulanPSL2
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
# PURPOSE.
# See the Mulan PSL v2 for more details.
# Create: 2021-2-1
import os
import re
import sys
import time
import json
import deco
import requests
import prettytable as pt
from collections import Counter
from collections import defaultdict
from datetime import datetime
from .utils import (format_date_en2standard,
is_number,
find_files,
print)
__all__ = ["libraries_detect_and_recommend"]
def _version_str2tuple(vs):
"""
Tool function, which provides the data structure of the Python version
of the string form converted into a tuple
:param vs: string code
:return: 3-tuple
"""
info = vs.split(".")
if len(info) == 1 and is_number(info[0]):
return info[0], "0", "*"
elif len(info) == 2 and all([
is_number(info[0]),
is_number(info[1])
]):
return info[0], info[1], "*"
elif len(info) == 3 and all([
is_number(info[0]),
is_number(info[1]),
is_number(info[2])
]):
return tuple(info)
elif len(info) == 3 and all([
is_number(info[0]),
is_number(info[1]),
info[2] == "*"
]):
return info[0], info[1], "*"
else:
raise ValueError(
"Unable to determine the string form of the Python version, "
"please troubleshoot the reason or submit a PR to the developer!"
)
def _update_python_versions():
"""
Python release date configuration
:return: dict
"""
# request www.python.org to get information
response = requests.get(url="https://www.python.org/doc/versions/").text
match_response = re.findall("Python (.*?)</a>, documentation released on (.*?)</li>", response)
versions = {
"timestamp": time.time(),
"versions": dict(
[(_, format_date_en2standard(en_date, "%d %B %Y")) if en_date[-1].isdigit() else (
_, format_date_en2standard(en_date[:-1], "%d %B %Y")) for _, en_date in
match_response]
)
}
# save on conf folder
with open(os.path.dirname(os.path.abspath(__file__)) + "/python_versions.json", "w", encoding="utf-8") as f:
json.dump(versions, f, ensure_ascii=False)
return versions
def get_requirements_library(path):
"""
Get all the dependent libraries in the specified requirements.txt
:param path: str
:return: dict
"""
requirements_dict = {}
for requirement_file in find_files(path=path, pattern="*requirements*.txt"):
with open(requirement_file, "r", encoding="utf-8") as f:
for line in f.readlines():
line = line.strip()
if not line:
continue
if "==" not in line:
requirements_dict[line] = ""
else:
ln, lv = line.split("==", 1)
requirements_dict[ln] = lv
return requirements_dict
@deco.concurrent.threaded(processes=8)
def find_python_version_by_library_version(ln, lv, update_step=30):
"""
Get the Python version applicable to the specified dependent library
:param update_step: days
:param ln:
:param lv:
:return:
"""
results = {
"version": lv,
"python_version": [],
"newest_version": ""
}
# Request detailed library version release information data
response = requests.get(url="https://pypi.org/pypi/{0}/json".format(ln)).json()
newest_library_version = response.get("info", {}).get("version", "")
results["newest_version"] = newest_library_version
if not lv:
print("Python dependency library ({0}) does not have version.".format(ln))
return results
# Get the timeline of Python release version
if not os.path.exists(os.path.dirname(os.path.abspath(__file__)) + "/python_versions.json"):
# Determine whether there is a timetable cache for the Python release version
versions = _update_python_versions()["versions"]
else:
with open(os.path.dirname(os.path.abspath(__file__)) + "/python_versions.json", "r", encoding="utf-8") as f:
versions_dict = json.load(f)
if (datetime.utcfromtimestamp(
time.time()
) - datetime.utcfromtimestamp(
versions_dict["timestamp"])
).days > update_step:
versions = _update_python_versions()["versions"]
else:
versions = versions_dict["versions"]
# Calculate the exact release time of the dependent library version
library_version_times = [
format_date_en2standard(
item["upload_time_iso_8601"], "%Y-%m-%dT%H:%M:%S.%fZ"
) for item in response.get("releases", {}).get(lv, [])
]
if not library_version_times:
print("Python dependency library ({0}) does not have version({1}).".format(ln, lv))
return results
extract_library_version_time = format_date_en2standard(
Counter(library_version_times).most_common(1)[0][0],
"%Y-%m-%d",
return_type="timeObject"
)
# Screening strategy:
# 1. Time filtering according to the time of the library release version and
# the time of the Python release version
support_versions = [version for version, _ in filter(
lambda x: x[1] > 0,
[(_, (extract_library_version_time - format_date_en2standard(
date, "%Y-%m-%d", return_type="timeObject"
)).days) for _, date in versions.items()]
)]
# 2. Filter according to the requires_python of the library release version
requires_python = set()
for item in response.get("releases", {}).get(lv, []):
if item["requires_python"]:
for _ in item["requires_python"].split(","):
requires_python.add(_)
for option in requires_python:
option = option.replace(" ", "")
if option[:2] == "!=":
major, minor, micro = _version_str2tuple(vs=option[2:])
if micro == "*":
support_versions = [
version for version in support_versions if
version[:3] != "{0}.{1}".format(major, minor)
]
else:
support_versions = [
version for version in support_versions if
version != "{0}.{1}.{2}".format(major, minor, micro[0])
]
elif option[:2] == ">=":
major, minor, micro = _version_str2tuple(vs=option[2:])
if micro == "*":
support_versions = [
version for version in support_versions if
float(version[:3]) >= float("{0}.{1}".format(major, minor))
]
else:
support_versions = [
version for version in support_versions if
int(version.replace(".", "")) >= int("{0}.{1}.{2}".format(major, minor, micro[0]))
]
elif option[:2] == "<=":
major, minor, micro = _version_str2tuple(vs=option[2:])
if micro == "*":
support_versions = [
version for version in support_versions if
float(version[:3]) <= float("{0}.{1}".format(major, minor))
]
else:
support_versions = [
version for version in support_versions if
int(version.replace(".", "")) <= int("{0}.{1}.{2}".format(major, minor, micro[0]))
]
elif option[:2] == "==":
major, minor, micro = _version_str2tuple(vs=option[2:])
if micro == "*":
support_versions = [
version for version in support_versions if
version[:3] == "{0}.{1}".format(major, minor)
]
else:
support_versions = [
version for version in support_versions if
version == "{0}.{1}.{2}".format(major, minor, micro[0])
]
elif option[0] == ">" and option[1].isdigit():
major, minor, micro = _version_str2tuple(vs=option[1:])
if not micro or micro[0] == "*":
support_versions = [
version for version in support_versions if
float(version[:3]) > float("{0}.{1}".format(major, minor))
]
else:
support_versions = [
version for version in support_versions if
int(version.replace(".", "")) > int("{0}.{1}.{2}".format(major, minor, micro[0]))
]
elif option[0] == "<" and option[1].isdigit():
major, minor, micro = _version_str2tuple(vs=option[1:])
if micro == "*":
support_versions = [
version for version in support_versions if
float(version[:3]) < float("{0}.{1}".format(major, minor))
]
else:
support_versions = [
version for version in support_versions if
int(version.replace(".", "")) < int("{0}.{1}.{2}".format(major, minor, micro[0]))
]
else:
print(
"The form of the string in the Python dependency library cannot be judged. "
"Please troubleshoot the reason or submit a PR to the developer."
)
# 3. Filtering according to the requirements_python of the release
# version of the library is unsuccessful,
# select the general version of classifiers in info to determine
if not requires_python:
# print("not requires_python", ln, lv)
classifiers_versions = set()
for item in requests.get(
url="https://pypi.org/pypi/{0}/{1}/json".format(ln, lv)
).json().get("info", {}).get("classifiers", []):
if "Python :: " in item:
ver = item.split(" :: ")[-1]
if is_number(ver):
classifiers_versions.add(float(ver))
support_versions = [
version for version in support_versions if
float(version[:3]) in classifiers_versions
]
results["python_version"] = list({float(version[:3]) for version in support_versions})
return results
@deco.synchronized
def find_python_versions_by_library_versions(name2version, update_step=30):
"""
And we add this for the function which calls the concurrent function
:param update_step:
:param name2version:
:return:
"""
results = defaultdict(dict)
for ln, lv in name2version.items():
results[ln] = find_python_version_by_library_version(ln, lv, update_step)
return dict(results)
def libraries_detect_and_recommend(target_path):
"""
Check whether the version of the project's dependent library is suitable for
the current Python environment and recommend to generate the most
suitable dependent library to the user.
Main function
:param target_path: str
:return: bool, ignore
"""
print(">>> Current Python Version: {0}.".format(sys.version))
detect_recommend_results = pt.PrettyTable(["No", "Library", "Version", "Support Status", "Recommend Version"])
python_version = float("{0}.{1}".format(sys.version_info.major, sys.version_info.minor))
print(">>> Statistical analysis of dependent libraries to adapt to the current Python version: Loading...")
try:
requirements_libraries = get_requirements_library(
path=target_path
)
python_versions_libraries = find_python_versions_by_library_versions(
name2version=requirements_libraries
)
for i, (ln, versions) in enumerate(python_versions_libraries.items()):
if python_version in versions["python_version"]:
detect_recommend_results.add_row(
[str(i + 1), ln, versions["version"], "√", versions["version"]]
)
else:
detect_recommend_results.add_row(
[str(i + 1), ln, versions["version"], "×", versions["newest_version"]]
)
with open(target_path + "/requirements_REC.txt", "w", encoding="utf-8") as f:
for ln, versions in python_versions_libraries.items():
if python_version in versions["python_version"]:
f.write("{0}=={1}\n".format(ln, versions["version"]))
else:
f.write("{0}=={1}\n".format(ln, versions["newest_version"]))
except Exception as e:
raise e
print(detect_recommend_results)
return True
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2020 <NAME> <<EMAIL>>
#
# This code is distributed under the terms and conditions
# from the MIT License (MIT).
#
"""Helper functions for documentation, etc."""
import inspect
import logging
import urllib.parse
logger = logging.getLogger(__name__)
def inspect_kwargs(kallable):
#
# inspect.getargspec got deprecated in Py3.4, and calling it spews
# deprecation warnings that we'd prefer to avoid. Unfortunately, older
# versions of Python (<3.3) did not have inspect.signature, so we need to
# handle them the old-fashioned getargspec way.
#
try:
signature = inspect.signature(kallable)
except AttributeError:
try:
args, varargs, keywords, defaults = inspect.getargspec(kallable)
except TypeError:
#
# Happens under Py2.7 with mocking.
#
return {}
if not defaults:
return {}
supported_keywords = args[-len(defaults):]
return dict(zip(supported_keywords, defaults))
else:
return {
name: param.default
for name, param in signature.parameters.items()
if param.default != inspect.Parameter.empty
}
def check_kwargs(kallable, kwargs):
"""Check which keyword arguments the callable supports.
Parameters
----------
kallable: callable
A function or method to test
kwargs: dict
The keyword arguments to check. If the callable doesn't support any
of these, a warning message will get printed.
Returns
-------
dict
A dictionary of argument names and values supported by the callable.
"""
supported_keywords = sorted(inspect_kwargs(kallable))
unsupported_keywords = [k for k in sorted(kwargs) if k not in supported_keywords]
supported_kwargs = {k: v for (k, v) in kwargs.items() if k in supported_keywords}
if unsupported_keywords:
logger.warning('ignoring unsupported keyword arguments: %r', unsupported_keywords)
return supported_kwargs
def clamp(value, minval, maxval):
"""Clamp a numeric value to a specific range.
Parameters
----------
value: numeric
The value to clamp.
minval: numeric
The lower bound.
maxval: numeric
The upper bound.
Returns
-------
numeric
The clamped value. It will be in the range ``[minval, maxval]``.
"""
return max(min(value, maxval), minval)
def make_range_string(start, stop=None):
"""Create a byte range specifier in accordance with RFC-2616.
Parameters
----------
start: int
The start of the byte range
stop: int, optional
The end of the byte range. If unspecified, indicates EOF.
Returns
-------
str
A byte range specifier.
"""
#
# https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.35
#
if stop is None:
return 'bytes=%d-' % start
return 'bytes=%d-%d' % (start, stop)
def safe_urlsplit(url):
"""This is a hack to prevent the regular urlsplit from splitting around question marks.
A question mark (?) in a URL typically indicates the start of a
querystring, and the standard library's urlparse function handles the
querystring separately. Unfortunately, question marks can also appear
_inside_ the actual URL for some schemas like S3, GS.
Replaces question marks with newlines prior to splitting. This is safe because:
1. The standard library's urlsplit completely ignores newlines
2. Raw newlines will never occur in innocuous URLs. They are always URL-encoded.
See Also
--------
https://github.com/python/cpython/blob/3.7/Lib/urllib/parse.py
https://github.com/RaRe-Technologies/smart_open/issues/285
https://github.com/RaRe-Technologies/smart_open/issues/458
"""
sr = urllib.parse.urlsplit(url.replace('?', '\n'), allow_fragments=False)
return urllib.parse.SplitResult(sr.scheme, sr.netloc, sr.path.replace('\n', '?'), '', '')
|
<filename>ocean_provider/routes/compute.py
#
# Copyright 2021 Ocean Protocol Foundation
# SPDX-License-Identifier: Apache-2.0
#
import json
import logging
from flask import Response, jsonify, request
from flask_sieve import validate
from requests.models import PreparedRequest
from ocean_lib.common.http_requests.requests_session import get_requests_session
from ocean_provider.exceptions import InvalidSignatureError
from ocean_provider.log import setup_logging
from ocean_provider.user_nonce import get_nonce, increment_nonce
from ocean_provider.utils.accounts import sign_message, verify_signature
from ocean_provider.utils.basics import LocalFileAdapter, get_provider_wallet, get_web3
from ocean_provider.utils.util import (
build_download_response,
get_compute_endpoint,
get_compute_result_endpoint,
get_request_data,
process_compute_request,
service_unavailable,
)
from ocean_provider.validation.algo import WorkflowValidator
from ocean_provider.validation.provider_requests import (
ComputeRequest,
ComputeStartRequest,
UnsignedComputeRequest,
ComputeGetResult,
)
from . import services
setup_logging()
provider_wallet = get_provider_wallet()
requests_session = get_requests_session()
requests_session.mount("file://", LocalFileAdapter())
logger = logging.getLogger(__name__)
standard_headers = {"Content-type": "application/json", "Connection": "close"}
@services.route("/compute", methods=["DELETE"])
@validate(ComputeRequest)
def computeDelete():
"""Deletes a workflow.
---
tags:
- services
consumes:
- application/json
parameters:
- name: signature
in: query
description: Signature of the documentId to verify that the consumer has rights to download the asset.
type: string
- name: documentId
in: query
description: The ID of the asset
required: true
type: string
- name: consumerAddress
in: query
description: The consumer address.
required: true
type: string
- name: jobId
in: query
description: JobId.
type: string
responses:
200:
description: Call to the operator-service was successful.
400:
description: One of the required attributes is missing.
401:
description: Invalid asset data.
503:
description: Service Unavailable
"""
data = get_request_data(request)
logger.info(f"computeDelete called. {data}")
try:
body = process_compute_request(data)
response = requests_session.delete(
get_compute_endpoint(),
params=body,
headers=standard_headers,
)
increment_nonce(body["owner"])
return Response(
response.content,
response.status_code,
headers=standard_headers,
)
except (ValueError, Exception) as e:
return service_unavailable(e, data, logger)
@services.route("/compute", methods=["PUT"])
@validate(ComputeRequest)
def computeStop():
"""Stop the execution of a workflow.
---
tags:
- services
consumes:
- application/json
parameters:
- name: signature
in: query
description: Signature of (consumerAddress+jobId+documentId) to verify the consumer of
this compute job/asset. The signature uses ethereum based signing method
(see https://github.com/ethereum/EIPs/pull/683)
type: string
- name: documentId
in: query
description: The ID of the asset. If not provided, all currently running compute
jobs will be stopped for the specified consumerAddress
required: true
type: string
- name: consumerAddress
in: query
description: The consumer ethereum address.
required: true
type: string
- name: jobId
in: query
description: The ID of the compute job. If not provided, all running compute jobs of
the specified consumerAddress/documentId are suspended
type: string
responses:
200:
description: Call to the operator-service was successful.
400:
description: One of the required attributes is missing.
401:
description: Consumer signature is invalid or failed verification.
503:
description: Service unavailable
"""
data = get_request_data(request)
logger.info(f"computeStop called. {data}")
try:
body = process_compute_request(data)
response = requests_session.put(
get_compute_endpoint(),
params=body,
headers=standard_headers,
)
increment_nonce(body["owner"])
return Response(
response.content,
response.status_code,
headers=standard_headers,
)
except (ValueError, Exception) as e:
return service_unavailable(e, data, logger)
@services.route("/compute", methods=["GET"])
@validate(UnsignedComputeRequest)
def computeStatus():
"""Get status for a specific jobId/documentId/owner
---
tags:
- services
consumes:
- application/json
parameters:
- name: signature
in: query
description: Signature of (consumerAddress+jobId+documentId) to verify the consumer of
this asset/compute job. The signature uses ethereum based signing method
(see https://github.com/ethereum/EIPs/pull/683)
type: string
- name: documentId
in: query
description: The ID of the asset. If not provided, the status of all
currently running and old compute jobs for the specified consumerAddress will be returned.
required: true
type: string
- name: consumerAddress
in: query
description: The consumer ethereum address.
required: true
type: string
- name: jobId
in: query
description: The ID of the compute job. If not provided, all running compute jobs of
the specified consumerAddress/documentId are suspended
type: string
responses:
200:
description: Call to the operator-service was successful.
400:
description: One of the required attributes is missing.
401:
description: Consumer signature is invalid or failed verification.
503:
description: Service Unavailable
"""
data = get_request_data(request)
logger.info(f"computeStatus called. {data}")
try:
body = process_compute_request(data)
response = requests_session.get(
get_compute_endpoint(),
params=body,
headers=standard_headers,
)
_response = response.content
# The following sections is needed only to ensure back compat. It will be removed soon, make sure you are going to update your C2D backends
signed_request = bool(data.get("signature"))
if signed_request:
owner = data.get("consumerAddress")
did = data.get("documentId")
jobId = data.get("jobId")
original_msg = f"{owner}{jobId}{did}"
try:
verify_signature(
owner, data.get("signature"), original_msg, get_nonce(owner)
)
except InvalidSignatureError:
signed_request = False
increment_nonce(owner)
# Filter status info if signature is not given or failed validation
if not signed_request:
resp_content = json.loads(response.content.decode("utf-8"))
if not isinstance(resp_content, list):
resp_content = [resp_content]
_response = []
keys_to_filter = [
"resultsUrl",
"algorithmLogUrl",
"resultsDid",
]
for job_info in resp_content:
for k in keys_to_filter:
job_info.pop(k, None)
_response.append(job_info)
_response = json.dumps(_response)
return Response(
_response,
response.status_code,
headers=standard_headers,
)
except (ValueError, Exception) as e:
return service_unavailable(e, data, logger)
@services.route("/compute", methods=["POST"])
@validate(ComputeStartRequest)
def computeStart():
"""Call the execution of a workflow.
---
tags:
- services
consumes:
- application/json
parameters:
- name: signature
in: query
description: Signature of (consumerAddress+jobId+documentId) to verify the consumer of
this asset/compute job. The signature uses ethereum based signing method
(see https://github.com/ethereum/EIPs/pull/683)
type: string
- name: consumerAddress
in: query
description: The consumer ethereum address.
required: true
type: string
- name: algorithmDid
in: query
description: The DID of the algorithm Asset to be executed
required: false
type: string
- name: algorithmMeta
in: query
description: json object that define the algorithm attributes and url or raw code
required: false
type: json string
- name: output
in: query
description: json object that define the output section
required: true
type: json string
responses:
200:
description: Call to the operator-service was successful.
400:
description: One of the required attributes is missing.
401:
description: Consumer signature is invalid or failed verification
503:
description: Service unavailable
"""
data = get_request_data(request)
logger.info(f"computeStart called. {data}")
try:
consumer_address = data.get("consumerAddress")
validator = WorkflowValidator(
get_web3(), consumer_address, provider_wallet, data
)
status = validator.validate()
if not status:
return jsonify(error=validator.error), 400
workflow = validator.workflow
# workflow is ready, push it to operator
logger.info("Sending: %s", workflow)
tx_id = data.get("transferTxId")
did = data.get("documentId")
msg_to_sign = f"{provider_wallet.address}{did}"
payload = {
"workflow": workflow,
"providerSignature": sign_message(msg_to_sign, provider_wallet),
"documentId": did,
"agreementId": tx_id,
"owner": consumer_address,
"providerAddress": provider_wallet.address,
}
response = requests_session.post(
get_compute_endpoint(),
data=json.dumps(payload),
headers=standard_headers,
)
increment_nonce(consumer_address)
return Response(
response.content,
response.status_code,
headers=standard_headers,
)
except (ValueError, KeyError, Exception) as e:
return service_unavailable(e, data, logger)
@services.route("/computeResult", methods=["GET"])
@validate(ComputeGetResult)
def computeResult():
"""Allows download of asset data file.
---
tags:
- services
consumes:
- application/json
parameters:
- name: consumerAddress
in: query
description: The consumer address.
required: true
type: string
- name: jobId
in: query
description: JobId
required: true
type: string
- name: index
in: query
description: Result index
required: true
- name: signature
in: query
description: Signature of (consumerAddress+jobId+index+nonce) to verify that the consumer has rights to download the result
responses:
200:
description: Content of the result
400:
description: One of the required attributes is missing.
404:
description: Result not found
503:
description: Service Unavailable
"""
data = get_request_data(request)
logger.info(f"computeResult endpoint called. {data}")
url = get_compute_result_endpoint()
msg_to_sign = f"{data.get('jobId')}{data.get('index')}{data.get('consumerAddress')}"
# we sign the same message as consumer does, but using our key
provider_signature = sign_message(msg_to_sign, provider_wallet)
params = {
"index": data.get("index"),
"consumerAddress": data.get("consumerAddress"),
"jobId": data.get("jobId"),
"consumerSignature": data.get("signature"),
"providerSignature": provider_signature,
}
req = PreparedRequest()
req.prepare_url(url, params)
result_url = req.url
logger.debug(f"Done processing computeResult, url: {result_url}")
increment_nonce(data.get("consumerAddress"))
try:
return build_download_response(
request, requests_session, result_url, result_url, None
)
except Exception as e:
return service_unavailable(
e,
{
"jobId": data.get("jobId"),
"index": data.get("index"),
"consumerAddress": data.get("consumerAddress"),
},
logger,
)
|
<gh_stars>10-100
import pandas as pd
import numpy as np
import os
from scipy.stats import rankdata
import math
import argparse
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("--enspath", type=str, default="./data", help="Path to folder with all csvs")
parser.add_argument("--enstype", type=str, default="loop", help="Type of ensembling to be performed - Current options: loop / sa")
parser.add_argument("--exp", type=str, default="experiment", help="Name of experiment for csv's")
# Parse the arguments.
args = parser.parse_args()
return args
### FUNCTIONS IMPLEMENTING ENSEMBLE METHODS ###
### HELPERS ###
### AVERAGES ###
def simple_average(targets, example, weights=None, power=1, normalize=False):
"""
targets: df with target values as columns
example: output df example (e.g. including ID - make sure to adjust iloc below if target is not at 1)
weights: per submission weights; default is equal weighting
power: optional for power averaging
normalize: Whether to normalize targets btw 0 & 1
"""
if weights is None:
weights = len(targets.columns) * [1.0 / len(targets.columns)]
else:
weights = weights / np.sum(weights)
preds = example.copy()
preds.iloc[:,1] = np.zeros(len(preds))
if normalize:
targets = (targets - targets.min())/(targets.max()-targets.min())
for i in range(len(targets.columns)):
preds.iloc[:,1] = np.add(preds.iloc[:, 1], weights[i] * (targets.iloc[:, i].astype(float)**power))
return preds
### APPLYING THE HELPER FUNCTIONS ###
def sa_wrapper(data_path="./data"):
"""
Applies simple average.
data_path: path to folder with X * (dev_seen, test_seen & test_unseen) .csv files
"""
# Make sure the lists will be ordered, i.e. test[0] is the same model as devs[0]
dev, test, test_unseen = [], [], []
dev_probas, test_probas, test_unseen_probas = {}, {}, {} # Never dynamically add to a pd Dataframe
for csv in sorted(os.listdir(data_path)):
if ".csv" in csv:
print("Included in Simple Average: ", csv)
if ("dev" in csv) or ("val" in csv):
dev.append(pd.read_csv(data_path + csv))
dev_probas[csv[:-8]] = pd.read_csv(data_path + csv).proba.values
elif "test_unseen" in csv:
test_unseen.append(pd.read_csv(data_path + csv))
test_unseen_probas[csv[:-14]] = pd.read_csv(data_path + csv).proba.values
elif "test" in csv:
test.append(pd.read_csv(data_path + csv))
test_probas[csv[:-7]] = pd.read_csv(data_path + csv).proba.values
dev_probas = pd.DataFrame(dev_probas)
test_probas = pd.DataFrame(test_probas)
test_unseen_probas = pd.DataFrame(test_unseen_probas)
dev_SA = simple_average(dev_probas, dev[0])
test_SA = simple_average(test_probas, test[0])
test_unseen_SA = simple_average(test_unseen_probas, test_unseen[0])
# Create output dir
os.makedirs(os.path.join(data_path, args.exp), exist_ok=True)
for csv in sorted(os.listdir(data_path)):
if ".csv" in csv:
if ("dev" in csv) or ("val" in csv):
os.remove(os.path.join(data_path, csv))
dev_SA.to_csv(os.path.join(data_path, args.exp, args.exp + "_dev_seen_SA.csv"), index=False)
elif "test_unseen" in csv:
os.remove(os.path.join(data_path, csv))
test_unseen_SA.to_csv(os.path.join(data_path, args.exp, args.exp + "_test_unseen_SA.csv"), index=False)
elif "test" in csv:
os.remove(os.path.join(data_path, csv))
test_SA.to_csv(os.path.join(data_path, args.exp, args.exp + "_test_seen_SA.csv"), index=False)
if __name__ == "__main__":
args = parse_args()
if args.enstype == "sa":
sa_wrapper(args.enspath)
else:
print(args.enstype, " is not yet enabled. Feel free to add the code :)")
|
<gh_stars>0
# -*- coding: utf-8 -*-
from PyQt4 import QtGui, QtCore, Qt
class QRoundProgressBar(QtGui.QWidget):
StyleDonut = 1
StylePie = 2
StyleLine = 3
PositionLeft = 180
PositionTop = 90
PositionRight = 0
PositionBottom = -90
UF_VALUE = 1
UF_PERCENT = 2
UF_MAX = 4
def __init__(self):
super(QRoundProgressBar, self).__init__()
self.min = 0
self.max = 100
self.value = 25
self.nullPosition = self.PositionTop
self.barStyle = self.StyleDonut
self.outlinePenWidth =1
self.dataPenWidth = 1
self.rebuildBrush = False
self.format = "%p%"
self.decimals = 1
self.updateFlags = self.UF_PERCENT
self.gradientData = []
self.donutThicknessRatio = 0.75
def setRange(self, min, max):
self.min = min
self.max = max
if self.max < self.min:
self.max, self.min = self.min, self.max
if self.value < self.min:
self.value = self.min
elif self.value > self.max:
self.value = self.max
if not self.gradientData:
self.rebuildBrush = True
self.update()
def setMinimun(self, min):
self.setRange(min, self.max)
def setMaximun(self, max):
self.setRange(self.min, max)
def setValue(self, val):
if self.value != val:
if val < self.min:
self.value = self.min
elif val > self.max:
self.value = self.max
else:
self.value = val
self.update()
def setNullPosition(self, position):
if position != self.nullPosition:
self.nullPosition = position
if not self.gradientData:
self.rebuildBrush = True
self.update()
def setBarStyle(self, style):
if style != self.barStyle:
self.barStyle = style
self.update()
def setOutlinePenWidth(self, penWidth):
if penWidth != self.outlinePenWidth:
self.outlinePenWidth = penWidth
self.update()
def setDataPenWidth(self, penWidth):
if penWidth != self.dataPenWidth:
self.dataPenWidth = penWidth
self.update()
def setDataColors(self, stopPoints):
if stopPoints != self.gradientData:
self.gradientData = stopPoints
self.rebuildBrush = True
self.update()
def setFormat(self, format):
if format != self.format:
self.format = format
self.valueFormatChanged()
def resetFormat(self):
self.format = ''
self.valueFormatChanged()
def setDecimals(self, count):
if count >= 0 and count != self.decimals:
self.decimals = count
self.valueFormatChanged()
def setDonutThicknessRatio(self, val):
self.donutThicknessRatio = max(0., min(val, 1.))
self.update()
def paintEvent(self, event):
outerRadius = min(self.width(), self.height())
baseRect = QtCore.QRectF(1, 1, outerRadius-2, outerRadius-2)
buffer = QtGui.QImage(outerRadius, outerRadius, QtGui.QImage.Format_ARGB32)
buffer.fill(0)
p = QtGui.QPainter(buffer)
p.setRenderHint(QtGui.QPainter.Antialiasing)
# data brush
self.rebuildDataBrushIfNeeded()
# background
self.drawBackground(p, buffer.rect())
# base circle
self.drawBase(p, baseRect)
# data circle
arcStep = 360.0 / (self.max - self.min) * self.value
self.drawValue(p, baseRect, self.value, arcStep)
# center circle
innerRect, innerRadius = self.calculateInnerRect(baseRect, outerRadius)
self.drawInnerBackground(p, innerRect)
# text
self.drawText(p, innerRect, innerRadius, self.value)
# finally draw the bar
p.end()
painter = QtGui.QPainter(self)
painter.drawImage(0, 0, buffer)
def drawBackground(self, p, baseRect):
p.fillRect(baseRect, self.palette().background())
def drawBase(self, p, baseRect):
bs = self.barStyle
if bs == self.StyleDonut:
p.setPen(QtGui.QPen(self.palette().shadow().color(), self.outlinePenWidth))
p.setBrush(self.palette().base())
p.drawEllipse(baseRect)
elif bs == self.StylePie:
p.setPen(QtGui.QPen(self.palette().base().color(), self.outlinePenWidth))
p.setBrush(self.palette().base())
p.drawEllipse(baseRect)
elif bs == self.StyleLine:
p.setPen(QtGui.QPen(self.palette().base().color(), self.outlinePenWidth))
p.setBrush(Qt.Qt.NoBrush)
p.drawEllipse(baseRect.adjusted(self.outlinePenWidth/2, self.outlinePenWidth/2, -self.outlinePenWidth/2, -self.outlinePenWidth/2))
def drawValue(self, p, baseRect, value, arcLength):
# nothing to draw
if value == self.min:
return
# for Line style
if self.barStyle == self.StyleLine:
p.setPen(QtGui.QPen(self.palette().highlight().color(), self.dataPenWidth))
p.setBrush(Qt.Qt.NoBrush)
p.drawArc(baseRect.adjusted(self.outlinePenWidth/2, self.outlinePenWidth/2, -self.outlinePenWidth/2, -self.outlinePenWidth/2),
self.nullPosition * 16,
-arcLength * 16)
return
# for Pie and Donut styles
dataPath = QtGui.QPainterPath()
dataPath.setFillRule(Qt.Qt.WindingFill)
# pie segment outer
dataPath.moveTo(baseRect.center())
dataPath.arcTo(baseRect, self.nullPosition, -arcLength)
dataPath.lineTo(baseRect.center())
p.setBrush(self.palette().highlight())
p.setPen(QtGui.QPen(self.palette().shadow().color(), self.dataPenWidth))
p.drawPath(dataPath)
def calculateInnerRect(self, baseRect, outerRadius):
# for Line style
if self.barStyle == self.StyleLine:
innerRadius = outerRadius - self.outlinePenWidth
else: # for Pie and Donut styles
innerRadius = outerRadius * self.donutThicknessRatio
delta = (outerRadius - innerRadius) / 2.
innerRect = QtCore.QRectF(delta, delta, innerRadius, innerRadius)
return innerRect, innerRadius
def drawInnerBackground(self, p, innerRect):
if self.barStyle == self.StyleDonut:
p.setBrush(self.palette().alternateBase())
cmod = p.compositionMode()
p.setCompositionMode(QtGui.QPainter.CompositionMode_Source)
p.drawEllipse(innerRect)
p.setCompositionMode(cmod)
def drawText(self, p, innerRect, innerRadius, value):
if not self.format:
return
text = self.valueToText(value)
# !!! to revise
f = self.font()
# f.setPixelSize(innerRadius * max(0.05, (0.35 - self.decimals * 0.08)))
f.setPixelSize(innerRadius * 1.8 / len(text))
p.setFont(f)
textRect = innerRect
p.setPen(self.palette().text().color())
p.drawText(textRect, Qt.Qt.AlignCenter, text)
def valueToText(self, value):
textToDraw = self.format
format_string = '{' + ':.{}f'.format(self.decimals) + '}'
if self.updateFlags & self.UF_VALUE:
textToDraw = textToDraw.replace("%v", format_string.format(value))
if self.updateFlags & self.UF_PERCENT:
percent = (value - self.min) / (self.max - self.min) * 100.0
textToDraw = textToDraw.replace("%p", format_string.format(percent))
if self.updateFlags & self.UF_MAX:
m = self.max - self.min + 1
textToDraw = textToDraw.replace("%m", format_string.format(m))
return textToDraw
def valueFormatChanged(self):
self.updateFlags = 0;
if "%v" in self.format:
self.updateFlags |= self.UF_VALUE
if "%p" in self.format:
self.updateFlags |= self.UF_PERCENT
if "%m" in self.format:
self.updateFlags |= self.UF_MAX
self.update()
def rebuildDataBrushIfNeeded(self):
if self.rebuildBrush:
self.rebuildBrush = False
dataBrush = QtGui.QConicalGradient()
dataBrush.setCenter(0.5,0.5)
dataBrush.setCoordinateMode(QtGui.QGradient.StretchToDeviceMode)
for pos, color in self.gradientData:
dataBrush.setColorAt(1.0 - pos, color)
# angle
dataBrush.setAngle(self.nullPosition)
p = self.palette()
p.setBrush(QtGui.QPalette.Highlight, dataBrush)
self.setPalette(p)
version = 1.0
class BattaryWidget(QtCore.QObject): #MoveSimpleWidget
def __init__(self):
#self.sock = sock
self.Name = 'Move Simple'
self.minVolt = 10.60
self.maxVolt = 18
def getWidget(self):
#elf.webCam = QtWebKit.QWebView()
#self.webCam.setUrl(QtCore.QUrl('http://172.16.17.32:3344/axis-cgi/mjpg/video.cgi?resolution=320x240'))
self.ampere = QtGui.QLabel('0.0A')
#self.ampere.setMinimumHeight(40)
#self.ampere.setMinimumWidth(40)
self.volt = QtGui.QLabel('0.0V')
#self.volt.setMinimumHeight(40)
#self.volt.setMinimumWidth(40)
self.power = QtGui.QLabel('0.0W')
#self.power.setMinimumHeight(40)
#self.power.setMinimumWidth(40)
self.workTimeText = QtGui.QLabel('Work:')
self.workTime = QtGui.QLabel('0h:0m:0s') #1h:23m:11s
self.forecastTimeText = QtGui.QLabel('Forecast:')
self.forecastTime = QtGui.QLabel('0h:0m:0s')
#self.progrBar = QRoundProgressBar()
self.bar = QRoundProgressBar()
self.bar.setFixedSize(50, 50)
self.bar.setDataPenWidth(1)
self.bar.setOutlinePenWidth(1)
self.bar.setDonutThicknessRatio(0.5)
self.bar.setDecimals(1)
self.bar.setFormat('%v')
# self.bar.resetFormat()
self.bar.setNullPosition(90)
self.bar.setBarStyle(QRoundProgressBar.StyleDonut)
self.bar.setDataColors([(0., QtGui.QColor.fromRgb(255,0,0)), (0.5, QtGui.QColor.fromRgb(255,255,0)), (1., QtGui.QColor.fromRgb(0,255,0))])
self.bar.setRange(0, 100)
self.bar.setValue(0)
#lay = QtGui.QVBoxLayout()
#lay.addWidget(self.bar)
#self.setLayout(lay)
#self.progrBar.setMinimun(0)
#self.progrBar.setMinimum(0)
#self.progrBar.setMaximum(100)
self.speed = QtGui.QSlider(QtCore.Qt.Horizontal)
self.speed.setMaximum(100)
#self.speed.setMinimumWidth(150)
self.connect(self.speed, QtCore.SIGNAL('valueChanged(int)'),
self.setV )
#self.delButton.setMaximumWidth(55)
self.gridLayout = QtGui.QGridLayout()
self.gridLayout.setSpacing(1)
self.gridLayout.addWidget(self.bar, 1, 1) #2, 2, QtCore.Qt.AlignRight)
self.gridLayout.addWidget(self.ampere, 2, 0, QtCore.Qt.AlignCenter)
self.gridLayout.addWidget(self.volt, 2, 1, QtCore.Qt.AlignCenter)
self.gridLayout.addWidget(self.power, 2, 2, QtCore.Qt.AlignCenter)
self.gridLayout.addWidget(self.workTimeText, 3, 0)
self.gridLayout.addWidget(self.workTime, 3, 2)
self.gridLayout.addWidget(self.forecastTimeText, 4, 0, 4, 1)
self.gridLayout.addWidget(self.forecastTime, 4, 2)
#self.gridLayout.addWidget(self.speed, 5, 0, 5, 3, QtCore.Qt.AlignRight)
widget = QtGui.QWidget()
widget.setLayout(self.gridLayout)
return widget
def setV(self, value):
self.bar.setValue(value)
def getName(self):
return self.Name
|
#!/usr/bin/env -S python3 -tt
# -*- coding: utf-8 -*-
#
# SPDX-License-Identifier: Apache-2.0
# Copyright 2020 - 2022 Pionix GmbH and Contributors to EVerest
#
"""
author: <EMAIL>
FIXME (aw): Module documentation.
"""
from . import __version__
from . import helpers
from datetime import datetime
from pathlib import Path
import jinja2 as j2
import argparse
# Global variables
everest_dir = None
# jinja template environment and global variable
env = j2.Environment(loader=j2.FileSystemLoader(Path(__file__).parent / 'templates'),
lstrip_blocks=True, trim_blocks=True, undefined=j2.StrictUndefined,
keep_trailing_newline=True)
templates = {
'interface_base': env.get_template('interface-Base.hpp.j2'),
'interface_exports': env.get_template('interface-Exports.hpp.j2'),
'interface_impl.hpp': env.get_template('interface-Impl.hpp.j2'),
'interface_impl.cpp': env.get_template('interface-Impl.cpp.j2'),
'module.hpp': env.get_template('module.hpp.j2'),
'module.cpp': env.get_template('module.cpp.j2'),
'ld-ev.hpp': env.get_template('ld-ev.hpp.j2'),
'ld-ev.cpp': env.get_template('ld-ev.cpp.j2'),
'cmakelists': env.get_template('CMakeLists.txt.j2')
}
validators = {}
# Function declarations
def setup_jinja_env():
env.globals['timestamp'] = datetime.utcnow()
# FIXME (aw): which repo to use? everest or everest-framework?
env.globals['git'] = helpers.gather_git_info(everest_dir)
env.filters['snake_case'] = helpers.snake_case
env.filters['create_dummy_result'] = helpers.create_dummy_result
def generate_tmpl_data_for_if(interface, if_def):
vars = []
for var, var_info in if_def.get('vars', {}).items():
type_info = helpers.build_type_info(var, var_info['type'])
vars.append(type_info)
cmds = []
for cmd, cmd_info in if_def.get('cmds', {}).items():
args = []
for arg, arg_info in cmd_info.get('arguments', {}).items():
type_info = helpers.build_type_info(arg, arg_info['type'])
args.append(type_info)
result_type_info = None
if 'result' in cmd_info:
result_info = cmd_info['result']
result_type_info = helpers.build_type_info(None, result_info['type'])
cmds.append({'name': cmd, 'args': args, 'result': result_type_info})
tmpl_data = {
'info': {
'base_class_header': f'generated/{interface}/Implementation.hpp',
'interface': interface,
'desc': if_def['description'],
},
'vars': vars,
'cmds': cmds
}
return tmpl_data
def generate_tmpl_data_for_module(module, module_def):
provides = []
for impl, impl_info in module_def.get('provides', {}).items():
config = []
for conf_id, conf_info in impl_info.get('config', {}).items():
type_info = helpers.build_type_info(conf_id, conf_info['type'])
config.append(type_info)
provides.append({
'id': impl,
'type': impl_info['interface'],
'desc': impl_info['description'],
'config': config,
'class_name': f'{impl_info["interface"]}Impl',
'base_class': f'{impl_info["interface"]}ImplBase',
'base_class_header': f'generated/{impl_info["interface"]}/Implementation.hpp'
})
requires = []
for requirement_id, req_info in module_def.get('requires', {}).items():
# min_connections=1 and max_connections=1 is the default if not provided otherwise (see manifest meta schema)
is_vector = not (
('min_connections' not in req_info or req_info['min_connections'] == 1) and
('max_connections' not in req_info or req_info['max_connections'] == 1))
requires.append({
'id': requirement_id,
'is_vector': is_vector,
'type': req_info['interface'],
'class_name': f'{req_info["interface"]}Intf',
'exports_header': f'generated/{req_info["interface"]}/Interface.hpp'
})
module_config = []
for conf_id, conf_info in module_def.get('config', {}).items():
type_info = helpers.build_type_info(conf_id, conf_info['type'])
module_config.append(type_info)
tmpl_data = {
'info': {
'name': module,
'class_name': module, # FIXME (aw): enforce capital case?
'desc': module_def['description'],
'module_header': f'{module}.hpp',
'module_config': module_config,
'ld_ev_header': 'ld-ev.hpp',
'enable_external_mqtt': module_def.get('enable_external_mqtt', False)
},
'provides': provides,
'requires': requires,
}
return tmpl_data
def construct_impl_file_paths(impl):
interface = impl['type']
common_part = f'{impl["id"]}/{interface}'
return (f'{common_part}Impl.hpp', f'{common_part}Impl.cpp')
def set_impl_specific_path_vars(tmpl_data, output_path):
"""Set cpp_file_rel_path and class_header vars to implementation template data."""
for impl in tmpl_data['provides']:
(impl['class_header'], impl['cpp_file_rel_path']) = construct_impl_file_paths(impl)
def generate_module_loader_files(mod, output_dir):
loader_files = []
mod_path = everest_dir / f'modules/{mod}/manifest.json'
mod_def = helpers.load_validated_module_def(mod_path, validators['module'])
tmpl_data = generate_tmpl_data_for_module(mod, mod_def)
set_impl_specific_path_vars(tmpl_data, mod_path.parent)
# ld-ev.hpp
tmpl_data['info']['hpp_guard'] = 'LD_EV_HPP'
loader_files.append({
'filename': 'ld-ev.hpp',
'path': output_dir / mod / 'ld-ev.hpp',
'printable_name': f'{mod}/ld-ev.hpp',
'content': templates['ld-ev.hpp'].render(tmpl_data),
'last_mtime': mod_path.stat().st_mtime
})
# ld-ev.cpp
loader_files.append({
'filename': 'ld-ev.cpp',
'path': output_dir / mod / 'ld-ev.cpp',
'printable_name': f'{mod}/ld-ev.cpp',
'content': templates['ld-ev.cpp'].render(tmpl_data),
'last_mtime': mod_path.stat().st_mtime
})
return loader_files
def generate_module_files(mod, update_flag):
mod_files = {'core': [], 'interfaces': []}
mod_path = everest_dir / f'modules/{mod}/manifest.json'
mod_def = helpers.load_validated_module_def(mod_path, validators['module'])
tmpl_data = generate_tmpl_data_for_module(mod, mod_def)
output_path = mod_path.parent
# FIXME (aw): we might move the following function into generate_tmp_data_for_module
set_impl_specific_path_vars(tmpl_data, output_path)
cmakelists_blocks = {
'version': 'v1',
'format_str': '# ev@{uuid}:{version}',
'regex_str': '^(?P<indent>\s*)# ev@(?P<uuid>[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}):(?P<version>.*)$',
'definitions': {
'add_general': {
'id': 'bcc62523-e22b-41d7-ba2f-825b493a3c97',
'content': '# insert your custom targets and additional config variables here'
},
'add_other': {
'id': 'c55432ab-152c-45a9-9d2e-7281d50c69c3',
'content': '# insert other things like install cmds etc here'
}
}
}
impl_hpp_blocks = {
'version': 'v1',
'format_str': '// ev@{uuid}:{version}',
'regex_str': '^(?P<indent>\s*)// ev@(?P<uuid>[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}):(?P<version>.*)$',
'definitions': {
'add_headers': {
'id': '75ac1216-19eb-4182-a85c-820f1fc2c091',
'content': '// insert your custom include headers here'
},
'public_defs': {
'id': '8ea32d28-373f-4c90-ae5e-b4fcc74e2a61',
'content': '// insert your public definitions here'
},
'protected_defs': {
'id': 'd2d1847a-7b88-41dd-ad07-92785f06f5c4',
'content': '// insert your protected definitions here'
},
'private_defs': {
'id': '3370e4dd-95f4-47a9-aaec-ea76f34a66c9',
'content': '// insert your private definitions here'
},
'after_class': {
'id': '3d7da0ad-02c2-493d-9920-0bbbd56b9876',
'content': '// insert other definitions here'
}
}
}
mod_hpp_blocks = {
'version': 'v1',
'format_str': '// ev@{uuid}:{version}',
'regex_str': '^(?P<indent>\s*)// ev@(?P<uuid>[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}):(?P<version>.*)$',
'definitions': {
'add_headers': {
'id': '4bf81b14-a215-475c-a1d3-0a484ae48918',
'content': '// insert your custom include headers here'
},
'public_defs': {
'id': '1fce4c5e-0ab8-41bb-90f7-14277703d2ac',
'content': '// insert your public definitions here'
},
'protected_defs': {
'id': '4714b2ab-a24f-4b95-ab81-36439e1478de',
'content': '// insert your protected definitions here'
},
'private_defs': {
'id': '211cfdbe-f69a-4cd6-a4ec-f8aaa3d1b6c8',
'content': '// insert your private definitions here'
},
'after_class': {
'id': '087e516b-124c-48df-94fb-109508c7cda9',
'content': '// insert other definitions here'
}
}
}
# provided interface implementations (impl cpp & hpp)
for impl in tmpl_data['provides']:
interface = impl['type']
(impl_hpp_file, impl_cpp_file) = construct_impl_file_paths(impl)
# load template data for interface
if_def, last_mtime = load_interface_defintion(interface)
if_tmpl_data = generate_tmpl_data_for_if(interface, if_def)
if_tmpl_data['info'].update({
'hpp_guard': helpers.snake_case(f'{impl["id"]}_{interface}').upper() + '_IMPL_HPP',
'config': impl['config'],
'class_name': interface + 'Impl',
'class_parent': interface + 'ImplBase',
'module_header': f'../{mod}.hpp',
'module_class': mod,
'interface_implementation_id': impl['id']
})
if_tmpl_data['info']['blocks'] = helpers.load_tmpl_blocks(
impl_hpp_blocks, output_path / impl_hpp_file, update_flag)
# FIXME (aw): time stamp should include parent interfaces modification dates
mod_files['interfaces'].append({
'abbr': f'{impl["id"]}.hpp',
'path': output_path / impl_hpp_file,
'printable_name': impl_hpp_file,
'content': templates['interface_impl.hpp'].render(if_tmpl_data),
'last_mtime': last_mtime
})
mod_files['interfaces'].append({
'abbr': f'{impl["id"]}.cpp',
'path': output_path / impl_cpp_file,
'printable_name': impl_cpp_file,
'content': templates['interface_impl.cpp'].render(if_tmpl_data),
'last_mtime': last_mtime
})
cmakelists_file = output_path / 'CMakeLists.txt'
tmpl_data['info']['blocks'] = helpers.load_tmpl_blocks(cmakelists_blocks, cmakelists_file, update_flag)
mod_files['core'].append({
'abbr': 'cmakelists',
'path': cmakelists_file,
'content': templates['cmakelists'].render(tmpl_data),
'last_mtime': mod_path.stat().st_mtime
})
# module.hpp
tmpl_data['info']['hpp_guard'] = helpers.snake_case(mod).upper() + '_HPP'
mod_hpp_file = output_path / f'{mod}.hpp'
tmpl_data['info']['blocks'] = helpers.load_tmpl_blocks(mod_hpp_blocks, mod_hpp_file, update_flag)
mod_files['core'].append({
'abbr': 'module.hpp',
'path': mod_hpp_file,
'content': templates['module.hpp'].render(tmpl_data),
'last_mtime': mod_path.stat().st_mtime
})
# module.cpp
mod_cpp_file = output_path / f'{mod}.cpp'
mod_files['core'].append({
'abbr': 'module.cpp',
'path': mod_cpp_file,
'content': templates['module.cpp'].render(tmpl_data),
'last_mtime': mod_path.stat().st_mtime
})
for file_info in [*mod_files['core'], *mod_files['interfaces']]:
file_info['printable_name'] = file_info['path'].relative_to(output_path)
return mod_files
def load_interface_defintion(interface):
if_path = everest_dir / f'interfaces/{interface}.json'
if_def = helpers.load_validated_interface_def(if_path, validators['interface'])
if 'vars' not in if_def:
if_def['vars'] = {}
if 'cmds' not in if_def:
if_def['cmds'] = {}
last_mtime = if_path.stat().st_mtime
return if_def, last_mtime
def generate_interface_headers(interface, all_interfaces_flag, output_dir):
if_parts = {'base': None, 'exports': None}
try:
if_def, last_mtime = load_interface_defintion(interface)
except Exception as e:
if not all_interfaces_flag:
raise
else:
print(f'Ignoring interface {interface} with reason: {e}')
return
tmpl_data = generate_tmpl_data_for_if(interface, if_def)
output_path = output_dir / interface
output_path.mkdir(parents=True, exist_ok=True)
# generate Base file (providers view)
tmpl_data['info']['hpp_guard'] = helpers.snake_case(interface).upper() + '_IMPLEMENTATION_HPP'
tmpl_data['info']['class_name'] = f'{interface}ImplBase'
base_file = output_path / 'Implementation.hpp'
if_parts['base'] = {
'path': base_file,
'content': templates['interface_base'].render(tmpl_data),
'last_mtime': last_mtime,
'printable_name': base_file.relative_to(output_path.parent)
}
# generate Exports file (users view)
tmpl_data['info']['hpp_guard'] = helpers.snake_case(interface).upper() + '_INTERFACE_HPP'
tmpl_data['info']['class_name'] = f'{interface}Intf'
exports_file = output_path / 'Interface.hpp'
if_parts['exports'] = {
'path': exports_file,
'content': templates['interface_exports'].render(tmpl_data),
'last_mtime': last_mtime,
'printable_name': exports_file.relative_to(output_path.parent)
}
return if_parts
def module_create(args):
create_strategy = 'force-create' if args.force else 'create'
mod_files = generate_module_files(args.module, False)
if args.only == 'which':
helpers.print_available_mod_files(mod_files)
return
else:
try:
helpers.filter_mod_files(args.only, mod_files)
except Exception as err:
print(err)
return
for file_info in mod_files['core'] + mod_files['interfaces']:
if not args.disable_clang_format:
helpers.clang_format(args.clang_format_file, file_info)
helpers.write_content_to_file(file_info, create_strategy, args.diff)
def module_update(args):
primary_update_strategy = 'force-update' if args.force else 'update'
update_strategy = {'module.cpp': 'update-if-non-existent'}
for file_name in ['cmakelists', 'module.hpp']:
update_strategy[file_name] = primary_update_strategy
# FIXME (aw): refactor out this only handling and rename it properly
mod_files = generate_module_files(args.module, True)
if args.only == 'which':
helpers.print_available_mod_files(mod_files)
return
else:
try:
helpers.filter_mod_files(args.only, mod_files)
except Exception as err:
print(err)
return
if not args.disable_clang_format:
for file_info in mod_files['core'] + mod_files['interfaces']:
helpers.clang_format(args.clang_format_file, file_info)
for file_info in mod_files['core']:
helpers.write_content_to_file(file_info, update_strategy[file_info['abbr']], args.diff)
for file_info in mod_files['interfaces']:
if file_info['abbr'].endswith('.hpp'):
helpers.write_content_to_file(file_info, primary_update_strategy, args.diff)
else:
helpers.write_content_to_file(file_info, 'update-if-non-existent', args.diff)
def module_genld(args):
output_dir = Path(args.output_dir).resolve() if args.output_dir else everest_dir / 'build/generated/modules'
loader_files = generate_module_loader_files(args.module, output_dir)
if not args.disable_clang_format:
for file_info in loader_files:
helpers.clang_format(args.clang_format_file, file_info)
for file_info in loader_files:
helpers.write_content_to_file(file_info, 'force-update')
def interface_genhdr(args):
output_dir = Path(args.output_dir).resolve() if args.output_dir else everest_dir / \
'build/generated/interfaces/generated'
primary_update_strategy = 'force-update' if args.force else 'update'
interfaces = args.interfaces
all_interfaces = False
if not interfaces:
all_interfaces = True
if_dir = everest_dir / 'interfaces'
interfaces = [if_path.stem for if_path in if_dir.iterdir() if (if_path.is_file() and if_path.suffix == '.json')]
for interface in interfaces:
if_parts = generate_interface_headers(interface, all_interfaces, output_dir)
if not args.disable_clang_format:
helpers.clang_format(args.clang_format_file, if_parts['base'])
helpers.clang_format(args.clang_format_file, if_parts['exports'])
helpers.write_content_to_file(if_parts['base'], primary_update_strategy, args.diff)
helpers.write_content_to_file(if_parts['exports'], primary_update_strategy, args.diff)
def helpers_genuuids(args):
if (args.count <= 0):
raise Exception(f'Invalid number ("{args.count}") of uuids to generate')
helpers.generate_some_uuids(args.count)
def main():
global validators, everest_dir
parser = argparse.ArgumentParser(description='Everest command line tool')
parser.add_argument('--version', action='version', version=f'%(prog)s {__version__}')
common_parser = argparse.ArgumentParser(add_help=False)
# parser.add_argument("--framework-dir", "-fd", help='directory of everest framework')
common_parser.add_argument("--everest-dir", "-ed", type=str,
help='everest directory containing the interface definitions (default: .)', default=str(Path.cwd()))
common_parser.add_argument("--framework-dir", "-fd", type=str,
help='everest framework directory containing the schema definitions (default: ../everest-framework)', default=str(Path.cwd() / '../everest-framework'))
common_parser.add_argument("--clang-format-file", type=str, default=str(Path.cwd()),
help='Path to the directory, containing the .clang-format file (default: .)')
common_parser.add_argument("--disable-clang-format", action='store_true', default=False,
help="Set this flag to disable clang-format")
subparsers = parser.add_subparsers(metavar='<command>', help='available commands', required=True)
parser_mod = subparsers.add_parser('module', aliases=['mod'], help='module related actions')
parser_if = subparsers.add_parser('interface', aliases=['if'], help='interface related actions')
parser_hlp = subparsers.add_parser('helpers', aliases=['hlp'], help='helper actions')
mod_actions = parser_mod.add_subparsers(metavar='<action>', help='available actions', required=True)
mod_create_parser = mod_actions.add_parser('create', aliases=['c'], parents=[
common_parser], help='create module(s)')
mod_create_parser.add_argument('module', type=str, help='name of the module, that should be created')
mod_create_parser.add_argument('-f', '--force', action='store_true', help='force overwriting - use with care!')
mod_create_parser.add_argument('-d', '--diff', '--dry-run', action='store_true',
help='show resulting diff on create or overwrite')
mod_create_parser.add_argument('--only', type=str,
help='Comma separated filter list of module files, that should be created. '
'For a list of available files use "--only which".')
mod_create_parser.set_defaults(action_handler=module_create)
mod_update_parser = mod_actions.add_parser('update', aliases=['u'], parents=[
common_parser], help='update module(s)')
mod_update_parser.add_argument('module', type=str, help='name of the module, that should be updated')
mod_update_parser.add_argument('-f', '--force', action='store_true', help='force overwriting')
mod_update_parser.add_argument('-d', '--diff', '--dry-run', action='store_true', help='show resulting diff')
mod_update_parser.add_argument('--only', type=str,
help='Comma separated filter list of module files, that should be updated. '
'For a list of available files use "--only which".')
mod_update_parser.set_defaults(action_handler=module_update)
mod_genld_parser = mod_actions.add_parser(
'generate-loader', aliases=['gl'], parents=[common_parser], help='generate everest loader')
mod_genld_parser.add_argument(
'module', type=str, help='name of the module, for which the loader should be generated')
mod_genld_parser.add_argument('-o', '--output-dir', type=str, help='Output directory for generated loader '
'files (default: {everest-dir}/build/generated/module/)')
mod_genld_parser.set_defaults(action_handler=module_genld)
if_actions = parser_if.add_subparsers(metavar='<action>', help='available actions', required=True)
if_genhdr_parser = if_actions.add_parser(
'generate-headers', aliases=['gh'], parents=[common_parser], help='generate headers')
if_genhdr_parser.add_argument('-f', '--force', action='store_true', help='force overwriting')
if_genhdr_parser.add_argument('-o', '--output-dir', type=str, help='Output directory for generated interface '
'headers (default: {everest-dir}/build/generated/include/generated)')
if_genhdr_parser.add_argument('-d', '--diff', '--dry-run', action='store_true', help='show resulting diff')
if_genhdr_parser.add_argument('interfaces', nargs='*', help='a list of interfaces, for which header files should '
'be generated - if no interface is given, all will be processed and non-processable '
'will be skipped')
if_genhdr_parser.set_defaults(action_handler=interface_genhdr)
hlp_actions = parser_hlp.add_subparsers(metavar='<action>', help='available actions', required=True)
hlp_genuuid_parser = hlp_actions.add_parser('generate-uuids', help='generete uuids')
hlp_genuuid_parser.add_argument('count', type=int, default=3)
hlp_genuuid_parser.set_defaults(action_handler=helpers_genuuids)
args = parser.parse_args()
everest_dir = Path(args.everest_dir).resolve()
if not (everest_dir / 'interfaces').exists():
print('The default (".") xor supplied (via --everest-dir) everest directory\n'
'doesn\'t contain an "interface" directory and therefore does not seem to be valid.\n'
f'dir: {everest_dir}')
exit(1)
setup_jinja_env()
framework_dir = Path(args.framework_dir).resolve()
if not (framework_dir / 'schemas').exists():
print('The default ("../everest-framework") xor supplied (via --framework-dir) everest framework directory\n'
'doesn\'t contain an "schemas" directory and therefore does not seem to be valid.\n'
f'dir: {framework_dir}')
exit(1)
validators = helpers.load_validators(framework_dir / 'schemas')
args.action_handler(args)
if __name__ == '__main__':
main()
|
<gh_stars>1-10
import base64
from couchdbkit import Database, ResourceNotFound, resource
from couchdbkit.ext.django.schema import Document
from django.db import models, transaction
import json
import uuid
from couchdbkit.exceptions import ResourceConflict
class SQLDocDB(Database):
def delete_doc(self, doc, **params):
found = False
try:
sqldoc = SQLDocModel.objects.get(doc_id=doc)
except SQLDocModel.DoesNotExist:
pass
else:
found = True
sqldoc.delete()
try:
return super(SQLDocDB, self).delete_doc(doc)
except ResourceNotFound:
if found:
return {'id': doc, 'ok': True, 'rev': None}
else:
raise
def copy_doc(self, doc, dest=None, headers=None):
from . import sync
sync.sync_all()
return super(SQLDocDB, self).copy_doc(doc, dest, headers)
def save_doc(self, doc, encode_attachments=True, force_update=False,
**params):
raise NotImplementedError()
def save_docs(self, docs, use_uuids=True, all_or_nothing=False,
**params):
raise NotImplementedError()
def delete_docs(self, docs, all_or_nothing=False, empty_on_delete=False,
**params):
raise NotImplementedError()
def _get(self, docid, **params):
try:
doc_model = SQLDocModel.objects.get(doc_id=docid)
except SQLDocModel.DoesNotExist:
docid = resource.escape_docid(docid)
return self.res.get(docid, **params).json_body
else:
doc = doc_model.doc
assert doc['doc_type'] == doc_model.doc_type
assert doc['_id'] == doc_model.doc_id
doc['_rev'] = doc_model.sql_rev
doc['_attachments'] = dict(
att.format_stub() for att in
doc_model.sqldocattachment_set.defer('payload')
)
return doc
def open_doc(self, docid, **params):
# This whole function is copied from Database.open_doc...
wrapper = None
if "wrapper" in params:
wrapper = params.pop("wrapper")
elif "schema" in params:
schema = params.pop("schema")
if not hasattr(schema, "wrap"):
raise TypeError("invalid schema")
wrapper = schema.wrap
# ...except for this line, which is changed
doc = self._get(docid)
if wrapper is not None:
if not callable(wrapper):
raise TypeError("wrapper isn't a callable")
return wrapper(doc)
return doc
def view(self, *args, **kwargs):
from . import sync
sync.sync_all()
return super(SQLDocDB, self).view(*args, **kwargs)
bulk_save = save_docs
bulk_delete = delete_docs
get = open_doc
class SQLDoc(Document):
@classmethod
def get_db(cls):
db = super(SQLDoc, cls).get_db()
db.__class__ = SQLDocDB
return db
def save(self):
with transaction.commit_on_success():
doc_model = self._get_and_lock_sqldoc()
doc_model.doc_type = self.doc_type
doc_model.sql_rev = self._new_sql_rev(doc_model.rev)
doc_model.doc = self.to_json()
doc_model.save()
self._rev = doc_model.sql_rev
def fetch_attachment(self, name):
try:
attachment = SQLDocAttachment.objects.get(doc=self._id, name=name)
except SQLDocAttachment.DoesNotExist:
return super(SQLDoc, self).fetch_attachment(name)
else:
content = attachment.content
try:
return content.decode('utf-8')
except UnicodeDecodeError:
return content
def put_attachment(self, content, name=None, content_type=None,
content_length=None):
with transaction.commit_on_success():
doc_model = self._get_and_lock_sqldoc()
try:
attachment = (SQLDocAttachment.objects.select_for_update()
.only('doc', 'name')
.get(doc=self._id, name=name))
except SQLDocAttachment.DoesNotExist:
attachment = SQLDocAttachment(
doc=doc_model,
name=name
)
if hasattr(content, 'read'):
content = content.read()
if isinstance(content, unicode):
content = content.encode('utf-8')
attachment.content = content
attachment.content_type = content_type
attachment.length = content_length or len(content)
doc_model.sql_rev = self._new_sql_rev(doc_model.rev)
attachment.save()
doc_model.save()
self._rev = doc_model.sql_rev
if self._attachments is None:
self._attachments = {}
self._attachments.__setitem__(*attachment.format_stub())
def delete_attachment(self, name):
raise NotImplementedError()
def _get_and_lock_sqldoc(self):
"""This should be done inside a transaction"""
if not self._id:
self._id = self.get_db().server.next_uuid()
try:
doc_model = (SQLDocModel.objects.select_for_update()
.only('rev', 'sql_rev', 'doc_id').get(pk=self._id))
except SQLDocModel.DoesNotExist:
doc_model = SQLDocModel(doc_id=self._id, rev=self._rev)
else:
if doc_model.sql_rev != self._rev:
raise ResourceConflict(
'[sqlcouch] (sql)_rev {0} of doc {1} '
'does not match the one stored in sql: {2}'
.format(self._rev, self._id, doc_model.sql_rev)
)
return doc_model
def _new_sql_rev(self, rev):
return (rev or '') + '-' + uuid.uuid4().hex
class SQLDocModel(models.Model):
doc_id = models.CharField(max_length=256, primary_key=True)
rev = models.CharField(max_length=256, null=True)
# docs stored in postgres will need their own rev scheme
# that mimics couchdb's, because docs may be saved a number of times
# in postgres before it's synced to couchdb
# if couchdb is up to date, sql_rev and rev will be equal
sql_rev = models.CharField(max_length=256)
doc_type = models.CharField(max_length=20)
doc_json = models.TextField()
synced = models.BooleanField(default=False, db_index=True)
def get_doc(self):
return json.loads(self.doc_json)
def set_doc(self, doc):
self.doc_json = json.dumps(doc)
doc = property(get_doc, set_doc)
def __unicode__(self):
return ('doc_id={0} rev={1} sql_rev={2} synced={3}'
.format(self.doc_id, self.rev, self.sql_rev, self.synced))
class SQLDocAttachment(models.Model):
doc = models.ForeignKey(SQLDocModel)
name = models.CharField(max_length=256, db_index=True)
content_type = models.CharField(max_length=256)
length = models.IntegerField()
payload = models.TextField()
synced = models.BooleanField(default=False, db_index=True)
class Meta:
unique_together = ('doc', 'name')
def get_content(self):
return base64.b64decode(self.payload)
def set_content(self, content):
self.payload = base64.b64encode(content)
content = property(get_content, set_content)
def format_stub(self):
return (self.name, {
'content_type': self.content_type,
'length': self.length,
'stub': True,
})
|
<reponame>KatthakS/project_freshwater
#import get to call a get request on the site
from random import random
from bs4 import BeautifulSoup
from requests import get
from pprint import pprint
from warnings import warn
import requests
class Scraper(object):
def __init__(self):
self.results_list_of_dicts = []
def run(self, url="https://sfbay.craigslist.org/search/sfc/apa?hasPic=1&availabilityMode=0"):
posts = self.set_url(url)
return self.scrape(posts)
def set_url(self, url):
#get the first page of craigslist page
# get rid of those lame-o's that post a housing option without a pic using their filter
response = get(url)
#throw warning for status codes that are not 200
if response.status_code != 200:
warn('Request: {}; Status code: {}'.format(requests, response.status_code))
html_soup = BeautifulSoup(response.text, 'html.parser')
#get the macro-container for the housing posts
posts = html_soup.find_all('li', class_='result-row')
# print(type(posts)) # to double check that I got a ResultSet
# print(len(posts)) # to double check I got 120 (elements/page)
# print("-------")
return posts
def scrape(self, posts):
i = 1
for post in posts:
#new dict
d = {}
#PRICE
price = post.find('span', class_='result-price').text
d['price'] = int(price.strip("\n$"))
#print(price)
#TITLE
title = post.find('a', class_='result-title hdrlnk').text
# print(title)
title = title.strip("\n")
d['title'] = title
#print(title)
if post.find('span', class_='housing') is not None:
#if the first element is accidentally square footage
if 'ft2' in post.find('span', class_ = 'housing').text.split()[0]:
#make sqft the first element
sqft = int(
post.find('span', class_='housing').text.split()[0][:-3])
d['sqft'] = sqft
#if the length of the housing details element is more than 2
elif len(post.find('span', class_ = 'housing').text.split()) > 2:
#therefore element 0 will be bedroom count
bedroom_count = post.find('span', class_ = 'housing').text.replace("br", "").split()[0]
d['bed_count'] = bedroom_count
#and sqft will be number 3, so set these here and append
sqft = int(post.find('span', class_ = 'housing').text.split()[2][:-3])
d['sqft'] = sqft
#if there is num bedrooms but no sqft
elif len(post.find('span', class_ = 'housing').text.split()) == 2:
#therefore element 0 will be bedroom count
bedroom_count = post.find('span', class_ = 'housing').text.replace("br", "").split()[0]
d['bed_count'] = bedroom_count
#NOTE there may be dictionaries without bed_count and or sqft
# good random guess ranges: sqft 100-2000, bedrooms 1-5
self.results_list_of_dicts.append(d)
#print('i = ' + str(i))
#pprint(d)
#print()
i += 1
return self.results_list_of_dicts
if __name__ == "__main__":
url_1 = "https://sfbay.craigslist.org/search/sfc/apa?hasPic=1&availabilityMode=0"
s1 = Scraper()
s1.run(url_1)
|
<gh_stars>0
from benchmark.utils import resource, benchmark
import hail as hl
@benchmark
def matrix_table_decode_and_count():
mt = hl.read_matrix_table(resource('profile.mt'))
mt._force_count_rows()
@benchmark
def matrix_table_array_arithmetic():
mt = hl.read_matrix_table(resource('profile.mt'))
mt = mt.filter_rows(mt.alleles.length() == 2)
mt.select_entries(dosage = hl.pl_dosage(mt.PL)).select_rows()._force_count_rows()
@benchmark
def matrix_table_entries_table():
mt = hl.read_matrix_table(resource('profile.mt'))
mt.entries()._force_count()
@benchmark
def matrix_table_rows_force_count():
ht = hl.read_matrix_table(resource('profile.mt')).rows().key_by()
ht._force_count()
@benchmark
def matrix_table_rows_is_transition():
ht = hl.read_matrix_table(resource('profile.mt')).rows().key_by()
ht.select(is_snp = hl.is_snp(ht.alleles[0], ht.alleles[1]))._force_count()
def many_aggs(mt):
aggs = [
hl.agg.count_where(mt.GT.is_hom_ref()),
hl.agg.count_where(mt.GT.is_het()),
hl.agg.count_where(mt.GT.is_hom_var()),
hl.agg.count_where(mt.GT.is_non_ref()),
hl.agg.count_where(mt.GT.n_alt_alleles() == 2),
hl.agg.count_where(mt.GT.phased),
hl.agg.count_where(mt.GT.is_haploid()),
hl.agg.count_where(mt.GT.is_diploid()),
hl.agg.count_where(mt.GT.ploidy == 2),
hl.agg.fraction(mt.AD[0] > 0),
hl.agg.fraction(mt.AD[0] < 0),
hl.agg.fraction(mt.AD.length() < 0),
hl.agg.fraction(mt.AD.length() > 0),
hl.agg.fraction(mt.PL[0] > 0),
hl.agg.fraction(mt.PL[0] < 0),
hl.agg.fraction(mt.PL.length() < 0),
hl.agg.fraction(mt.PL.length() > 0),
hl.agg.fraction(mt.GQ < 0),
hl.agg.fraction(mt.GQ > 0),
hl.agg.fraction(mt.GQ % 2 == 0),
hl.agg.fraction(mt.GQ % 2 != 0),
hl.agg.fraction(mt.GQ / 5 < 10),
hl.agg.fraction(mt.GQ / 5 <= 10),
hl.agg.fraction(mt.GQ / 5 > 10),
hl.agg.fraction(mt.GQ / 5 >= 10),
hl.agg.fraction(mt.DP < 0),
hl.agg.fraction(mt.DP > 0),
hl.agg.fraction(mt.DP % 2 == 0),
hl.agg.fraction(mt.DP % 2 != 0),
hl.agg.fraction(mt.DP / 5 < 10),
hl.agg.fraction(mt.DP / 5 <= 10),
hl.agg.fraction(mt.DP / 5 > 10),
hl.agg.fraction(mt.DP / 5 >= 10),
]
return {f'x{i}': expr for i, expr in enumerate(aggs)}
@benchmark
def matrix_table_many_aggs_row_wise():
mt = hl.read_matrix_table(resource('profile.mt'))
mt = mt.annotate_rows(**many_aggs(mt))
mt.rows()._force_count()
@benchmark
def matrix_table_many_aggs_col_wise():
mt = hl.read_matrix_table(resource('profile.mt'))
mt = mt.annotate_cols(**many_aggs(mt))
mt.cols()._force_count()
@benchmark
def matrix_table_aggregate_entries():
mt = hl.read_matrix_table(resource('profile.mt'))
mt.aggregate_entries(hl.agg.stats(mt.GQ)) |
<reponame>MAKENTNU/web
from http import HTTPStatus
from typing import Set
from urllib.parse import urlparse
from django.test import Client
from django_hosts import reverse
from users.models import User
from util.test_utils import Get, PermissionsTestCase, assert_requesting_paths_succeeds
from .forms import MemberStatusForm
from .models import Member, Secret, SystemAccess
# Makes sure that the subdomain of all requests is `internal`
INTERNAL_CLIENT_DEFAULTS = {'SERVER_NAME': 'internal.testserver'}
def reverse_internal(viewname: str, **kwargs):
return reverse(viewname, kwargs=kwargs, host='internal', host_args=['internal'])
class UrlTests(PermissionsTestCase):
def setUp(self):
password = "<PASSWORD>"
non_member_user = User.objects.create_user(username="NON_MEMBER", password=password)
member_user = User.objects.create_user(username="MEMBER", password=password)
member_editor_user = User.objects.create_user(username="MEMBER_EDITOR", password=password)
self.add_permissions(member_user, 'is_internal')
self.add_permissions(member_editor_user, 'is_internal',
'can_register_new_member', 'can_edit_group_membership', 'change_systemaccess')
self.member = Member.objects.create(user=member_user)
self.member_editor = Member.objects.create(user=member_editor_user)
self.anon_client = Client(**INTERNAL_CLIENT_DEFAULTS)
self.non_member_client = Client(**INTERNAL_CLIENT_DEFAULTS)
self.member_client = Client(**INTERNAL_CLIENT_DEFAULTS)
self.member_editor_client = Client(**INTERNAL_CLIENT_DEFAULTS)
self.all_clients = {self.anon_client, self.non_member_client, self.member_client, self.member_editor_client}
self.non_member_client.login(username=non_member_user, password=password)
self.member_client.login(username=member_user, password=password)
self.member_editor_client.login(username=member_editor_user, password=password)
@staticmethod
def generic_request(client: Client, method: str, path: str, data: dict = None):
if method == 'GET':
return client.get(path)
elif method == 'POST':
return client.post(path, data)
else:
raise ValueError(f'Method "{method}" not supported')
def _test_url_permissions(self, method: str, path: str, data: dict = None, *, allowed_clients: Set[Client], expected_redirect_url: str = None):
disallowed_clients = self.all_clients - allowed_clients
for client in disallowed_clients:
response = self.generic_request(client, method, path)
# Non-member users should be redirected to login:
if client in {self.anon_client, self.non_member_client}:
self.assertEqual(response.status_code, HTTPStatus.FOUND)
self.assertTrue(urlparse(response.url).path.startswith("/login/"))
# Disallowed members should be rejected:
else:
self.assertGreaterEqual(response.status_code, 400)
for client in allowed_clients:
response = self.generic_request(client, method, path, data)
if expected_redirect_url:
self.assertRedirects(response, expected_redirect_url)
else:
self.assertEqual(response.status_code, HTTPStatus.OK)
def _test_internal_url(self, method: str, path: str, data: dict = None, *, expected_redirect_url: str = None):
self._test_url_permissions(method, path, data, allowed_clients={self.member_client, self.member_editor_client},
expected_redirect_url=expected_redirect_url)
def _test_editor_url(self, method: str, path: str, data: dict = None, *, expected_redirect_url: str = None):
self._test_url_permissions(method, path, data, allowed_clients={self.member_editor_client},
expected_redirect_url=expected_redirect_url)
def test_permissions(self):
self._test_internal_url('GET', reverse_internal('member_list'))
self._test_internal_url('GET', reverse_internal('member_list', pk=self.member.pk))
self._test_editor_url('GET', reverse_internal('create_member'))
# All members can edit themselves, but only editors can edit other members
self._test_internal_url('GET', reverse_internal('edit_member', pk=self.member.pk))
self._test_editor_url('GET', reverse_internal('edit_member', pk=self.member_editor.pk))
self._test_editor_url('GET', reverse_internal('member_quit', pk=self.member.pk))
path_data_assertion_tuples = (
('member_quit', {'date_quit': "2000-01-01", 'reason_quit': "Whatever."}, lambda member: member.quit),
('edit_member_status', {'status_action': MemberStatusForm.StatusAction.UNDO_QUIT}, lambda member: not member.quit),
('edit_member_status', {'status_action': MemberStatusForm.StatusAction.RETIRE}, lambda member: member.retired),
('edit_member_status', {'status_action': MemberStatusForm.StatusAction.UNDO_RETIRE}, lambda member: not member.retired),
)
for path, data, assertion in path_data_assertion_tuples:
with self.subTest(path=path, data=data):
self._test_editor_url('POST', reverse_internal(path, pk=self.member.pk), data,
expected_redirect_url=f"/members/{self.member.pk}/")
self.member.refresh_from_db()
self.assertTrue(assertion(self.member))
for system_access in self.member.system_accesses.all():
with self.subTest(system_access=system_access):
# No one is allowed to change their `WEBSITE` access. Other than that,
# all members can edit their own accesses, but only editors can edit other members'.
allowed_clients = {self.member_client, self.member_editor_client} if system_access.name != SystemAccess.WEBSITE else set()
self._test_url_permissions('POST', reverse_internal('edit_system_access', member_pk=self.member.pk, pk=system_access.pk),
{'value': True}, allowed_clients=allowed_clients,
expected_redirect_url=f"/members/{self.member.pk}/")
for system_access in self.member_editor.system_accesses.all():
with self.subTest(system_access=system_access):
# No one is allowed to change their `WEBSITE` access
allowed_clients = {self.member_editor_client} if system_access.name != SystemAccess.WEBSITE else set()
self._test_url_permissions('POST', reverse_internal('edit_system_access', member_pk=self.member_editor.pk, pk=system_access.pk),
{'value': True}, allowed_clients=allowed_clients,
expected_redirect_url=f"/members/{self.member_editor.pk}/")
self._test_internal_url('GET', reverse_internal('home'))
self._test_internal_url('POST', reverse_internal('set_language'), {'language': 'en'}, expected_redirect_url="/en/")
self._test_internal_url('POST', reverse_internal('set_language'), {'language': 'nb'}, expected_redirect_url="/")
def test_all_non_member_get_request_paths_succeed(self):
secret1 = Secret.objects.create(title="Key storage box", content="Code: 1234")
secret2 = Secret.objects.create(title="YouTube account", content="<p>Email: <EMAIL></p><p>Password: password</p>")
path_predicates = [
Get(reverse_internal('home'), public=False),
Get(reverse_internal('secret_list'), public=False),
Get(reverse_internal('create_secret'), public=False),
Get(reverse_internal('edit_secret', pk=secret1.pk), public=False),
Get(reverse_internal('edit_secret', pk=secret2.pk), public=False),
Get('/robots.txt', public=True, translated=False),
]
assert_requesting_paths_succeeds(self, path_predicates, 'internal')
|
<reponame>SenorPez/project-cars-replay-enhancer
"""
Provides classes for default static cards.
"""
from PIL import Image, ImageDraw, ImageFont
from replayenhancer.StaticBase import StaticBase
class RaceResults(StaticBase):
"""
Defines a class for a default Race Results title card.
This card, by default, has the following columns:
- Pos.: Finish position.
- Driver: Driver name.
- Team: Driver team (if applicable, blank else).
- Car: Driver car.
- Laps: Driver laps completed.
- Stops: Pit stops (if greater than 0)
- Time: Driver total race time.
- Best Lap: Driver best lap.
- Best S1: Driver best sector 1.
- Best S2: Driver best sector 2.
- Best S3: Driver best sector 3.
- Points: Driver points earned.
"""
def __init__(self, data, size=None, **kwargs):
super().__init__(data, size=size, **kwargs)
self._sort_data(lambda x: (-x.driver.laps_complete, x.driver.race_time))
try:
name_lookup = {
k: v['display']
for k, v in kwargs['participant_config'].items()}
except KeyError:
name_lookup = None
try:
car_class_lookup = None
car_lookup = {
k: v['car']
for k, v in kwargs['participant_config'].items()
if v['car'] != ""}
if len(car_lookup):
try:
if len(kwargs['car_classes']):
car_class_lookup = {
driver: (car_class_data['color'], car_class)
for driver, car in car_lookup.items()
for car_class, car_class_data
in kwargs['car_classes'].items()
if car in car_class_data['cars']
}
except KeyError:
car_class_lookup = None
else:
car_lookup = None
car_class_lookup = None
except KeyError:
car_lookup = None
car_class_lookup = None
try:
team_lookup = {
k: v['team']
for k, v in kwargs['participant_config'].items()
if v['team'] != ""}
if len(team_lookup) == 0:
team_lookup = None
except KeyError:
team_lookup = None
try:
point_structure = {
k: v
for k, v in enumerate(kwargs['point_structure'])}
if len(point_structure) == 0 or not any(point_structure.values()):
point_structure = None
except KeyError:
point_structure = None
try:
points_adjust = {
k: v['points_adjust']
for k, v in kwargs['participant_config'].items()
if v['points_adjust'] != ""}
if len(points_adjust) == 0:
points_adjust = None
except KeyError:
points_adjust = None
try:
try:
font = ImageFont.truetype(
kwargs['font'],
kwargs['font_size'])
except (AttributeError, OSError):
font = ImageFont.load_default()
font_color = tuple(kwargs['font_color'])
except KeyError:
font = ImageFont.load_default()
font_color = (0, 0, 0)
self._add_column('position', 'Pos.')
if name_lookup is None:
self._add_column('driver_name', 'Driver')
else:
self._add_lookup(
'driver_name',
name_lookup,
'ERROR',
'Driver')
if team_lookup is not None:
self._add_lookup('driver_name', team_lookup, '', 'Team')
if car_lookup is not None:
self._add_lookup('driver_name', car_lookup, '', 'Car')
if car_class_lookup is not None:
self._add_lookup(
'driver_name',
{k: v for k, v in car_class_lookup.items()},
'',
'Car Class',
formatter=self._car_class_formatter,
formatter_args={
'text_height': font.getsize("A")[1],
'font': font,
'font_color': font_color})
self._add_column('laps_complete', 'Laps', align='center')
try:
result_lines = self._options['result_lines']
stops = [x.driver.stops for x in self._data[:result_lines]]
except KeyError:
stops = [x.driver.stops for x in self._data]
if any(stops):
self._add_column('stops', 'Stops', align='center')
self._add_column(
'race_time',
'Time',
formatter=self.format_time,
align='center')
self._add_column(
'best_lap',
'Best Lap',
formatter=self.format_time,
align='center')
self._add_column(
'best_sector_1',
'Best S1',
formatter=self.format_time,
align='center')
self._add_column(
'best_sector_2',
'Best S2',
formatter=self.format_time,
align='center')
self._add_column(
'best_sector_3',
'Best S3',
formatter=self.format_time,
align='center')
if point_structure is not None:
formatter_args = {
'point_structure': point_structure,
'points_adjust': points_adjust}
self._add_column(
'calc_points_data',
'Points',
formatter=self.calc_points,
formatter_args=formatter_args,
align='center')
def calc_points(self, value, **kwargs):
points = self._calc_points_float(value, **kwargs)
return str(int(points // 1))
@staticmethod
def format_time(seconds):
"""
Converts seconds into seconds, minutes:seconds, or
hours:minutes.seconds as appropriate.
"""
try:
minutes, seconds = divmod(float(seconds), 60)
hours, minutes = divmod(minutes, 60)
return_value = (int(hours), int(minutes), float(seconds))
if hours:
return "{0:d}:{1:0>2d}:{2:0>6.3f}".format(*return_value)
elif minutes:
return "{1:d}:{2:0>6.3f}".format(*return_value)
else:
return "{2:.3f}".format(*return_value)
except (TypeError, ValueError):
return ""
def _calc_points_float(self, value, **kwargs):
driver_name, position, best_lap = value
points = 0
try:
if best_lap == min(
[
entry.best_lap for entry in self._data
if entry.best_lap is not None]):
points += kwargs['point_structure'][0]
points += kwargs['point_structure'][position]
except (KeyError, IndexError, TypeError, ValueError):
points += 0
if 'points_adjust' in kwargs \
and kwargs['points_adjust'] is not None \
and driver_name in kwargs['points_adjust']:
adjust = kwargs['points_adjust'][driver_name]
if adjust[0] == '+':
points += float(adjust[1:])
elif adjust[0] == '-':
points -= float(adjust[1:])
else:
try:
points = float(adjust)
except ValueError:
pass
return points
class StartingGrid(StaticBase):
"""
Defines a class for a default Starting Grid title card.
This card, by default, has the following columns:
- Pos.: Starting position.
- Driver: Driver name.
- Team: Driver team (if applicable, blank else).
- Car: Driver car.
- Points: Driver series points (if applicable, blank else).
"""
def __init__(self, data, size=None, **kwargs):
super().__init__(data, size=size, **kwargs)
try:
name_lookup = {
k: v['display']
for k, v in kwargs['participant_config'].items()}
except KeyError:
name_lookup = None
try:
car_class_lookup = None
car_lookup = {
k: v['car']
for k, v in kwargs['participant_config'].items()
if v['car'] != ""}
if len(car_lookup):
try:
if len(kwargs['car_classes']):
car_class_lookup = {
driver: (car_class_data['color'], car_class)
for driver, car in car_lookup.items()
for car_class, car_class_data
in kwargs['car_classes'].items()
if car in car_class_data['cars']
}
except KeyError:
car_class_lookup = None
else:
car_lookup = None
car_class_lookup = None
except KeyError:
car_lookup = None
car_class_lookup = None
try:
team_lookup = {
k: v['team']
for k, v in kwargs['participant_config'].items()
if v['team'] != ""}
if len(team_lookup) == 0:
team_lookup = None
except KeyError:
team_lookup = None
try:
point_structure = {
k: v
for k, v in enumerate(kwargs['point_structure'])}
if len(point_structure) == 0 or not any(point_structure.values()):
point_structure = None
except KeyError:
point_structure = None
if point_structure is not None:
try:
points_lookup = {
k: v['points']
for k, v in kwargs['participant_config'].items()}
if not any(points_lookup.values()):
points_lookup = None
except KeyError:
points_lookup = None
else:
points_lookup = None
try:
try:
font = ImageFont.truetype(
kwargs['font'],
kwargs['font_size'])
except (AttributeError, OSError):
font = ImageFont.load_default()
font_color = tuple(kwargs['font_color'])
except KeyError:
font = ImageFont.load_default()
font_color = (0, 0, 0)
self._add_column('position', 'Pos.')
if name_lookup is None:
self._add_column('driver_name', 'Driver')
else:
self._add_lookup(
'driver_name',
name_lookup,
'ERROR',
'Driver')
if team_lookup is not None:
self._add_lookup('driver_name', team_lookup, '', 'Team')
if car_lookup is not None:
self._add_lookup('driver_name', car_lookup, '', 'Car')
if car_class_lookup is not None:
self._add_lookup(
'driver_name',
{k: v for k, v in car_class_lookup.items()},
'',
'Car Class',
formatter=self._car_class_formatter,
formatter_args={
'text_height': font.getsize("A")[1],
'font': font,
'font_color': font_color})
if points_lookup is not None or point_structure is not None:
self._add_lookup(
'driver_name',
points_lookup,
0,
'Points',
align='center')
class SeriesStandings(RaceResults):
"""
Defines a class for a default Series Standings title card.
This card, by default, has the following columns:
- Rank: Series rank.
- Driver: Driver name.
- Team: Driver team (if applicable, blank else).
- Car: Driver car.
- Points: Driver series points.
"""
def __init__(self, data, size=None, **kwargs):
super(RaceResults, self).__init__(data, size=size, **kwargs)
try:
name_lookup = {
k: v['display']
for k, v in kwargs['participant_config'].items()}
if 'additional_participant_config' in kwargs:
for name in kwargs['additional_participant_config'].keys():
name_lookup[name] = name
except KeyError:
name_lookup = None
try:
car_class_lookup = None
car_lookup = {
k: v['car']
for k, v in kwargs['participant_config'].items()
if v['car'] != ""}
if 'additional_participant_config' in kwargs:
for name, values \
in kwargs['additional_participant_config'].items():
if values['car'] != "":
car_lookup[name] = values['car']
if len(car_lookup):
try:
if len(kwargs['car_classes']):
car_class_lookup = {
driver: (car_class_data['color'], car_class)
for driver, car in car_lookup.items()
for car_class, car_class_data
in kwargs['car_classes'].items()
if car in car_class_data['cars']
}
except KeyError:
car_class_lookup = None
else:
car_lookup = None
car_class_lookup = None
except KeyError:
car_lookup = None
car_class_lookup = None
try:
team_lookup = {
k: v['team']
for k, v in kwargs['participant_config'].items()
if v['team'] != ""}
if 'additional_participant_config' in kwargs:
for name, values \
in kwargs['additional_participant_config'].items():
if values['team'] != "":
team_lookup[name] = values['team']
if len(team_lookup) == 0:
team_lookup = None
except KeyError:
team_lookup = None
try:
point_structure = {
k: v
for k, v in enumerate(kwargs['point_structure'])}
except KeyError:
point_structure = None
try:
points_lookup = {
k: v['points']
for k, v in kwargs['participant_config'].items()}
if 'additional_participant_config' in kwargs:
for name, values \
in kwargs['additional_participant_config'].items():
points_lookup[name] = values['points']
except KeyError:
points_lookup = None
try:
points_adjust = {
k: v['points_adjust']
for k, v in kwargs['participant_config'].items()
if v['points_adjust'] != ""}
if len(points_adjust) == 0:
points_adjust = None
except KeyError:
points_adjust = None
if 'additional_participant_config' in kwargs:
for name in kwargs['additional_participant_config'].keys():
self._data.append(AdditionalClassificationEntry(name))
formatter_args = {'point_structure': point_structure,
'points_lookup': points_lookup,
'points_adjust': points_adjust}
self._sort_data(
lambda x: (
-int(self.calc_series_points(
x.calc_points_data, **formatter_args)),
x.driver_name))
if ('hide_series_zeros' in kwargs) and (
kwargs['hide_series_zeros'] is True):
self._data = [x for x in self._data if int(self.calc_series_points(
x.calc_points_data, **formatter_args)) != 0]
try:
try:
font = ImageFont.truetype(
kwargs['font'],
kwargs['font_size'])
except (AttributeError, OSError):
font = ImageFont.load_default()
font_color = tuple(kwargs['font_color'])
except KeyError:
font = ImageFont.load_default()
font_color = (0, 0, 0)
self._add_column(
'calc_points_data',
'Rank',
formatter=self.calc_series_rank,
formatter_args=formatter_args)
if name_lookup is None:
self._add_column('driver_name', 'Driver')
else:
self._add_lookup(
'driver_name',
name_lookup,
'ERROR',
'Driver')
if team_lookup is not None:
self._add_lookup('driver_name', team_lookup, '', 'Team')
if car_lookup is not None:
self._add_lookup('driver_name', car_lookup, '', 'Car')
if car_class_lookup is not None:
self._add_lookup(
'driver_name',
{k: v for k, v in car_class_lookup.items()},
'',
'Car Class',
formatter=self._car_class_formatter,
formatter_args={
'text_height': font.getsize("A")[1],
'font': font,
'font_color': font_color})
self._add_column(
'calc_points_data',
'Points',
formatter=self.calc_series_points,
formatter_args=formatter_args,
align='center')
def calc_series_points(self, value, **kwargs):
driver_name, position, best_lap = value
try:
points = kwargs['points_lookup'][driver_name]
except (KeyError, TypeError):
points = 0
points += int(self._calc_points_float(value, **kwargs))
return str(points)
def calc_series_rank(self, value, **kwargs):
driver_name, position, best_lap = value
ranks = dict()
last_points = None
last_rank = 0
for entry in self._data:
if last_points != int(
self.calc_series_points(
entry.calc_points_data,
**kwargs)):
last_points = int(
self.calc_series_points(
entry.calc_points_data,
**kwargs))
last_rank = len(ranks) + 1
ranks[entry.driver_name] = last_rank
return str(ranks[driver_name])
class SeriesChampion(SeriesStandings):
"""
Defines a class for a default Series Champion title card.
"""
def __init__(self, data, size=None, **kwargs):
super(RaceResults, self).__init__(data, size=size, **kwargs)
try:
self._name_lookup = {
k: v['display']
for k, v in kwargs['participant_config'].items()}
if 'additional_participant_config' in kwargs:
for name in kwargs['additional_participant_config'].keys():
self._name_lookup[name] = name
except KeyError:
self._name_lookup = {
entry.driver_name: entry.driver_name
for entry in self._data}
try:
self._car_class_lookup = None
self._car_lookup = {
k: v['car']
for k, v in kwargs['participant_config'].items()
if v['car'] != ""}
if 'additional_participant_config' in kwargs:
for name, values \
in kwargs['additional_participant_config'].items():
if values['car'] != "":
self._car_lookup[name] = values['car']
if len(self._car_lookup):
try:
if len(kwargs['car_classes']):
self._car_class_lookup = {
driver: (car_class_data['color'], car_class)
for driver, car in self._car_lookup.items()
for car_class, car_class_data
in kwargs['car_classes'].items()
if car in car_class_data['cars']
}
except KeyError:
self._car_class_lookup = None
else:
self._car_lookup = None
self._car_class_lookup = None
except KeyError:
self._car_lookup = None
self._car_class_lookup = None
try:
self._team_lookup = {
k: v['team']
for k, v in kwargs['participant_config'].items()
if v['team'] != ""}
if 'additional_participant_config' in kwargs:
for name, values \
in kwargs['additional_participant_config'].items():
if values['team'] != "":
self._team_lookup[name] = values['team']
if len(self._team_lookup) == 0:
self._team_lookup = None
except KeyError:
self._team_lookup = None
try:
point_structure = {
k: v
for k, v in enumerate(kwargs['point_structure'])}
except KeyError:
point_structure = None
try:
points_lookup = {
k: v['points']
for k, v in kwargs['participant_config'].items()}
if 'additional_participant_config' in kwargs:
for name, values \
in kwargs['additional_participant_config'].items():
points_lookup[name] = values['points']
except KeyError:
points_lookup = None
try:
points_adjust = {
k: v['points_adjust']
for k, v in kwargs['participant_config'].items()
if v['points_adjust'] != ""}
if len(points_adjust) == 0:
points_adjust = None
except KeyError:
points_adjust = None
if 'additional_participant_config' in kwargs:
for name in kwargs['additional_participant_config'].keys():
self._data.append(AdditionalClassificationEntry(name))
formatter_args = {'point_structure': point_structure,
'points_lookup': points_lookup,
'points_adjust': points_adjust}
self._formatter_args = formatter_args
self._sort_data(
lambda x: (
-int(self.calc_series_points(
x.calc_points_data, **formatter_args)),
x.driver_name))
def _make_material(self):
# If data exists, create a heading.
try:
heading_color = tuple(self._options['heading_color'])
try:
heading_font_color = tuple(
self._options['heading_font_color'])
except KeyError:
heading_font_color = (0, 0, 0)
try:
heading_font = ImageFont.truetype(
self._options['heading_font'],
self._options['heading_font_size'])
except (KeyError, OSError):
heading_font = ImageFont.load_default()
try:
heading_text = self._options['heading_text']
except KeyError:
heading_text = None
except KeyError:
heading_color = None
heading_font_color = (0, 0, 0)
heading_font = ImageFont.load_default()
heading_text = None
heading = False
else:
heading = True
try:
series_logo = Image.open(self._options['series_logo'])
try:
series_logo_width = self._options['champion_width']
except KeyError:
series_logo_width = 300
try:
series_logo_height = self._options['champion_height']
except KeyError:
series_logo_height = 300
except (KeyError, OSError):
series_logo = None
series_logo_width = 0
series_logo_height = 0
try:
champion_color = tuple(self._options['champion_color'])
except KeyError:
champion_color = (255, 255, 255)
# If provided, use a font.
try:
try:
font = ImageFont.truetype(
self._options['font'],
self._options['font_size'])
except OSError:
font = ImageFont.load_default()
font_color = tuple(self._options['font_color'])
except KeyError:
font = ImageFont.load_default()
font_color = (0, 0, 0)
# If set, use external and internal margins.
try:
margin = self._options['margin']
except KeyError:
margin = 6*font.getsize("A")[1]
try:
column_margin = self._options['column_margin']
except KeyError:
column_margin = 3*font.getsize("A")[1]
# If set, use a backdrop.
try:
backdrop = Image.open(self._options['backdrop'])
if self._size is not None:
backdrop = backdrop.resize(self._size)
backdrop_size = self._size
else:
backdrop_size = backdrop.size
except (AttributeError, KeyError, IOError):
backdrop = None
backdrop_size = None
# If set, use a logo on the backdrop.
try:
logo = Image.open(self._options['logo'])
logo_size = (
self._options['logo_width'],
self._options['logo_height'])
except (AttributeError, KeyError, IOError):
logo = None
logo_size = None
champion_data = [
entry for entry in self._data
if int(self.calc_series_rank(
entry.calc_points_data,
**self._formatter_args)) <= 3]
# Build main data material
text_width = 0
text_height = 0
for rank, entry in enumerate(champion_data, 1):
if rank == 1:
width, height = heading_font.getsize("Champion")
text_width = max([text_width, width + 2 * margin])
text_height += height
width, height = heading_font.getsize(
self._name_lookup[entry.driver_name])
text_width = max([text_width, width + 2 * margin])
text_height += height
else:
width, height = font.getsize("Runner Up")
text_width = max([text_width, width + 2 * margin])
text_height += height
width, height = font.getsize(
self._name_lookup[entry.driver_name])
text_width = max([text_width, width + 2 * margin])
text_height += height
if self._team_lookup is not None:
width, height = font.getsize(
self._team_lookup[entry.driver_name])
text_width = max([
text_width,
width + column_margin + 2 * margin])
text_height += height
if self._car_lookup is not None:
width, height = font.getsize(
self._car_lookup[entry.driver_name])
text_width = max([
text_width,
width + column_margin + 2 * margin])
text_height += height
if self._car_class_lookup is not None:
width, height = font.getsize(
self._car_class_lookup[entry.driver_name][1])
text_width = max([
text_width,
width + column_margin + 2 * margin])
text_height += height
text_height += margin
# Remove extra margin added by last loop iteration.
text_height -= margin
material_width = series_logo_width + text_width
# Build heading, if applicable.
heading_height = 0
heading_material = None
if heading:
heading_height = heading_font.getsize(str(heading_text))[1]
heading_height += 2*margin
material_width = max([
(
2*margin
+ heading_font.getsize(str(heading_text))[0]),
material_width])
heading_material = Image.new(
'RGBA',
(material_width, heading_height),
heading_color)
draw = ImageDraw.Draw(heading_material)
if heading_text is not None:
draw.text(
(margin, margin),
heading_text,
fill=heading_font_color,
font=heading_font)
material_height = sum([
heading_height,
max([series_logo_height, text_height + 2 * margin])
])
material = Image.new(
'RGBA',
(material_width, material_height),
champion_color)
# Write heading, if applicable.
if heading:
material.paste(heading_material, (0, 0), heading_material)
if series_logo is not None:
series_logo = series_logo.resize(
(series_logo_width, series_logo_height))
material.paste(series_logo, (0, heading_height))
y_position = heading_height \
+ int((max([
series_logo_height,
text_height + 2 * margin]) - text_height) / 2)
x_position = series_logo_width + margin
draw = ImageDraw.Draw(material)
for rank, entry in enumerate(champion_data, 1):
if rank == 1:
draw.text(
(x_position, y_position),
"Champion",
fill=font_color,
font=heading_font)
y_position += heading_font.getsize("A")[1]
draw.text(
(x_position, y_position),
self._name_lookup[entry.driver_name],
fill=font_color,
font=heading_font)
y_position += heading_font.getsize("A")[1]
x_position += column_margin
else:
draw.text(
(x_position, y_position),
"Runner Up",
fill=font_color,
font=font)
y_position += font.getsize("A")[1]
draw.text(
(x_position, y_position),
self._name_lookup[entry.driver_name],
fill=font_color,
font=font)
y_position += font.getsize("A")[1]
x_position += column_margin
if self._team_lookup is not None:
draw.text(
(x_position, y_position),
self._team_lookup[entry.driver_name],
fill=font_color,
font=font)
y_position += font.getsize("A")[1]
if self._car_lookup is not None:
draw.text(
(x_position, y_position),
self._car_lookup[entry.driver_name],
fill=font_color,
font=font)
y_position += font.getsize("A")[1]
if self._car_class_lookup is not None:
draw.text(
(x_position, y_position),
self._car_class_lookup[entry.driver_name][1],
fill=font_color,
font=font)
y_position += font.getsize("A")[1]
y_position += margin
x_position -= column_margin
if backdrop is not None:
backdrop_width, backdrop_height = backdrop_size
# Add logo if needed.
if logo is not None:
logo = logo.resize(logo_size)
logo_width, logo_height = logo_size
text_x_position = backdrop_width-logo_width
y_position = backdrop_height-logo_height
backdrop.paste(
logo,
(text_x_position, y_position),
logo)
if material_width > backdrop_width \
or material_height > backdrop_height:
material.thumbnail(backdrop_size)
material_width, material_height = material.size
text_x_position = int((backdrop_width-material_width)/2)
y_position = int((backdrop_height-material_height)/2)
backdrop.paste(
material,
(text_x_position, y_position),
material)
material = backdrop
return material
class AdditionalClassificationEntry:
def __init__(self, name):
self._name = name
@property
def best_lap(self):
return None
@property
def calc_points_data(self):
return self._name, None, None
@property
def driver_name(self):
return self._name
|
<filename>digitrec_neuralnet.py<gh_stars>1-10
import math
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from scipy.io import loadmat
import scipy.optimize as opt
data = loadmat('data/ex3data1.mat')
theta = loadmat('data/ex4weights.mat')
theta1 = theta['Theta1']
theta2 = theta['Theta2']
x = data['X']
y_prime = data['y'].transpose()[0]
y_prime = np.array([elem if elem != 10 else 0 for elem in y_prime])
y = np.zeros((y_prime.shape[0], 10))
for i in range(y_prime.shape[0]):
y[i][y_prime[i]] = 1
def g(z):
#sigmoid function
return np.float64(1.0) / (1.0 + math.pow(math.e, -z))
g = np.vectorize(g, otypes=[np.float64])
#now, when g is applied to a vector, it is applied to each cell
def unroll(thetas):
unrolled_theta = np.concatenate(tuple([theta.flatten() for theta in thetas]), axis=0)
return unrolled_theta
def roll(theta, layers):
#returns a list of rolled theta matrices
rolled_theta = []
offset = 0
for i in range(1, len(layers)):
m, n = layers[i], layers[i - 1] + 1
rolled_theta.append(theta[offset: offset + m * n].reshape((m, n)))
offset += m * n
return rolled_theta
def randomInit(layers):
rolled_theta = []
for i in range(1, len(layers)):
m, n = layers[i], layers[i - 1] + 1
rolled_theta.append((np.random.rand(m, n) - 0.5 * np.ones((m, n))))
unrolled_theta = unroll(rolled_theta)
return unrolled_theta
def cost(theta, h, y, layers, λ):
#ith row of h or y is the hypothesis or expected output for the ith training set
cost, m = 0, y.shape[0]
for i in range(m):
cost += y[i] @ np.log(h[i]) + (np.ones((y[i].shape)) - y[i]) @ np.log(np.ones(h[i].shape) - h[i])
cost = -cost / m
rolled_theta = roll(theta, layers)
for theta_i in rolled_theta:
cost += (λ / (2 * m)) * (np.sum(np.square(theta_i[:,1:])))
return cost
def classifier(h):
tmp = np.argmax(h, axis=1)
output = np.zeros((h.shape), dtype='float64')
for i in range(tmp.shape[0]):
output[i][tmp[i]] = 1
return output
def feedForward(theta, x, layers):
#returns a list or np.ndarrays where list[i][j] is the
#activator vector for the ith layer and jth dataset
rolled_theta = roll(theta, layers)
a = [x.transpose(),]
for i in range(len(layers) - 1):
a[-1] = np.insert(a[-1], [0], np.ones((a[-1].shape[1],), dtype=np.float64), axis=0)
a.append(g(rolled_theta[i] @ a[-1]))
for i in range(len(a)):
a[i] = a[i].transpose()
return a
def gradient(theta, x, y, layers, λ):
print("running gradient descent")
Δ = [0 for el in range(len(layers) - 1)]
m = x.shape[0]
a = feedForward(theta, x, layers)
rolled_theta = roll(theta, layers)
#list of np.ndarrays where list[i][j] contains δ for ith layer and jth dataset
#δ fr ith layer is a matrix of m rows, each row for one training set
δ = [0 for el in layers]
δ[-1] = a[-1] - y
for i in range(len(layers) - 2, -1, -1):
g_prime = (a[i] * (np.ones(a[i].shape) - a[i]))
tmp = np.array([rolled_theta[i].transpose() @ row for row in δ[i + 1]])
#no error term for i = 0, but here we are calculating just for fun!
#also no error term for bias units which is why we discard the first column of δ[i]
δ[i] = (tmp * g_prime)[:,1:]
Δ[i] = sum([np.outer(δ[i + 1][j], a[i][j]) for j in range(m)])
for i in range(len(layers) - 1):
tmp = np.insert(rolled_theta[i][:,1:], 0, np.zeros((rolled_theta[i].shape[0],)), axis=1)
Δ[i] = Δ[i] / np.float64(m) + (λ / np.float64(m)) * tmp
Δ_unrolled = unroll(Δ)
return Δ_unrolled
def grad_check(theta, x, y, layers, λ):
epsilon = 0.0001
Δ_unrolled = np.zeros((theta.shape), dtype=np.float64)
for i in range(len(theta) -1, 0, -1):
tmp = np.zeros((theta.shape))
tmp[i] = epsilon
theta_up = theta + tmp
theta_lo = theta - tmp
cost_up = cost(theta_up, feedForward(theta_up, x, layers)[-1], y, layers, λ)
cost_lo = cost(theta_lo, feedForward(theta_lo, x, layers)[-1], y, layers, λ)
Δ_unrolled[i] = (cost_up - cost_lo) / (2 * epsilon)
print(i, Δ_unrolled[i])
return Δ_unrolled
def findAccuracy(theta, x, y, layers):
y_pred = classifier(feedForward(theta, x, layers)[-1])
correct = [1 if np.all(a == b) else 0 for (a, b) in zip(y_pred, y)]
accuracy = (sum(map(int, correct)) / float(len(correct)))
return accuracy
layers = [400, 25, 10]
#theta = unroll((theta1, theta2))
#randomly initialize a theta
theta = randomInit(layers)
λ = 1
#h = feedForward(theta, x, layers)[-1]
#print(cost(theta, h, y, layers, λ))
'''
grad = gradient(theta, x, y, layers, λ)
for i in range(grad.shape[0]):
print(i, grad[i])
print("//")
print(grad_check(theta, x, y, layers, λ))
'''
#a = np.array([[[1, 2], [3, 4]], [[5, 6, 7], [8, 9, 10], [11, 12, 13]]])
#print(a.shape)
#out = classifier(h)
#print(out[4003])
#print(y[4003])
def backPropagate(theta, x, y, layers, λ):
j = cost(theta, feedForward(theta, x, layers)[-1], y, layers, λ)
grad = gradient(theta, x, y, layers, λ)
return j, grad
fmin = opt.minimize(fun=backPropagate, x0=theta, args=(x, y, layers, λ), method='TNC', jac=True, options={'maxiter': 250})
theta = fmin.x
np.set_printoptions(threshold=np.nan)
print(fmin)
print(findAccuracy(theta, x, y, layers))
|
"""
Copyright 2019 Samsung SDS
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import io
import json
import os
import codecs
import shutil
import numpy as np
import pandas as pd
import boto3
from brightics.function.utils import _model_dict_scala, _model_dict
import brightics.common.json as data_json
from brightics.common.datasource import DbEngine
from brightics.common.validation import raise_runtime_error
from brightics.brightics_data_api import _write_dataframe
import brightics.common.data.utils as data_utils
from brightics.common.utils import check_required_parameters
def unload(table, partial_path):
path = data_utils.make_data_path_from_key(partial_path[0])
if os.path.isdir(path):
shutil.rmtree(path)
_write_dataframe(table, path)
def write_csv(table, path):
dir_data = os.getcwd() + '/data'
path = os.path.join(dir_data, path)
dir_ = os.path.dirname(path)
if not os.path.exists(dir_):
os.makedirs(dir_)
table.to_csv(path, index=False)
def _change_capital_to_under_bar(a):
if a.isupper():
return '_' + a.lower()
else:
return a
def unload_model(path, **params):
linked = params['linked']
if 'outputs' in linked:
outputs = linked['outputs']
else:
outputs = linked['outData']
param = linked['param']
def getDataFromInputs(data):
for k, v in params.items():
if k is data:
return v
return {}
if 'model' in outputs:
model = getDataFromInputs(outputs['model'])
else:
model_table = dict()
i = 0
if isinstance(outputs, list):
for v in outputs:
model_table['table_{}'.format(i + 1)] = getDataFromInputs(v)
i += 1
else:
for k, v in outputs.items():
model_table['table_{}'.format(i + 1)] = getDataFromInputs(v)
i += 1
if ('groupby' in param and param['groupby']) or ('group_by' in param and param['group_by']):
if 'groupby' in param:
group_by = param['groupby']
else:
group_by = param['group_by']
sample_table = model_table['table_1']
groups = sample_table[group_by].drop_duplicates().values
group_keys = np.array([_group_key_from_list(row) for row in groups])
group_key_dict = {k:v.tolist() for k, v in zip(group_keys, groups)}
model = {
'_grouped_data': _grouped_data(group_by=group_by, group_key_dict=group_key_dict)
}
for group_key in group_key_dict:
group_key_row = group_key_dict[group_key]
tmp_model_table = model_table.copy()
for k, v in tmp_model_table.items():
for group_by_col, group in zip(group_by, group_key_row):
v = v[v[group_by_col] == group]
tmp_model_table[k] = v.reset_index(drop=True)
model['_grouped_data']['data'][group_key] = _unload_model(tmp_model_table, linked, param)
else:
model = _unload_model(model_table, linked, param)
dir_ = os.path.dirname(path)
if not os.path.exists(dir_):
os.makedirs(dir_)
with open(path, 'wb') as fp:
json.dump(data_json.to_json(model, for_redis=True), codecs.getwriter('utf-8')(fp), ensure_ascii=False)
return {'model' : model}
def _grouped_data(group_by, group_key_dict):
grouped_data = {
'data': dict(),
'group_by': group_by,
'group_key_dict': group_key_dict
}
return grouped_data
def _group_key_from_list(list_):
GROUP_KEY_SEPA = '\u0002'
return GROUP_KEY_SEPA.join([str(item) for item in list_])
def _unload_model(model, linked, param):
if 'brightics' in linked['name']:
name = linked['name'].split("$")[-1]
name = name.replace('train', 'model')
out_model = _model_dict(name)
else:
name = ''
for i in linked['name']:
name += _change_capital_to_under_bar(i)
name = name[1:]
name = name.replace('train_for_classification', 'classification_model')
name = name.replace('train_for_regression', 'regression_model')
name = name.replace('train', 'model')
name = name.replace('g_b_t', 'gbt')
name = name.replace('s_v_m', 'svm')
out_model = _model_dict_scala(name)
for k, v in param.items():
if v == "false":
v = False
elif v == "true":
v = True
if k == 'intercept' or k == 'fit-intercept':
out_model['fit_intercept'] = v
elif k == 'ycolumn' or k == 'label-col-name':
out_model['label_col'] = v[0]
elif k == 'xcolumns' or k == 'columns':
out_model['feature_cols'] = v[0]
else:
out_model[k.replace('-', '_')] = v
for k, v in model.items():
out_model[k] = v
return out_model
def write_to_s3(table, datasource, object_key):
client = boto3.client(
's3',
aws_access_key_id=datasource['accessKeyId'],
aws_secret_access_key=datasource['secretAccessKey'],
use_ssl=False
)
csv_buffer = io.StringIO()
table.to_csv(csv_buffer, index=False)
csv_buffer.seek(0)
client.put_object(Bucket=datasource['bucketName'], Key=object_key, Body=csv_buffer.getvalue())
def write_to_s3_2(table, object_key, access_key_id, secret_access_key, bucket_name):
datasource = {'accessKeyId':access_key_id, 'secretAccessKey':secret_access_key, 'bucketName':bucket_name}
write_to_s3(table, datasource, object_key)
def write_to_db(table, **params):
check_required_parameters(_write_to_db, params, ['table'])
return _write_to_db(table, **params)
def _write_to_db(table, tableName, datasource, ifExists='fail'):
if not isinstance(table, pd.DataFrame):
raise_runtime_error('table is not pandas.DataFrame')
with DbEngine(**datasource) as engine:
table.to_sql(tableName, engine, if_exists=ifExists, index=False)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.