input stringlengths 2.65k 237k | output stringclasses 1
value |
|---|---|
2**(char_digits-1)
Etiny = -2**(char_digits-1) + 1
context.__name__ = "Float_%d_%d_%s" % (char_digits, sig_digits,
_round_code[rounding])
self.contexts[(char_digits, sig_digits, rounding)] = context
return context
define_context = context_registry()
single = define_context(8, 23) # IEEE 754 32 bit float
double = define_context(11, 52) # IEEE 754 64 bit float
quadruple = define_context(15, 112) # IEEE 754 128 bit float
half = define_context(5, 10) # IEEE 754 16 bit float
test = define_context(4, 6, ROUND_DOWN) # for learning purposes
class Binary:
"""Constructor for binary floating point representation from a binary string
or Decimal class representation of a real number: e.g. -1101.100011 or
Decimal("-13.546875000"). The value may also be an instance of an existing
context.
If a context is given as a Context class, the return value will be an
instance of that class (a IEEE 754 representation of that binary value).
If the value given was from a different context it will be coerced.
The context may also be provided as the tuple: (characteristic_digits,
significand_digits,
rounding mode string)
although this is intended primarily for internal use or for eval(repr(x))
for a Context object x.
Binary() is the same as Binary('0') to be functionally compatible with
the Decimal class.
"""
def __init__(self, x='0', context=None):
if isinstance(context, tuple):
self.context = define_context(context[0], context[1],
rounding=context[2])
else:
self.context = context
# placeholder for representation of x
self.rep = None
if isinstance(x, Binary):
self.dec = x.dec
self.bin = x.bin
if context is None:
self.context = x.context
# in case x.context is not None (otherwise gets
# overwritten by x.dec == x.rep anyway)
self.rep = x.rep
elif isinstance(x, ContextClass):
if context is None:
# optimization -- don't recreate self.rep later
# from this context, keep it now.
self.context = x.__class__
self.rep = x
self.dec = x.as_decimal()
self.bin = str(x.as_binary())
else:
x_dec = x.as_decimal()
if abs(x_dec) > self.context.largest_norm:
if x_dec < 0:
self.bin = "-Inf"
seld.dec = Decimal("-Inf")
else:
self.bin = "Inf"
self.dec = Decimal("Inf")
self.rep = self.context(self.dec)
elif isinstance(x, Decimal):
self.dec = x
if self.context is None:
raise ValueError("Cannot create arbitrary precision binary "
"value without a representation context")
if x.is_nan() or x.is_infinite():
if x.is_nan():
self.bin = "NaN"
else:
if x < 0:
self.bin = "-Inf"
else:
self.bin = "Inf"
elif abs(x) > self.context.largest_norm:
if x < 0:
self.dec = Decimal("-Inf")
self.bin = "-Inf"
else:
self.dec = Decimal("Inf")
self.bin = "Inf"
else:
self.bin = dec2binstr(x, self.context)
else:
# string
bstr = x.lower()
if bstr in ['-inf', 'inf', 'nan']:
self.bin = x
self.dec = Decimal(x)
else:
self.bin = x
self.dec = binvalstr2dec(bstr)
if self.context is None:
self.rep = self.dec
else:
if self.rep is None:
try:
self.rep = self.context(self.dec)
except BinaryOverflow:
if self.dec < 0:
self.dec = Decimal("-Inf")
self.bin = "-Inf"
else:
self.dec = Decimal("Inf")
self.bin = "Inf"
self.rep = self.context(self.dec)
self.dec = self.rep.dec_value
if self.rep.bin_value == "":
# lazy evaluation hasn't been performed yet
self.bin = dec2binstr(self.dec, self.rep)
# might as well update the representation
self.rep.bin_value = self.bin
else:
self.bin = self.rep.bin_value
def __hash__(self):
return hash(self.rep)
def __str__(self):
return self.bin
def __repr__(self):
if self.context:
return 'Binary("%s", (%d, %d, %s))' % (self.bin,
self.context.characteristicClass.digits,
self.context.significandClass.digits,
self.context.round_mode)
else:
return 'Binary("%s")' % self.bin
def as_binary(self):
return self
def as_decimal(self):
return self.dec
def _op_check(self, other):
if isinstance(other, (int, np.int32, np.int64)):
ox, c = Decimal(other), None
elif isinstance(other, Binary):
ox, c = other.dec, other.context
elif isinstance(other, Decimal):
ox, c = other, None
elif isinstance(other, ContextClass):
# ContextClass is strict about comparing only with others
# of the same representation, so ensure self is
if self.context == other.__class__:
ox, c = other.as_decimal(), other.__class__
else:
raise TypeError("Invalid object for comparison")
else:
raise TypeError("Invalid object for comparison")
if self.context:
s_digits = self.context.digits
else:
s_digits = 0
if c:
c_digits = c.digits
else:
c_digits = 0
if s_digits > c_digits:
ctx = self.context
else:
if s_digits > 0 and s_digits == c_digits:
# contexts have the same precision, but what about
# rounding?
if self.context.round_mode == c.round_mode:
ctx = c
else:
raise ValueError("Clash of rounding modes for "
"equal-precision comparison")
else:
ctx = c
return ox, ctx
def __eq__(self, other):
ox, c = self._op_check(other)
return self.dec == ox
def __ne__(self, other):
ox, c = self._op_check(other)
return self.dec != ox
def __le__(self, other):
ox, c = self._op_check(other)
return self.dec <= ox
def __lt__(self, other):
ox, c = self._op_check(other)
return self.dec < ox
def __ge__(self, other):
ox, c = self._op_check(other)
return self.dec >= ox
def __gt__(self, other):
ox, c = self._op_check(other)
return self.dec > ox
def __neg__(self):
return self.__class__(-self.rep)
def __abs__(self):
return self.__class__(abs(self.rep))
def __add__(self, other):
ox, ctx = self._op_check(other)
return self.__class__(self.dec + ox, ctx)
__radd__ = __add__
def __sub__(self, other):
ox, ctx = self._op_check(other)
return self.__class__(self.dec - ox, ctx)
def __rsub__(self, other):
ox, ctx = self._op_check(other)
return self.__class__(ox - self.dec, ctx)
def __mul__(self, other):
ox, ctx = self._op_check(other)
return self.__class__(self.dec * ox, ctx)
__rmul__ = __mul__
def __div__(self, other):
ox, ctx = self._op_check(other)
return self.__class__(self.dec / ox, ctx)
def __rdiv__(self, other):
ox, ctx = self._op_check(other)
return self.__class__(ox / self.dec, ctx)
__rtruediv__ = __rdiv__
__truediv__ = __div__
def __pow__(self, other):
ox, ctx = self._op_check(other)
return self.__class__(self.dec ** ox, ctx)
def __rpow__(self, other):
ox, ctx = self._op_check(other)
return self.__class__(ox ** self.dec, ctx)
def __nonzero__(self):
return self.dec != 0
def sqrt(self):
return self.__class__(self.dec.sqrt(), self.context)
def max(self, other):
"""Respects NaN and Inf"""
ox, ctx = self._op_check(other)
r = self.dec.max(ox)
if r == self.dec:
return self
else:
return other
def min(self, other):
"""Respects NaN and Inf"""
ox, ctx = self._op_check(other)
r = self.dec.min(ox)
if r == self.dec:
return self
else:
return other
def __reduce__(self):
return (self.__class__, (repr(self),))
def __copy__(self):
if type(self) == Binary:
return self # I'm immutable; therefore I am my own clone
return self.__class__(str(self))
def __deepcopy__(self, memo):
if type(self) == Binary:
return self # My components are also immutable
return self.__class__(str(self))
def binvalstr2dec(x):
"""Convert signed real numbers in binary string form to decimal
value (no special values Inf, NaN), including values in scientific notation.
"""
if not isbinstr(x):
raise ValueError("Invalid string representation of binary"
" float: %s" % x)
if x[0] == '-':
x = x[1:]
sign = -1
else:
sign = 1
if 'e' in x:
x, estr = x.split('e')
e = int(estr)
elif 'E' in x:
x, estr = x.split('E')
e = int(estr)
else:
e = 0
if '.' in x:
try:
whole, frac = x.split('.')
except ValueError:
raise ValueError("Invalid string representation of binary"
" float")
else:
if frac == "":
frac = '0'
if whole == "":
whole = '0'
else:
whole = x
frac = '0'
try:
dec_whole = Decimal(int(whole, base=2)) * Decimal(2)**e
except ValueError:
dec_whole = Decimal(0)
dec_frac = binfracstr2decfrac(frac) * Decimal(2)**e
return sign*(dec_whole+dec_frac)
def isbinstr(arg):
# supports unary + / - at front, and checks for usage of exponentials
# (using 'E' or 'e')
s = arg.lower()
try:
if s[0] in ['+','-']:
s_rest = s[1:]
else:
s_rest = s
except IndexError:
return False
if '0' not in s_rest and '1' not in s_rest:
return False
pts = s.count('.')
exps = s.count('e')
pm = s_rest.count('+') + s_rest.count('-')
if pts > 1 or exps > 1 or pm > 1:
return False
if exps == 1:
exp_pos = s.find('e')
pre_exp = s[:exp_pos]
# must be numbers before and after the 'e'
if not np.sometrue([n in ('0','1') for n in pre_exp]):
return False
if s[-1]=='e':
# no chars after 'e'!
return False
if not np.sometrue([n in ('0','1','2','3','4','5','6','7','8','9') \
for n in s[exp_pos:]]):
return False
# check that any additional +/- occurs directly after 'e'
if pm == 1:
pm_pos = max([s_rest.find('+'), s_rest.find('-')])
if s_rest[pm_pos-1] != 'e':
return False
e_rest = s_rest[pm_pos+1:] # safe due to previous check
s_rest = s_rest[:pm_pos+1]
else:
e_rest = s[exp_pos+1:]
s_rest = s[:exp_pos+1]
# only remaining chars in s after e and possible +/- are numbers
if '.' in e_rest:
return False
# cannot use additional +/- if not using exponent
if pm == 1 and exps == 0:
return False
return np.alltrue([n in ('0', '1', '.', 'e', '+', '-') for | |
import numpy as np
from PIL import Image
import cv2
import io
import time
import pandas as pd
from random import randint
import os
import selenium
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common.keys import Keys
import tensorflow as tf
from tensorflow.keras.models import model_from_json
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Dropout, Activation, Flatten, Conv2D, MaxPooling2D
from tensorflow.keras.optimizers import SGD, Adam, Nadam
from tensorflow.keras.callbacks import TensorBoard
from collections import deque
import random
import pickle
import base64
from io import BytesIO
import json
# Path Variables
GAME_URL = "http://wayou.github.io/t-rex-runner/"
CHROME_DRIVER_PATH = "./chromedriver"
LOSS_FILE_PATH = "./objects/loss_df.csv"
ACTIONS_FILE_PATH = "./objects/actions_df.csv"
Q_VALUE_FILE_PATH = "./objects/q_values.csv"
SCORE_FILE_PATH = "./objects/scores_df.csv"
# Script to create id for canvas for faster selections from Document Object MOdel (DOM)
init_script = "document.getElementsByClassName('runner-canvas')[0].id = 'runner-canvas'"
# Script to get image from canvas
getbase64Script = "canvasRunner = document.getElementById('runner-canvas'); \
return canvasRunner.toDataURL().substring(22)"
# Game Parameter Constants
ACTIONS = 2 # Possible actions: "Jump" or "Do Nothing"
GAMMA = 0.9 # Decay rate of past observations, original 0.9
OBSERVATION = 100. # Timesteps to observe before training
EXPLORE = 100000 # Frames over which to anneal epsilon
FINAL_EPSILON = 0.0001 # Final value of epsilon
INITIAL_EPSILON = 0.1 # Initial value of epsilon
REPLAY_MEMORY = 80000 # Number of previous transitions to remember
BATCH = 32 # Size of minibatch
FRAME_PER_ACTION = 1
LEARNING_RATE = 0.0003
img_rows, img_cols = 80, 80
img_channels = 4 # We stack 4 frames
# Initialize log structures from file if they exist or else create new
loss_df = pd.read_csv(LOSS_FILE_PATH) if os.path.isfile(
LOSS_FILE_PATH) else pd.DataFrame(columns=["loss"])
score_df = pd.read_csv(SCORE_FILE_PATH) if os.path.isfile(
SCORE_FILE_PATH) else pd.DataFrame(columns=["Scores"])
actions_df = pd.read_csv(ACTIONS_FILE_PATH) if os.path.isfile(
ACTIONS_FILE_PATH) else pd.DataFrame(columns=["Actions"])
q_values_df = pd.read_csv(Q_VALUE_FILE_PATH) if os.path.isfile(
Q_VALUE_FILE_PATH) else pd.DataFrame(columns=["qvalues"])
# Some basic pre-processing function
def save_object(object, name):
"""
Dump file into objects folder
"""
with open("objects/" + name + ".pkl", "wb") as f:
pickle.dump(object, f, pickle.HIGHEST_PROTOCOL)
def load_object(name):
"""
Loads file Dump
"""
with open("objects/" + name + ".pkl", "rb") as f:
return pickle.load(f)
def process_image(image):
"""
Processes the image to use futher
"""
image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) # RGB to Gray scale
image = image[:300, :500] # Crop Region of Interest(ROI)
image = cv2.resize(image, (80, 80))
return image
def grab_screen(_driver):
"""
Grabs the screen
"""
image_b64 = _driver.execute_script(getbase64Script)
screen = np.array(Image.open(BytesIO(base64.b64decode(image_b64))))
image = process_image(screen) # Processing image is required
return image
def show_image(graphs=False):
"""
Shows images in new window
"""
while True:
screen = (yield)
window_title = "Logs" if graphs else "Game_play"
cv2.namedWindow(window_title, cv2.WINDOW_NORMAL)
image_size = cv2.resize(screen, (800, 400))
cv2.imshow(window_title, screen)
if (cv2.waitKey(1) & 0xFF == ord("q")):
cv2.destroyAllWindows()
break
# Trainig varialbes saed as checkpoints to filesystem to resume training from the same step
class Game():
"""
Selenium interfacing between the python and browser
"""
def __init__(self, custom_config=True):
"""
Launch the broswer window using the attributes in chrome_options
"""
chrome_options = Options()
chrome_options.add_argument("disable-infobars")
chrome_options.add_argument("--mute-audio")
self._driver = webdriver.Chrome(
executable_path=CHROME_DRIVER_PATH, chrome_options=chrome_options)
self._driver.set_window_position(x=-10, y=0)
self._driver.get("chrome://dino")
self._driver.execute_script("Runner.config.ACCELERATION=0")
self._driver.execute_script(init_script)
def get_crashed(self):
"""
return True if the agent as crashed on an obstacles. Gets javascript variable from game decribing the state
"""
return self._driver.execute_script("return Runner.instance_.crashed")
def get_playing(self):
"""
returns True if game in progress, false is crashed or paused
"""
return self._driver.execute_script("return Runner.instance_.playing")
def restart(self):
"""
Sends a signal to browser-javascript to restart the game
"""
self._driver.execute_script("Runner.instance_.restart()")
def press_up(self):
"""
Sends a single to press up get to the browser
"""
self._driver.find_element_by_tag_name("body").send_keys(Keys.ARROW_UP)
def get_score(self):
"""
Gets current game score from javascript variables
"""
score_array = self._driver.execute_script(
"return Runner.instance_.distanceMeter.digits")
# the javascript object is of type array with score in the formate[1,0,0] which is 100.
score = ''.join(score_array)
return int(score)
def pause(self):
"""
Pause the game
"""
return self._driver.execute_script("return Runner.instance_.stop()")
def resume(self):
"""
Resume a paused game if not crashed
"""
return self._driver.execute_script("return Runner.instance_.play()")
def end(self):
"""
Close the browser and end the game
"""
self._driver.close()
class DinoAgent:
"""
Reinforcement Agent
"""
def __init__(self, game): # takes game as input for taking actions
self._game = game
self.jump() # to start the game, we need to jump once
def is_running(self):
return self._game.get_playing()
def is_crashed(self):
return self._game.get_crashed()
def jump(self):
self._game.press_up()
def duck(self):
self._game.press_down()
class Game_State:
def __init__(self, agent, game):
self._agent = agent
self._game = game
# Display the processed image on screen using openCV, implemented using python coroutine
self._display = show_image()
self._display.__next__() # Initilize the display coroutine
def get_state(self, actions):
"""
Returns the Experience of one itereationas a tuple
"""
actions_df.loc[len(actions_df)
] = actions[1] # Storing actions in a dataframe
score = self._game.get_score()
reward = 0.1
is_over = False # Game Over
if actions[1] == 1:
self._agent.jump()
image = grab_screen(self._game._driver)
self._display.send(image) # Display the image on screen
if self._agent.is_crashed():
# Log the score when the game is over
score_df.loc[len(loss_df)] = score
self._game.restart()
reward = -1
is_over = True
return image, reward, is_over
def buildModel():
print("Building Convolutional Neural Network")
model = Sequential()
model.add(Conv2D(32, (8, 8), padding="same", strides=(4, 4), input_shape=(
img_cols, img_rows, img_channels))) # First layer of 80*80*4 with 32 filters
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Activation("relu"))
# Second layer of 40*40*4 with 64 filters
model.add(Conv2D(64, (4, 4), strides=(2, 2), padding='same'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Activation("relu"))
# Third layer of 30*30*4 with 64 filters
model.add(Conv2D(64, (3, 3), strides=(1, 1), padding='same'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Activation("relu"))
model.add(Flatten())
model.add(Dense(512))
model.add(Activation("relu"))
model.add(Dense(ACTIONS))
#adam = Adam(lr=LEARNING_RATE)
nadam = Nadam(lr=LEARNING_RATE)
model.compile(loss="mse", optimizer=nadam)
# Creating model file if not present
if not os.path.isfile(LOSS_FILE_PATH):
model.save_weights("model.h5")
print("Finished building the Convolutional Neural Network")
return model
def trainNetwork(model, game_state, observe=False):
"""
Main Training module
Parameters:
model => Keras Model to be trained
game_state => Game State module with access to game environment and dino
observe => Flag to indicate if the model is to be trained(weights updates), else just play
"""
last_time = time.time() # Store the previous observations in replay memory
D = load_object("D") # Load from file system
do_nothing = np.zeros(ACTIONS)
do_nothing[0] = 1 # 0 => Do Nothing ; 1 => Jump
# Get next step after performing the action
x_t, r_0, terminal = game_state.get_state(do_nothing)
# Stack 4 images to create a placeholder input
s_t = np.stack((x_t, x_t, x_t, x_t), axis=2)
s_t = s_t.reshape(1, s_t.shape[0], s_t.shape[1], s_t.shape[2]) # 1*20*40*4
initial_state = s_t
if observe: # We keep observing, never train
OBSERVE = 99999
epsilon = FINAL_EPSILON
print("Loading weights to the CNN")
model.load_weights("model.h5")
#adam = Adam(lr=LEARNING_RATE)
nadam = Nadam(lr=LEARNING_RATE)
model.compile(loss="mse", optimizer=nadam)
print("Loading weights Successful")
else: # We go to training mode
OBSERVE = OBSERVATION
epsilon = load_object("epsilon")
model.load_weights("model.h5")
#adam = Adam(lr=LEARNING_RATE)
nadam = Nadam(lr=LEARNING_RATE)
model.compile(loss="mse", optimizer=nadam)
# Resume from the previous time step stored in the file system
t = load_object("time")
while True: # Endless running
loss = 0
Q_sa = 0
action_index = 0
r_t = 0 # Reward at 4
a_t = np.zeros([ACTIONS]) # Actions at t
# Choose an action epsilon greedy
if t % FRAME_PER_ACTION == 0: # Parameter to skip frames for actions
if random.random() <= epsilon: # Randomly explore an action
print("---------Random Action---------")
action_index = random.randrange(ACTIONS)
a_t[action_index] = 1
else: # Predict the output
# Input a stack of 4 images, get the prediction
q = model.predict(s_t)
max_Q = np.argmax(q) # Choosing index with maximum "q" value
action_index = max_Q
a_t[action_index] = 1 # 0 => Do Nothing, 1 => Jump
# We reduce the epsilon (exploration parameter) gradually
if epsilon > FINAL_EPSILON and t > OBSERVE:
epsilon -= (INITIAL_EPSILON - FINAL_EPSILON)/EXPLORE
# Run the selected action and observed next state and reward
x_t1, r_t, terminal = game_state.get_state(a_t)
# FPS of the game
print("FPS: {0}".format(1/(time.time()-last_time)))
last_time = time.time()
x_t1 = x_t1.reshape(1, x_t1.shape[0], x_t1.shape[1], 1) # 1x20x40x1
# Append the new image to input stack and remove the first one
s_t1 = np.append(x_t1, s_t[:, :, :, :3], axis=3)
# Store the transition in D
D.append((s_t, action_index, r_t, s_t1, terminal))
if len(D) > REPLAY_MEMORY:
D.popleft()
# Only train if done observing
if t > OBSERVE:
# Sample a minibatch to train on
minibatch = random.sample(D, BATCH)
inputs = np.zeros(
(BATCH, s_t.shape[1], s_t.shape[2], s_t.shape[3])) # 32x20x40x4
targets = np.zeros((inputs.shape[0], ACTIONS))
# Now we do the experience replay
for i in range(0, len(minibatch)):
state_t = minibatch[i][0] # 4D stack of images
action_t = minibatch[i][1] # This is the action index
reward_t = minibatch[i][2] # Reward at state_t due to action_t
state_t1 = minibatch[i][3] # Next State
# Wheather the agent died or survided due to | |
I1sub,I2sub = make_templates_same_size(I1,I2)
S1, S2 = np.fft.fft2(I1sub), np.fft.fft2(I2sub)
W2 = np.divide(1, np.sqrt(abs(I1sub))*np.sqrt(abs(I2sub)) )
Q = (S1)*np.conj((W2*S2))
return Q
def amplitude_comp_corr(I1, I2, F_0=0.04):
""" match two imagery through amplitude compensated phase correlation
Parameters
----------
I1 : np.array, size=(m,n)
array with intensities
I2 : np.array, size=(m,n)
array with intensities
F_0 : float, default=4e-2
cut-off intensity in respect to maximum
Returns
-------
Q : np.array, size=(m,n)
cross-spectrum
Notes
-----
[1] <NAME> al. "Amplitude-compensated matched filtering", Applied optics,
vol. 27(16) pp. 3461-3463, 1988.
"""
if I1.ndim==3: # multi-spectral frequency stacking
bands = I1.shape[2]
I1sub,I2sub = make_templates_same_size(I1,I2)
for i in range(bands): # loop through all bands
I1bnd, I2bnd = I1sub[:,:,i], I2sub[:,:,i]
S1, S2 = np.fft.fft2(I1bnd), np.fft.fft2(I2bnd)
s_0 = F_0 * np.amax(abs(S2))
W = np.divide(1, abs(I2sub) )
A = np.divide(s_0, abs(I2sub)**2)
W[abs(S2)>s_0] = A
if i == 0:
Q = (S1)*np.conj((W*S2))
else:
Q_b = (S1)*np.conj((W*S2))
Q = (1/(i+1))*Q_b + (i/(i+1))*Q
else:
I1sub,I2sub = make_templates_same_size(I1,I2)
S1, S2 = np.fft.fft2(I1sub), np.fft.fft2(I2sub)
s_0 = F_0 * np.amax(abs(S2))
W = np.divide(1, abs(I2sub) )
A = np.divide(s_0, abs(I2sub)**2)
W[abs(S2)>s_0] = A[abs(S2)>s_0]
Q = (S1)*np.conj((W*S2))
return Q
def robust_corr(I1, I2):
""" match two imagery through fast robust correlation
Parameters
----------
I1 : np.array, size=(m,n)
array with intensities
I2 : np.array, size=(m,n)
array with intensities
Returns
-------
Q : np.array, size=(m,n)
cross-spectrum
Notes
-----
[1] Fitch et al. "Fast robust correlation", IEEE transactions on image
processing vol. 14(8) pp. 1063-1073, 2005.
[2] Essannouni et al. "Adjustable SAD matching algorithm using frequency
domain" Journal of real-time image processing, vol.1 pp.257-265
"""
I1sub,I2sub = make_templates_same_size(I1,I2)
p_steps = 10**np.arange(0,1,.5)
for idx, p in enumerate(p_steps):
I1p = 1/p**(1/3) * np.exp(1j*(2*p -1)*I1sub)
I2p = 1/p**(1/3) * np.exp(1j*(2*p -1)*I2sub)
S1p, S2p = np.fft.fft2(I1p), np.fft.fft2(I2p)
if idx==0:
Q = (S1p)*np.conj(S2p)
else:
Q += (S1p)*np.conj(S2p)
return Q
def orientation_corr(I1, I2):
""" match two imagery through orientation correlation
Parameters
----------
I1 : np.array, size=(m,n)
array with intensities
I2 : np.array, size=(m,n)
array with intensities
Returns
-------
Q : np.array, size=(m,n)
cross-spectrum
See Also
--------
phase_corr, windrose_corr
Notes
-----
[1] Fitch et al. "Orientation correlation", Proceeding of the Britisch
machine vison conference, pp. 1--10, 2002.
[2] <NAME>. "Evaluation of existing image matching methods for
deriving glacier surface displacements globally from optical satellite
imagery", Remote sensing of environment, vol. 118 pp. 339-355, 2012.
"""
if I1.ndim==3: # multi-spectral frequency stacking
bands = I1.shape[2]
I1sub,I2sub = make_templates_same_size(I1,I2)
for i in range(bands): # loop through all bands
I1bnd, I2bnd = I1sub[:,:,i], I2sub[:,:,i]
S1, S2 = np.fft.fft2(I1bnd), np.fft.fft2(I2bnd)
S1,S2 = normalize_power_spectrum(S1),normalize_power_spectrum(S2)
if i == 0:
Q = (S1)*np.conj(S2)
else:
Q_b = (S1)*np.conj(S2)
Q = (1/(i+1))*Q_b + (i/(i+1))*Q
else:
I1sub,I2sub = make_templates_same_size(I1,I2)
S1, S2 = np.fft.fft2(I1sub), np.fft.fft2(I2sub)
S1,S2 = normalize_power_spectrum(S1),normalize_power_spectrum(S2)
Q = (S1)*np.conj(S2)
return Q
def windrose_corr(I1, I2):
""" match two imagery through windrose phase correlation
Parameters
----------
I1 : np.array, size=(m,n)
array with intensities
I2 : np.array, size=(m,n)
array with intensities
Returns
-------
Q : np.array, size=(m,n)
cross-spectrum
See Also
--------
orientation_corr, phase_only_corr
Notes
-----
[1] Kumar & Juday, "Design of phase-only, binary phase-only, and complex
ternary matched filters with increased signal-to-noise ratios for
colored noise", Optics letters, vol. 16(13) pp. 1025--1027, 1991.
"""
if I1.ndim==3: # multi-spectral frequency stacking
bands = I1.shape[2]
I1sub,I2sub = make_templates_same_size(I1,I2)
for i in range(bands): # loop through all bands
I1bnd, I2bnd = I1sub[:,:,i], I2sub[:,:,i]
S1, S2 = np.fft.fft2(I1bnd), np.fft.fft2(I2bnd)
if i == 0:
Q = (S1)*np.conj(S2)
else:
Q_b = (S1)*np.conj(S2)
Q = (1/(i+1))*Q_b + (i/(i+1))*Q
else:
I1sub,I2sub = make_templates_same_size(I1,I2)
S1, S2 = np.sign(np.fft.fft2(I1sub)), np.sign(np.fft.fft2(I2sub))
Q = (S1)*np.conj(S2)
return Q
def phase_corr(I1, I2):
""" match two imagery through phase correlation
Parameters
----------
I1 : np.array, size=(m,n)
array with intensities
I2 : np.array, size=(m,n)
array with intensities
Returns
-------
Q : np.array, size=(m,n)
cross-spectrum
See Also
--------
orientation_corr, cross_corr
Notes
-----
[1] Kuglin & Hines. "The phase correlation image alignment method",
proceedings of the IEEE international conference on cybernetics and
society, pp. 163-165, 1975.
"""
if I1.ndim==3: # multi-spectral frequency stacking
bands = I1.shape[2]
I1sub,I2sub = make_templates_same_size(I1,I2)
for i in range(bands): # loop through all bands
I1bnd, I2bnd = I1sub[:,:,i], I2sub[:,:,i]
S1, S2 = np.fft.fft2(I1bnd), np.fft.fft2(I2bnd)
if i == 0:
Q = (S1)*np.conj(S2)
Q = np.divide(Q, abs(Q))
else:
Q_b = (S1)*np.conj(S2)
Q_b = np.divide(Q_b, abs(Q))
Q = (1/(i+1))*Q_b + (i/(i+1))*Q
else:
I1sub,I2sub = make_templates_same_size(I1,I2)
S1, S2 = np.fft.fft2(I1sub), np.fft.fft2(I2sub)
Q = (S1)*np.conj(S2)
Q = np.divide(Q, abs(Q))
return Q
def cross_corr(I1, I2):
""" match two imagery through cross correlation in FFT
Parameters
----------
I1 : np.array, size=(m,n)
array with intensities
I2 : np.array, size=(m,n)
array with intensities
Returns
-------
Q : np.array, size=(m,n)
cross-spectrum
See Also
--------
phase_corr
Notes
-----
[1] Heid & Kääb. "Evaluation of existing image matching methods for
deriving glacier surface displacements globally from optical satellite
imagery", Remote sensing of environment, vol. 118 pp. 339-355, 2012.
"""
if I1.ndim==3: # multi-spectral frequency stacking
bands = I1.shape[2]
I1sub,I2sub = make_templates_same_size(I1,I2)
for i in range(bands): # loop through all bands
I1bnd, I2bnd = I1sub[:,:,i], I2sub[:,:,i]
S1, S2 = np.fft.fft2(I1bnd), np.fft.fft2(I2bnd)
if i == 0:
Q = (S1)*np.conj(S2)
else:
Q_b = (S1)*np.conj(S2)
Q = (1/(i+1))*Q_b + (i/(i+1))*Q
else:
I1sub,I2sub = make_templates_same_size(I1,I2)
S1, S2 = np.fft.fft2(I1sub), np.fft.fft2(I2sub)
Q = (S1)*np.conj(S2)
return Q
def binary_orientation_corr(I1, I2):
""" match two imagery through binary phase only correlation
Parameters
----------
I1 : np.array, size=(m,n)
array with intensities
I2 : np.array, size=(m,n)
array with intensities
Returns
-------
Q : np.array, size=(m,n)
cross-spectrum
See Also
--------
orientation_corr, phase_only_corr
Notes
-----
[1] Kumar & Juday, "Design of phase-only, binary phase-only, and complex
ternary matched filters with increased signal-to-noise ratios for
colored noise", Optics letters, vol. 16(13) pp. 1025--1027, 1991.
"""
if I1.ndim==3: # multi-spectral frequency stacking
bands = I1.shape[2]
I1sub,I2sub = make_templates_same_size(I1,I2)
for i in range(bands): # loop through all bands
I1bnd, I2bnd = I1sub[:,:,i], I2sub[:,:,i]
S1, S2 = np.fft.fft2(I1bnd), np.fft.fft2(I2bnd)
W = np.sign(np.real(S2))
if i == 0:
Q = (S1)*np.conj(W*S2)
else:
Q_b = (S1)*np.conj(W*S2)
Q = (1/(i+1))*Q_b + (i/(i+1))*Q
else:
I1sub,I2sub = make_templates_same_size(I1,I2)
S1, S2 = np.fft.fft2(I1sub), np.fft.fft2(I2sub)
W = np.sign(np.real(S2))
Q = (S1)*np.conj(W*S2)
return Q
def masked_corr(I1, I2, M1, M2):
""" match two imagery through masked normalized cross-correlation in FFT
Parameters
----------
I1 : np.array, size=(m,n)
array with intensities
I2 : np.array, size=(m,n)
array with intensities
M1 : np.array, size=(m,n)
array with mask
M2 : np.array, size=(m,n)
array with mask
Returns
-------
NCC : np.array, size=(m,n)
correlation surface
Notes
-----
[1] Padfield. "Masked object registration in the Fourier domain",
IEEE transactions on image processing, vol. 21(5) pp. 2706-2718, 2011.
"""
I1sub,I2sub = make_templates_same_size(I1,I2)
M1sub,M2sub = make_templates_same_size(M1,M2)
I1f, I2f = np.fft.fft2(I1sub), np.fft.fft2(I2sub)
M1f, M2f = np.fft.fft2(M1sub), np.fft.fft2(M2sub)
fF1F2 = np.fft.ifft2( I1f*np.conj(I2f) )
fM1M2 = np.fft.ifft2( M1f*np.conj(M2f) )
fM1F2 = np.fft.ifft2( M1f*np.conj(I2f) )
fF1M2 = np.fft.ifft2( I1f*np.conj(M2f) )
ff1M2 = np.fft.ifft2( np.fft.fft2(I1sub**2)*np.conj(M2f) )
fM1f2 = np.fft.ifft2( M1f*np.fft.fft2( np.flipud(I2sub**2) ) )
NCC_num = fF1F2 - \
(np.divide(
np.multiply( fF1M2, fM1F2 ), fM1M2 ))
NCC_den = np.multiply( \
np.sqrt(ff1M2 - np.divide( fF1M2**2, fM1M2) ),
np.sqrt(fM1f2 - np.divide( fM1F2**2, fM1M2) ))
NCC = np.divide(NCC_num, NCC_den)
return NCC
# binary transform functions
def affine_binairy_registration(B1, B2):
# preparation
pT = np.sum(B1) # Lebesgue integral
pO = np.sum(B2)
Jac = pO/pT # Jacobian
x = np.linspace(0,B1.shape[1]-1,B1.shape[1])
y = np.linspace(0,B1.shape[0]-1,B1.shape[0])
X1, Y1 = | |
sub-movie that represents the converted
# mkv file, to get information on it's subtitle tracks.
convertedFName = self.fileName.replace('.mkv', '--converted.mkv')
converted = Movie(self.root, self.subdir, convertedFName)
if subDefault:
for track in converted.subtitleTracks:
vidCommand.extend(['--default-track', '{trackID}:0'.format(
trackID=track.trackID
)])
# We'll use -A to exclude any audio tracks that snuck in. There should
# not be any.
vidCommand.extend(['-A', self.destination])
command = vidCommand + audCommand + subCommand
mkvmerge(command, dFile)
self.merged = True
class SubtitleTrack(object):
"""A single subtitle track.
Args:
movie : (<Movie>)
The parent <Movie> object that this <SubtitleTrack> is a child of.
trackID : (str)
The trackID of the subtitle track this object is to represent.
fileType : (str)
The filetype of this subtitle track.
"""
def __init__(self, movie, trackID, fileType, infoDict):
self.movie = movie
self.trackID = trackID
self.fileType = fileType
self.info = infoDict
self.extracted = False
self.extractedSup = None
# When converted, there will be an Idx file and a Sub file
self.converted = False
self.convertedIdx = None
self.convertedSub = None
# forced means the track contains forced flags
# forcedOnly means the track ONLY contains forced flags
self.forced = False
self.forcedOnly = False
# If a track contains both forced and unforced tracks, there will be an
# additional track saved out.
self.convertedIdxForced = None
self.convertedSubForced = None
# Default track means this is the main subtitle track
self.default = True if self.info['default_track'] == '1' else False
def extractTrack(self):
"""Extracts the subtitle this object represents from the parent mkv"""
command = "{trackID}:".format(trackID=str(self.trackID))
# Derive the location the track should be saved to
fileName = self.movie.fileName.replace('.mkv', '')
# TODO: Should this be locked into .sup?
fileName += "_Track{trackID}_sub.sup".format(trackID=self.trackID)
self.extractedSup = os.path.join(
self.movie.root,
self.movie.subdir,
fileName
).replace('\\', '/')
print ""
print "Extracting trackID {ID} of type {type} from {file}".format(
ID=self.trackID,
type=self.fileType,
file=self.movie.path
)
print ""
mkvExtract(self.movie.path, command, self.extractedSup)
self.extracted = True
def convertTrack(self):
"""Converts and resizes the subtitle track"""
print ""
print "Converting track {ID} at res: {res}p".format(
ID=self.trackID,
res=str(self.movie.resolution)
)
# BDSup2Sub doesn't take numerical values for resolution
if self.movie.resolution == 480:
res = 'ntsc'
else:
# Should be '1080p' or '720p'
res = "{res}p".format(res=str(self.movie.resolution))
# Our only option flag is really resolution
options = "-r {res}".format(res=res)
# Use the extractedSup as a baseline, replace the file extension
# We check for and replace the period to make sure we grab the ext
self.convertedIdx = self.extractedSup.replace('.sup', '.idx')
self.convertedSub = self.extractedSup.replace('.sup', '.sub')
print "Saving IDX file to {dest}".format(dest=self.convertedIdx)
# Using deprecated os.popen (easier) to put shell output in list
shellOut = bdSup2Sub(self.extractedSup, options, self.convertedIdx, popen=True)
# We need to check the results for FORCED subtitles
#
# If the amount of Forced subtitles is less than the total subs
# We'll just create a new .idx file in addition to the default one.
#
# If the amount of Forced subtitles is the same as the total subs,
# the entire subtitle track is forced, so we remove the resultFiles
# and create a new FORCED .idx
totalCount = 0
for line in shellOut:
print line
if line.startswith('#'):
lineList = line.split(' ')
# The last count entry from BD will set the total
try:
totalCount = int(lineList[1])
except ValueError:
pass
# There should only be 1 entry with 'forced' in it, that entry
# looks like:
#
# 'Detected 39 forced captions.'
if 'forced' in line:
forcedCaptions = int(line.split()[1])
if forcedCaptions > 0:
self.forced = True
if forcedCaptions == totalCount:
self.forcedOnly = True
print ""
print "Subtitle track has forced titles?", self.forced
print "Subtitle track is ONLY forced titles?", self.forcedOnly
print ""
if self.forced:
self.convertedIdxForced = self.convertedIdx.replace(
'.idx',
'_forced.idx'
)
self.convertedSubForced = self.convertedSub.replace(
'.sub',
'_forced.sub'
)
if self.forced and not self.forcedOnly:
# If some forced subtitles exist (but not the entire subtitle
# track is forced), we'll create a new _FORCED.idx in addition
# to the one already exported.
options += ' -D'
bdSup2Sub(self.extractedSup, options, self.convertedIdxForced)
elif self.forced and self.forcedOnly:
# If the track is entirely forced subtitles, we'll rename the
# extracted file to be the _forced file.
# First we need to see if the file already exists, as renaming
# will fail if it does.
# If it exists, we'll assume that something went wrong, and
# delete it.
if os.path.isfile(self.convertedIdxForced):
os.remove(self.convertedIdxForced)
if os.path.isfile(self.convertedSubForced):
os.remove(self.convertedSubForced)
# Now for the renaming.
os.rename(self.convertedIdx, self.convertedIdxForced)
os.rename(self.convertedSub, self.convertedSubForced)
self.converted = True
#===============================================================================
# FUNCTIONS
#===============================================================================
def bdSup2Sub(file, options, dest, popen=False):
"""CLI command builder for converting susbtitles with BDSup2Sub
Args:
file : (str)
The source file the subtitles must be converted from.
options : (str)
Resolution, Forced Only and other CLI commands for BDSup2Sub
dest : (str)
Destination filename to be written to. If a pair of files, BDSup2Sub
will automatically write the paired file based off this string.
popen=False : (bool)
If True, os.popen will be used rather than os.system, and the read()
method will be returned, rather than just executed.
Raises:
N/A
Returns:
[str]
If popen=True, the console output of BDSup2Sub will be returned as a
list.
"""
c = '""' + Config.java + '" -jar "' + Config.sup2Sub + '" ' + options +\
' -o "' + dest + '" "' + file + '""'
print ''
print "Sending to bdSup2Sub"
print c
print ''
if popen:
return os.popen(c).read().split('\n')
else:
os.system(c)
def handBrake(file, options, dest):
"""CLI command builder for converting video and audio with Handbrake
Args:
file : (str)
The source file the video and audio tracks are to be converted from.
options : (str)
The option command line arguments to pass to handbrake
dest : (str)
The destination file to write to.
Raises:
N/A
Returns:
None
"""
c = '""' + Config.handBrake + '"' + ' -i ' + file + ' -o ' + dest + ' ' + \
options + '"'
print ''
print "HandBrake Settings:"
print c
print ''
os.system(c)
def mkvExtract(file, command, dest):
"""CLI command builder for extracting tracks with mkvextract
Args:
file : (str)
The source file the tracks are to be extracted from.
command : (str)
The track command to be executed. Usually looks like: '3:'.
dest : (str)
The destination file to be written to.
Raises:
N/A
Returns:
None
The full filepath this builds should look like:
'"mkvextract tracks I:/src/fold/file.mkv 3:I:/dest/fold/subtitle.sup "'
"""
# os.system('"mkvextract tracks ' + file + ' ' + command + dest + ' "')
os.system('""{mkvExtract}" tracks "{file}" {command}{dest} "'.format(
mkvExtract=Config.mkvExtract,
file=file,
command=command,
dest=dest
))
def mkvInfo(movie):
"""Uses CLI to fetch names all audio, video and subtitle tracks from a mkv
Args:
movie : (<tools.Movie>)
The file to get the info from
Raises:
N/A
Returns:
[[str], [str], [str]]
A list contraining a list of video tracks, audio tracks and subtitle
tracks.
"""
file = movie.path
# mkvMerge will return a listing of each track
info = Popen(
[Config.mkvMerge, '-I', file],
shell=True,
stdout=PIPE
).stdout
# info is now a file object, each entry a line
#
# Example:
#
# File 'I:\Ripmaster\toConvert\JR__1080\JR_t01.mkv': container: Matroska []
# Track ID 0: video (V_MPEG4/ISO/AVC) [...]
# Track ID 1: audio (A_AC3) [...]
# Track ID 2: audio (A_TRUEHD) [...]
# Track ID 3: subtitles (S_HDMV/PGS) [...]
AUDIO_TYPES = {
'A_AAC': 'aac',
'A_DTS': 'dts',
'A_AC3': 'ac3',
'A_EAC3': 'aec3',
'A_TRUEHD': 'truehd',
'A_MP3': 'mp3',
'A_MS/ACM': 'acm',
'A_PCM/INT/LIT': 'pcm'
}
SUBTITLE_TYPES = {
'S_VOBSUB': 'vobsub',
'S_HDMV/PGS': 'pgs'
}
subtitleTracks = []
audioTracks = []
videoTracks = [] # No plans to use video tracks for now
for line in info.readlines():
if line.startswith('Track ID'):
trackID, trackType, trackDict = _trackInfo(line)
if trackType == 'video':
# Since video tracks aren't really used right now, we'll just
# throw this stuff into a list.
videoTracks.append([trackID, trackDict])
elif trackType == 'audio':
fileType = AUDIO_TYPES[trackDict['codec_id']]
track = AudioTrack(movie, trackID, fileType, trackDict)
audioTracks.append(track)
elif trackType == 'subtitles':
fileType = SUBTITLE_TYPES[trackDict['codec_id']]
track = SubtitleTrack(movie, trackID, fileType, trackDict)
subtitleTracks.append(track)
return videoTracks, audioTracks, subtitleTracks
def mkvmerge(command, dest):
"""CLI command builder for merging tracks with mkvmerge
Args:
command : | |
self.evaluate(layer.trainable_variables()))
def test_parallel_sum_with_shared_layers(self):
inner_layer = ScalarMultiplicationLayer(2.0)
layer = layers.ParallelSum([inner_layer]*5)
inputs = tf.constant([1.0, -2.0, 3.0])
output_shape = layer.build(inputs.shape)
self.assertLen(layer.trainable_tensors(), 1)
output = layer.apply(inputs, training=True)
self.assertEqual(output_shape, output.shape)
layer_sum = 2.0 * 5
self.evaluate(tf.global_variables_initializer())
self.assertAllClose(
self.evaluate(output),
[1.0*layer_sum, -2.0*layer_sum, 3.0*layer_sum])
self.assertAllClose(
self.evaluate(layer.trainable_tensors()),
self.evaluate(layer.trainable_variables()))
def test_parallel_sum_regularization_loss(self):
layer = layers.ParallelSum([
ScalarMultiplicationLayer(
initial_value=2.0,
regularizer=tf.keras.regularizers.l2(3.0)),
ScalarMultiplicationLayer(
initial_value=4.0,
regularizer=tf.keras.regularizers.l2(5.0))
])
inputs = tf.constant(10.0)
layer.build(inputs.shape)
layer.apply(inputs, training=True)
self.evaluate(tf.global_variables_initializer())
self.assertAllClose(
(3.0 * 2.0**2) + (5.0 * 4.0**2),
self.evaluate(layer.regularization_loss()))
def test_parallel_sum_regularization_loss_with_reused_layer(self):
# Verify that we only regularize a layer's variables once, even if we use
# that layer more than once in a model.
inner_layer = ScalarMultiplicationLayer(
initial_value=2.0,
regularizer=tf.keras.regularizers.l2(3.0))
outer_layer = layers.ParallelSum([inner_layer, inner_layer])
inputs = tf.constant(10.0)
outer_layer.build(inputs.shape)
outer_layer.apply(inputs, training=True)
self.evaluate(tf.global_variables_initializer())
self.assertAllClose(
3.0 * 2.0**2,
self.evaluate(outer_layer.regularization_loss()))
def test_parallel_sum_with_broadcast_scalar(self):
layer = layers.ParallelSum([layers.Identity(), Constant(4.0)])
inputs = tf.constant([1.0, -2.0, 3.0])
output_shape = layer.build(inputs.shape)
output = layer.apply(inputs, training=True)
self.assertEqual(output_shape, output.shape)
self.evaluate(tf.global_variables_initializer())
self.assertAllClose(self.evaluate(output), [1.0+4.0, -2.0+4.0, 3.0+4.0])
def test_parallel_product_with_variables(self):
layer0 = ScalarMultiplicationLayer(4.0)
layer1 = ScalarMultiplicationLayer(5.0)
layer = layers.ParallelProduct([layer0, layer1])
inputs = tf.constant([1.0, -2.0, 3.0])
output_shape = layer.build(inputs.shape)
output = layer.apply(inputs, training=True)
self.assertEqual(output_shape, output.shape)
self.evaluate(tf.global_variables_initializer())
self.assertAllClose(
self.evaluate(output),
[1.0*4.0 * 1.0*5.0, -2.0*4.0 * -2.0*5.0, 3.0*4.0 * 3.0*5.0])
self.assertLen(layer.trainable_tensors(), 2)
self.assertAllClose(
self.evaluate(layer.trainable_tensors()),
self.evaluate(layer.trainable_variables()))
def test_parallel_product_with_broadcast_scalar(self):
layer = layers.ParallelProduct([layers.Identity(), Constant(4.0)])
inputs = tf.constant([1.0, -2.0, 3.0])
output_shape = layer.build(inputs.shape)
output = layer.apply(inputs, training=True)
self.assertEqual(output_shape, output.shape)
self.evaluate(tf.global_variables_initializer())
self.assertAllClose(self.evaluate(output), [1.0*4.0, -2.0*4.0, 3.0*4.0])
def test_sequential_switch_regularization_loss_with_weird_layer_reuse(self):
# Document how regularization losses should be computed when layers are
# reuse in weird ways.
mask = tf.placeholder(shape=(2,), dtype=tf.float32)
# Weird behavior: `inner_layer` is used both inside and outside the Switch.
inner_layer = ScalarMultiplicationLayer(
initial_value=4.0,
regularizer=tf.keras.regularizers.l2(5.0))
outer_layer = layers.Sequential(
[inner_layer,
layers.Switch(mask, [layers.Zeros(), inner_layer])])
inputs = tf.constant([1.0, -2.0, 3.0])
outer_layer.build(inputs.shape)
outer_layer.apply(inputs, training=True)
reg_loss = outer_layer.regularization_loss()
with self.session() as sess:
# The ScalarMultiplicationLayer should be regularized regardless of which
# option is selected, since it is used both inside and outside the Switch.
sess.run(tf.global_variables_initializer())
self.assertAllClose(5.0 * 4.0**2, sess.run(reg_loss, {mask: [1, 0]}))
self.assertAllClose(5.0 * 4.0**2, sess.run(reg_loss, {mask: [0, 1]}))
def test_conv2d_3x3(self):
layer = layers.Conv2D(
filters=1,
kernel_size=(3, 3),
kernel_initializer=tf.initializers.ones())
inputs = tf.ones([1, 6, 6, 1])
output_shape = layer.build(inputs.shape)
output = layer.apply(inputs, training=True)
self.assertEqual(output_shape, output.shape)
self.evaluate(tf.global_variables_initializer())
self.assertAllEqual(
self.evaluate(output),
[[[[4], [6], [6], [6], [6], [4]], [[6], [9], [9], [9], [9], [6]],
[[6], [9], [9], [9], [9], [6]], [[6], [9], [9], [9], [9], [6]],
[[6], [9], [9], [9], [9], [6]], [[4], [6], [6], [6], [6], [4]]]])
def test_conv2d_3x3_explicit_padding(self):
layer = layers.Conv2D(
filters=1,
kernel_size=(3, 3),
kernel_initializer=tf.initializers.ones(),
use_explicit_padding=True)
inputs = tf.ones([1, 6, 6, 1])
output_shape = layer.build(inputs.shape)
output = layer.apply(inputs, training=True)
self.assertEqual(output_shape, output.shape)
self.evaluate(tf.global_variables_initializer())
self.assertAllEqual(
self.evaluate(output),
[[[[4], [6], [6], [6], [6], [4]],
[[6], [9], [9], [9], [9], [6]],
[[6], [9], [9], [9], [9], [6]],
[[6], [9], [9], [9], [9], [6]],
[[6], [9], [9], [9], [9], [6]],
[[4], [6], [6], [6], [6], [4]]]])
def test_conv2d_3x3_bias(self):
layer = layers.Conv2D(
filters=1,
kernel_size=(3, 3),
kernel_initializer=tf.initializers.ones(),
bias_initializer=tf.initializers.constant(0.5),
use_bias=True)
inputs = tf.ones([1, 6, 6, 1])
output_shape = layer.build(inputs.shape)
output = layer.apply(inputs, training=True)
self.assertEqual(output_shape, output.shape)
self.evaluate(tf.global_variables_initializer())
self.assertAllEqual(
self.evaluate(output),
[[[[4.5], [6.5], [6.5], [6.5], [6.5], [4.5]],
[[6.5], [9.5], [9.5], [9.5], [9.5], [6.5]],
[[6.5], [9.5], [9.5], [9.5], [9.5], [6.5]],
[[6.5], [9.5], [9.5], [9.5], [9.5], [6.5]],
[[6.5], [9.5], [9.5], [9.5], [9.5], [6.5]],
[[4.5], [6.5], [6.5], [6.5], [6.5], [4.5]]]])
def test_conv2d_3x3_stride2(self):
layer = layers.Conv2D(
filters=1,
kernel_size=(3, 3),
strides=(2, 2),
kernel_initializer=tf.initializers.ones())
inputs = tf.ones([1, 6, 6, 1])
output_shape = layer.build(inputs.shape)
output = layer.apply(inputs, training=True)
self.assertEqual(output_shape, output.shape)
self.evaluate(tf.global_variables_initializer())
self.assertAllEqual(
self.evaluate(output),
[[[[9], [9], [6]],
[[9], [9], [6]],
[[6], [6], [4]]]])
def test_conv2d_3x3_stride2_explicit_padding(self):
layer = layers.Conv2D(
filters=1,
kernel_size=(3, 3),
strides=(2, 2),
kernel_initializer=tf.initializers.ones(),
use_explicit_padding=True)
inputs = tf.ones([1, 6, 6, 1])
output_shape = layer.build(inputs.shape)
output = layer.apply(inputs, training=True)
self.assertEqual(output_shape, output.shape)
self.evaluate(tf.global_variables_initializer())
self.assertAllEqual(
self.evaluate(output),
[[[[4], [6], [6]], [[6], [9], [9]], [[6], [9], [9]]]])
def test_conv2d_3x3_dilation2(self):
layer = layers.Conv2D(
filters=1,
kernel_size=(3, 3),
dilation_rates=(2, 2),
kernel_initializer=tf.initializers.ones())
inputs = tf.ones([1, 6, 6, 1])
output_shape = layer.build(inputs.shape)
output = layer.apply(inputs, training=True)
self.assertEqual(output_shape, output.shape)
self.evaluate(tf.global_variables_initializer())
self.assertAllEqual(
self.evaluate(output),
[[[[4], [4], [6], [6], [4], [4]], [[4], [4], [6], [6], [4], [4]],
[[6], [6], [9], [9], [6], [6]], [[6], [6], [9], [9], [6], [6]],
[[4], [4], [6], [6], [4], [4]], [[4], [4], [6], [6], [4], [4]]]])
def test_conv2d_3x3_dilation2_explicit_padding(self):
layer = layers.Conv2D(
filters=1,
kernel_size=(3, 3),
dilation_rates=(2, 2),
kernel_initializer=tf.initializers.ones(),
use_explicit_padding=True)
inputs = tf.ones([1, 6, 6, 1])
output_shape = layer.build(inputs.shape)
output = layer.apply(inputs, training=True)
self.assertEqual(output_shape, output.shape)
self.evaluate(tf.global_variables_initializer())
self.assertAllEqual(
self.evaluate(output),
[[[[4], [4], [6], [6], [4], [4]],
[[4], [4], [6], [6], [4], [4]],
[[6], [6], [9], [9], [6], [6]],
[[6], [6], [9], [9], [6], [6]],
[[4], [4], [6], [6], [4], [4]],
[[4], [4], [6], [6], [4], [4]]]])
def test_conv2d_3x3_dilation2_stride2(self):
layer = layers.Conv2D(
filters=1,
kernel_size=(3, 3),
strides=(2, 2),
dilation_rates=(2, 2),
kernel_initializer=tf.initializers.ones())
inputs = tf.ones([1, 6, 6, 1])
output_shape = layer.build(inputs.shape)
output = layer.apply(inputs, training=True)
self.assertEqual(output_shape, output.shape)
self.evaluate(tf.global_variables_initializer())
self.assertAllEqual(
self.evaluate(output),
[[[[4], [6], [4]],
[[6], [9], [6]],
[[4], [6], [4]]]])
def test_conv2d_3x3_dilation2_stride2_explicit_padding(self):
layer = layers.Conv2D(
filters=1,
kernel_size=(3, 3),
strides=(2, 2),
dilation_rates=(2, 2),
kernel_initializer=tf.initializers.ones(),
use_explicit_padding=True)
inputs = tf.ones([1, 6, 6, 1])
output_shape = layer.build(inputs.shape)
output = layer.apply(inputs, training=True)
self.assertEqual(output_shape, output.shape)
self.evaluate(tf.global_variables_initializer())
self.assertAllEqual(
self.evaluate(output),
[[[[4], [6], [4]],
[[6], [9], [6]],
[[4], [6], [4]]]])
def test_conv2d_3x3_int_kernel_size_and_strides(self):
layer = layers.Conv2D(
filters=1,
kernel_size=3,
strides=2,
kernel_initializer=tf.initializers.ones())
inputs = tf.ones([1, 6, 6, 1])
output_shape = layer.build(inputs.shape)
output = layer.apply(inputs, training=True)
self.assertEqual(output_shape, output.shape)
self.evaluate(tf.global_variables_initializer())
self.assertAllEqual(
self.evaluate(output),
[[[[9], [9], [6]],
[[9], [9], [6]],
[[6], [6], [4]]]])
def test_conv2d_output_shape(self):
for image_size in [1, 2, 3, 32, 201, 224]:
inputs = tf.ones([32, image_size, image_size, 1])
for kernel_size in [1, 2, 3, 4, 5]:
for strides in [1, 2, 3]:
layer = layers.Conv2D(
filters=1,
kernel_size=(kernel_size, kernel_size),
strides=(strides, strides),
kernel_initializer=tf.initializers.ones())
output_shape = layer.build(inputs.shape)
output = layer.apply(inputs, training=True)
self.assertEqual(output.shape, output_shape)
def test_conv2d_trainable_tensors(self):
layer = layers.Conv2D(
filters=1,
kernel_size=(3, 3),
kernel_initializer=tf.initializers.ones())
input_shape = tf.TensorShape([1, 6, 6, 1])
layer.build(input_shape)
trainable_tensors = layer.trainable_tensors()
self.assertNotEmpty(trainable_tensors)
for tensor in trainable_tensors:
self.assertIsInstance(tensor, tf.Tensor)
self.evaluate(tf.global_variables_initializer())
self.assertAllClose(
self.evaluate(layer.trainable_tensors()),
self.evaluate(layer.trainable_variables()))
def test_conv2d_kernel_regularization_loss(self):
layer = layers.Conv2D(
filters=12,
kernel_size=(3, 3),
kernel_initializer=tf.initializers.constant(0.5),
kernel_regularizer=tf.keras.regularizers.l2(3.0))
inputs = tf.random_uniform([32, 28, 28, 8])
layer.build(inputs.shape)
layer.apply(inputs, training=True)
# Number of parameters in the convolutional kernel.
self.evaluate(tf.global_variables_initializer())
kernel_params = 3 * 3 * 8 * 12
self.assertAllClose(
kernel_params * 3.0 * 0.5**2,
self.evaluate(layer.regularization_loss()))
def test_conv2d_bias_regularization_loss(self):
layer = layers.Conv2D(
filters=12,
kernel_size=(3, 3),
bias_initializer=tf.initializers.constant(0.5),
bias_regularizer=tf.keras.regularizers.l2(3.0),
use_bias=True)
inputs = tf.random_uniform([32, 28, 28, 8])
layer.build(inputs.shape)
layer.apply(inputs, training=True)
# Number of parameters in the convolutional kernel.
self.evaluate(tf.global_variables_initializer())
bias_params = 12
self.assertAllClose(
bias_params * 3.0 * 0.5**2,
self.evaluate(layer.regularization_loss()))
def test_depthwise_conv2d_3x3(self):
layer = layers.DepthwiseConv2D(
kernel_size=(3, 3),
depthwise_initializer=tf.initializers.ones())
inputs = tf.ones([1, 6, 6, 1])
output_shape = layer.build(inputs.shape)
output = layer.apply(inputs, training=True)
self.assertEqual(output_shape, output.shape)
self.evaluate(tf.global_variables_initializer())
self.assertAllEqual(
self.evaluate(output),
[[[[4], [6], [6], [6], [6], [4]], [[6], [9], [9], [9], [9], [6]],
[[6], [9], [9], [9], [9], [6]], [[6], [9], [9], [9], [9], [6]],
[[6], [9], [9], [9], [9], [6]], [[4], [6], [6], [6], [6], [4]]]])
def test_depthwise_conv2d_3x3_explicit_padding(self):
layer = layers.DepthwiseConv2D(
kernel_size=(3, 3),
depthwise_initializer=tf.initializers.ones(),
use_explicit_padding=True)
inputs = tf.ones([1, 6, 6, 1])
output_shape = layer.build(inputs.shape)
output = layer.apply(inputs, training=True)
self.assertEqual(output_shape, output.shape)
self.evaluate(tf.global_variables_initializer())
self.assertAllEqual(
self.evaluate(output),
[[[[4], [6], [6], [6], [6], [4]],
[[6], [9], [9], [9], [9], [6]],
[[6], [9], [9], [9], [9], [6]],
[[6], [9], [9], [9], [9], [6]],
[[6], [9], [9], [9], [9], [6]],
[[4], [6], [6], [6], [6], [4]]]])
def test_depthwise_conv2d_3x3_stride2(self):
layer = layers.DepthwiseConv2D(
kernel_size=(3, 3),
strides=(2, 2),
depthwise_initializer=tf.initializers.ones())
inputs = tf.ones([1, 6, 6, 1])
output_shape = layer.build(inputs.shape)
output = layer.apply(inputs, training=True)
self.assertEqual(output_shape, output.shape)
self.evaluate(tf.global_variables_initializer())
self.assertAllEqual(
self.evaluate(output),
[[[[9], [9], [6]],
[[9], [9], [6]],
[[6], [6], [4]]]])
def test_depthwise_conv2d_3x3_stride2_explicit_padding(self):
layer = layers.DepthwiseConv2D(
kernel_size=(3, 3),
strides=(2, 2),
depthwise_initializer=tf.initializers.ones(),
use_explicit_padding=True)
inputs = tf.ones([1, 6, 6, 1])
output_shape = layer.build(inputs.shape)
output = layer.apply(inputs, training=True)
self.assertEqual(output_shape, output.shape)
self.evaluate(tf.global_variables_initializer())
self.assertAllEqual(
self.evaluate(output),
[[[[4], [6], [6]],
[[6], [9], [9]],
[[6], [9], [9]]]])
def test_depthwise_conv2d_3x3_dilation2(self):
layer = layers.DepthwiseConv2D(
kernel_size=(3, 3),
strides=1,
dilation_rates=2,
depthwise_initializer=tf.initializers.ones())
inputs = tf.ones([1, 6, 6, 1])
output_shape = layer.build(inputs.shape)
output = layer.apply(inputs, training=True)
self.assertEqual(output_shape, output.shape)
self.evaluate(tf.global_variables_initializer())
self.assertAllEqual(
self.evaluate(output),
[[[[4], [4], [6], [6], [4], [4]], [[4], [4], [6], [6], [4], [4]],
[[6], [6], [9], [9], [6], [6]], [[6], [6], [9], [9], [6], [6]],
[[4], [4], [6], [6], [4], [4]], [[4], [4], [6], [6], [4], [4]]]])
def test_depthwise_conv2d_3x3_dilation2_explicit_padding(self):
layer = layers.DepthwiseConv2D(
kernel_size=(3, 3),
strides=1,
dilation_rates=2,
depthwise_initializer=tf.initializers.ones(),
use_explicit_padding=True)
inputs = tf.ones([1, 6, 6, 1])
output_shape = layer.build(inputs.shape)
output = layer.apply(inputs, training=True)
self.assertEqual(output_shape, output.shape)
self.evaluate(tf.global_variables_initializer())
self.assertAllEqual(
self.evaluate(output),
[[[[4], [4], [6], | |
# -*- coding: utf-8 -*-
import os
import mimetypes
from io import BytesIO
from typing import Iterable
from concurrent.futures import ThreadPoolExecutor, as_completed
from abeja.common.config import UPLOAD_WORKER_COUNT
from abeja.common.logging import logger
from abeja.common.file_helpers import generate_path_iter
from .api.client import APIClient
from .file import DatalakeFile, Files, FileIterator
class Channel:
"""a model class for a channel
Properties:
- organization_id (str)
- channel_id (str)
- name (str)
- display_name (str)
- description (str)
- archived (bool)
- created_at (datetime)
- updated_at (datetime)
"""
def __init__(self, api: APIClient, organization_id: str, channel_id: str,
name: str=None, description: str=None, display_name: str=None,
storage_type: str=None, created_at: str=None,
updated_at: str=None, archived: bool=False) -> None:
self._api = api
self.organization_id = organization_id
self.channel_id = channel_id
self.name = name
self.description = description
self.display_name = display_name
self.storage_type = storage_type
self.created_at = created_at
self.updated_at = updated_at
self.archived = archived
@property
def files(self) -> Files:
"""Get datalake Files object
Request syntax:
.. code-block:: python
channel = client.get_channel(channel_id='1230000000000')
channel.files
Returns:
:class:`Files <abeja.datalake.file.Files>` object
"""
return Files(self._api, self.organization_id, self.channel_id)
def list_files(
self,
start: str=None,
end: str=None,
timezone: str=None,
sort: str=None,
next_page_token: str=None,
limit: int=None,
prefetch: bool=False,
query: str=None) -> FileIterator:
"""get datalake files in the channel
Request syntax:
.. code-block:: python
for f in channel.list_files():
pass
Params:
- **start** (str): start date of target uploaded files
- **end** (str): end date of target uploaded files
- **timezone** (str): timezone of specified start and end date
- **query** (str):
query to search.
It is possible to filter what contain specific value by describing like "x-abeja-meta-filename:filename".
- **sort** (str):
the order of the file list.
multiple items can be specified by separating with commas (,).
It is possible to sort in descending order by specifying a hyphen (-) in front of the item.
By default, the list is sorted by uploaded_at in ascending order.
Return type:
:class:`FileIterator <abeja.datalake.file.FileIterator>` object
"""
return FileIterator(
self._api,
self.organization_id,
self.channel_id,
start=start,
end=end,
timezone=timezone,
next_page_token=next_page_token,
items_per_page=limit,
sort=sort,
prefetch=prefetch,
query=query)
def get_file(self, file_id: str) -> DatalakeFile:
"""get a datalake file in the channel
Request syntax:
.. code-block:: python
file_id = '20180101T000000-00000000-1111-2222-3333-999999999999'
datalake_file = channel.get_file(file_id=file_id)
Params:
- **file_id** (str): FILE_ID
Return type:
:class:`DatalakeFile <abeja.datalake.file.DatalakeFile>` object
"""
download_info = self._api.get_channel_file_download(
self.channel_id, file_id)
return DatalakeFile(
api=self._api,
organization_id=self.organization_id,
channel_id=self.channel_id,
file_id=download_info.get('file_id'),
content_type=download_info.get('content_type'),
download_uri=download_info.get('download_uri'),
metadata=download_info.get('metadata'),
url_expires_on=download_info.get('url_expires_on'),
uploaded_at=download_info.get('uploaded_at'),
lifetime=download_info.get('lifetime'))
def upload(self, file_obj: BytesIO, content_type: str, metadata: dict=None,
lifetime: str=None, conflict_target: str=None) -> DatalakeFile:
"""upload a content to a channel with file-like object.
Request syntax:
.. code-block:: python
content_type = 'image/jpeg'
metadata = {
'label': 'example'
}
with open('example.csv') as f:
response = channel.upload(f, content_type, metadata=metadata)
Params:
- **file_obj** (a file-like object) : a file-like object to upload. It must implement the read method, and must return bytes.
- **content_type** (str): MIME type of content.
- **metadata** (dict): **[optional]** metadata to be added to uploaded file. Object can not be set to the key or value of dict. It must be a string.
- **lifetime** (str): **[optional]** each one of `1day` / `1week` / `1month` / `6months`. the file will be deleted after the specified time.
- **conflict_target** (str): **[optional]** return `409 Conflict` when the same value of specified key already exists in channel.
Return type:
:class:`DatalakeFile <abeja.datalake.file.DatalakeFile>` object
Returns:
a file uploaded to a channel
"""
if not metadata:
metadata = {}
# ignore Content-Type in metadata
metadata = {
k: v for k,
v in metadata.items() if k.lower() not in {
'content_type',
'content-type'}}
# add x-abeja-meta- prefix
metadata = {
'x-abeja-meta-{}'.format(k): str(v) for k,
v in metadata.items()}
res = self._api.post_channel_file_upload(
self.channel_id,
file_obj,
content_type,
metadata=metadata,
lifetime=lifetime,
conflict_target=conflict_target)
return DatalakeFile(
api=self._api,
organization_id=self.organization_id,
channel_id=self.channel_id,
file_id=res.get('file_id'),
content_type=res.get('content_type'),
metadata=res.get('metadata'),
uploaded_at=res.get('uploaded_at'),
lifetime=res.get('lifetime'))
def upload_file(
self, file_path: str, metadata: dict=None, content_type: str=None,
lifetime: str=None, conflict_target: str=None) -> DatalakeFile:
"""upload a file to a channel.
This method infers the content_type of given file if content_type is not specified,
and set the filename as `x-abeja-meta-filename` in metadata.
Request syntax:
.. code-block:: python
metadata = {
'label': 'example'
}
response = channel.upload('~/example.txt', metadata=metadata)
Params:
- **file_path** (str) : path to a file
- **metadata** (dict): **[optional]** metadata to be added to uploaed file.
- **content_type** (str): **[optional]** MIME type of content. Content-Type is assumed by the extension if not specified.
- **lifetime** (str): **[optional]** each one of `1day` / `1week` / `1month` / `6months`. the file will be deleted after the specified time.
- **conflict_target** (str): **[optional]** return `409 Conflict` when the same value of specified key already exists in channel.
Return type:
:class:`DatalakeFile <abeja.datalake.file.DatalakeFile>` object
Returns:
a file uploaded to a channel
"""
if not content_type:
mime_type, _ = mimetypes.guess_type(file_path)
content_type = mime_type
if not metadata:
metadata = {}
# keep the user defined "metadata" unchanged
update_metadata = {**metadata}
# add `x-abeja-meta-filename` if not defined
if 'filename' not in metadata:
update_metadata['filename'] = os.path.basename(file_path)
with open(file_path, 'rb') as f:
return self.upload(
f,
content_type,
metadata=update_metadata,
lifetime=lifetime,
conflict_target=conflict_target)
def upload_dir(
self,
dir_path: str,
metadata: dict=None,
content_type: str=None,
lifetime: str=None,
conflict_target: str=None,
recursive: bool=False,
use_thread: bool=True) -> Iterable[DatalakeFile]:
"""upload files in directory to a channel.
This method infers the content_type of given file if content_type is not specified,
and set the filename as `x-abeja-meta-filename` in metadata.
Note: this method returns list ( not generator ) to make sure upload process will be done here.
Request syntax:
.. code-block:: python
metadata = {
'label': 'example'
}
response = channel.upload_dir('./source_dir', metadata)
Params:
- **content** (file-like object) : contents to be uploaded
- **metadata** (dict): metadata to be added to uploaed file. **[optional]**
- **content_type** (str): MIME type of content. Content-Type is assumed by extensions if not specified **[optional]**
- **lifetime** (str): **[optional]** each one of `1day` / `1week` / `1month` / `6months`. the file will be deleted after the specified time.
- **conflict_target** (str): **[optional]** return `409 Conflict` when the same value of specified key already exists in channel.
Return type:
list of :class:`DatalakeFile <abeja.datalake.file.DatalakeFile>` object
Returns:
A list of DatalakeFile successfully uploaded.
"""
file_path_iter = generate_path_iter(dir_path, recursive=recursive)
if use_thread:
upload_files_func = self._upload_files_threaded
else:
upload_files_func = self._upload_files_unthreaded
return upload_files_func(
file_path_iter,
content_type=content_type,
metadata=metadata,
lifetime=lifetime,
conflict_target=conflict_target)
def _upload_files_threaded(
self,
file_paths: Iterable[str],
content_type: str=None,
metadata: dict=None,
lifetime: str=None,
conflict_target: str=None) -> Iterable[DatalakeFile]:
"""upload files asynchronously using thread
this method does not return generator to avoid lazy evaluation.
"""
files = []
with ThreadPoolExecutor(max_workers=UPLOAD_WORKER_COUNT) as executor:
futures = []
for f in file_paths:
futures.append(
executor.submit(
self.upload_file,
f,
metadata=metadata,
content_type=content_type,
lifetime=lifetime,
conflict_target=conflict_target))
for f in as_completed(futures):
try:
files.append(f.result())
except Exception as e:
logger.error(e)
return files
def _upload_files_unthreaded(
self,
file_paths: Iterable[str],
content_type: str=None,
metadata: dict=None,
lifetime: str=None,
conflict_target: str=None) -> Iterable[DatalakeFile]:
"""upload files synchronously using thread
this method does not return generator to avoid lazy evaluation.
"""
files = []
for file_path in file_paths:
try:
file = self.upload_file(
file_path,
content_type=content_type,
metadata=metadata,
lifetime=lifetime,
conflict_target=conflict_target)
files.append(file)
except Exception as e:
logger.error(e)
return files
def list_datasources(self):
raise NotImplementedError
def add_datasource(self):
raise NotImplementedError
def remove_datasource(self):
raise NotImplementedError
class Channels:
"""a class for handling channels"""
def __init__(self, api: APIClient, organization_id: str) -> None:
self._api = api
self.organization_id = organization_id
def create(
self,
name: str,
description: str,
storage_type: str) -> Channel:
"""create a channel
API reference: POST /organizations/<organization_id>/channels/
Request Syntax:
.. code-block:: python
params = {
"name": "test-channel",
"description": "test channel",
"storage_type": "datalake"
}
channel = channels.create(**params)
Params:
- **name** (str): channel name
- **description** (str): channel description
- **storage_type** (str): type of storage, datalake or file
Return type:
:class:`Channel <abeja.datalake.channel.Channel>` object
"""
res = self._api.create_channel(
self.organization_id, name, description, storage_type)
channel_info = res.get('channel', {})
return Channel(
self._api,
organization_id=self.organization_id,
channel_id=channel_info.get('channel_id'),
name=channel_info.get('name'),
display_name=channel_info.get('display_name'),
description=channel_info.get('description'),
storage_type=channel_info.get('storage_type'),
archived=channel_info.get('archived', False),
created_at=channel_info.get('created_at'),
updated_at=channel_info.get('updated_at'))
def list(self, limit: int=None, offset: int=None) -> Iterable[Channel]:
"""list channels
API reference: GET /organizations/<organization_id>/channels/
Request Syntax:
.. code-block:: python
channel = channels.list()
Return type:
generator of :class:`Channel <abeja.datalake.channel.Channel>` objects
"""
res = self._api.list_channels(
self.organization_id, limit=limit, offset=offset)
for item in res['channels']:
yield Channel(
self._api,
organization_id=self.organization_id,
channel_id=item.get('channel_id'),
name=item.get('name'),
display_name=item.get('display_name'),
description=item.get('description'),
storage_type=item.get('storage_type'),
archived=item.get('archived', False),
created_at=item.get('created_at'),
updated_at=item.get('updated_at'))
def get(self, channel_id: str) -> Channel:
"""get a channel
API reference: GET /organizations/<organization_id>/channels/<channel_id>
Request Syntax:
.. code-block:: python
channel_id = '1234567890123'
channel = channels.get(channel_id=channel_id)
| |
= stats
self.init_incomplete = True
# Update plot
def __call__(self, key, E, P):
k, kObs, faus = key
stats = self.stats
chrono = stats.HMM.t
ax0, ax1 = self.axs
def update_arrays(lines):
for name, ln in lines.items():
stat = deep_getattr(stats, name)
t = chrono.tt[k] # == chrono.ttObs[kObs]
if isinstance(stat, FAUSt):
# ln['data'] will contain duplicates for f/a times.
if ln['plot_u']:
val = stat[key]
ln['tt'] .insert(k, t)
ln['data'].insert(k, ln['transf'](val))
elif 'u' not in faus:
val = stat[key]
ln['tt'] .insert(kObs, t)
ln['data'].insert(kObs, ln['transf'](val))
else:
# ln['data'] will not contain duplicates, coz only 'a' is input.
if 'a' in faus:
val = stat[kObs]
ln['tt'] .insert(kObs, t)
ln['data'].insert(kObs, ln['transf'](val))
elif 'f' in faus:
pass
def update_plot_data(ax, lines):
def bend_into(shape, xx, yy):
# Get arrays. Repeat (to use for intermediate nodes).
yy = yy.array.repeat(3)
xx = xx.array.repeat(3)
if len(xx) == 0:
pass # shortcircuit any modifications
elif shape == 'step':
yy = np.hstack([yy[1:], nan]) # roll leftward
elif shape == 'dirac':
nonlocal nDirac
axW = np.diff(ax.get_xlim())
yy[0::3] = False # set datapoin to 0
xx[2::3] = nan # make datapoint disappear
xx += nDirac*axW/100 # offset datapoint horizontally
nDirac += 1
return xx, yy
nDirac = 1
for _name, ln in lines.items():
ln['handle'].set_data(*bend_into(ln['shape'], ln['tt'], ln['data']))
def finalize_init(ax, lines, mm):
# Rm lines that only contain NaNs
for name in list(lines):
ln = lines[name]
stat = deep_getattr(stats, name)
if not stat.were_changed:
ln['handle'].remove() # rm from axes
del lines[name] # rm from dict
# Add legends
if lines:
ax.legend(loc='upper left',
bbox_to_anchor=(1.01, 1), borderaxespad=0)
if mm:
ax.annotate(star+": mean of\nmarginals",
xy=(0, -1.5/len(lines)),
xycoords=ax.get_legend().get_frame(),
bbox=dict(alpha=0.0), fontsize='small')
# coz placement of annotate needs flush sometimes:
plot_pause(0.01)
# Insert current stats
for lines, ax in zip(self.d, self.axs):
update_arrays(lines)
update_plot_data(ax, lines)
# Set x-limits (time)
sliding_xlim(ax0, self.d[0]['err.rms']['tt'], self.T_lag, margin=True)
self.baseline0.set_xdata(ax0.get_xlim())
# Set y-limits
data0 = [ln['data'].array for ln in self.d[0].values()]
data1 = [ln['data'].array for ln in self.d[1].values()]
ax0.set_ylim(0, d_ylim(data0, ax0 , cC=0.2, cE=0.9)[1])
ax1.set_ylim(*d_ylim(data1, ax1, Max=4, Min=-4, cC=0.3, cE=0.9))
# Init legend. Rm nan lines.
if self.init_incomplete and 'a' == faus:
self.init_incomplete = False
finalize_init(ax0, self.d[0], False)
finalize_init(ax1, self.d[1], True)
def sliding_xlim(ax, tt, lag, margin=False):
dt = lag/20 if margin else 0
if tt.nFilled == 0:
return # Quit
t1, t2 = tt.span() # Get suggested span.
s1, s2 = ax.get_xlim() # Get previous lims.
# If zero span (eg tt holds single 'f' and 'a'):
if t1 == t2:
t1 -= 1 # add width
t2 += 1 # add width
# If user has skipped (too much):
elif np.isnan(t1):
s2 -= dt # Correct for dt.
span = s2-s1 # Compute previous span
# If span<lag:
if span < lag:
span += (t2-s2) # Grow by "dt".
span = min(lag, span) # Bound
t1 = t2 - span # Set span.
ax.set_xlim(t1, t2 + dt) # Set xlim to span
class weight_histogram:
"""Plots histogram of weights. Refreshed each analysis."""
def __init__(self, fignum, stats, key0, plot_u, E, P, **kwargs):
if not hasattr(stats, 'w'):
self.is_active = False
return
fig, ax = place.freshfig(fignum, figsize=(7, 3), gridspec_kw={'bottom': .15})
ax.set_xscale('log')
ax.set_xlabel('Weigth')
ax.set_ylabel('Count')
self.stats = stats
self.ax = ax
self.hist = []
self.bins = np.exp(np.linspace(np.log(1e-10), np.log(1), 31))
def __call__(self, key, E, P):
k, kObs, faus = key
if 'a' == faus:
w = self.stats.w[key]
N = len(w)
ax = self.ax
self.is_active = N < 10001
if not self.is_active:
not_available_text(ax, 'Not computed (N > threshold)')
return
counted = w > self.bins[0]
_ = [b.remove() for b in self.hist]
nn, _, self.hist = ax.hist(
w[counted], bins=self.bins, color='b')
ax.set_ylim(top=max(nn))
ax.set_title('N: {:d}. N_eff: {:.4g}. Not shown: {:d}. '.
format(N, 1/(w@w), N-np.sum(counted)))
class spectral_errors:
"""Plots the (spatial-RMS) error as a functional of the SVD index."""
def __init__(self, fignum, stats, key0, plot_u, E, P, **kwargs):
fig, ax = place.freshfig(fignum, figsize=(6, 3))
ax.set_xlabel('Sing. value index')
ax.set_yscale('log')
self.init_incomplete = True
self.ax = ax
self.plot_u = plot_u
try:
self.msft = stats.umisf
self.sprd = stats.svals
except AttributeError:
self.is_active = False
not_available_text(ax, "Spectral stats not being computed")
# Update plot
def __call__(self, key, E, P):
k, kObs, faus = key
ax = self.ax
if self.init_incomplete:
if self.plot_u or 'f' == faus:
self.init_incomplete = False
msft = abs(self.msft[key])
sprd = self.sprd[key]
if np.any(np.isinf(msft)):
not_available_text(ax, "Spectral stats not finite")
self.is_active = False
else:
self.line_msft, = ax.plot(
msft, 'k', lw=2, label='Error')
self.line_sprd, = ax.plot(
sprd, 'b', lw=2, label='Spread', alpha=0.9)
ax.get_xaxis().set_major_locator(
MaxNLocator(integer=True))
ax.legend()
else:
msft = abs(self.msft[key])
sprd = self.sprd[key]
self.line_sprd.set_ydata(sprd)
self.line_msft.set_ydata(msft)
# ax.set_ylim(*d_ylim(msft))
# ax.set_ylim(bottom=1e-5)
ax.set_ylim([1e-3, 1e1])
class correlations:
"""Plots the state (auto-)correlation matrix."""
half = True # Whether to show half/full (symmetric) corr matrix.
def __init__(self, fignum, stats, key0, plot_u, E, P, **kwargs):
GS = {'height_ratios': [4, 1], 'hspace': 0.09, 'top': 0.95}
fig, (ax, ax2) = place.freshfig(fignum, figsize=(5, 6), nrows=2, gridspec_kw=GS)
if E is None and np.isnan(
P.diag if isinstance(P, CovMat) else P).all():
not_available_text(ax, (
'Not available in replays'
'\ncoz full Ens/Cov not stored.'))
self.is_active = False
return
Nx = len(stats.mu[key0])
if Nx <= 1003:
C = np.eye(Nx)
# Mask half
mask = np.zeros_like(C, dtype=np.bool)
mask[np.tril_indices_from(mask)] = True
# Make colormap. Log-transform cmap,
# but not internally in matplotlib,
# so as to avoid transforming the colorbar too.
cmap = plt.get_cmap('RdBu_r')
trfm = mpl.colors.SymLogNorm(linthresh=0.2, linscale=0.2,
base=np.e, vmin=-1, vmax=1)
cmap = cmap(trfm(np.linspace(-0.6, 0.6, cmap.N)))
cmap = mpl.colors.ListedColormap(cmap)
#
VM = 1.0 # abs(np.percentile(C,[1,99])).max()
im = ax.imshow(C, cmap=cmap, vmin=-VM, vmax=VM)
# Colorbar
_ = ax.figure.colorbar(im, ax=ax, shrink=0.8)
# Tune plot
plt.box(False)
ax.set_facecolor('w')
ax.grid(False)
ax.set_title("State correlation matrix:", y=1.07)
ax.xaxis.tick_top()
# ax2 = inset_axes(ax,width="30%",height="60%",loc=3)
line_AC, = ax2.plot(arange(Nx), ones(Nx), label='Correlation')
line_AA, = ax2.plot(arange(Nx), ones(Nx), label='Abs. corr.')
_ = ax2.hlines(0, 0, Nx-1, 'k', 'dotted', lw=1)
# Align ax2 with ax
bb_AC = ax2.get_position()
bb_C = ax.get_position()
ax2.set_position([bb_C.x0, bb_AC.y0, bb_C.width, bb_AC.height])
# Tune plot
ax2.set_title("Auto-correlation:")
ax2.set_ylabel("Mean value")
ax2.set_xlabel("Distance (in state indices)")
ax2.set_xticklabels([])
ax2.set_yticks([0, 1] + list(ax2.get_yticks()[[0, -1]]))
ax2.set_ylim(top=1)
ax2.legend(frameon=True, facecolor='w',
bbox_to_anchor=(1, 1), loc='upper left', borderaxespad=0.02)
self.ax = ax
self.ax2 = ax2
self.im = im
self.line_AC = line_AC
self.line_AA = line_AA
self.mask = mask
if hasattr(stats, 'w'):
self.w = stats.w
else:
not_available_text(ax)
# Update plot
def __call__(self, key, E, P):
# Get cov matrix
if E is not None:
if hasattr(self, 'w'):
C = np.cov(E, rowvar=False, aweights=self.w[key])
else:
C = np.cov(E, rowvar=False)
else:
assert P is not None
C = P.full if isinstance(P, CovMat) else P
C = C.copy()
# Compute corr from cov
std = np.sqrt(np.diag(C))
C /= std[:, None]
C /= std[None, :]
# Mask
if self.half:
C = np.ma.masked_where(self.mask, C)
# Plot
self.im.set_data(C)
# Auto-corr function
ACF = circulant_ACF(C)
AAF = circulant_ACF(C, do_abs=True)
self.line_AC.set_ydata(ACF)
self.line_AA.set_ydata(AAF)
def circulant_ACF(C, do_abs=False):
"""Compute the auto-covariance-function corresponding to `C`.
This assumes it is the cov/corr matrix of a 1D periodic domain.
"""
M = len(C)
# cols = np.flipud(sla.circulant(np.arange(M)[::-1]))
cols = sla.circulant(np.arange(M))
ACF = np.zeros(M)
for i in range(M):
row = C[i, cols[i]]
if do_abs:
row = abs(row)
ACF += row
# Note: this actually also accesses masked values in C.
return ACF/M
def sliding_marginals(
obs_inds = (),
dims = (),
labels = (),
Tplot = None,
ens_props = dict(alpha=0.4), # noqa
zoomy = 1.0,
):
# Store parameters
params_orig = DotDict(**locals())
def init(fignum, stats, key0, plot_u, E, P, **kwargs):
xx, yy, mu, std, chrono = \
stats.xx, stats.yy, stats.mu, stats.std, stats.HMM.t
# Set parameters (kwargs takes precedence over params_orig)
p = DotDict(**{
kw: kwargs.get(kw, val) for kw, val in params_orig.items()})
# Lag settings:
T_lag, K_lag, a_lag = validate_lag(p.Tplot, chrono)
K_plot = comp_K_plot(K_lag, a_lag, plot_u)
# Extend K_plot forther for adding blanks in resampling (PartFilt):
has_w = hasattr(stats, 'w')
if has_w:
K_plot += a_lag
# Chose marginal dims to plot
if not p.dims:
Nx = min(10, xx.shape[-1])
DimsX = linspace_int(xx.shape[-1], Nx)
else:
Nx = len(p.dims)
DimsX = p.dims
# Pre-process obs dimensions
# Rm inds of obs if not in DimsX
iiY = [i for i, m in enumerate(p.obs_inds) if m in DimsX]
# Rm obs_inds if not in DimsX
DimsY = [m for i, m in enumerate(p.obs_inds) if m in DimsX]
# Get dim (within y) of each | |
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Fine-tuning on regression/classification tasks."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import six
import sys
if six.PY2:
reload(sys)
sys.setdefaultencoding('utf8')
import os
import time
import json
import argparse
import numpy as np
import subprocess
import multiprocessing
from scipy.stats import pearsonr
import paddle
import paddle.fluid as fluid
import reader.cls as reader
from model.xlnet import XLNetConfig
from model.classifier import create_model
from optimization import optimization
from utils.args import ArgumentGroup, print_arguments, check_cuda
from utils.init import init_pretraining_params, init_checkpoint
from utils.cards import get_cards
num_trainers = int(os.environ.get('PADDLE_TRAINERS_NUM', 1))
# yapf: disable
parser = argparse.ArgumentParser(__doc__)
model_g = ArgumentGroup(parser, "model", "model configuration and paths.")
model_g.add_arg("model_config_path", str, None, "Path to the json file for bert model config.")
model_g.add_arg("dropout", float, 0.1, "Dropout rate.")
model_g.add_arg("dropatt", float, 0.1, "Attention dropout rate.")
model_g.add_arg("clamp_len", int, -1, "Clamp length.")
model_g.add_arg("summary_type", str, "last",
"Method used to summarize a sequence into a vector.", choices=['last'])
model_g.add_arg("use_summ_proj", bool, True,
"Whether to use projection for summarizing sequences.")
model_g.add_arg("spiece_model_file", str, None, "Sentence Piece model path.")
model_g.add_arg("init_checkpoint", str, None, "Init checkpoint to resume training from.")
model_g.add_arg("init_pretraining_params", str, None,
"Init pre-training params which preforms fine-tuning from. If the "
"arg 'init_checkpoint' has been set, this argument wouldn't be valid.")
model_g.add_arg("checkpoints", str, "checkpoints", "Path to save checkpoints.")
init_g = ArgumentGroup(parser, "init", "parameter initialization options.")
init_g.add_arg("init", str, "normal", "Initialization method.", choices=["normal", "uniform"])
init_g.add_arg("init_std", str, 0.02, "Initialization std when init is normal.")
init_g.add_arg("init_range", str, 0.1, "Initialization std when init is uniform.")
train_g = ArgumentGroup(parser, "training", "training options.")
train_g.add_arg("epoch", int, 1000, "Number of epoches for fine-tuning.")
train_g.add_arg("learning_rate", float, 5e-5, "Learning rate used to train with warmup.")
train_g.add_arg("lr_scheduler", str, "linear_warmup_decay",
"scheduler of learning rate.", choices=['linear_warmup_decay', 'noam_decay'])
train_g.add_arg("weight_decay", float, 0.01, "Weight decay rate for L2 regularizer.")
train_g.add_arg("lr_layer_decay_rate", float, 1.0, "Top layer: lr[L] = args.learning_rate. "
"Lower layers: lr[l-1] = lr[l] * lr_layer_decay_rate.")
train_g.add_arg("save_steps", int, 10000, "The steps interval to save checkpoints.")
train_g.add_arg("train_batch_size", int, 8, "Total examples' number in batch for training.")
train_g.add_arg("eval_batch_size", int, 128, "Total examples' number in batch for development.")
train_g.add_arg("predict_batch_size", int, 128, "Total examples' number in batch for prediction.")
train_g.add_arg("train_steps", int, 1000, "The total steps for training.")
train_g.add_arg("warmup_steps", int, 1000, "The steps for warmup.")
train_g.add_arg("validation_steps", int, 1000, "The steps interval to evaluate model performance.")
log_g = ArgumentGroup(parser, "logging", "logging related.")
log_g.add_arg("skip_steps", int, 10, "The steps interval to print loss.")
log_g.add_arg("verbose", bool, False, "Whether to output verbose log.")
data_g = ArgumentGroup(parser, "data", "Data paths, vocab paths and data processing options")
data_g.add_arg("data_dir", str, None, "Path to training data.")
data_g.add_arg("predict_dir", str, None, "Path to write predict results.")
data_g.add_arg("predict_threshold", float, 0.0, "Threshold for binary prediction.")
data_g.add_arg("max_seq_length", int, 512, "Number of words of the longest seqence.")
data_g.add_arg("uncased", bool, True,
"Whether to lower case the input text. Should be True for uncased models and False for cased models.")
data_g.add_arg("random_seed", int, 0, "Random seed.")
run_type_g = ArgumentGroup(parser, "run_type", "running type options.")
run_type_g.add_arg("use_cuda", bool, True, "If set, use GPU for training.")
run_type_g.add_arg("use_fast_executor", bool, False, "If set, use fast parallel executor (in experiment).")
run_type_g.add_arg("shuffle", bool, True, "")
run_type_g.add_arg("task_name", str, None,
"The name of task to perform fine-tuning, should be in {'xnli', 'mnli', 'cola', 'mrpc'}.")
run_type_g.add_arg("is_regression", str, None, "Whether it's a regression task.")
run_type_g.add_arg("do_train", bool, True, "Whether to perform training.")
run_type_g.add_arg("do_eval", bool, True, "Whether to perform evaluation on dev data set.")
run_type_g.add_arg("do_predict", bool, True, "Whether to perform evaluation on test data set.")
run_type_g.add_arg("eval_split", str, "dev", "Could be dev or test")
parser.add_argument("--enable_ce", action='store_true', help="The flag indicating whether to run the task for continuous evaluation.")
args = parser.parse_args()
# yapf: enable.
def evaluate(exe, predict_program, test_data_loader, fetch_list, eval_phase, num_examples):
test_data_loader.start()
total_cost, total_num_seqs = [], []
all_logits, all_labels = [], []
time_begin = time.time()
total_steps = int(num_examples / args.eval_batch_size)
steps = 0
while True:
try:
np_loss, np_num_seqs, np_logits, np_labels = exe.run(program=predict_program,
fetch_list=fetch_list)
total_cost.extend(np_loss * np_num_seqs)
total_num_seqs.extend(np_num_seqs)
all_logits.extend(np_logits)
all_labels.extend(np_labels)
if steps % (int(total_steps / 10)) == 0:
print("Evaluation [{}/{}]".format(steps, total_steps))
steps += 1
except fluid.core.EOFException:
test_data_loader.reset()
break
all_logits = np.array(all_logits)
all_labels = np.array(all_labels)
if args.is_regression:
key = "eval_pearsonr"
eval_result, _ = pearsonr(all_logits, all_labels)
else:
key = "eval_accuracy"
pred = np.argmax(all_logits, axis=1).reshape(all_labels.shape)
eval_result = np.sum(pred == all_labels) / float(all_labels.size)
time_end = time.time()
print("[%s evaluation] ave loss: %f, %s: %f, elapsed time: %f s" %
(eval_phase, np.sum(total_cost) / np.sum(total_num_seqs), key, eval_result,
time_end - time_begin))
def predict(exe, predict_program, test_data_loader, task_name, label_list, fetch_list):
test_data_loader.start()
pred_cnt = 0
predict_results = []
with open(os.path.join(args.predict_dir, "{}.tsv".format(
task_name)), "w") as fout:
fout.write("index\tprediction\n")
while True:
try:
np_logits = exe.run(program=predict_program,
fetch_list=fetch_list)
for result in np_logits[0]:
if pred_cnt % 1000 == 0:
print("Predicting submission for example: {}".format(
pred_cnt))
logits = [float(x) for x in result.flat]
predict_results.append(logits)
if len(logits) == 1:
label_out = logits[0]
elif len(logits) == 2:
if logits[1] - logits[0] > args.predict_threshold:
label_out = label_list[1]
else:
label_out = label_list[0]
elif len(logits) > 2:
max_index = np.argmax(np.array(logits, dtype=np.float32))
label_out = label_list[max_index]
else:
raise NotImplementedError
fout.write("{}\t{}\n".format(pred_cnt, label_out))
pred_cnt += 1
except fluid.core.EOFException:
test_data_loader.reset()
break
predict_json_path = os.path.join(args.predict_dir, "{}.logits.json".format(
task_name))
with open(predict_json_path, "w") as fp:
json.dump(predict_results, fp, indent=4)
def get_device_num():
# NOTE(zcd): for multi-processe training, each process use one GPU card.
if num_trainers > 1 : return 1
visible_device = os.environ.get('CUDA_VISIBLE_DEVICES', None)
if visible_device:
device_num = len(visible_device.split(','))
else:
device_num = subprocess.check_output(['nvidia-smi','-L']).decode().count('\n')
return device_num
def main(args):
if not (args.do_train or args.do_eval or args.do_predict):
raise ValueError("For args `do_train`, `do_eval` and `do_predict`, at "
"least one of them must be True.")
if args.do_predict and not args.predict_dir:
raise ValueError("args 'predict_dir' should be given when doing predict")
if not os.path.exists(args.predict_dir):
os.makedirs(args.predict_dir)
xlnet_config = XLNetConfig(args.model_config_path)
xlnet_config.print_config()
if args.use_cuda:
place = fluid.CUDAPlace(int(os.getenv('FLAGS_selected_gpus', '0')))
dev_count = get_device_num()
else:
place = fluid.CPUPlace()
dev_count = int(os.environ.get('CPU_NUM', multiprocessing.cpu_count()))
exe = fluid.Executor(place)
task_name = args.task_name.lower()
processors = {
"mnli_matched": reader.MnliMatchedProcessor,
"mnli_mismatched": reader.MnliMismatchedProcessor,
'sts-b': reader.StsbProcessor,
'imdb': reader.ImdbProcessor,
"yelp5": reader.Yelp5Processor
}
processor = processors[task_name](args)
label_list = processor.get_labels() if not args.is_regression else None
num_labels = len(label_list) if label_list is not None else None
train_program = fluid.Program()
startup_prog = fluid.Program()
if args.random_seed is not None:
startup_prog.random_seed = args.random_seed
train_program.random_seed = args.random_seed
if args.do_train:
# NOTE: If num_trainers > 1, the shuffle_seed must be set, because
# the order of batch data generated by reader
# must be the same in the respective processes.
shuffle_seed = 1 if num_trainers > 1 else None
train_data_generator = processor.data_generator(
batch_size=args.train_batch_size,
is_regression=args.is_regression,
phase='train',
epoch=args.epoch,
dev_count=dev_count,
shuffle=args.shuffle)
num_train_examples = processor.get_num_examples(phase='train')
print("Device count: %d" % dev_count)
print("Max num of epoches: %d" % args.epoch)
print("Num of train examples: %d" % num_train_examples)
print("Num of train steps: %d" % args.train_steps)
print("Num of warmup steps: %d" % args.warmup_steps)
with fluid.program_guard(train_program, startup_prog):
with fluid.unique_name.guard():
train_data_loader, loss, logits, num_seqs, label_ids = create_model(
args,
xlnet_config=xlnet_config,
n_class=num_labels)
scheduled_lr = optimization(
loss=loss,
warmup_steps=args.warmup_steps,
num_train_steps=args.train_steps,
learning_rate=args.learning_rate,
train_program=train_program,
startup_prog=startup_prog,
weight_decay=args.weight_decay,
lr_layer_decay_rate=args.lr_layer_decay_rate,
scheduler=args.lr_scheduler)
if args.do_eval:
dev_prog = fluid.Program()
with fluid.program_guard(dev_prog, startup_prog):
with fluid.unique_name.guard():
dev_data_loader, loss, logits, num_seqs, label_ids = create_model(
args,
xlnet_config=xlnet_config,
n_class=num_labels)
dev_prog = dev_prog.clone(for_test=True)
dev_data_loader.set_batch_generator(
processor.data_generator(
batch_size=args.eval_batch_size,
is_regression=args.is_regression,
phase=args.eval_split,
epoch=1,
dev_count=1,
shuffle=False), place)
if args.do_predict:
predict_prog = fluid.Program()
with fluid.program_guard(predict_prog, startup_prog):
with fluid.unique_name.guard():
predict_data_loader, loss, logits, num_seqs, label_ids = create_model(
args,
xlnet_config=xlnet_config,
n_class=num_labels)
predict_prog = predict_prog.clone(for_test=True)
predict_data_loader.set_batch_generator(
processor.data_generator(
batch_size=args.predict_batch_size,
is_regression=args.is_regression,
phase=args.eval_split,
epoch=1,
dev_count=1,
shuffle=False), place)
exe.run(startup_prog)
if args.do_train:
if args.init_checkpoint and args.init_pretraining_params:
print(
"WARNING: args 'init_checkpoint' and 'init_pretraining_params' "
"both are set! Only arg 'init_checkpoint' is made valid.")
if args.init_checkpoint:
init_checkpoint(
exe,
args.init_checkpoint,
main_program=startup_prog)
elif args.init_pretraining_params:
init_pretraining_params(
exe,
args.init_pretraining_params,
main_program=startup_prog)
elif args.do_eval or args.do_predict:
if not args.init_checkpoint:
raise ValueError("args 'init_checkpoint' should be set if"
"only doing validation or testing!")
init_checkpoint(
exe,
args.init_checkpoint,
main_program=startup_prog)
if args.do_train:
exec_strategy = fluid.ExecutionStrategy()
exec_strategy.use_experimental_executor = args.use_fast_executor
exec_strategy.num_threads = dev_count
build_strategy = fluid.BuildStrategy()
if args.use_cuda and num_trainers > 1:
assert shuffle_seed is not None
dist_utils.prepare_for_multi_process(exe, build_strategy, train_program)
train_data_generator = fluid.contrib.reader.distributed_batch_reader(
train_data_generator)
train_compiled_program = fluid.CompiledProgram(train_program).with_data_parallel(
loss_name=loss.name, build_strategy=build_strategy)
train_data_loader.set_batch_generator(train_data_generator, place)
if args.do_train:
train_data_loader.start()
steps = 0
total_cost, total_num_seqs, total_time = [], [], 0.0
throughput = []
ce_info = []
while steps < args.train_steps:
try:
time_begin = time.time()
steps += 1
if steps % args.skip_steps == 0:
fetch_list = [loss.name, scheduled_lr.name, num_seqs.name]
else:
fetch_list = []
outputs = exe.run(train_compiled_program, fetch_list=fetch_list)
time_end = time.time()
used_time = time_end - time_begin
total_time | |
from __future__ import absolute_import
from __future__ import print_function
import numpy as np
from ._lapjv import reduction_transfer
from ._lapjv import augmenting_row_reduction
from ._lapjv import augment
from six.moves import range
def lapjv(i, j, costs, wants_dual_variables=False, augmenting_row_reductions=2):
"""Sparse linear assignment solution using Jonker-Volgenant algorithm
i,j - similarly-sized vectors that pair the object at index i[n] with
the object at index j[j]
costs - a vector of similar size to i and j that is the cost of pairing
i[n] with j[n].
wants_dual_variables - the dual problem reduces the costs using two
vectors, u[i] and v[j] where the solution is the maximum value of
np.sum(u) + np.sum(v) where cost[i,j] - u[i] - v[j] >= 0.
Set wants_dual_variables to True to have u and v returned in
addition to the assignments.
augmenting_row_reductions - the authors suggest that augmenting row reduction
be performed twice to optimize the u and v before the augmenting
stage. The caller can choose a different number of reductions
by supplying a different value here.
All costs not appearing in i,j are taken as infinite. Each i in the range,
0 to max(i) must appear at least once and similarly for j.
returns (x, y), the pairs of assignments that represent the solution
or (x, y, u, v) if the dual variables are requested.
"""
import os
i = np.atleast_1d(i).astype(int)
j = np.atleast_1d(j).astype(int)
costs = np.atleast_1d(costs)
assert len(i) == len(j), "i and j must be the same length"
assert len(i) == len(costs), "costs must be the same length as i"
#
# Find the number of i with non-infinite cost for each j
#
j_count = np.bincount(j)
assert not np.any(j_count == 0), "all j must be paired with at least one i"
#
# if you order anything by j, this is an index to the minimum for each j
#
j_index = np.hstack([[0], np.cumsum(j_count[:-1])])
#
# Likewise for i
#
i_count = np.bincount(i)
assert not np.any(i_count == 0), "all i must be paired with at least one j"
i_index = np.hstack([[0], np.cumsum(i_count[:-1])])
n = len(j_count) # dimension of the square cost matrix
assert n == len(i_count), "There must be the same number of unique i and j"
# # # # # # # #
#
# Variable initialization:
#
# The output variables:
#
# x - for each i, the assigned j. -1 indicates uninitialized
# y - for each j, the assigned i
# u, v - the dual variables
#
# A value of x = n or y = n means "unassigned"
#
x = np.ascontiguousarray(np.ones(n, np.uint32) * n)
y = np.ascontiguousarray(np.ones(n, np.uint32) * n, np.uint32)
u = np.ascontiguousarray(np.zeros(n, np.float64))
# # # # # # # #
#
# Column reduction
#
# # # # # # # #
#
# For a given j, find the i with the minimum cost.
#
order = np.lexsort((-i, costs, j))
min_idx = order[j_index]
min_i = i[min_idx]
#
# v[j] is assigned to the minimum cost over all i
#
v = np.ascontiguousarray(costs[min_idx], np.float64)
#
# Find the last j for which i was min_i.
#
x[min_i] = np.arange(n).astype(np.uint32)
y[x[x != n]] = np.arange(n).astype(np.uint32)[x != n]
#
# Three cases for i:
#
# i is not the minimum of any j - i goes on free list
# i is the minimum of one j - v[j] remains the same and y[x[j]] = i
# i is the minimum of more than one j, perform reduction transfer
#
assignment_count = np.bincount(min_i[min_i != n])
assignment_count = np.hstack(
(assignment_count, np.zeros(n - len(assignment_count), int))
)
free_i = assignment_count == 0
one_i = assignment_count == 1
# order = np.lexsort((costs, i)) Replace with this after all is done
order = np.lexsort((j, i))
j = np.ascontiguousarray(j[order], np.uint32)
costs = np.ascontiguousarray(costs[order], np.float64)
i_index = np.ascontiguousarray(i_index, np.uint32)
i_count = np.ascontiguousarray(i_count, np.uint32)
if np.any(one_i):
reduction_transfer(
np.ascontiguousarray(np.argwhere(one_i).flatten(), np.uint32),
j,
i_index,
i_count,
x,
u,
v,
costs,
)
#
# Perform augmenting row reduction on unassigned i
#
ii = np.ascontiguousarray(np.argwhere(free_i).flatten(), np.uint32)
if len(ii) > 0:
for iii in range(augmenting_row_reductions):
ii = augmenting_row_reduction(n, ii, j, i_index, i_count, x, y, u, v, costs)
augment(n, ii, j, i_index, i_count, x, y, u, v, costs)
if wants_dual_variables:
return x, y, u, v
else:
return x, y
def slow_reduction_transfer(ii, j, idx, count, x, u, v, c):
"""Perform the reduction transfer step from the Jonker-Volgenant algorithm
The data is input in a ragged array in terms of "i" structured as a
vector of values for each i,j combination where:
ii - the i to be reduced
j - the j-index of every entry
idx - the index of the first entry for each i
count - the number of entries for each i
x - the assignment of j to i
u - the dual variable "u" which will be updated. It should be
initialized to zero for the first reduction transfer.
v - the dual variable "v" which will be reduced in-place
c - the cost for each entry.
The code described in the paper is:
for each assigned row i do
begin
j1:=x[i]; u=min {c[i,j]-v[j] | j=1..n, j != j1};
v[j1]:=v[j1]-(u-u[i]);
u[i] = u;
end;
The authors note that reduction transfer can be applied in later stages
of the algorithm but does not seem to provide a substantial benefit
in speed.
"""
for i in ii:
j1 = x[i]
jj = j[idx[i] : (idx[i] + count[i])]
uu = np.min((c[idx[i] : (idx[i] + count[i])] - v[jj])[jj != j1])
v[j1] = v[j1] - uu + u[i]
u[i] = uu
def slow_augmenting_row_reduction(n, ii, jj, idx, count, x, y, u, v, c):
"""Perform the augmenting row reduction step from the Jonker-Volgenaut algorithm
n - the number of i and j in the linear assignment problem
ii - the unassigned i
jj - the j-index of every entry in c
idx - the index of the first entry for each i
count - the number of entries for each i
x - the assignment of j to i
y - the assignment of i to j
u - the dual variable "u" which will be updated. It should be
initialized to zero for the first reduction transfer.
v - the dual variable "v" which will be reduced in-place
c - the cost for each entry.
returns the new unassigned i
"""
#######################################
#
# From Jonker:
#
# procedure AUGMENTING ROW REDUCTION;
# begin
# LIST: = {all unassigned rows};
# for all i in LIST do
# repeat
# ul:=min {c[i,j]-v[j] for j=l ...n};
# select j1 with c [i,j 1] - v[j 1] = u1;
# u2:=min {c[i,j]-v[j] for j=l ...n,j< >jl} ;
# select j2 with c [i,j2] - v [j2] = u2 and j2 < >j 1 ;
# u[i]:=u2;
# if ul <u2 then v[jl]:=v[jl]-(u2-ul)
# else if jl is assigned then jl : =j2;
# k:=y [jl]; if k>0 then x [k]:=0; x[i]:=jl; y [ j l ] : = i ; i:=k
# until ul =u2 (* no reduction transfer *) or k=0 i~* augmentation *)
# end
ii = list(ii)
k = 0
limit = len(ii)
free = []
while k < limit:
i = ii[k]
k += 1
j = jj[idx[i] : (idx[i] + count[i])]
uu = c[idx[i] : (idx[i] + count[i])] - v[j]
order = np.lexsort([uu])
u1, u2 = uu[order[:2]]
j1, j2 = j[order[:2]]
i1 = y[j1]
if u1 < u2:
v[j1] = v[j1] - u2 + u1
elif i1 != n:
j1 = j2
i1 = y[j1]
if i1 != n:
if u1 < u2:
k -= 1
ii[k] = i1
else:
free.append(i1)
x[i] = j1
y[j1] = i
return np.array(free, np.uint32)
def slow_augment(n, ii, jj, idx, count, x, y, u, v, c):
"""Perform the augmentation step to assign unassigned i and | |
the following enumeration values:
['all', 'first', 'last', 'none']
Returns
-------
Any
"""
return self["showexponent"]
@showexponent.setter
def showexponent(self, val):
self["showexponent"] = val
# showgrid
# --------
@property
def showgrid(self):
"""
Determines whether or not grid lines are drawn. If True, the
grid lines are drawn at every tick mark.
The 'showgrid' property must be specified as a bool
(either True, or False)
Returns
-------
bool
"""
return self["showgrid"]
@showgrid.setter
def showgrid(self, val):
self["showgrid"] = val
# showline
# --------
@property
def showline(self):
"""
Determines whether or not a line bounding this axis is drawn.
The 'showline' property must be specified as a bool
(either True, or False)
Returns
-------
bool
"""
return self["showline"]
@showline.setter
def showline(self, val):
self["showline"] = val
# showspikes
# ----------
@property
def showspikes(self):
"""
Sets whether or not spikes starting from data points to this
axis' wall are shown on hover.
The 'showspikes' property must be specified as a bool
(either True, or False)
Returns
-------
bool
"""
return self["showspikes"]
@showspikes.setter
def showspikes(self, val):
self["showspikes"] = val
# showticklabels
# --------------
@property
def showticklabels(self):
"""
Determines whether or not the tick labels are drawn.
The 'showticklabels' property must be specified as a bool
(either True, or False)
Returns
-------
bool
"""
return self["showticklabels"]
@showticklabels.setter
def showticklabels(self, val):
self["showticklabels"] = val
# showtickprefix
# --------------
@property
def showtickprefix(self):
"""
If "all", all tick labels are displayed with a prefix. If
"first", only the first tick is displayed with a prefix. If
"last", only the last tick is displayed with a suffix. If
"none", tick prefixes are hidden.
The 'showtickprefix' property is an enumeration that may be specified as:
- One of the following enumeration values:
['all', 'first', 'last', 'none']
Returns
-------
Any
"""
return self["showtickprefix"]
@showtickprefix.setter
def showtickprefix(self, val):
self["showtickprefix"] = val
# showticksuffix
# --------------
@property
def showticksuffix(self):
"""
Same as `showtickprefix` but for tick suffixes.
The 'showticksuffix' property is an enumeration that may be specified as:
- One of the following enumeration values:
['all', 'first', 'last', 'none']
Returns
-------
Any
"""
return self["showticksuffix"]
@showticksuffix.setter
def showticksuffix(self, val):
self["showticksuffix"] = val
# spikecolor
# ----------
@property
def spikecolor(self):
"""
Sets the color of the spikes.
The 'spikecolor' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
Returns
-------
str
"""
return self["spikecolor"]
@spikecolor.setter
def spikecolor(self, val):
self["spikecolor"] = val
# spikesides
# ----------
@property
def spikesides(self):
"""
Sets whether or not spikes extending from the projection data
points to this axis' wall boundaries are shown on hover.
The 'spikesides' property must be specified as a bool
(either True, or False)
Returns
-------
bool
"""
return self["spikesides"]
@spikesides.setter
def spikesides(self, val):
self["spikesides"] = val
# spikethickness
# --------------
@property
def spikethickness(self):
"""
Sets the thickness (in px) of the spikes.
The 'spikethickness' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self["spikethickness"]
@spikethickness.setter
def spikethickness(self, val):
self["spikethickness"] = val
# tick0
# -----
@property
def tick0(self):
"""
Sets the placement of the first tick on this axis. Use with
`dtick`. If the axis `type` is "log", then you must take the
log of your starting tick (e.g. to set the starting tick to
100, set the `tick0` to 2) except when `dtick`=*L<f>* (see
`dtick` for more info). If the axis `type` is "date", it should
be a date string, like date data. If the axis `type` is
"category", it should be a number, using the scale where each
category is assigned a serial number from zero in the order it
appears.
The 'tick0' property accepts values of any type
Returns
-------
Any
"""
return self["tick0"]
@tick0.setter
def tick0(self, val):
self["tick0"] = val
# tickangle
# ---------
@property
def tickangle(self):
"""
Sets the angle of the tick labels with respect to the
horizontal. For example, a `tickangle` of -90 draws the tick
labels vertically.
The 'tickangle' property is a angle (in degrees) that may be
specified as a number between -180 and 180. Numeric values outside this
range are converted to the equivalent value
(e.g. 270 is converted to -90).
Returns
-------
int|float
"""
return self["tickangle"]
@tickangle.setter
def tickangle(self, val):
self["tickangle"] = val
# tickcolor
# ---------
@property
def tickcolor(self):
"""
Sets the tick color.
The 'tickcolor' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
Returns
-------
str
"""
return self["tickcolor"]
@tickcolor.setter
def tickcolor(self, val):
self["tickcolor"] = val
# tickfont
# --------
@property
def tickfont(self):
"""
Sets the tick font.
The 'tickfont' property is an instance of Tickfont
that may be specified as:
- An instance of :class:`plotly.graph_objs.layout.scene.zaxis.Tickfont`
- A dict of string/value properties that will be passed
to the Tickfont constructor
Supported dict properties:
color
family
HTML font family - the typeface that will be
applied by the web browser. The web browser
will only be able to apply a font if it is
available on the system which it operates.
Provide multiple font families, separated by
commas, to indicate the preference in which to
apply fonts if they aren't available on the
system. The Chart Studio Cloud (at
https://chart-studio.plotly.com or on-premise)
generates images on a server, where only a
select number of fonts are installed and
supported. These include "Arial", "Balto",
"Courier New", "Droid Sans",, "Droid Serif",
"Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
size
Returns
-------
plotly.graph_objs.layout.scene.zaxis.Tickfont
"""
return self["tickfont"]
@tickfont.setter
def tickfont(self, val):
self["tickfont"] = val
# tickformat
# ----------
@property
def tickformat(self):
"""
Sets the tick label | |
<gh_stars>100-1000
#===----------------------------------------------------------------------===##
#
# Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
# See https://llvm.org/LICENSE.txt for license information.
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
#
#===----------------------------------------------------------------------===##
# XFAIL: LIBCXX-WINDOWS-FIXME
# Note: We prepend arguments with 'x' to avoid thinking there are too few
# arguments in case an argument is an empty string.
# RUN: %{python} %s x%S x%T x%{substitutions}
import base64
import copy
import os
import pickle
import platform
import subprocess
import sys
import unittest
from os.path import dirname
# Allow importing 'lit' and the 'libcxx' module. Make sure we put the lit
# path first so we don't find any system-installed version.
monorepoRoot = dirname(dirname(dirname(dirname(dirname(dirname(__file__))))))
sys.path = [os.path.join(monorepoRoot, 'libcxx', 'utils'),
os.path.join(monorepoRoot, 'llvm', 'utils', 'lit')] + sys.path
import libcxx.test.dsl as dsl
import lit.LitConfig
import lit.util
# Steal some parameters from the config running this test so that we can
# bootstrap our own TestingConfig.
args = list(map(lambda s: s[1:], sys.argv[1:8])) # Remove the leading 'x'
SOURCE_ROOT, EXEC_PATH, SUBSTITUTIONS = args
sys.argv[1:8] = []
# Decode the substitutions.
SUBSTITUTIONS = pickle.loads(base64.b64decode(SUBSTITUTIONS))
for s, sub in SUBSTITUTIONS:
print("Substitution '{}' is '{}'".format(s, sub))
class SetupConfigs(unittest.TestCase):
"""
Base class for the tests below -- it creates a fake TestingConfig.
"""
def setUp(self):
"""
Create a fake TestingConfig that can be populated however we wish for
the purpose of running unit tests below. We pre-populate it with the
minimum required substitutions.
"""
self.litConfig = lit.LitConfig.LitConfig(
progname='lit',
path=[],
quiet=False,
useValgrind=False,
valgrindLeakCheck=False,
valgrindArgs=[],
noExecute=False,
debug=False,
isWindows=platform.system() == 'Windows',
params={})
self.config = lit.TestingConfig.TestingConfig.fromdefaults(self.litConfig)
self.config.test_source_root = SOURCE_ROOT
self.config.test_exec_root = EXEC_PATH
self.config.recursiveExpansionLimit = 10
self.config.substitutions = copy.deepcopy(SUBSTITUTIONS)
def getSubstitution(self, substitution):
"""
Return a given substitution from the TestingConfig. It is an error if
there is no such substitution.
"""
found = [x for (s, x) in self.config.substitutions if s == substitution]
assert len(found) == 1
return found[0]
def findIndex(list, pred):
"""Finds the index of the first element satisfying 'pred' in a list, or
'len(list)' if there is no such element."""
index = 0
for x in list:
if pred(x):
break
else:
index += 1
return index
class TestHasCompileFlag(SetupConfigs):
"""
Tests for libcxx.test.dsl.hasCompileFlag
"""
def test_no_flag_should_work(self):
self.assertTrue(dsl.hasCompileFlag(self.config, ''))
def test_flag_exists(self):
self.assertTrue(dsl.hasCompileFlag(self.config, '-O1'))
def test_nonexistent_flag(self):
self.assertFalse(dsl.hasCompileFlag(self.config, '-this_is_not_a_flag_any_compiler_has'))
def test_multiple_flags(self):
self.assertTrue(dsl.hasCompileFlag(self.config, '-O1 -Dhello'))
class TestSourceBuilds(SetupConfigs):
"""
Tests for libcxx.test.dsl.sourceBuilds
"""
def test_valid_program_builds(self):
source = """int main(int, char**) { return 0; }"""
self.assertTrue(dsl.sourceBuilds(self.config, source))
def test_compilation_error_fails(self):
source = """int main(int, char**) { this does not compile }"""
self.assertFalse(dsl.sourceBuilds(self.config, source))
def test_link_error_fails(self):
source = """extern void this_isnt_defined_anywhere();
int main(int, char**) { this_isnt_defined_anywhere(); return 0; }"""
self.assertFalse(dsl.sourceBuilds(self.config, source))
class TestProgramOutput(SetupConfigs):
"""
Tests for libcxx.test.dsl.programOutput
"""
def test_valid_program_returns_output(self):
source = """
#include <cstdio>
int main(int, char**) { std::printf("FOOBAR"); return 0; }
"""
self.assertEqual(dsl.programOutput(self.config, source), "FOOBAR")
def test_valid_program_returns_output_newline_handling(self):
source = """
#include <cstdio>
int main(int, char**) { std::printf("FOOBAR\\n"); return 0; }
"""
self.assertEqual(dsl.programOutput(self.config, source), "FOOBAR\n")
def test_valid_program_returns_no_output(self):
source = """
int main(int, char**) { return 0; }
"""
self.assertEqual(dsl.programOutput(self.config, source), "")
def test_invalid_program_returns_None_1(self):
# The program compiles, but exits with an error
source = """
int main(int, char**) { return 1; }
"""
self.assertEqual(dsl.programOutput(self.config, source), None)
def test_invalid_program_returns_None_2(self):
# The program doesn't compile
source = """
int main(int, char**) { this doesnt compile }
"""
self.assertEqual(dsl.programOutput(self.config, source), None)
def test_pass_arguments_to_program(self):
source = """
#include <cassert>
#include <string>
int main(int argc, char** argv) {
assert(argc == 3);
assert(argv[1] == std::string("first-argument"));
assert(argv[2] == std::string("second-argument"));
return 0;
}
"""
args = ["first-argument", "second-argument"]
self.assertEqual(dsl.programOutput(self.config, source, args=args), "")
def test_caching_is_not_too_aggressive(self):
# Run a program, then change the substitutions and run it again.
# Make sure the program is run the second time and the right result
# is given, to ensure we're not incorrectly caching the result of the
# first program run.
source = """
#include <cstdio>
int main(int, char**) {
std::printf("MACRO=%u\\n", MACRO);
return 0;
}
"""
compileFlagsIndex = findIndex(self.config.substitutions, lambda x: x[0] == '%{compile_flags}')
compileFlags = self.config.substitutions[compileFlagsIndex][1]
self.config.substitutions[compileFlagsIndex] = ('%{compile_flags}', compileFlags + ' -DMACRO=1')
output1 = dsl.programOutput(self.config, source)
self.assertEqual(output1, "MACRO=1\n")
self.config.substitutions[compileFlagsIndex] = ('%{compile_flags}', compileFlags + ' -DMACRO=2')
output2 = dsl.programOutput(self.config, source)
self.assertEqual(output2, "MACRO=2\n")
def test_program_stderr_is_not_conflated_with_stdout(self):
# Run a program that produces stdout output and stderr output too, making
# sure the stderr output does not pollute the stdout output.
source = """
#include <cstdio>
int main(int, char**) {
std::fprintf(stdout, "STDOUT-OUTPUT");
std::fprintf(stderr, "STDERR-OUTPUT");
return 0;
}
"""
self.assertEqual(dsl.programOutput(self.config, source), "STDOUT-OUTPUT")
class TestHasLocale(SetupConfigs):
"""
Tests for libcxx.test.dsl.hasLocale
"""
def test_doesnt_explode(self):
# It's really hard to test that a system has a given locale, so at least
# make sure we don't explode when we try to check it.
try:
dsl.hasAnyLocale(self.config, ['en_US.UTF-8'])
except subprocess.CalledProcessError:
self.fail("checking for hasLocale should not explode")
def test_nonexistent_locale(self):
self.assertFalse(dsl.hasAnyLocale(self.config, ['for_sure_this_is_not_an_existing_locale']))
class TestCompilerMacros(SetupConfigs):
"""
Tests for libcxx.test.dsl.compilerMacros
"""
def test_basic(self):
macros = dsl.compilerMacros(self.config)
self.assertIsInstance(macros, dict)
self.assertGreater(len(macros), 0)
for (k, v) in macros.items():
self.assertIsInstance(k, str)
self.assertIsInstance(v, str)
def test_no_flag(self):
macros = dsl.compilerMacros(self.config)
self.assertIn('__cplusplus', macros.keys())
def test_empty_flag(self):
macros = dsl.compilerMacros(self.config, '')
self.assertIn('__cplusplus', macros.keys())
def test_with_flag(self):
macros = dsl.compilerMacros(self.config, '-DFOO=3')
self.assertIn('__cplusplus', macros.keys())
self.assertEqual(macros['FOO'], '3')
def test_with_flags(self):
macros = dsl.compilerMacros(self.config, '-DFOO=3 -DBAR=hello')
self.assertIn('__cplusplus', macros.keys())
self.assertEqual(macros['FOO'], '3')
self.assertEqual(macros['BAR'], 'hello')
class TestFeatureTestMacros(SetupConfigs):
"""
Tests for libcxx.test.dsl.featureTestMacros
"""
def test_basic(self):
macros = dsl.featureTestMacros(self.config)
self.assertIsInstance(macros, dict)
self.assertGreater(len(macros), 0)
for (k, v) in macros.items():
self.assertIsInstance(k, str)
self.assertIsInstance(v, int)
class TestFeature(SetupConfigs):
"""
Tests for libcxx.test.dsl.Feature
"""
def test_trivial(self):
feature = dsl.Feature(name='name')
origSubstitutions = copy.deepcopy(self.config.substitutions)
actions = feature.getActions(self.config)
self.assertTrue(len(actions) == 1)
for a in actions:
a.applyTo(self.config)
self.assertEqual(origSubstitutions, self.config.substitutions)
self.assertIn('name', self.config.available_features)
def test_name_can_be_a_callable(self):
feature = dsl.Feature(name=lambda cfg: 'name')
for a in feature.getActions(self.config):
a.applyTo(self.config)
self.assertIn('name', self.config.available_features)
def test_name_is_not_a_string_1(self):
feature = dsl.Feature(name=None)
self.assertRaises(ValueError, lambda: feature.getActions(self.config))
self.assertRaises(ValueError, lambda: feature.pretty(self.config))
def test_name_is_not_a_string_2(self):
feature = dsl.Feature(name=lambda cfg: None)
self.assertRaises(ValueError, lambda: feature.getActions(self.config))
self.assertRaises(ValueError, lambda: feature.pretty(self.config))
def test_adding_action(self):
feature = dsl.Feature(name='name', actions=[dsl.AddCompileFlag('-std=c++03')])
origLinkFlags = copy.deepcopy(self.getSubstitution('%{link_flags}'))
for a in feature.getActions(self.config):
a.applyTo(self.config)
self.assertIn('name', self.config.available_features)
self.assertIn('-std=c++03', self.getSubstitution('%{compile_flags}'))
self.assertEqual(origLinkFlags, self.getSubstitution('%{link_flags}'))
def test_actions_can_be_a_callable(self):
feature = dsl.Feature(name='name',
actions=lambda cfg: (
self.assertIs(self.config, cfg),
[dsl.AddCompileFlag('-std=c++03')]
)[1])
for a in feature.getActions(self.config):
a.applyTo(self.config)
self.assertIn('-std=c++03', self.getSubstitution('%{compile_flags}'))
def test_unsupported_feature(self):
feature = dsl.Feature(name='name', when=lambda _: False)
self.assertEqual(feature.getActions(self.config), [])
def test_is_supported_gets_passed_the_config(self):
feature = dsl.Feature(name='name', when=lambda cfg: (self.assertIs(self.config, cfg), True)[1])
self.assertEqual(len(feature.getActions(self.config)), 1)
def _throw():
raise ValueError()
class TestParameter(SetupConfigs):
"""
Tests for libcxx.test.dsl.Parameter
"""
def test_empty_name_should_blow_up(self):
self.assertRaises(ValueError, lambda: dsl.Parameter(name='', choices=['c++03'], type=str, help='', actions=lambda _: []))
def test_empty_choices_should_blow_up(self):
self.assertRaises(ValueError, lambda: dsl.Parameter(name='std', choices=[], type=str, help='', actions=lambda _: []))
def test_no_choices_is_ok(self):
param = dsl.Parameter(name='triple', type=str, help='', actions=lambda _: [])
self.assertEqual(param.name, 'triple')
def test_name_is_set_correctly(self):
param = dsl.Parameter(name='std', choices=['c++03'], type=str, help='', actions=lambda _: [])
self.assertEqual(param.name, 'std')
def test_no_value_provided_and_no_default_value(self):
param = dsl.Parameter(name='std', choices=['c++03'], type=str, help='', actions=lambda _: [])
self.assertRaises(ValueError, lambda: param.getActions(self.config, self.litConfig.params))
def test_no_value_provided_and_default_value(self):
param = dsl.Parameter(name='std', choices=['c++03'], type=str, help='', default='c++03',
actions=lambda std: [dsl.AddFeature(std)])
for a in param.getActions(self.config, self.litConfig.params):
a.applyTo(self.config)
self.assertIn('c++03', self.config.available_features)
def test_value_provided_on_command_line_and_no_default_value(self):
self.litConfig.params['std'] = 'c++03'
param = dsl.Parameter(name='std', choices=['c++03'], type=str, help='',
actions=lambda std: [dsl.AddFeature(std)])
for a in param.getActions(self.config, self.litConfig.params):
a.applyTo(self.config)
self.assertIn('c++03', self.config.available_features)
def test_value_provided_on_command_line_and_default_value(self):
"""The value provided on the command line should override the default value"""
self.litConfig.params['std'] = 'c++11'
param = dsl.Parameter(name='std', choices=['c++03', 'c++11'], type=str, default='c++03', help='',
actions=lambda std: [dsl.AddFeature(std)])
for a in param.getActions(self.config, self.litConfig.params):
a.applyTo(self.config)
self.assertIn('c++11', self.config.available_features)
self.assertNotIn('c++03', self.config.available_features)
def test_value_provided_in_config_and_default_value(self):
"""The value provided in the config should override the default value"""
self.config.std ='c++11'
param = dsl.Parameter(name='std', choices=['c++03', 'c++11'], type=str, default='c++03', help='',
actions=lambda std: [dsl.AddFeature(std)])
for a in param.getActions(self.config, self.litConfig.params):
a.applyTo(self.config)
self.assertIn('c++11', self.config.available_features)
self.assertNotIn('c++03', self.config.available_features)
def test_value_provided_in_config_and_on_command_line(self):
"""The value on the command line should override the one in the config"""
self.config.std = 'c++11'
self.litConfig.params['std'] = 'c++03'
param = dsl.Parameter(name='std', choices=['c++03', 'c++11'], type=str, help='',
actions=lambda std: [dsl.AddFeature(std)])
for a in param.getActions(self.config, self.litConfig.params):
a.applyTo(self.config)
self.assertIn('c++03', self.config.available_features)
self.assertNotIn('c++11', self.config.available_features)
def test_no_actions(self):
self.litConfig.params['std'] = 'c++03'
param = dsl.Parameter(name='std', choices=['c++03'], type=str, help='',
actions=lambda _: [])
actions = param.getActions(self.config, self.litConfig.params)
self.assertEqual(actions, [])
def test_boolean_value_parsed_from_trueish_string_parameter(self):
self.litConfig.params['enable_exceptions'] = "True"
param = dsl.Parameter(name='enable_exceptions', choices=[True, False], type=bool, help='',
actions=lambda exceptions: [] if exceptions else _throw())
self.assertEqual(param.getActions(self.config, self.litConfig.params), [])
def test_boolean_value_from_true_boolean_parameter(self):
self.litConfig.params['enable_exceptions'] = True
param = dsl.Parameter(name='enable_exceptions', choices=[True, False], type=bool, help='',
actions=lambda exceptions: [] if exceptions else _throw())
self.assertEqual(param.getActions(self.config, self.litConfig.params), [])
def test_boolean_value_parsed_from_falseish_string_parameter(self):
self.litConfig.params['enable_exceptions'] = "False"
param = dsl.Parameter(name='enable_exceptions', choices=[True, False], type=bool, help='',
actions=lambda exceptions: [] if exceptions else [dsl.AddFeature("-fno-exceptions")])
for a in param.getActions(self.config, self.litConfig.params):
a.applyTo(self.config)
self.assertIn('-fno-exceptions', self.config.available_features)
def test_boolean_value_from_false_boolean_parameter(self):
self.litConfig.params['enable_exceptions'] = False
param = dsl.Parameter(name='enable_exceptions', choices=[True, False], type=bool, help='',
actions=lambda exceptions: [] if exceptions else [dsl.AddFeature("-fno-exceptions")])
for a in param.getActions(self.config, self.litConfig.params):
a.applyTo(self.config)
self.assertIn('-fno-exceptions', self.config.available_features)
def test_list_parsed_from_comma_delimited_string_empty(self):
self.litConfig.params['additional_features'] = ""
| |
14.94 ? 60 TYR B CZ 1
ATOM 2198 O OH . TYR B 1 59 ? 47.693 -35.035 -11.459 1.00 15.11 ? 60 TYR B OH 1
ATOM 2199 N N . ARG B 1 60 ? 47.291 -39.611 -6.974 1.00 14.87 ? 61 ARG B N 1
ATOM 2200 C CA . ARG B 1 60 ? 48.595 -40.249 -6.761 1.00 12.88 ? 61 ARG B CA 1
ATOM 2201 C C . ARG B 1 60 ? 48.635 -40.774 -5.331 1.00 12.49 ? 61 ARG B C 1
ATOM 2202 O O . ARG B 1 60 ? 49.708 -40.797 -4.729 1.00 12.84 ? 61 ARG B O 1
ATOM 2203 C CB . ARG B 1 60 ? 49.787 -39.311 -7.038 1.00 12.56 ? 61 ARG B CB 1
ATOM 2204 C CG . ARG B 1 60 ? 49.836 -38.793 -8.465 1.00 13.20 ? 61 ARG B CG 1
ATOM 2205 C CD . ARG B 1 60 ? 51.169 -38.162 -8.786 1.00 13.79 ? 61 ARG B CD 1
ATOM 2206 N NE . ARG B 1 60 ? 51.105 -37.491 -10.078 1.00 16.06 ? 61 ARG B NE 1
ATOM 2207 C CZ . ARG B 1 60 ? 51.060 -38.107 -11.265 1.00 16.30 ? 61 ARG B CZ 1
ATOM 2208 N NH1 . ARG B 1 60 ? 51.100 -39.430 -11.392 1.00 19.01 ? 61 ARG B NH1 1
ATOM 2209 N NH2 . ARG B 1 60 ? 51.007 -37.367 -12.335 1.00 18.35 ? 61 ARG B NH2 1
ATOM 2210 N N . ALA B 1 61 ? 47.510 -41.300 -4.866 1.00 11.69 ? 62 ALA B N 1
ATOM 2211 C CA . ALA B 1 61 ? 47.416 -41.607 -3.415 1.00 10.96 ? 62 ALA B CA 1
ATOM 2212 C C . ALA B 1 61 ? 48.157 -42.867 -3.037 1.00 11.85 ? 62 ALA B C 1
ATOM 2213 O O . ALA B 1 61 ? 48.407 -43.081 -1.837 1.00 11.91 ? 62 ALA B O 1
ATOM 2214 C CB . ALA B 1 61 ? 46.001 -41.622 -2.967 1.00 12.06 ? 62 ALA B CB 1
ATOM 2215 N N . ASP B 1 62 ? 48.481 -43.727 -4.028 1.00 11.06 ? 63 ASP B N 1
ATOM 2216 C CA . ASP B 1 62 ? 49.354 -44.911 -3.772 1.00 12.09 ? 63 ASP B CA 1
ATOM 2217 C C . ASP B 1 62 ? 50.803 -44.800 -4.167 1.00 10.84 ? 63 ASP B C 1
ATOM 2218 O O . ASP B 1 62 ? 51.534 -45.760 -4.124 1.00 12.75 ? 63 ASP B O 1
ATOM 2219 C CB . ASP B 1 62 ? 48.700 -46.142 -4.387 1.00 12.02 ? 63 ASP B CB 1
ATOM 2220 C CG . ASP B 1 62 ? 47.345 -46.394 -3.797 1.00 15.69 ? 63 ASP B CG 1
ATOM 2221 O OD1 . ASP B 1 62 ? 47.198 -46.342 -2.524 1.00 14.84 ? 63 ASP B OD1 1
ATOM 2222 O OD2 . ASP B 1 62 ? 46.342 -46.518 -4.570 1.00 14.59 ? 63 ASP B OD2 1
ATOM 2223 N N . GLU B 1 63 ? 51.244 -43.606 -4.583 1.00 12.30 ? 64 GLU B N 1
ATOM 2224 C CA . GLU B 1 63 ? 52.635 -43.290 -4.859 1.00 12.39 ? 64 GLU B CA 1
ATOM 2225 C C . GLU B 1 63 ? 53.421 -42.869 -3.647 1.00 13.72 ? 64 GLU B C 1
ATOM 2226 O O . GLU B 1 63 ? 52.892 -42.216 -2.783 1.00 14.54 ? 64 GLU B O 1
ATOM 2227 C CB . GLU B 1 63 ? 52.796 -42.136 -5.868 1.00 13.30 ? 64 GLU B CB 1
ATOM 2228 C CG . GLU B 1 63 ? 52.273 -42.506 -7.261 1.00 13.81 ? 64 GLU B CG 1
ATOM 2229 C CD . GLU B 1 63 ? 52.514 -41.478 -8.326 1.00 15.31 ? 64 GLU B CD 1
ATOM 2230 O OE1 . GLU B 1 63 ? 51.658 -41.417 -9.247 1.00 14.47 ? 64 GLU B OE1 1
ATOM 2231 O OE2 . GLU B 1 63 ? 53.535 -40.752 -8.320 1.00 15.45 ? 64 GLU B OE2 1
ATOM 2232 N N . ARG B 1 64 ? 54.683 -43.282 -3.592 1.00 12.68 ? 65 ARG B N 1
ATOM 2233 C CA . ARG B 1 64 ? 55.518 -42.876 -2.444 1.00 12.25 ? 65 ARG B CA 1
ATOM 2234 C C . ARG B 1 64 ? 56.000 -41.445 -2.597 1.00 12.85 ? 65 ARG B C 1
ATOM 2235 O O . ARG B 1 64 ? 56.206 -40.921 -3.733 1.00 13.35 ? 65 ARG B O 1
ATOM 2236 C CB . ARG B 1 64 ? 56.681 -43.846 -2.190 1.00 13.26 ? 65 ARG B CB 1
ATOM 2237 C CG . ARG B 1 64 ? 56.198 -45.151 -1.558 1.00 12.85 ? 65 ARG B CG 1
ATOM 2238 C CD . ARG B 1 64 ? 57.335 -46.140 -1.420 1.00 14.04 ? 65 ARG B CD 1
ATOM 2239 N NE . ARG B 1 64 ? 56.853 -47.272 -0.701 1.00 13.52 ? 65 ARG B NE 1
ATOM 2240 C CZ . ARG B 1 64 ? 56.664 -47.357 0.623 1.00 14.59 ? 65 ARG B CZ 1
ATOM 2241 N NH1 . ARG B 1 64 ? 56.963 -46.377 1.450 1.00 15.10 ? 65 ARG B NH1 1
ATOM 2242 N NH2 . ARG B 1 64 ? 56.167 -48.438 1.127 1.00 15.39 ? 65 ARG B NH2 1
ATOM 2243 N N . PHE B 1 65 ? 56.129 -40.778 -1.460 1.00 11.95 ? 66 PHE B N 1
ATOM 2244 C CA . PHE B 1 65 ? 56.734 -39.454 -1.397 1.00 11.95 ? 66 PHE B CA 1
ATOM 2245 C C . PHE B 1 65 ? 57.601 -39.383 -0.140 1.00 13.29 ? 66 PHE B C 1
ATOM 2246 O O . PHE B 1 65 ? 57.326 -40.058 0.851 1.00 12.23 ? 66 PHE B O 1
ATOM 2247 C CB . PHE B 1 65 ? 55.648 -38.323 -1.265 1.00 12.69 ? 66 PHE B CB 1
ATOM 2248 C CG . PHE B 1 65 ? 54.761 -38.147 -2.443 1.00 12.57 ? 66 PHE B CG 1
ATOM 2249 C CD1 . PHE B 1 65 ? 53.720 -39.011 -2.665 1.00 13.53 ? 66 PHE B CD1 1
ATOM 2250 C CD2 . PHE B 1 65 ? 54.863 -37.009 -3.271 1.00 13.62 ? 66 PHE B CD2 1
ATOM 2251 C CE1 . PHE B 1 65 ? 52.926 -38.833 -3.762 1.00 12.97 ? 66 PHE B CE1 1
ATOM 2252 C CE2 . PHE B 1 65 ? 54.024 -36.821 -4.355 1.00 12.95 ? 66 PHE B CE2 1
ATOM 2253 C CZ . PHE B 1 65 ? 53.046 -37.713 -4.583 1.00 12.83 ? 66 PHE B CZ 1
ATOM 2254 N N . PRO B 1 66 ? 58.700 -38.588 -0.182 1.00 11.64 ? 67 PRO B N 1
ATOM 2255 C CA . PRO B 1 66 ? 59.460 -38.309 1.007 1.00 13.40 ? 67 PRO B CA 1
ATOM 2256 C C . PRO B 1 66 ? 58.599 -37.592 2.040 1.00 14.30 ? 67 PRO B C 1
ATOM 2257 O O . PRO B 1 66 ? 57.991 -36.555 1.693 1.00 16.20 ? 67 PRO B O 1
ATOM 2258 C CB . PRO B 1 66 ? 60.563 -37.357 0.498 | |
#!/usr/bin/python
#
# SPDX-License-Identifier: Apache-2.0
#
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from ansible.module_utils._text import to_native
from ..module_utils.module import BlockchainModule
from ..module_utils.utils import (get_console, get_identity_by_module,
get_organizations_by_module,
get_peer_by_module, resolve_identity)
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: committed_chaincode
short_description: Manage an committed chaincode on a Hyperledger Fabric channel
description:
- Commit a chaincode definition on a Hyperledger Fabric channel by using the IBM Blockchain Platform.
- This module works with the IBM Blockchain Platform managed service running in IBM Cloud, or the IBM Blockchain
Platform software running in a Red Hat OpenShift or Kubernetes cluster.
author: <NAME> (@sstone1)
options:
api_endpoint:
description:
- The URL for the IBM Blockchain Platform console.
type: str
required: true
api_authtype:
description:
- C(ibmcloud) - Authenticate to the IBM Blockchain Platform console using IBM Cloud authentication.
You must provide a valid API key using I(api_key).
- C(basic) - Authenticate to the IBM Blockchain Platform console using basic authentication.
You must provide both a valid API key using I(api_key) and API secret using I(api_secret).
type: str
required: true
api_key:
description:
- The API key for the IBM Blockchain Platform console.
type: str
required: true
api_secret:
description:
- The API secret for the IBM Blockchain Platform console.
- Only required when I(api_authtype) is C(basic).
type: str
api_timeout:
description:
- The timeout, in seconds, to use when interacting with the IBM Blockchain Platform console.
type: int
default: 60
api_token_endpoint:
description:
- The IBM Cloud IAM token endpoint to use when using IBM Cloud authentication.
- Only required when I(api_authtype) is C(ibmcloud), and you are using IBM internal staging servers for testing.
type: str
default: https://iam.cloud.ibm.com/identity/token
state:
description:
- C(absent) - If a chaincode definition matching the specified name, version and configuration is
committed, then an error will be thrown, as it is not possible to uncommit a chaincode definition.
- C(present) - Asserts that a chaincode definition matching the specified name, version and configuration
is committed on the specified channel. If it is not committed, then the chaincode definition with the
specified name, version and configuration will be committed on the specified channel.
type: str
default: present
choices:
- absent
- present
peer:
description:
- The peer to use to manage the committed chaincode definition.
- You can pass a string, which is the display name of a peer registered
with the IBM Blockchain Platform console.
- You can also pass a dict, which must match the result format of one of the
M(peer_info) or M(peer) modules.
type: raw
required: true
identity:
description:
- The identity to use when interacting with the peer.
- You can pass a string, which is the path to the JSON file where the enrolled
identity is stored.
- You can also pass a dict, which must match the result format of one of the
M(enrolled_identity_info) or M(enrolled_identity) modules.
type: raw
required: true
msp_id:
description:
- The MSP ID to use for interacting with the peer.
type: str
required: true
hsm:
description:
- "The PKCS #11 compliant HSM configuration to use for digital signatures."
- Only required if the identity specified in I(identity) was enrolled using an HSM.
type: dict
suboptions:
pkcs11library:
description:
- "The PKCS #11 library that should be used for digital signatures."
type: str
label:
description:
- The HSM label that should be used for digital signatures.
type: str
pin:
description:
- The HSM pin that should be used for digital signatures.
type: str
channel:
description:
- The name of the channel.
type: str
required: true
organizations:
description:
- The list of organizations to use to endorse the transaction for
committing the chaincode definition.
- The organizations must all be members of the channel, must all have
approved the chaincode definition, and must all have at least one
anchor peer defined.
- You can pass strings, which are the names of organizations that are
registered with the IBM Blockchain Platform console.
- You can also pass a dict, which must match the result format of one
of the M(organization_info) or M(organization) modules.
- Only required when I(state) is C(present).
type: list
elements: raw
name:
description:
- The name of the chaincode definition.
type: str
required: true
version:
description:
- The version of the chaincode definition.
type: str
required: true
sequence:
description:
- The sequence number of the chaincode definition.
type: int
required: true
endorsement_policy_ref:
description:
- A reference to a channel policy to use as the endorsement policy for this chaincode definition, for example I(/Channel/Application/MyEndorsementPolicy).
type: str
endorsement_policy:
description:
- The endorsement policy for this chaincode definition.
type: str
endorsement_plugin:
description:
- The endorsement plugin for this chaincode definition.
type: str
validation_plugin:
description:
- The validation plugin for this chaincode definition.
type: str
init_required:
description:
- True if this chaincode definition requires called the I(Init) method before the I(Invoke) method,
false otherwise.
type: bool
collections_config:
description:
- The path to the collections configuration file for the chaincode definition.
type: str
'''
EXAMPLES = '''
- name: Commit the chaincode definition on the channel
ibm.blockchain_platform.committed_chaincode:
state: present
api_endpoint: https://ibp-console.example.org:32000
api_authtype: basic
api_key: <KEY>
api_secret: <KEY>
peer: Org1 Peer
identity: Org1 Admin.json
msp_id: Org1MSP
channel: mychannel
name: fabcar
version: 1.0.0
sequence: 1
- name: Commit the chaincode definition on the channel with an endorsement policy and collection configuration
ibm.blockchain_platform.committed_chaincode:
state: present
api_endpoint: https://ibp-console.example.org:32000
api_authtype: basic
api_key: xxxxxxxx
api_secret: <KEY>
peer: Org1 Peer
identity: Org1 Admin.json
msp_id: Org1MSP
channel: mychannel
name: fabcar
version: 1.0.0
sequence: 1
endorsement_policy: AND('Org1MSP.peer', 'Org2MSP.peer')
collections_config: collections-config.json
- name: Ensure the chaincode definition is not committed on the channel
ibm.blockchain_platform.committed_chaincode:
state: absent
api_endpoint: https://ibp-console.example.org:32000
api_authtype: basic
api_key: xxxxxxxx
api_secret: <KEY>
peer: Org1 Peer
identity: Org1 Admin.json
msp_id: Org1MSP
channel: mychannel
name: fabcar
version: 1.0.0
sequence: 1
'''
RETURN = '''
---
committed_chaincode:
description:
- The committed chaincode definition.
type: dict
returned: when I(state) is C(present)
contains:
channel:
description:
- The name of the channel.
type: str
sample: mychannel
name:
description:
- The name of the chaincode definition.
type: str
sample: fabcar
version:
description:
- The version of the chaincode definition.
type: str
sample: 1.0.0
sequence:
description:
- The sequence number of the chaincode definition.
type: int
sample: 1
endorsement_policy_ref:
description:
- The reference to a channel policy used as the endorsement policy for this chaincode definition.
type: str
sample: /Channel/Application/MyEndorsementPolicy
endorsement_policy:
description:
- The endorsement policy for this chaincode definition.
type: str
endorsement_plugin:
description:
- The endorsement plugin for this chaincode definition.
type: str
validation_plugin:
description:
- The validation plugin for this chaincode definition.
type: str
init_required:
description:
- True if this chaincode definition requires called the I(Init) method before the I(Invoke) method,
false otherwise.
type: bool
collections_config:
description:
- The path to the collections configuration file for the chaincode definition.
type: str
'''
def main():
# Create the module.
argument_spec = dict(
state=dict(type='str', default='present', choices=['present', 'absent']),
api_endpoint=dict(type='str', required=True),
api_authtype=dict(type='str', required=True, choices=['ibmcloud', 'basic']),
api_key=dict(type='str', required=True, no_log=True),
api_secret=dict(type='str', no_log=True),
api_timeout=dict(type='int', default=60),
api_token_endpoint=dict(type='str', default='https://iam.cloud.ibm.com/identity/token'),
peer=dict(type='raw', required=True),
identity=dict(type='raw', required=True),
msp_id=dict(type='str', required=True),
hsm=dict(type='dict', options=dict(
pkcs11library=dict(type='str', required=True),
label=dict(type='str', required=True, no_log=True),
pin=dict(type='str', required=True, no_log=True)
)),
channel=dict(type='str', required=True),
organizations=dict(type='list', elements='raw'),
name=dict(type='str', required=True),
version=dict(type='str', required=True),
sequence=dict(type='int', required=True),
endorsement_policy_ref=dict(type='str'),
endorsement_policy=dict(type='str'),
endorsement_plugin=dict(type='str', default='escc'),
validation_plugin=dict(type='str', default='vscc'),
init_required=dict(type='bool'),
collections_config=dict(type='str')
)
required_if = [
('api_authtype', 'basic', ['api_secret']),
('state', 'present', ['organizations'])
]
mutually_exclusive = [
['endorsement_policy_ref', 'endorsement_policy']
]
module = BlockchainModule(
min_fabric_version='2.1.1',
argument_spec=argument_spec,
supports_check_mode=True,
required_if=required_if,
mutually_exclusive=mutually_exclusive)
# Validate HSM requirements if HSM is specified.
if module.params['hsm']:
module.check_for_missing_hsm_libs()
# Ensure all exceptions are caught.
try:
# Log in to the IBP console.
console = get_console(module)
# Get the peer, identity, and MSP ID.
peer = get_peer_by_module(console, module)
identity = get_identity_by_module(module)
msp_id = module.params['msp_id']
hsm = module.params['hsm']
identity = resolve_identity(console, module, identity, msp_id)
# Extract the chaincode information.
channel = module.params['channel']
name = module.params['name']
version = module.params['version']
sequence = module.params['sequence']
endorsement_policy_ref = module.params['endorsement_policy_ref']
endorsement_policy = module.params['endorsement_policy']
endorsement_plugin = module.params['endorsement_plugin']
validation_plugin = module.params['validation_plugin']
init_required = module.params['init_required']
collections_config = module.params['collections_config']
# Check if this chaincode is already committed on the channel.
with peer.connect(module, identity, msp_id, hsm) as peer_connection:
| |
+ '/block/vpools'
elif resource_type == 'file_vpool':
uri = URI_SERVICES_BASE + '/file/vpools'
elif resource_type == "varray":
uri = URI_VARRAYS
elif resource_type == "network_system":
uri = URI_NETWORKSYSTEMS
elif resource_type == "storage_system":
uri = URI_STORAGEDEVICES
elif resource_type == "protection_system":
uri = URI_PROTECTION_SYSTEMS
elif resource_type == "protectionset":
uri = URI_PROTECTIONSETS
elif resource_type == "smis_provider":
uri = URI_SMISPROVIDERS
elif resource_type == "storage_tier":
uri = URI_STORAGETIERS
elif resource_type == "network":
uri = URI_NETWORKS
elif resource_type == "storage_pool":
uri = URI_SERVICES_BASE + '/vdc/storage-pools'
elif resource_type == "storage_port":
uri = URI_SERVICES_BASE + '/vdc/storage-ports'
elif resource_type == "snapshot":
uri = URI_FILE_SNAPSHOTS
elif resource_type == "block_snapshot":
uri = URI_SERVICES_BASE + '/block/snapshots'
elif resource_type == "block_export":
uri = URI_SERVICES_BASE + '/block/exports'
elif resource_type == "block_consistency_group":
uri = URI_SERVICES_BASE + '/block/consistency-groups'
elif resource_type == "vcenter":
uri = URI_VCENTERS
elif resource_type == "datacenter":
uri = URI_DATACENTERS
elif resource_type == "host":
uri = URI_HOSTS
elif resource_type == "cluster":
uri = URI_CLUSTERS
elif resource_type == "ipinterface":
uri = URI_IPINTERFACES
elif resource_type == "initiator":
uri = URI_INITIATORS
else:
raise Exception('Unknown resource type ' + resource_type)
searchuri = uri + '/search'
results = self.api('GET', searchuri, None, search_scope)
return results['resource']
#
# Tag API's
#
def getTagURI(self, resource_type, id):
uri = ''
if resource_type == "authnprovider":
uri = URI_VDC_AUTHN_PROFILES.format(id)
elif resource_type == "auto_tiering_policy":
uri = URI_AUTO_TIER_POLICY.format(id)
elif resource_type == "fileshare":
uri = URI_FILESYSTEM.format(id)
elif resource_type == "volume":
uri = URI_VOLUME.format(id)
elif resource_type == "project":
uri = URI_PROJECT.format(id)
elif resource_type == "tenant":
uri = URI_TENANTS.format(id)
elif resource_type == "block_vpool":
uri = URI_SERVICES_BASE + '/block/vpools/{0}'.format(id)
elif resource_type == 'file_vpool':
uri = URI_SERVICES_BASE + '/file/vpools/{0}'.format(id)
elif resource_type == 'vpool':
uri = URI_SERVICES_BASE + '/object/data-services-vpools/{0}'.format(id)
elif resource_type == "varray":
uri = URI_VARRAY.format(id)
elif resource_type == "network_system":
uri = URI_NETWORKSYSTEM.format(id)
elif resource_type == "storage_system":
uri = URI_STORAGEDEVICE.format(id)
elif resource_type == "protection_system":
uri = URI_PROTECTION_SYSTEM.format(id)
elif resource_type == "protectionset":
uri = URI_PROTECTIONSET.format(id)
elif resource_type == "smis_provider":
uri = URI_SMISPROVIDER.format(id)
elif resource_type == "storage_tier":
uri = URI_STORAGETIER.format(id)
elif resource_type == "network":
uri = URI_NETWORK.format(id)
elif resource_type == "storage_pool":
uri = URI_STORAGEPOOL.format(id)
elif resource_type == "storage_port":
uri = URI_STORAGEPORT.format(id)
elif resource_type == "snapshot":
uri = URI_FILE_SNAPSHOT.format(id)
elif resource_type == "block_snapshot":
uri = URI_BLOCK_SNAPSHOTS.format(id)
elif resource_type == "block_export":
uri = URI_EXPORTGROUP_INSTANCE.format(id)
elif resource_type == "vcenter":
uri = URI_VCENTER.format(id)
elif resource_type == "datacenter":
uri = URI_DATACENTER.format(id)
elif resource_type == "host":
uri = URI_HOST.format(id)
elif resource_type == "cluster":
uri = URI_CLUSTER.format(id)
elif resource_type == "ipinterface":
uri = URI_IPINTERFACE.format(id)
elif resource_type == "initiator":
uri = URI_INITIATOR.format(id)
else:
raise Exception('Unknown resource type ' + resource_type)
return uri + '/tags'
def tag(self, resource_type, id, tags):
target = self.getTagURI(resource_type, id)
params = {
'add': tags
}
self.api('PUT', target, params)
def untag(self, resource_type, id, tags):
target = self.getTagURI(resource_type, id)
params = {
'remove': tags
}
self.api('PUT', target, params)
def datastore_create(self, type, label, cos, filecos, size, mountpoint):
if (type == 'commodity'):
params = dict()
params['nodes'] = []
params['nodes'].append({"nodeId":label, "name":label, "description":"Commodity Sanity Node", "virtual_array":cos})
o = self.api('POST', URI_DATA_STORE_LIST + "/" + type, params)
print ('data store creation result is %s' % o)
sync_out_list = []
for task in o['task']:
s = self.api_sync_2(task['resource']['id'], task['op_id'], self.datastore_show_task)
sync_out_list.append(s)
print "sync completed"
return (o, sync_out_list)
else:
params = {
'name' : label,
'virtual_array' : cos,
}
if (size):
params['size'] = size
if (mountpoint):
params['mount_point'] = mountpoint
if (filecos):
params['file_data_services_vpool'] = filecos
o = self.api('POST', URI_DATA_STORE_LIST + "/" + type, params)
print ('data store creation result is %s' % o)
s = self.api_sync_2(o['resource']['id'], o['op_id'], self.datastore_show_task)
print "sync completed"
return (o, s)
def datastore_delete(self, uri, type):
print "uri is ", uri
o = self.api('POST', URI_RESOURCE_DEACTIVATE.format(URI_DATA_STORE.format(uri)), None)
r = self.waitfor_op_deletion(uri, type)
return (o, r)
def waitfor_op_deletion(self, id, type):
response = self.coreapi('GET', URI_DATA_STORE_LIST + "/" + type + "/" + id)
if(BOURNE_DEBUG == '1'):
print ('Datastore deletion response is %s' % response.text)
tmo = 0
while (response.text != 'invalid pool'):
time.sleep(3)
response = self.coreapi('GET', URI_DATA_STORE_LIST + "/" + type + "/" + id)
print ('response is %s' % response.text)
tmo += 3
if (tmo > API_SYNC_TIMEOUT):
break
if (response.text != 'invalid pool'):
raise Exception('Timed out waiting for deletion of data store: ' + id)
return response
def datastore_show(self, type, uri):
return self.api('GET', URI_DATA_STORE_LIST + "/" + type + "/" + uri)
def datastore_show_task(self, uri, task):
uri_object_task = URI_DATA_STORE + '/tasks/{1}'
return self.api('GET', uri_object_task.format(uri, task))
def datastore_list(self):
o = self.api('GET', URI_DATA_STORE_LIST)
if (not o):
return {};
else:
return o['data_store']
def datastore_query(self, type, label):
if (self.__is_uri(label)):
return label
o = self.api('GET', URI_DATA_STORE_LIST)
pools = o['data_store']
ids = []
if (not o):
return ()
else :
for pool in pools:
try:
pool_details = self.datastore_show(type, pool['id'])
if (pool_details['name'] == label):
return pool_details.get('id')
except:
pass
raise Exception('bad pool name '+label)
def datastore_bulkget(self):
return self.api('GET', URI_DATA_STORE_BULKGET)
def datastore_bulkgetids(self):
ids = self.datastore_bulkget()
# retrieve the first 10 volumes only
chunk = self.get_ids_chunk(ids['id'], 0, 10)
return chunk
def datastore_bulkpost(self, ids):
return self.api('POST', URI_DATA_STORE_BULKGET, ids)
def atmosdevice_create(self, namespace, project, name, atmosip, tenant, tenantid, admin, password, token):
parms = {'name': name,
'namespace': namespace,
'ip': atmosip,
'tenant_name': tenant,
'tenant_id': tenantid,
'tenant_admin': admin,
'tenant_admin_password': password}
if (project != None):
project = self.project_query(project).strip()
parms['project'] = project
o = self.api('POST', URI_ATMOS_DEVICE_LIST, parms)
# only POST uses /object/atmos-importer
# GETs continue to use /vdc/data-stores
token = o['op_id']
s = self.api_sync_2(o['resource']['id'], token, self.atmosdevice_show_task)
return (o, s)
def atmosdevice_update(self, uri, atmosip, tenant, admin, password):
parms = {}
if(atmosip):
parms['ip'] = atmosip
if(tenant):
parms['tenant_name'] = tenant
if(admin):
parms['tenant_admin'] = admin
if(password):
parms['tenant_admin_password'] = password
token = '<PASSWORD>-' + uri
response = self.coreapi('PUT', URI_ATMOS_DEVICE.format(uri), parms)
if (response.status_code != 200):
print "update atmos device failed with code: ", response.status_code
raise Exception('update atmos device failed')
return response.text
def atmosdevice_query(self, label):
if (self.__is_uri(label)):
return label
o = self.api('GET', URI_ATMOS_DEVICE_LIST)
devices = o['atmos_device']
if (not o):
return ()
else:
for device in devices:
try:
device_details = self.atmosdevice_show(device['id'])
if (device_details['name'] == label):
return device.get('id')
except:
pass
raise Exception('bad device name '+ label)
def atmosdevice_show(self, uri):
return self.api('GET', URI_ATMOS_DEVICE.format(uri))
def atmosdevice_list(self):
o = self.api('GET', URI_ATMOS_DEVICE_LIST)
devices = o['atmos_device']
ids = []
if (not o):
return ()
else:
for device in devices:
ids.append(device.get('id'))
return ids
def atmosdevice_show_task(self, uri, task):
return self.api('GET', URI_ATMOS_DEVICE_TASK.format(uri, task))
def atmosdevice_delete(self, uri):
o = self.api('POST', URI_ATMOS_DEVICE_DELETE.format(uri), None)
token = o['op_id']
r = self.api_sync_2(uri, token, self.atmosdevice_show_task)
return (o, r)
def objectingestion_create(self, dataStoreName, fileshareId, keypoolName,
dataStoreDescription):
parms = {
'datastore_name' : dataStoreName,
'filesystem_device_info' : { 'fileshare_id': fileshareId },
'keypool_name' : keypoolName
}
if (dataStoreDescription):
parms['datastore_description'] = dataStoreDescription
return self.api('POST', URI_OBJECT_INGESTION_LIST, parms)
def objectingestion_op_status(self, objectingestionId, opId):
return self.api('GET', URI_OBJECT_INGESTION_OP_STATUS.format(objectingestionId, opId))
def objectingestion_list(self):
o = self.api('GET', URI_OBJECT_INGESTION_LIST)
if (not o):
return {};
else:
return o['object_ingestion']
def objectingestion_show(self, objectingestionId):
print self.api('GET', URI_OBJECT_INGESTION.format(objectingestionId))
def objectingestion_delete(self, objectingestionId):
print self.api('POST', URI_OBJECT_INGESTION_DELETE.format(objectingestionId))
def _s3_hmac_base64_sig(self, method, bucket, objname, uid, secret, content_type, parameters_to_sign=None):
'''
calculate the signature for S3 request
StringToSign = HTTP-Verb + "\n" +
* Content-MD5 + "\n" +
* Content-Type + "\n" +
* Date + "\n" +
* CanonicalizedAmzHeaders +
* CanonicalizedResource
'''
buf = ""
# HTTP-Verb
buf += method + "\n"
# Content-MD5, a new line is needed even if it does not exist
md5 = self._headers.get('Content-MD5')
if md5 != None:
buf += md5
buf += "\n"
#Content-Type, a new line is needed even if it does not exist
if content_type != None:
buf+=content_type
buf += "\n"
# Date, it should be removed if "x-amz-date" is set
if self._headers.get("x-amz-date") == None:
date = self._headers.get('Date')
if date != None:
buf += date
buf += "\n"
# CanonicalizedAmzHeaders, does not support multiple headers with same name
canonicalizedAmzHeaders = []
for header in self._headers.keys():
if header.startswith("x-amz-") or header.startswith("x-emc-"):
canonicalizedAmzHeaders.append(header)
canonicalizedAmzHeaders.sort()
for name in canonicalizedAmzHeaders:
buf +=name+":"+self._headers[name]+"\n"
#CanonicalizedResource represents the Amazon S3 resource targeted by the request.
buf += "/"
if bucket != None:
buf += bucket
if objname != None:
buf += "/" + urllib.quote(objname)
if parameters_to_sign !=None:
para_names = parameters_to_sign.keys()
para_names.sort()
separator = '?';
for name in para_names:
value = parameters_to_sign[name]
buf += separator
buf += name
if value != None and value | |
in result:
metric_id = int(row['id'])
metric_name = str(row['metric'])
metrics.append([metric_id, metric_name])
connection.close()
except:
trace = traceback.format_exc()
logger.error('%s' % trace)
fail_msg = 'error :: could not determine metrics from metrics table'
logger.error('%s' % fail_msg)
# @added 20170806 - Bug #2130: MySQL - Aborted_clients
# Added missing disposal and raise
if engine:
engine_disposal(engine)
raise
if get_metric_profiles:
metrics_id = None
for metric_obj in metrics:
if metrics_id:
break
if metric == str(metric_obj[1]):
metrics_id = str(metric_obj[0])
new_query_string = query_string.replace('REPLACE_WITH_METRIC_ID', metrics_id)
query_string = new_query_string
logger.debug('debug :: query_string - %s' % query_string)
ionosphere_table = None
try:
ionosphere_table, fail_msg, trace = ionosphere_table_meta(skyline_app, engine)
logger.info(fail_msg)
except:
trace = traceback.format_exc()
logger.error('%s' % trace)
fail_msg = 'error :: failed to get ionosphere_table meta for options'
logger.error('%s' % fail_msg)
if engine:
engine_disposal(engine)
raise
logger.info('%s :: ionosphere_table OK' % function_str)
all_fps = []
try:
connection = engine.connect()
stmt = select([ionosphere_table]).where(ionosphere_table.c.id != 0)
result = connection.execute(stmt)
for row in result:
try:
fp_id = int(row['id'])
fp_metric_id = int(row['metric_id'])
for metric_obj in metrics:
if fp_metric_id == int(metric_obj[0]):
fp_metric = metric_obj[1]
break
full_duration = int(row['full_duration'])
anomaly_timestamp = int(row['anomaly_timestamp'])
tsfresh_version = str(row['tsfresh_version'])
# These handle MySQL NULL
try:
calc_time = float(row['calc_time'])
except:
calc_time = 0
try:
features_count = int(row['features_count'])
except:
features_count = 0
try:
features_sum = float(row['features_sum'])
except:
features_sum = 0
try:
deleted = int(row['deleted'])
except:
deleted = 0
fp_matched_count = int(row['matched_count'])
last_matched = int(row['last_matched'])
if str(last_matched) == '0':
human_date = 'never matched'
else:
human_date = time.strftime('%Y-%m-%d %H:%M:%S %Z (%A)', time.localtime(int(last_matched)))
created_timestamp = str(row['created_timestamp'])
last_checked = int(row['last_checked'])
if str(last_checked) == '0':
checked_human_date = 'never checked'
else:
checked_human_date = time.strftime('%Y-%m-%d %H:%M:%S %Z (%A)', time.localtime(int(last_checked)))
fp_checked_count = int(row['checked_count'])
fp_parent_id = int(row['parent_id'])
fp_generation = int(row['generation'])
# @added 20170402 - Feature #2000: Ionosphere - validated
fp_validated = int(row['validated'])
all_fps.append([fp_id, fp_metric_id, str(fp_metric), full_duration, anomaly_timestamp, tsfresh_version, calc_time, features_count, features_sum, deleted, fp_matched_count, human_date, created_timestamp, fp_checked_count, checked_human_date, fp_parent_id, fp_generation, fp_validated])
# logger.info('%s :: %s feature profiles found' % (function_str, str(len(all_fps))))
except:
trace = traceback.format_exc()
logger.error('%s' % trace)
logger.error('error :: bad row data')
connection.close()
all_fps.sort(key=operator.itemgetter(int(0)))
except:
trace = traceback.format_exc()
logger.error('%s' % trace)
logger.error('error :: bad row data')
raise
if count_request and search_query:
features_profiles = None
features_profiles_count = None
full_duration_list = None
enabled_list = None
tsfresh_version_list = None
generation_list = None
if count_by_metric and search_query:
features_profiles_count = []
if engine_needed and engine:
try:
stmt = query_string
connection = engine.connect()
for row in engine.execute(stmt):
fp_count = int(row[0])
fp_metric_id = int(row['metric_id'])
for metric_obj in metrics:
if fp_metric_id == metric_obj[0]:
fp_metric = metric_obj[1]
break
features_profiles_count.append([fp_count, fp_metric_id, str(fp_metric)])
connection.close()
logger.info('%s :: features_profiles_count %s' % (function_str, str(len(features_profiles_count))))
except:
trace = traceback.format_exc()
logger.error('%s' % trace)
fail_msg = 'error :: failed to count features profiles'
logger.error('%s' % fail_msg)
if engine:
engine_disposal(engine)
raise
features_profiles_count.sort(key=operator.itemgetter(int(0)))
if count_request and search_query:
if not count_by_metric:
if engine_needed and engine:
try:
stmt = query_string
connection = engine.connect()
for row in engine.execute(stmt):
item_count = int(row[0])
item_id = int(row[1])
if count_by_matched or count_by_checked:
for fp_obj in all_fps:
if item_id == fp_obj[0]:
metric_name = fp_obj[2]
break
if count_by_matched:
matched_count.append([item_count, item_id, metric_name])
if count_by_checked:
checked_count.append([item_count, item_id, metric_name])
if count_by_generation:
generation_count.append([item_count, item_id])
connection.close()
except:
trace = traceback.format_exc()
logger.error('%s' % trace)
fail_msg = 'error :: failed to get ionosphere_table meta for options'
logger.error('%s' % fail_msg)
if engine:
engine_disposal(engine)
raise
if count_request and search_query:
if engine:
engine_disposal(engine)
# @modified 20170809 - Bug #2136: Analyzer stalling on no metrics
# Added except to all del methods to prevent stalling if any object does
# not exist
try:
del all_fps
except:
logger.error('error :: failed to del all_fps')
try:
del metrics
except:
logger.error('error :: failed to del metrics')
search_success = True
return (features_profiles, features_profiles_count, matched_count,
checked_count, generation_count, full_duration_list,
enabled_list, tsfresh_version_list, generation_list,
search_success, fail_msg, trace)
features_profiles = []
# @added 20170322 - Feature #1960: ionosphere_layers
# Added layers information to the features_profiles items
layers_present = False
if engine_needed and engine and search_query:
try:
connection = engine.connect()
if get_metric_profiles:
# stmt = select([ionosphere_table]).where(ionosphere_table.c.metric_id == int(metric_id))
stmt = select([ionosphere_table]).where(ionosphere_table.c.metric_id == int(metrics_id))
logger.debug('debug :: stmt - is abstracted')
else:
stmt = query_string
logger.debug('debug :: stmt - %s' % stmt)
try:
result = connection.execute(stmt)
except:
trace = traceback.format_exc()
logger.error('%s' % trace)
fail_msg = 'error :: MySQL query failed'
logger.error('%s' % fail_msg)
if engine:
engine_disposal(engine)
raise
for row in result:
try:
fp_id = int(row['id'])
metric_id = int(row['metric_id'])
for metric_obj in metrics:
if metric_id == int(metric_obj[0]):
metric = metric_obj[1]
break
full_duration = int(row['full_duration'])
anomaly_timestamp = int(row['anomaly_timestamp'])
tsfresh_version = str(row['tsfresh_version'])
# These handle MySQL NULL
try:
calc_time = float(row['calc_time'])
except:
calc_time = 0
try:
features_count = int(row['features_count'])
except:
features_count = 0
try:
features_sum = float(row['features_sum'])
except:
features_sum = 0
try:
deleted = int(row['deleted'])
except:
deleted = 0
fp_matched_count = int(row['matched_count'])
last_matched = int(row['last_matched'])
if str(last_matched) == '0':
human_date = 'never matched'
else:
human_date = time.strftime('%Y-%m-%d %H:%M:%S %Z (%A)', time.localtime(int(last_matched)))
created_timestamp = str(row['created_timestamp'])
last_checked = int(row['last_checked'])
if str(last_checked) == '0':
checked_human_date = 'never checked'
else:
checked_human_date = time.strftime('%Y-%m-%d %H:%M:%S %Z (%A)', time.localtime(int(last_checked)))
fp_checked_count = int(row['checked_count'])
fp_parent_id = int(row['parent_id'])
fp_generation = int(row['generation'])
# @added 20170402 - Feature #2000: Ionosphere - validated
fp_validated = int(row['validated'])
fp_layers_id = int(row['layers_id'])
# @added 20170322 - Feature #1960: ionosphere_layers
# Added layers information to the features_profiles items
if fp_layers_id > 0:
layers_present = True
# @modified 20180812 - Feature #2430: Ionosphere validate learnt features profiles page
# Fix bug and make this function output useable to
# get_features_profiles_to_validate
append_to_features_profile_list = True
if 'validated_equals' in request.args:
validated_equals = request.args.get('validated_equals', 'any')
else:
validated_equals = 'any'
if validated_equals == 'false':
if fp_validated == 1:
append_to_features_profile_list = False
if append_to_features_profile_list:
features_profiles.append([fp_id, metric_id, str(metric), full_duration, anomaly_timestamp, tsfresh_version, calc_time, features_count, features_sum, deleted, fp_matched_count, human_date, created_timestamp, fp_checked_count, checked_human_date, fp_parent_id, fp_generation, fp_validated, fp_layers_id])
# @added 20170912 - Feature #2056: ionosphere - disabled_features_profiles
features_profile_enabled = int(row['enabled'])
if features_profile_enabled == 1:
enabled_list.append(fp_id)
except:
trace = traceback.format_exc()
logger.error('%s' % trace)
logger.error('error :: bad row data')
connection.close()
features_profiles.sort(key=operator.itemgetter(int(0)))
logger.debug('debug :: features_profiles length - %s' % str(len(features_profiles)))
except:
trace = traceback.format_exc()
logger.error('%s' % trace)
fail_msg = 'error :: failed to get ionosphere_table data'
logger.error('%s' % fail_msg)
if engine:
engine_disposal(engine)
raise
# @added 20170322 - Feature #1960: ionosphere_layers
# Added layers information to the features_profiles items
features_profiles_layers = []
if features_profiles and layers_present:
try:
ionosphere_layers_table, log_msg, trace = ionosphere_layers_table_meta(skyline_app, engine)
logger.info(log_msg)
logger.info('ionosphere_layers OK')
except:
trace = traceback.format_exc()
logger.error('%s' % trace)
fail_msg = 'error :: failed to get ionosphere_layers meta'
logger.error('%s' % fail_msg)
# @added 20170806 - Bug #2130: MySQL - Aborted_clients
# Added missing disposal
if engine:
engine_disposal(engine)
raise # to webapp to return in the UI
try:
connection = engine.connect()
if get_metric_profiles:
stmt = select([ionosphere_layers_table]).where(ionosphere_layers_table.c.metric_id == int(metrics_id))
# logger.debug('debug :: stmt - is abstracted')
else:
layers_query_string = 'SELECT * FROM ionosphere_layers'
stmt = layers_query_string
# logger.debug('debug :: stmt - %s' % stmt)
result = connection.execute(stmt)
for row in result:
try:
layer_id = int(row['id'])
fp_id = int(row['fp_id'])
layer_matched_count = int(row['matched_count'])
layer_last_matched = int(row['last_matched'])
if str(layer_last_matched) == '0':
layer_human_date = 'never matched'
else:
layer_human_date = time.strftime('%Y-%m-%d %H:%M:%S %Z (%A)', time.localtime(int(layer_last_matched)))
layer_last_checked = int(row['last_checked'])
# @modified 20170924 - Feature #2170: Ionosphere - validated matches
# Fixed variable typo which resulted in layer last checked
# field showing 1970-01-01 00:00:00 UTC (Thursday)
# if str(last_checked) == '0':
if str(layer_last_checked) == '0':
layer_checked_human_date = 'never checked'
else:
layer_checked_human_date = time.strftime('%Y-%m-%d %H:%M:%S %Z (%A)', time.localtime(int(layer_last_checked)))
layer_check_count = int(row['check_count'])
layer_label = str(row['label'])
features_profiles_layers.append([layer_id, fp_id, layer_matched_count, layer_human_date, layer_check_count, layer_checked_human_date, layer_label])
except:
trace = traceback.format_exc()
logger.error('%s' % trace)
logger.error('error :: bad row data')
connection.close()
features_profiles_layers.sort(key=operator.itemgetter(int(0)))
logger.debug('debug :: features_profiles length - %s' % str(len(features_profiles)))
except:
trace = traceback.format_exc()
logger.error('%s' % trace)
fail_msg = 'error :: failed to get ionosphere_table data'
logger.error('%s' % fail_msg)
if engine:
engine_disposal(engine)
raise
# Add the layers information to the features_profiles list
features_profiles_and_layers = []
if features_profiles:
# @modified 20170402 - Feature #2000: Ionosphere - validated
for fp_id, metric_id, metric, full_duration, anomaly_timestamp, tsfresh_version, calc_time, features_count, features_sum, deleted, fp_matched_count, human_date, created_timestamp, fp_checked_count, checked_human_date, fp_parent_id, fp_generation, fp_validated, fp_layers_id in features_profiles:
default_values = True
# @modified 20180816 - Feature #2430: Ionosphere validate learnt features profiles page
# Moved default_values | |
<filename>com/vmware/appliance/system_client.py
# -*- coding: utf-8 -*-
#---------------------------------------------------------------------------
# Copyright 2020 VMware, Inc. All rights reserved.
# AUTO GENERATED FILE -- DO NOT MODIFY!
#
# vAPI stub file for package com.vmware.appliance.system.
#---------------------------------------------------------------------------
"""
The ``com.vmware.appliance.system_client`` module provides classes to query the
appliance system information. The module is available starting in vSphere 6.5.
"""
__author__ = 'VMware, Inc.'
__docformat__ = 'restructuredtext en'
import sys
from vmware.vapi.bindings import type
from vmware.vapi.bindings.converter import TypeConverter
from vmware.vapi.bindings.enum import Enum
from vmware.vapi.bindings.error import VapiError
from vmware.vapi.bindings.struct import VapiStruct
from vmware.vapi.bindings.stub import (
ApiInterfaceStub, StubFactoryBase, VapiInterface)
from vmware.vapi.bindings.common import raise_core_exception
from vmware.vapi.data.validator import (UnionValidator, HasFieldsOfValidator)
from vmware.vapi.exception import CoreException
from vmware.vapi.lib.constants import TaskType
from vmware.vapi.lib.rest import OperationRestMetadata
class Storage(VapiInterface):
"""
``Storage`` class provides methods Appliance storage configuration
"""
_VAPI_SERVICE_ID = 'com.vmware.appliance.system.storage'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _StorageStub)
self._VAPI_OPERATION_IDS = {}
class StorageMapping(VapiStruct):
"""
The ``Storage.StorageMapping`` class describes the mapping between VCSA
partitions and the Hard disk numbers visible in the vSphere Web Client.
.. tip::
The arguments are used to initialize data attributes with the same
names.
"""
def __init__(self,
disk=None,
partition=None,
description=None,
):
"""
:type disk: :class:`str`
:param disk: The disk number in the vSphere Web Client.
When clients pass a value of this class as a parameter, the
attribute must be an identifier for the resource type:
``com.vmware.appliance.system.storage``. When methods return a
value of this class as a return value, the attribute will be an
identifier for the resource type:
``com.vmware.appliance.system.storage``.
:type partition: :class:`str`
:param partition: Storage partition name.
:type description: :class:`com.vmware.vapi.std_client.LocalizableMessage`
:param description: Description of partition. This attribute was added in vSphere API
6.7.
This attribute is optional because it was added in a newer version
than its parent node.
"""
self.disk = disk
self.partition = partition
self.description = description
VapiStruct.__init__(self)
StorageMapping._set_binding_type(type.StructType(
'com.vmware.appliance.system.storage.storage_mapping', {
'disk': type.IdType(resource_types='com.vmware.appliance.system.storage'),
'partition': type.StringType(),
'description': type.OptionalType(type.ReferenceType('com.vmware.vapi.std_client', 'LocalizableMessage')),
},
StorageMapping,
False,
None))
class StorageChange(VapiStruct):
"""
The ``Storage.StorageChange`` class describes the changes in capasity of a
storage partition. This class was added in vSphere API 6.7.
.. tip::
The arguments are used to initialize data attributes with the same
names.
"""
def __init__(self,
old_size=None,
new_size=None,
):
"""
:type old_size: :class:`long`
:param old_size: Original size of the partition in MB. This attribute was added in
vSphere API 6.7.
:type new_size: :class:`long`
:param new_size: Nedw size of the partition in MB. This attribute was added in
vSphere API 6.7.
"""
self.old_size = old_size
self.new_size = new_size
VapiStruct.__init__(self)
StorageChange._set_binding_type(type.StructType(
'com.vmware.appliance.system.storage.storage_change', {
'old_size': type.IntegerType(),
'new_size': type.IntegerType(),
},
StorageChange,
False,
None))
def list(self):
"""
Get disk to partition mapping.
:rtype: :class:`list` of :class:`Storage.StorageMapping`
:return: list of mapping items
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
Generic error
"""
return self._invoke('list', None)
def resize(self):
"""
Resize all partitions to 100 percent of disk size.
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
Generic error
"""
return self._invoke('resize', None)
def resize_ex(self):
"""
Resize all partitions to 100 percent of disk size. This method was
added in vSphere API 6.7.
:rtype: :class:`dict` of :class:`str` and :class:`Storage.StorageChange`
:return: List of the partitions with the size before and after resizing
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
Generic error
"""
return self._invoke('resize_ex', None)
class Uptime(VapiInterface):
"""
``Uptime`` class provides methods Get the system uptime.
"""
_VAPI_SERVICE_ID = 'com.vmware.appliance.system.uptime'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _UptimeStub)
self._VAPI_OPERATION_IDS = {}
def get(self):
"""
Get the system uptime.
:rtype: :class:`float`
:return: system uptime
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
Generic error
"""
return self._invoke('get', None)
class Time(VapiInterface):
"""
``Time`` class provides methods Gets system time.
"""
_VAPI_SERVICE_ID = 'com.vmware.appliance.system.time'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _TimeStub)
self._VAPI_OPERATION_IDS = {}
class SystemTimeStruct(VapiStruct):
"""
``Time.SystemTimeStruct`` class Structure representing the system time.
.. tip::
The arguments are used to initialize data attributes with the same
names.
"""
def __init__(self,
seconds_since_epoch=None,
date=None,
time=None,
timezone=None,
):
"""
:type seconds_since_epoch: :class:`float`
:param seconds_since_epoch: seconds since the epoch
:type date: :class:`str`
:param date: date format: Thu 07-31-2014
:type time: :class:`str`
:param time: time format: 18:18:32
:type timezone: :class:`str`
:param timezone: timezone
"""
self.seconds_since_epoch = seconds_since_epoch
self.date = date
self.time = time
self.timezone = timezone
VapiStruct.__init__(self)
SystemTimeStruct._set_binding_type(type.StructType(
'com.vmware.appliance.system.time.system_time_struct', {
'seconds_since_epoch': type.DoubleType(),
'date': type.StringType(),
'time': type.StringType(),
'timezone': type.StringType(),
},
SystemTimeStruct,
False,
None))
def get(self):
"""
Get system time.
:rtype: :class:`Time.SystemTimeStruct`
:return: System time
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
Generic error
"""
return self._invoke('get', None)
class Version(VapiInterface):
"""
``Version`` class provides methods Get the appliance version.
"""
_VAPI_SERVICE_ID = 'com.vmware.appliance.system.version'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _VersionStub)
self._VAPI_OPERATION_IDS = {}
class VersionStruct(VapiStruct):
"""
``Version.VersionStruct`` class Structure representing appliance version
information.
.. tip::
The arguments are used to initialize data attributes with the same
names.
"""
def __init__(self,
version=None,
product=None,
build=None,
type=None,
summary=None,
releasedate=None,
install_time=None,
):
"""
:type version: :class:`str`
:param version: Appliance version.
:type product: :class:`str`
:param product: Appliance name.
:type build: :class:`str`
:param build: Appliance build number.
:type type: :class:`str`
:param type: Type of product. Same product can have different deployment
options, which is represented by type.
:type summary: :class:`str`
:param summary: Summary of patch (empty string, if the appliance has not been
patched)
:type releasedate: :class:`str`
:param releasedate: Release date of patch (empty string, if the appliance has not been
patched)
:type install_time: :class:`str`
:param install_time: Display the date and time when this system was first installed.
Value will not change on subsequent updates.
"""
self.version = version
self.product = product
self.build = build
self.type = type
self.summary = summary
self.releasedate = releasedate
self.install_time = install_time
VapiStruct.__init__(self)
VersionStruct._set_binding_type(type.StructType(
'com.vmware.appliance.system.version.version_struct', {
'version': type.StringType(),
'product': type.StringType(),
'build': type.StringType(),
'type': type.StringType(),
'summary': type.StringType(),
'releasedate': type.StringType(),
'install_time': type.StringType(),
},
VersionStruct,
False,
None))
def get(self):
"""
Get the version.
:rtype: :class:`Version.VersionStruct`
:return: version information about the appliance
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
Generic error
"""
return self._invoke('get', None)
class _StorageStub(ApiInterfaceStub):
def __init__(self, config):
# properties for list operation
list_input_type = type.StructType('operation-input', {})
list_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/appliance/system/storage',
path_variables={
},
query_parameters={
}
)
# properties for resize operation
resize_input_type = type.StructType('operation-input', {})
resize_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
}
resize_input_value_validator_list = [
]
resize_output_validator_list = [
]
resize_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/appliance/system/storage/resize',
path_variables={
},
query_parameters={
}
)
# properties for resize_ex operation
resize_ex_input_type = type.StructType('operation-input', {})
resize_ex_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
}
resize_ex_input_value_validator_list = [
]
resize_ex_output_validator_list = [
]
resize_ex_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/appliance/system/storage?action=resize-ex',
path_variables={
},
query_parameters={
}
)
operations = {
'list': {
'input_type': list_input_type,
'output_type': type.ListType(type.ReferenceType(__name__, 'Storage.StorageMapping')),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'resize': {
'input_type': resize_input_type,
'output_type': type.VoidType(),
'errors': resize_error_dict,
'input_value_validator_list': resize_input_value_validator_list,
'output_validator_list': resize_output_validator_list,
'task_type': TaskType.NONE,
},
'resize_ex': {
'input_type': resize_ex_input_type,
'output_type': type.MapType(type.StringType(), type.ReferenceType(__name__, 'Storage.StorageChange')),
'errors': resize_ex_error_dict,
'input_value_validator_list': resize_ex_input_value_validator_list,
'output_validator_list': resize_ex_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'list': list_rest_metadata,
'resize': resize_rest_metadata,
'resize_ex': resize_ex_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.appliance.system.storage',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=True)
class _UptimeStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {})
get_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/appliance/system/uptime',
path_variables={
},
query_parameters={
}
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.DoubleType(),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.appliance.system.uptime',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=True)
class _TimeStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {})
get_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/appliance/system/time',
path_variables={
},
query_parameters={
}
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType(__name__, 'Time.SystemTimeStruct'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = | |
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import functools
import os
import pytest
import llnl.util.filesystem as fs
import spack.config
import spack.database
import spack.environment as ev
import spack.main
import spack.spec
import spack.store
import spack.util.spack_yaml as syaml
config = spack.main.SpackCommand('config')
env = spack.main.SpackCommand('env')
def _create_config(scope=None, data={}, section='packages'):
scope = scope or spack.config.default_modify_scope()
cfg_file = spack.config.config.get_config_filename(scope, section)
with open(cfg_file, 'w') as f:
syaml.dump(data, stream=f)
return cfg_file
@pytest.fixture()
def packages_yaml_v015(mutable_config):
"""Create a packages.yaml in the old format"""
old_data = {
'packages': {
'cmake': {
'paths': {'cmake@3.14.0': '/usr'}
},
'gcc': {
'modules': {'gcc@8.3.0': 'gcc-8'}
}
}
}
return functools.partial(_create_config, data=old_data, section='packages')
@pytest.fixture()
def config_yaml_v015(mutable_config):
"""Create a packages.yaml in the old format"""
old_data = {
'config': {
'install_tree': '/fake/path',
'install_path_scheme': '{name}-{version}',
}
}
return functools.partial(_create_config, data=old_data, section='config')
def test_get_config_scope(mock_low_high_config):
assert config('get', 'compilers').strip() == 'compilers: {}'
def test_get_config_scope_merged(mock_low_high_config):
low_path = mock_low_high_config.scopes['low'].path
high_path = mock_low_high_config.scopes['high'].path
fs.mkdirp(low_path)
fs.mkdirp(high_path)
with open(os.path.join(low_path, 'repos.yaml'), 'w') as f:
f.write('''\
repos:
- repo3
''')
with open(os.path.join(high_path, 'repos.yaml'), 'w') as f:
f.write('''\
repos:
- repo1
- repo2
''')
assert config('get', 'repos').strip() == '''repos:
- repo1
- repo2
- repo3'''
def test_config_edit():
"""Ensure `spack config edit` edits the right paths."""
dms = spack.config.default_modify_scope('compilers')
dms_path = spack.config.config.scopes[dms].path
user_path = spack.config.config.scopes['user'].path
comp_path = os.path.join(dms_path, 'compilers.yaml')
repos_path = os.path.join(user_path, 'repos.yaml')
assert config('edit', '--print-file', 'compilers').strip() == comp_path
assert config('edit', '--print-file', 'repos').strip() == repos_path
def test_config_get_gets_spack_yaml(mutable_mock_env_path):
env = ev.create('test')
config('get', fail_on_error=False)
assert config.returncode == 1
with env:
config('get', fail_on_error=False)
assert config.returncode == 1
env.write()
assert 'mpileaks' not in config('get')
env.add('mpileaks')
env.write()
assert 'mpileaks' in config('get')
def test_config_edit_edits_spack_yaml(mutable_mock_env_path):
env = ev.create('test')
with env:
assert config('edit', '--print-file').strip() == env.manifest_path
def test_config_edit_fails_correctly_with_no_env(mutable_mock_env_path):
output = config('edit', '--print-file', fail_on_error=False)
assert "requires a section argument or an active environment" in output
def test_config_get_fails_correctly_with_no_env(mutable_mock_env_path):
output = config('get', fail_on_error=False)
assert "requires a section argument or an active environment" in output
def test_config_list():
output = config('list')
assert 'compilers' in output
assert 'packages' in output
def test_config_add(mutable_empty_config):
config('add', 'config:dirty:true')
output = config('get', 'config')
assert output == """config:
dirty: true
"""
def test_config_add_list(mutable_empty_config):
config('add', 'config:template_dirs:test1')
config('add', 'config:template_dirs:[test2]')
config('add', 'config:template_dirs:test3')
output = config('get', 'config')
assert output == """config:
template_dirs:
- test3
- test2
- test1
"""
def test_config_add_override(mutable_empty_config):
config('--scope', 'site', 'add', 'config:template_dirs:test1')
config('add', 'config:template_dirs:[test2]')
output = config('get', 'config')
assert output == """config:
template_dirs:
- test2
- test1
"""
config('add', 'config::template_dirs:[test2]')
output = config('get', 'config')
assert output == """config:
template_dirs:
- test2
"""
def test_config_add_override_leaf(mutable_empty_config):
config('--scope', 'site', 'add', 'config:template_dirs:test1')
config('add', 'config:template_dirs:[test2]')
output = config('get', 'config')
assert output == """config:
template_dirs:
- test2
- test1
"""
config('add', 'config:template_dirs::[test2]')
output = config('get', 'config')
assert output == """config:
'template_dirs:':
- test2
"""
def test_config_add_update_dict(mutable_empty_config):
config('add', 'packages:all:version:[1.0.0]')
output = config('get', 'packages')
expected = 'packages:\n all:\n version: [1.0.0]\n'
assert output == expected
def test_config_with_c_argument(mutable_empty_config):
# I don't know how to add a spack argument to a Spack Command, so we test this way
config_file = 'config:install_root:root:/path/to/config.yaml'
parser = spack.main.make_argument_parser()
args = parser.parse_args(['-c', config_file])
assert config_file in args.config_vars
# Add the path to the config
config("add", args.config_vars[0], scope='command_line')
output = config("get", 'config')
assert "config:\n install_root:\n root: /path/to/config.yaml" in output
def test_config_add_ordered_dict(mutable_empty_config):
config('add', 'mirrors:first:/path/to/first')
config('add', 'mirrors:second:/path/to/second')
output = config('get', 'mirrors')
assert output == """mirrors:
first: /path/to/first
second: /path/to/second
"""
def test_config_add_invalid_fails(mutable_empty_config):
config('add', 'packages:all:variants:+debug')
with pytest.raises(
(spack.config.ConfigFormatError, AttributeError)
):
config('add', 'packages:all:True')
def test_config_add_from_file(mutable_empty_config, tmpdir):
contents = """spack:
config:
dirty: true
"""
file = str(tmpdir.join('spack.yaml'))
with open(file, 'w') as f:
f.write(contents)
config('add', '-f', file)
output = config('get', 'config')
assert output == """config:
dirty: true
"""
def test_config_add_from_file_multiple(mutable_empty_config, tmpdir):
contents = """spack:
config:
dirty: true
template_dirs: [test1]
"""
file = str(tmpdir.join('spack.yaml'))
with open(file, 'w') as f:
f.write(contents)
config('add', '-f', file)
output = config('get', 'config')
assert output == """config:
dirty: true
template_dirs: [test1]
"""
def test_config_add_override_from_file(mutable_empty_config, tmpdir):
config('--scope', 'site', 'add', 'config:template_dirs:test1')
contents = """spack:
config::
template_dirs: [test2]
"""
file = str(tmpdir.join('spack.yaml'))
with open(file, 'w') as f:
f.write(contents)
config('add', '-f', file)
output = config('get', 'config')
assert output == """config:
template_dirs: [test2]
"""
def test_config_add_override_leaf_from_file(mutable_empty_config, tmpdir):
config('--scope', 'site', 'add', 'config:template_dirs:test1')
contents = """spack:
config:
template_dirs:: [test2]
"""
file = str(tmpdir.join('spack.yaml'))
with open(file, 'w') as f:
f.write(contents)
config('add', '-f', file)
output = config('get', 'config')
assert output == """config:
'template_dirs:': [test2]
"""
def test_config_add_update_dict_from_file(mutable_empty_config, tmpdir):
config('add', 'packages:all:compiler:[gcc]')
# contents to add to file
contents = """spack:
packages:
all:
version:
- 1.0.0
"""
# create temp file and add it to config
file = str(tmpdir.join('spack.yaml'))
with open(file, 'w') as f:
f.write(contents)
config('add', '-f', file)
# get results
output = config('get', 'packages')
# added config comes before prior config
expected = """packages:
all:
version:
- 1.0.0
compiler: [gcc]
"""
assert expected == output
def test_config_add_invalid_file_fails(tmpdir):
# contents to add to file
# invalid because version requires a list
contents = """spack:
packages:
all:
version: 1.0.0
"""
# create temp file and add it to config
file = str(tmpdir.join('spack.yaml'))
with open(file, 'w') as f:
f.write(contents)
with pytest.raises(
(spack.config.ConfigFormatError)
):
config('add', '-f', file)
def test_config_remove_value(mutable_empty_config):
config('add', 'config:dirty:true')
config('remove', 'config:dirty:true')
output = config('get', 'config')
assert output == """config: {}
"""
def test_config_remove_alias_rm(mutable_empty_config):
config('add', 'config:dirty:true')
config('rm', 'config:dirty:true')
output = config('get', 'config')
assert output == """config: {}
"""
def test_config_remove_dict(mutable_empty_config):
config('add', 'config:dirty:true')
config('rm', 'config:dirty')
output = config('get', 'config')
assert output == """config: {}
"""
def test_remove_from_list(mutable_empty_config):
config('add', 'config:template_dirs:test1')
config('add', 'config:template_dirs:[test2]')
config('add', 'config:template_dirs:test3')
config('remove', 'config:template_dirs:test2')
output = config('get', 'config')
assert output == """config:
template_dirs:
- test3
- test1
"""
def test_remove_list(mutable_empty_config):
config('add', 'config:template_dirs:test1')
config('add', 'config:template_dirs:[test2]')
config('add', 'config:template_dirs:test3')
config('remove', 'config:template_dirs:[test2]')
output = config('get', 'config')
assert output == """config:
template_dirs:
- test3
- test1
"""
def test_config_add_to_env(mutable_empty_config, mutable_mock_env_path):
env('create', 'test')
with ev.read('test'):
config('add', 'config:dirty:true')
output = config('get')
expected = """ config:
dirty: true
"""
assert expected in output
def test_config_add_to_env_preserve_comments(mutable_empty_config,
mutable_mock_env_path,
tmpdir):
filepath = str(tmpdir.join('spack.yaml'))
manifest = """# comment
spack: # comment
# comment
specs: # comment
- foo # comment
# comment
view: true # comment
packages: # comment
# comment
all: # comment
# comment
compiler: [gcc] # comment
"""
with open(filepath, 'w') as f:
f.write(manifest)
env = ev.Environment(str(tmpdir))
with env:
config('add', 'config:dirty:true')
output = config('get')
expected = manifest
expected += """ config:
dirty: true
"""
assert output == expected
def test_config_remove_from_env(mutable_empty_config, mutable_mock_env_path):
env('create', 'test')
with ev.read('test'):
config('add', 'config:dirty:true')
with ev.read('test'):
config('rm', 'config:dirty')
output = config('get')
expected = ev.default_manifest_yaml
expected += """ config: {}
"""
assert output == expected
def test_config_update_packages(packages_yaml_v015):
"""Test Spack updating old packages.yaml format for externals
to new format. Ensure that data is preserved and converted
properly.
"""
packages_yaml_v015()
config('update', '-y', 'packages')
# Check the entries have been transformed
data = spack.config.get('packages')
check_packages_updated(data)
def test_config_update_config(config_yaml_v015):
config_yaml_v015()
config('update', '-y', 'config')
# Check the entires have been transformed
data = spack.config.get('config')
check_config_updated(data)
def test_config_update_not_needed(mutable_config):
data_before = spack.config.get('repos')
config('update', '-y', 'repos')
data_after = spack.config.get('repos')
assert data_before == data_after
def test_config_update_fail_on_permission_issue(
packages_yaml_v015, monkeypatch
):
# The first time it will update and create the backup file
packages_yaml_v015()
# Mock a global scope where we cannot write
monkeypatch.setattr(
spack.cmd.config, '_can_update_config_file', lambda x, y: False
)
with pytest.raises(spack.main.SpackCommandError):
config('update', '-y', 'packages')
def test_config_revert(packages_yaml_v015):
cfg_file = packages_yaml_v015()
bkp_file = cfg_file + '.bkp'
config('update', '-y', 'packages')
# Check that the backup file exists, compute its md5 sum
assert os.path.exists(bkp_file)
md5bkp = fs.md5sum(bkp_file)
config('revert', '-y', 'packages')
# Check that the backup file does not exist anymore and
# that the md5 sum of the configuration file is the same
# as that of the old backup file
assert not os.path.exists(bkp_file)
assert md5bkp == fs.md5sum(cfg_file)
def test_config_revert_raise_if_cant_write(packages_yaml_v015, monkeypatch):
packages_yaml_v015()
config('update', '-y', 'packages')
# Mock a global scope where we cannot write
monkeypatch.setattr(
spack.cmd.config, '_can_revert_update', lambda x, y, z: False
)
# The command raises with an helpful error if a configuration
# file is to be deleted and we don't have sufficient permissions
with pytest.raises(spack.main.SpackCommandError):
config('revert', '-y', 'packages')
def test_updating_config_implicitly_raises(packages_yaml_v015):
# Trying to write implicitly to a scope with a configuration file
# in the old format raises an exception
packages_yaml_v015()
with pytest.raises(RuntimeError):
config('add', 'packages:cmake:buildable:false')
def test_updating_multiple_scopes_at_once(packages_yaml_v015):
# Create 2 config files in the old format
packages_yaml_v015(scope='user')
packages_yaml_v015(scope='site')
# Update both of them at once
config('update', '-y', 'packages')
for scope in ('user', 'site'):
data = spack.config.get('packages', scope=scope)
check_packages_updated(data)
@pytest.mark.regression('18031')
def test_config_update_can_handle_comments(mutable_config):
# Create an outdated config file with comments
scope = spack.config.default_modify_scope()
cfg_file = spack.config.config.get_config_filename(scope, 'packages')
with open(cfg_file, mode='w') as f:
f.write("""
packages:
# system cmake in /usr
cmake:
paths:
cmake@3.14.0: /usr
# Another comment after the outdated section
buildable: False
""")
# Try to update it, it should not raise errors
config('update', '-y', 'packages')
# Check data
data = spack.config.get('packages', scope=scope)
assert 'paths' not in | |
charts:
subprocess.check_call(['helm-upload', helm_repo, chart], # pylint: disable=not-callable
env=env, stdout=fnull, stderr=fnull)
LOG.info("Helm chart %s uploaded" % os.path.basename(chart))
# Make sure any helm repo changes are reflected for the users
helm_utils.refresh_helm_repo_information()
except Exception as e:
raise exception.KubeAppUploadFailure(
name=app.name, version=app.version, reason=str(e))
finally:
os.chown(constants.APP_INSTALL_ROOT_PATH, orig_uid, orig_gid)
# For system applications with plugin support, establish user override
# entries and disable charts based on application metadata.
self._plugins.activate_plugins(app)
db_app = self._dbapi.kube_app_get(app.name)
app_ns = self._helm.get_helm_application_namespaces(db_app.name)
for chart, namespaces in six.iteritems(app_ns):
for namespace in namespaces:
try:
db_chart = self._dbapi.helm_override_get(
db_app.id, chart, namespace)
except exception.HelmOverrideNotFound:
# Create it
try:
db_chart = self._dbapi.helm_override_create(
{'app_id': db_app.id, 'name': chart,
'namespace': namespace})
except Exception as e:
LOG.exception(e)
# Since we are uploading a fresh application. Ensure that
# charts are disabled based on metadata
system_overrides = db_chart.system_overrides
system_overrides.update({common.HELM_CHART_ATTR_ENABLED:
chart not in disabled_charts})
try:
self._dbapi.helm_override_update(
db_app.id, chart, namespace, {'system_overrides':
system_overrides})
except exception.HelmOverrideNotFound:
LOG.exception("Helm Override Not Found")
self._plugins.deactivate_plugins(app)
def _validate_labels(self, labels):
expr = re.compile(r'[a-z0-9]([-a-z0-9]*[a-z0-9])')
for label in labels:
if not expr.match(label):
return False
return True
def _update_kubernetes_labels(self, hostname, label_dict):
body = {
'metadata': {
'labels': {}
}
}
body['metadata']['labels'].update(label_dict)
try:
self._kube.kube_patch_node(hostname, body)
except exception.KubeNodeNotFound:
pass
def _assign_host_labels(self, hosts, labels):
for host in hosts:
if host.administrative != constants.ADMIN_LOCKED:
continue
for label_str in labels:
k, v = label_str.split('=')
try:
self._dbapi.label_create(
host.id, {'host_id': host.id,
'label_key': k,
'label_value': v})
except exception.HostLabelAlreadyExists:
pass
label_dict = {k: v for k, v in (i.split('=') for i in labels)}
self._update_kubernetes_labels(host.hostname, label_dict)
def _find_label(self, host_uuid, label_str):
host_labels = self._dbapi.label_get_by_host(host_uuid)
for label_obj in host_labels:
if label_str == label_obj.label_key + '=' + label_obj.label_value:
return label_obj
return None
def _remove_host_labels(self, hosts, labels):
for host in hosts:
if host.administrative != constants.ADMIN_LOCKED:
continue
null_labels = {}
for label_str in labels:
lbl_obj = self._find_label(host.uuid, label_str)
if lbl_obj:
self._dbapi.label_destroy(lbl_obj.uuid)
key = lbl_obj.label_key
null_labels[key] = None
if null_labels:
self._update_kubernetes_labels(host.hostname, null_labels)
def _create_rbd_provisioner_secrets(self, app_name):
""" Provide access to the system persistent RBD provisioner.
The rbd-provsioner is installed as part of system provisioning and has
created secrets for all common default namespaces. Copy the secret to
this application's namespace(s) to provide resolution for PVCs
:param app_name: Name of the application
"""
# Only set up a secret for the default storage pool (i.e. ignore
# additional storage tiers)
pool_secret = K8RbdProvisioner.get_user_secret_name({
'name': constants.SB_DEFAULT_NAMES[constants.SB_TYPE_CEPH]})
app_ns = self._helm.get_helm_application_namespaces(app_name)
namespaces = \
list(set([ns for ns_list in app_ns.values() for ns in ns_list]))
for ns in namespaces:
if (ns in [common.HELM_NS_HELM_TOOLKIT,
common.HELM_NS_RBD_PROVISIONER] or
self._kube.kube_get_secret(pool_secret, ns) is not None):
# Secret already exist
continue
try:
if not self._kube.kube_get_namespace(ns):
self._kube.kube_create_namespace(ns)
self._kube.kube_copy_secret(
pool_secret, common.HELM_NS_RBD_PROVISIONER, ns)
except Exception as e:
LOG.error(e)
raise
def _delete_rbd_provisioner_secrets(self, app_name):
""" Remove access to the system persistent RBD provisioner.
As part of launching a supported application, secrets were created to
allow access to the provisioner from the application namespaces. This
will remove those created secrets.
:param app_name: Name of the application
"""
# Only set up a secret for the default storage pool (i.e. ignore
# additional storage tiers)
pool_secret = K8RbdProvisioner.get_user_secret_name({
'name': constants.SB_DEFAULT_NAMES[constants.SB_TYPE_CEPH]})
app_ns = self._helm.get_helm_application_namespaces(app_name)
namespaces = \
list(set([ns for ns_list in app_ns.values() for ns in ns_list]))
for ns in namespaces:
if (ns == common.HELM_NS_HELM_TOOLKIT or
ns == common.HELM_NS_RBD_PROVISIONER):
continue
try:
LOG.info("Deleting Secret %s under Namespace "
"%s ..." % (pool_secret, ns))
self._kube.kube_delete_secret(
pool_secret, ns, grace_period_seconds=0)
LOG.info("Secret %s under Namespace %s delete "
"completed." % (pool_secret, ns))
except Exception as e:
LOG.error(e)
raise
def _create_local_registry_secrets(self, app_name):
# Temporary function to create default registry secret
# which would be used by kubernetes to pull images from
# local registry.
# This should be removed after OSH supports the deployment
# with registry has authentication turned on.
# https://blueprints.launchpad.net/openstack-helm/+spec/
# support-docker-registry-with-authentication-turned-on
body = {
'type': 'kubernetes.io/dockerconfigjson',
'metadata': {},
'data': {}
}
app_ns = self._helm.get_helm_application_namespaces(app_name)
namespaces = \
list(set([ns for ns_list in app_ns.values() for ns in ns_list]))
sysinv_registry_secret = self._kube.kube_get_secret(DOCKER_REGISTRY_SECRET,
common.HELM_NS_KUBE_SYSTEM)
for ns in namespaces:
if (ns == common.HELM_NS_HELM_TOOLKIT or
self._kube.kube_get_secret(DOCKER_REGISTRY_SECRET, ns) is not None):
# Secret already exist
continue
try:
if sysinv_registry_secret is not None:
# Use the sysinv token in default_registry_key secret in
# kube-system namespace to create secret in another namespace.
sysinv_registry_token = sysinv_registry_secret.data['.dockerconfigjson']
body['data'].update({'.dockerconfigjson': sysinv_registry_token})
else:
# This must be the first platform app in the kube-system
# namespace (i.e. nginx-ingress-controller app)
local_registry_auth = cutils.get_local_docker_registry_auth()
auth = '{0}:{1}'.format(local_registry_auth['username'],
local_registry_auth['password'])
token = '{{\"auths\": {{\"{0}\": {{\"auth\": \"{1}\"}}}}}}'.format(
constants.DOCKER_REGISTRY_SERVER, base64.b64encode(auth))
body['data'].update({'.dockerconfigjson': base64.b64encode(token)})
body['metadata'].update({'name': DOCKER_REGISTRY_SECRET,
'namespace': ns})
if not self._kube.kube_get_namespace(ns):
self._kube.kube_create_namespace(ns)
self._kube.kube_create_secret(ns, body)
LOG.info("Secret %s created under Namespace %s." % (DOCKER_REGISTRY_SECRET, ns))
except Exception as e:
LOG.error(e)
raise
def _delete_local_registry_secrets(self, app_name):
app_ns = self._helm.get_helm_application_namespaces(app_name)
namespaces = \
list(set([ns for ns_list in app_ns.values() for ns in ns_list]))
for ns in namespaces:
if ns in [common.HELM_NS_HELM_TOOLKIT, common.HELM_NS_KUBE_SYSTEM]:
continue
try:
LOG.info("Deleting Secret %s under Namespace "
"%s ..." % (DOCKER_REGISTRY_SECRET, ns))
self._kube.kube_delete_secret(
DOCKER_REGISTRY_SECRET, ns, grace_period_seconds=0)
LOG.info("Secret %s under Namespace %s delete "
"completed." % (DOCKER_REGISTRY_SECRET, ns))
except Exception as e:
LOG.error(e)
raise
def audit_local_registry_secrets(self):
"""
local registry uses admin's username&password for authentication.
K8s stores the authentication info in secrets in order to access
local registry, while admin's password is saved in keyring.
Admin's password could be changed by openstack client cmd outside of
sysinv and K8s. It will cause info mismatch between keyring and
k8s's secrets, and leads to authentication failure.
There are two ways to keep k8s's secrets updated with data in keyring:
1. Polling. Use a periodic task to sync info from keyring to secrets.
2. Notification. Keystone send out notification when there is password
update, and notification receiver to do the data sync.
To ensure k8s's secrets are timely and always synced with keyring, both
methods are used here. And this function will be called in both cases
to audit password info between keyring and registry-local-secret, and
update keyring's password to all local registry secrets if need.
"""
# Use lock to synchronize call from timer and notification
lock_name = "AUDIT_LOCAL_REGISTRY_SECRETS"
@cutils.synchronized(lock_name, external=False)
def _sync_audit_local_registry_secrets(self):
try:
secret = self._kube.kube_get_secret("registry-local-secret", kubernetes.NAMESPACE_KUBE_SYSTEM)
if secret is None:
return
secret_auth_body = base64.b64decode(secret.data['.dockerconfigjson'])
secret_auth_info = (secret_auth_body.split('auth":')[1]).split('"')[1]
registry_auth = cutils.get_local_docker_registry_auth()
registry_auth_info = '{0}:{1}'.format(registry_auth['username'],
registry_auth['password'])
if secret_auth_info == base64.b64encode(registry_auth_info):
LOG.debug("Auth info is the same, no update is needed for k8s secret.")
return
except Exception as e:
LOG.error(e)
return
try:
# update secret with new auth info
token = '{{\"auths\": {{\"{0}\": {{\"auth\": \"{1}\"}}}}}}'.format(
constants.DOCKER_REGISTRY_SERVER, base64.b64encode(registry_auth_info))
secret.data['.dockerconfigjson'] = base64.b64encode(token)
self._kube.kube_patch_secret("registry-local-secret", kubernetes.NAMESPACE_KUBE_SYSTEM, secret)
LOG.info("Secret registry-local-secret under Namespace kube-system is updated")
except Exception as e:
LOG.error("Failed to update Secret %s under Namespace kube-system: %s"
% ("registry-local-secret", e))
return
# update "default-registry-key" secret info under all namespaces
try:
ns_list = self._kube.kube_get_namespace_name_list()
for ns in ns_list:
secret = self._kube.kube_get_secret(DOCKER_REGISTRY_SECRET, ns)
if secret is None:
continue
try:
secret_auth_body = base64.b64decode(secret.data['.dockerconfigjson'])
if constants.DOCKER_REGISTRY_SERVER in secret_auth_body:
secret.data['.dockerconfigjson'] = base64.b64encode(token)
self._kube.kube_patch_secret(DOCKER_REGISTRY_SECRET, ns, secret)
LOG.info("Secret %s under Namespace %s is updated"
% (DOCKER_REGISTRY_SECRET, ns))
except Exception as e:
LOG.error("Failed to update Secret %s under Namespace %s: %s"
% (DOCKER_REGISTRY_SECRET, ns, e))
continue
except Exception as e:
LOG.error(e)
return
_sync_audit_local_registry_secrets(self)
def _delete_namespace(self, namespace):
loop_timeout = 1
timeout = 300
try:
LOG.info("Deleting Namespace %s ..." % namespace)
self._kube.kube_delete_namespace(namespace,
grace_periods_seconds=0)
# Namespace termination timeout 5mins
while(loop_timeout <= timeout):
if not self._kube.kube_get_namespace(namespace):
# Namepace has been terminated
break
loop_timeout += 1
time.sleep(1)
if loop_timeout > timeout:
raise exception.KubeNamespaceDeleteTimeout(name=namespace)
LOG.info("Namespace %s delete completed." % namespace)
except Exception as e:
LOG.error(e)
raise
def _wait_for_pod_termination(self, namespace):
loop_timeout = 0
loop_check_interval = 10
timeout = 300
try:
LOG.info("Waiting for pod termination in namespace %s ..." % namespace)
# Pod termination timeout 5mins
while(loop_timeout <= timeout):
if not self._kube.kube_namespaced_pods_exist(namespace):
# Pods have terminated
break
loop_timeout += loop_check_interval
time.sleep(loop_check_interval)
if loop_timeout > timeout:
raise exception.KubePodTerminateTimeout(name=namespace)
LOG.info("Pod termination in Namespace %s completed." % namespace)
except Exception as e:
LOG.error(e)
raise
def _delete_persistent_volume_claim(self, namespace):
try:
LOG.info("Deleting Persistent Volume Claim "
"under Namespace %s ..." % namespace)
self._kube.kube_delete_persistent_volume_claim(namespace,
timeout_seconds=10)
LOG.info("Persistent Volume Claim delete completed.")
except Exception as e:
LOG.error(e)
raise
def _get_list_of_charts(self, manifest_file):
"""Get the charts information from the manifest file
The following chart data for each chart in the | |
not in attributes:
columns.append(a_name)
attributes[a_name] = {}
attributes[a_name][r_id] = a_value
values_data = []
for m_id in ordered_ids:
values_row = []
for col in columns:
if col == 'checkbox':
values_row.append({'checkbox': m_id})
continue
val = '-'
href = None
color = None
if col in attributes:
val = attributes[col].get(m_id, val)
elif col == 'number':
val = cnt
href = reverse('marks:mark', args=[self.type, m_id])
elif col == 'num_of_links':
val = marks[m_id]['num_of_links']
broken = marks[m_id].get('broken_links', 0)
if broken > 0:
val = ungettext_lazy(
'%(all)s (%(broken)s is broken)', '%(all)s (%(broken)s are broken)', broken
) % {'all': marks[m_id]['num_of_links'], 'broken': broken}
elif col == 'verdict':
if self.type == 'safe':
val = MarkSafe(verdict=marks[m_id]['verdict']).get_verdict_display()
color = SAFE_COLOR[marks[m_id]['verdict']]
elif self.type == 'unsafe':
val = MarkUnsafe(verdict=marks[m_id]['verdict']).get_verdict_display()
color = UNSAFE_COLOR[marks[m_id]['verdict']]
elif col == 'tags':
if 'tags' in marks[m_id] and marks[m_id]['tags']:
val = ', '.join(sorted(marks[m_id]['tags']))
elif col == 'status':
val = model_map[self.type](status=marks[m_id]['status']).get_status_display()
color = STATUS_COLOR[marks[m_id]['status']]
elif col == 'author':
if marks[m_id].get('author_id'):
val = '%s %s' % (marks[m_id]['first_name'], marks[m_id]['last_name'])
href = reverse('users:show_profile', args=[int(marks[m_id]['author_id'])])
elif col == 'change_date':
val = marks[m_id]['change_date']
if self.user.extended.data_format == 'hum':
val = get_templated_text('{% load humanize %}{{ date|naturaltime }}', date=val)
elif col == 'source':
val = model_map[self.type](type=marks[m_id]['source']).get_type_display()
elif col == 'total_similarity':
val = '%d%%' % (marks[m_id][col] * 100)
elif col in {'format', 'component', 'pattern', 'identifier'}:
val = marks[m_id][col]
values_row.append({'color': color, 'value': val, 'href': href})
values_data.append(values_row)
cnt += 1
return columns, values_data
class MarkData:
def __init__(self, mark_type, mark_version=None, report=None):
self.type = mark_type
self.mark_version = mark_version
self.verdicts = self.__verdict_info()
self.statuses = self.__status_info()
if isinstance(self.mark_version, MarkUnsafeHistory) or isinstance(report, ReportUnsafe):
self.comparison, self.selected_func = self.__functions()
self.unknown_data = self.__unknown_info()
self.attributes = self.__get_attributes(report)
self.description = ''
if isinstance(self.mark_version, (MarkUnsafeHistory, MarkSafeHistory, MarkUnknownHistory)):
self.description = self.mark_version.description
self.tags = None
if isinstance(self.mark_version, (MarkUnsafeHistory, MarkSafeHistory)):
self.tags = TagsInfo(self.type, list(tag.tag.pk for tag in self.mark_version.tags.all()))
elif isinstance(report, (ReportUnsafe, ReportSafe)):
self.tags = TagsInfo(self.type, [])
self.error_trace = None
if isinstance(self.mark_version, MarkUnsafeHistory):
with ConvertedTraces.objects.get(id=self.mark_version.error_trace_id).file.file as fp:
self.error_trace = fp.read().decode('utf8')
self.author = None
if isinstance(self.mark_version, (MarkUnsafeHistory, MarkSafeHistory, MarkUnknownHistory)):
self.author = type(self.mark_version).objects.get(mark=self.mark_version.mark, version=1).author
def __get_attributes(self, report):
if isinstance(self.mark_version, (MarkUnsafeHistory, MarkSafeHistory, MarkUnknownHistory)):
return list(
self.mark_version.attrs.order_by('id').values_list('attr__name__name', 'attr__value', 'is_compare')
)
elif isinstance(report, (ReportUnsafe, ReportSafe, ReportUnknown)):
return list(report.attrs.order_by('id').values_list('attr__name__name', 'attr__value', 'associate'))
return None
def __unknown_info(self):
if not isinstance(self.mark_version, MarkUnknownHistory):
return []
return [
self.mark_version.function, self.mark_version.problem_pattern,
self.mark_version.link, self.mark_version.is_regexp
]
def __verdict_info(self):
verdicts = []
if self.type == 'unsafe':
for verdict in MARK_UNSAFE:
verdict_data = {
'title': verdict[1],
'value': verdict[0],
'checked': False,
'color': UNSAFE_COLOR[verdict[0]]
}
if (isinstance(self.mark_version, MarkUnsafeHistory) and
verdict_data['value'] == self.mark_version.verdict) or \
(not isinstance(self.mark_version, MarkUnsafeHistory) and verdict_data['value'] == '0'):
verdict_data['checked'] = True
verdicts.append(verdict_data)
elif self.type == 'safe':
for verdict in MARK_SAFE:
verdict_data = {
'title': verdict[1],
'value': verdict[0],
'checked': False,
'color': SAFE_COLOR[verdict[0]]
}
if (isinstance(self.mark_version, MarkSafeHistory) and
verdict_data['value'] == self.mark_version.verdict) or \
(not isinstance(self.mark_version, MarkSafeHistory) and verdict_data['value'] == '0'):
verdict_data['checked'] = True
verdicts.append(verdict_data)
return verdicts
def __status_info(self):
statuses = []
for verdict in MARK_STATUS:
status_data = {
'title': verdict[1],
'value': verdict[0],
'checked': False,
'color': STATUS_COLOR[verdict[0]]
}
if (isinstance(self.mark_version, (MarkUnsafeHistory, MarkSafeHistory, MarkUnknownHistory)) and
verdict[0] == self.mark_version.status) or \
(self.mark_version is None and verdict[0] == MARK_STATUS[0][0]):
status_data['checked'] = True
statuses.append(status_data)
return statuses
def __functions(self):
functions = []
selected_func = None
if self.type == 'unsafe':
for f in MarkUnsafeCompare.objects.order_by('name'):
func_data = {'id': f.id, 'name': f.name}
if isinstance(self.mark_version, MarkUnsafeHistory):
if self.mark_version.function == f:
func_data['selected'] = True
selected_func = f
elif f.name == DEFAULT_COMPARE:
func_data['selected'] = True
selected_func = f
functions.append(func_data)
return functions, selected_func
# Table data for showing links between the specified mark and reports
class MarkReportsTable:
def __init__(self, user, mark, view, page=1):
self.user = user
self.mark = mark
self.view = view
if isinstance(self.mark, MarkUnsafe):
self.type = 'unsafe'
elif isinstance(self.mark, MarkSafe):
self.type = 'safe'
elif isinstance(self.mark, MarkUnknown):
self.type = 'unknown'
else:
return
self.selected_columns = self.__selected()
self.available_columns = self.__available()
self.columns = self.__get_columns()
self.header = Header(self.columns, MARK_TITLES).struct
self.values = self.__get_page(page, self.__get_values())
def __selected(self):
columns = []
for col in self.view['columns']:
if col not in self.__supported_columns():
return []
col_title = col
if col_title in MARK_TITLES:
col_title = MARK_TITLES[col_title]
columns.append({'value': col, 'title': col_title})
return columns
def __available(self):
columns = []
for col in self.__supported_columns():
col_title = col
if col_title in MARK_TITLES:
col_title = MARK_TITLES[col_title]
columns.append({'value': col, 'title': col_title})
return columns
def __supported_columns(self):
if self.type == 'unsafe':
return ['job', 'similarity', 'ass_type', 'ass_author', 'likes']
return ['job', 'ass_type', 'ass_author', 'likes']
def __get_columns(self):
columns = ['report']
columns.extend(self.view['columns'])
return columns
def __get_values(self):
likes = {}
dislikes = {}
if 'likes' in self.columns:
likes_model = {
'safe': SafeAssociationLike, 'unsafe': UnsafeAssociationLike, 'unknown': UnknownAssociationLike
}
for ass_id, l_num, dl_num in likes_model[self.type].objects.values('association_id')\
.annotate(dislikes=Count(Case(When(dislike=True, then=1))),
likes=Count(Case(When(dislike=False, then=1))))\
.values_list('association_id', 'likes', 'dislikes'):
likes[ass_id] = l_num
dislikes[ass_id] = dl_num
values = []
cnt = 0
for mark_report in self.mark.markreport_set.select_related('report', 'report__root__job').order_by('id'):
if 'similarity' in self.view:
if '0' not in self.view['similarity'] and mark_report.result == 0:
continue
if '100' not in self.view['similarity'] and mark_report.result == 1:
continue
if '50' not in self.view['similarity'] and 0 < mark_report.result < 1:
continue
if 'ass_type' in self.view and mark_report.type not in self.view['ass_type']:
continue
report = mark_report.report
cnt += 1
values_str = []
for col in self.columns:
val = '-'
color = None
href = None
if col == 'report':
val = cnt
if JobAccess(self.user, report.root.job).can_view():
if self.type == 'unsafe':
href = reverse('reports:unsafe', args=[report.trace_id])
else:
href = reverse('reports:%s' % self.type, args=[report.id])
elif col == 'similarity':
if mark_report.error is not None:
val = mark_report.error
color = result_color(0)
else:
val = "{:.0%}".format(mark_report.result)
color = result_color(mark_report.result)
elif col == 'job':
val = report.root.job.name
if JobAccess(self.user, report.root.job).can_view():
href = reverse('jobs:job', args=[report.root.job_id])
elif col == 'ass_type':
val = mark_report.get_type_display()
elif col == 'ass_author':
if mark_report.author:
val = mark_report.author.get_full_name()
href = reverse('users:show_profile', args=[mark_report.author_id])
elif col == 'likes':
val = '%s/%s' % (likes.get(mark_report.id, 0), dislikes.get(mark_report.id, 0))
values_str.append({'value': val, 'href': href, 'color': color})
values.append(values_str)
return values
def __get_page(self, page, values):
num_per_page = DEF_NUMBER_OF_ELEMENTS
if 'elements' in self.view:
num_per_page = int(self.view['elements'][0])
self.paginator = Paginator(values, num_per_page)
try:
values = self.paginator.page(page)
except PageNotAnInteger:
values = self.paginator.page(1)
except EmptyPage:
values = self.paginator.page(self.paginator.num_pages)
return values
class AssociationChangesTable:
def __init__(self, obj, view):
self.view = view
self._data = json.loads(obj.table_data)
self._problems_names = {}
self.href = self._data['href']
if self.view['type'] == VIEW_TYPES[16][0]:
self.verdicts = SAFE_VERDICTS
elif self.view['type'] == VIEW_TYPES[17][0]:
self.verdicts = UNSAFE_VERDICTS
self.selected_columns = self.__selected()
self.available_columns = self.__available()
self.columns = self.__get_columns()
self.header = Header(self.columns, MARK_TITLES).struct
self.values = self.__get_values()
def __selected(self):
columns = []
for col in self.view['columns']:
if col not in self.__supported_columns():
return []
col_title = col
if col_title in MARK_TITLES:
col_title = MARK_TITLES[col_title]
columns.append({'value': col, 'title': col_title})
return columns
def __available(self):
columns = []
for col in self.__supported_columns():
col_title = col
if col_title in MARK_TITLES:
col_title = MARK_TITLES[col_title]
columns.append({'value': col, 'title': col_title})
return columns
def __supported_columns(self):
supported_columns = ['change_kind', 'job', 'format', 'problems']
if self.view['type'] in {VIEW_TYPES[16][0], VIEW_TYPES[17][0]}:
supported_columns.append('sum_verdict')
supported_columns.append('tags')
return supported_columns
def __verdict_change(self, report_id, mark_type):
vtmpl = '<span style="color:{0}">{1}</span>'
if mark_type == 'unsafe':
colors = UNSAFE_COLOR
tmp_leaf = ReportUnsafe()
elif mark_type == 'safe':
colors = SAFE_COLOR
tmp_leaf = ReportSafe()
else:
return '-'
tmp_leaf.verdict = self._data['values'][report_id]['old_verdict']
val1 = tmp_leaf.get_verdict_display()
if self._data['values'][report_id]['old_verdict'] == self._data['values'][report_id]['new_verdict']:
return vtmpl.format(colors[self._data['values'][report_id]['old_verdict']], val1)
tmp_leaf.verdict = self._data['values'][report_id]['new_verdict']
val2 = tmp_leaf.get_verdict_display()
return '<i class="ui long arrow right icon"></i>'.join([
vtmpl.format(colors[self._data['values'][report_id]['old_verdict']], val1),
vtmpl.format(colors[self._data['values'][report_id]['new_verdict']], val2)
])
def __get_columns(self):
columns = ['report']
columns.extend(self.view['columns'])
columns.extend(self._data.get('attrs', []))
return columns
def __get_values(self):
values = []
if self.view['type'] == VIEW_TYPES[16][0]:
mark_type = 'safe'
elif self.view['type'] == VIEW_TYPES[17][0]:
mark_type = 'unsafe'
elif self.view['type'] == VIEW_TYPES[18][0]:
mark_type = 'unknown'
problems_ids = []
for r_id in self._data['values']:
if 'problems' in self._data['values'][r_id]:
for p_id in self._data['values'][r_id]['problems']:
problems_ids.append(int(p_id))
for problem in UnknownProblem.objects.filter(id__in=problems_ids):
self._problems_names[problem.id] = problem.name
else:
return []
cnt = 0
for report_id in self._data['values']:
cnt += 1
values_str = []
for col in self.columns:
val = '-'
color = None
href = None
if not self.__filter_row(report_id):
cnt -= 1
break
if col == 'report':
val = cnt
if mark_type == 'unsafe':
href = reverse('reports:unsafe', args=[self._data['values'][report_id]['trace_id']])
else:
href = reverse('reports:%s' % mark_type, args=[report_id])
elif col == 'sum_verdict':
val = self.__verdict_change(report_id, mark_type)
elif col == 'change_kind':
if self._data['values'][report_id]['change_kind'] in CHANGE_DATA:
val = CHANGE_DATA[self._data['values'][report_id]['change_kind']][0]
color = CHANGE_DATA[self._data['values'][report_id]['change_kind']][1]
elif col == 'job':
val = self._data['values'][report_id]['job'][1]
href = reverse('jobs:job', args=[self._data['values'][report_id]['job'][0]])
elif col == 'format':
val = self._data['values'][report_id]['format']
elif col == 'tags':
val | |
0x000000FF) + ":" \
+ str(( ds_trace[15] >> 8) & 0x00FF| (ds_trace[15] << 8) & 0xFF00)
print(vidToIP)
print(vidToIp_map[str(vidToIP)])
fix_df_dic = {}
pd.set_option("display.precision", 8)
fix_df = pd.DataFrame([], columns=trace_field)
seq_map_cnt = 0
for vid_seq in vid_seq_map:
if len(vid_seq_map[vid_seq]) == 6:
_df = pd.DataFrame(vid_seq_map[vid_seq],columns=trace_field)
fix_topic = 0
rebase_span_timeline = 0
SIP2DIP = 0
for ds_trace in vid_seq_map[vid_seq]:
if str(ds_trace[7]) !='nan' and not fix_topic:
topic_name = str(ds_trace[7])
_df['topic_name'] = topic_name
fix_topic = 1
if not rebase_span_timeline and ds_trace[6] == 1 :
baseStime4eachtrace = ds_trace[1]
_df['start_ts'] = _df['start_ts'].apply(lambda x: x - baseStime4eachtrace)
_df[ 'end_ts'] = _df[ 'end_ts'].apply(lambda x: x - baseStime4eachtrace)
rebase_span_timeline = 1
if not SIP2DIP and ds_trace[6] == 20 :
SIP = str( ds_trace[13] & 0x000000FF) + "." \
+ str(( ds_trace[13] >> 8) & 0x000000FF) + "." \
+ str(( ds_trace[13] >> 16) & 0x000000FF) + "." \
+ str(( ds_trace[13] >> 24) & 0x000000FF) + ":" \
+ str(( ds_trace[15] >> 8) & 0x00FF| (ds_trace[15] << 8) & 0xFF00)
DIP = str( ds_trace[14] & 0x000000FF) + "." \
+ str(( ds_trace[14] >> 8) & 0x000000FF) + "." \
+ str(( ds_trace[14] >> 16) & 0x000000FF) + "." \
+ str(( ds_trace[14] >> 24) & 0x000000FF) + ":" \
+ str(( ds_trace[16] >> 8) & 0x00FF| (ds_trace[16] << 8) & 0xFF00)
SIP2DIP = SIP + " to " + DIP
here_not_classify = """
if rebase_span_timeline and fix_topic and SIP2DIP:
fix_df = pd.concat([fix_df, _df], ignore_index=True, sort=False)
break
"""
if rebase_span_timeline and fix_topic and SIP2DIP:
if SIP2DIP not in fix_df_dic:
fix_df_dic[SIP2DIP] = pd.DataFrame([], columns=trace_field)
fix_df_dic[SIP2DIP] = pd.concat([fix_df_dic[SIP2DIP], _df], ignore_index=True, sort=False)
else:
fix_df_dic[SIP2DIP] = pd.concat([fix_df_dic[SIP2DIP], _df], ignore_index=True, sort=False)
break
vid_seq_map[vid_seq] = _df.values.tolist() # Write back fix trace in map
traces = []
span_cnt = 0
for fix_df in fix_df_dic:
span_cnt += 1
span_list = fix_df_dic[fix_df].values.tolist()
span4SOFA = []
for ds_trace in span_list:
x = ds_trace[0]
y1 = ds_trace[1]
y2 = ds_trace[2]
execution_time = y2 - y1
funName = funID2funName(ds_trace[6])
topicInfo = ' <' + str(ds_trace[7]) + ':' + str(ds_trace[9]) + '>'
y1_info = funName + topicInfo + '<br> Start time: ' + str(format(ds_trace[0] + ds_trace[1], '.6f')) + 's' + "<br> Execution time: " + str(format(execution_time*1000, '.3f')) + "ms"
y2_info = funName + topicInfo + '<br> End time: ' + str(format(ds_trace[0] + ds_trace[2], '.6f')) + 's' + "<br> Execution time: " + str(format(execution_time*1000, '.3f')) + "ms"
span4SOFA.append([x,y1,y1_info])
span4SOFA.append([x,y2,y2_info])
span4SOFA.append([None,None,''])
span_trace = pd.DataFrame(span4SOFA, columns = ['x','y','name'])
sofatrace = SOFATrace()
sofatrace.name = 'DDS_span_view' + str(span_cnt)
sofatrace.title = fix_df
sofatrace.color = 'rgba(%s,%s,%s,0.8)' %(random.randint(0,255),random.randint(0,255),random.randint(0,255))
sofatrace.x_field = 'x'
sofatrace.y_field = 'y'
sofatrace.data = span_trace
traces.append(sofatrace)
traces_to_json(traces, 'span_view.js', cfg, '_span')
# Not used
def ds_find_sender(recv_iter, all_send_index_list, send_find, send_canidate, latency, negative,total_latency):
recv_tmp = recv_iter[0]
recv_feature_pattern = str(recv_tmp[7]) + str(recv_tmp[8]) + str(recv_tmp[9]) + str(recv_tmp[10]) + str(recv_tmp[11])
#print(recv_feature_pattern)
for send_cnt in range(len(all_send_index_list)):
send_tmp = list(all_send_index_list[send_cnt][0])
send_feature_pattern = str(send_tmp[7]) + str(send_tmp[8]) + str(send_tmp[9]) + str(send_tmp[10]) + str(send_tmp[11])
#print(send_feature_pattern)
if (recv_feature_pattern == send_feature_pattern) and send_canidate[send_cnt]:
send_select = all_send_index_list[send_cnt][1]
if not negative:
if (0 < recv_tmp[0] - send_tmp[0] < latency):
if not send_find[send_select]:
total_latency += recv_tmp[0] - send_tmp[0]
return total_latency, send_cnt
else:
latency = 0 - latency
if (latency < recv_tmp[0] - send_tmp[0] < 0):
if not send_find[send_select]:
total_latency += recv_tmp[0] - send_tmp[0]
return total_latency, send_cnt
return total_latency, False
### Add single point information in Highchart's line chart data format
def create_cnct_trace(cnct_list, is_sender, pid_yPos_dic):
cnct_trace_tmp = list(cnct_list)
name = ''
x = cnct_trace_tmp[1]
y = pid_yPos_dic[str(cnct_trace_tmp[4])]
if is_sender:
name = str(cnct_trace_tmp[13]) + ':' + str(cnct_trace_tmp[15]) + ' | checksum = ' + str(cnct_trace_tmp[18])
else:
name = str(cnct_trace_tmp[14]) + ':' + str(cnct_trace_tmp[16]) + ' | checksum = ' + str(cnct_trace_tmp[18])
trace = ds_cnct_trace_init()
trace = [name, x, y]
return trace
def ds_connect_preprocess(cfg):
# DS/DDS trace field name
# 0: Timestamp # 3: record_type # 6: fun_ID # 9: seq # 12: gid_seria # 20: ret
# 1: start_TS # 4: tgid # 7: topic_name # 10: gid_sys # 13 ~ 18: arg1 ~ arg6
# 2: end_TS # 5: tid # 8: comm # 11: gid_local # 19: link
logdir = cfg.logdir
ds_trace_field = ['timestamp', 'start_ts', 'end_ts', 'record_type', 'tgid', 'tid', 'fun_ID', 'topic_name', 'comm', 'seq',
'gid_sys', 'gid_local', 'gid_seria', 'arg1', 'arg2', 'arg3', 'arg4', 'arg5', 'arg6', 'link', 'ret']
all_ds_df = pd.DataFrame([], columns=ds_trace_field)
pid_yPos_dic = {}
yPos_cnt = 0
pid_ip_dic = {}
adjust_list = []
en_adjust = 1
second_1 = 1
adjust_file_exist = 0
if (os.path.exists('adjust_offset.txt')):
adjust_file_exist = 1
f = open('adjust_offset.txt')
adjust_list = f.readline().split(',')
second_1 = float(adjust_list[2])
### Read in all nodes network activities information
nodes_dir = glob.glob('[0-9]*')
command_dic = {}
for nd_dir_iter in nodes_dir:
f = open ('%s/pid2ip.txt'%nd_dir_iter)
pid2ip = f.readline().split()
f.close()
f = open ('%s/command.txt'%nd_dir_iter)
command = f.readline().split()
f.close()
command_dic[command[0]] = 1
pid_ip_dic[pid2ip[0]] = pid2ip[1]
pid_yPos_dic[nd_dir_iter] = yPos_cnt
ds_df = pd.read_csv('%s/socket_trace_%s'%(nd_dir_iter, nd_dir_iter), sep=',\s+', delimiter=',', encoding="utf-8",
skipinitialspace=True, header=0, float_precision='round_trip')
if en_adjust and adjust_file_exist and (nd_dir_iter == adjust_list[0]):
ds_df['start_ts'] = ds_df['start_ts'].apply( lambda x: x - float(adjust_list[1]) )
all_ds_df = pd.concat([ds_df, all_ds_df], ignore_index=True, sort=False)
yPos_cnt += 1
all_ds_df.sort_values(by='start_ts', inplace=True)
all_ds_df.to_csv('processed_ds_record', mode='w', index=False, float_format='%.9f')
print('\nIn kernel ds data preprocess done')
y = [0,0,0,0,0,0,0,0,0,0,0,0,0]
ds_df_no_multicast = pd.DataFrame([], columns=ds_trace_field)
ds_df_no_multicast = all_ds_df.apply( lambda x: x if (int(x['arg2'].split('.')[0]) & 0xf0 != 0xe0) else 0
, result_type='broadcast', axis=1)
#print(ds_df_no_multicast)
#ds_df_no_multicast = ds_df_no_multicast.dropna()
#ds_df_no_multicast = all_ds_df
### Not really important, just nickname for sender and receiver records.
filter = ds_df_no_multicast['fun_ID'] == 20
all_send_df = ds_df_no_multicast[filter]
all_send_df = all_send_df.dropna()
all_send_list = all_send_df.values.tolist()
filter = ds_df_no_multicast['fun_ID'] == 30
all_recv_df = ds_df_no_multicast[filter]
all_recv_list = all_recv_df.values.tolist()
#print(all_recv_df)
### Create list to accelerate preprocess when finding network connection which is accomplished by remove redundant calculation.
all_send_index_list = []
all_recv_index_list = []
for index in range(len(all_send_list)):
all_send_index_list.append([all_send_list[index], index])
for index in range(len(all_recv_list)):
all_recv_index_list.append([all_recv_list[index], index])
### Choose those data whose feature pattern is unique in the whole
send_canidate = [False] * len(all_send_list)
feature_send_dic = {}
for send_cnt in range(len(all_send_index_list)):
send_tmp = all_send_index_list[send_cnt][0]
send_feature_pattern = \
str(send_tmp[13]) + str(send_tmp[15]) + str(send_tmp[14]) + \
str(send_tmp[16]) + str(send_tmp[18])
send_feature_pattern = str(send_tmp[10]) + str(send_tmp[11]) + str(send_tmp[12]) + str(send_tmp[9])
if send_feature_pattern not in feature_send_dic:
feature_send_dic[send_feature_pattern] = [1, send_cnt]
send_canidate[send_cnt] = True
else:
feature_send_dic[send_feature_pattern][0] += 1
# send_canidate[feature_send_dic[send_feature_pattern][1]] = False
send_canidate[send_cnt] = True
recv_canidate = [False] * len(all_recv_list)
feature_recv_dic = {}
for recv_cnt in range(len(all_recv_index_list)):
recv_tmp = all_recv_index_list[recv_cnt][0]
recv_feature_pattern = \
str(recv_tmp[13]) + str(recv_tmp[15]) + str(recv_tmp[14]) + \
str(recv_tmp[16]) + str(recv_tmp[18])
recv_feature_pattern = str(recv_tmp[10]) + str(recv_tmp[11]) + str(recv_tmp[12]) + str(recv_tmp[9])
if recv_feature_pattern not in feature_recv_dic:
feature_recv_dic[recv_feature_pattern] = [1, recv_cnt]
recv_canidate[recv_cnt] = True
else:
feature_recv_dic[recv_feature_pattern][0] += 1
# recv_canidate[feature_recv_dic[recv_feature_pattern][1]] = False
recv_canidate[recv_cnt] = True
### Create connection view by add highchart line data
# Used to avoid miss selection of same data if there exist multiple same feature pattern in the data.
send_find = [False] * len(all_send_list)
recv_find = [False] * len(all_recv_list)
# Create node to node connection view traces
cnct_trace = []
cnct_traces =[]
trace_index = 0
node2node_traceIndex_dic = {}
# Because searching list is ordered and none matched received data should not
# search again (not found in previous searing), skip previous searched data.
recv_cnt_skip = 0
# Accounting
pre_sent_count, pre_recv_count, positive_min, negative_max, total_latency = 0, 0, 16, 0, 0
who = 0
match_cnt, neg_count, pos_count, total_neg, total_pos= 0, 0, 0, 0, 0
# Loop control paremeters
latency, retry, negative = 1, True, False
neg_who_dic = {} # []
accounting = {}
while retry:
retry = False
for recv_cnt in range(len(all_recv_index_list)):
if not recv_canidate[all_recv_index_list[recv_cnt][1]]:
#if recv_find[all_recv_index_list[recv_cnt][1]]:
continue
recv_tmp = all_recv_index_list[recv_cnt][0]
recv_feature_pattern = str(recv_tmp[13]) + str(recv_tmp[15]) + str(recv_tmp[14]) + \
str(recv_tmp[16]) + str(recv_tmp[18])
recv_feature_pattern = str(recv_tmp[10]) + str(recv_tmp[11]) + str(recv_tmp[12]) + str(recv_tmp[9])
#print(recv_feature_pattern)
sfind = False
for send_cnt in range(len(all_send_index_list)):
if not send_canidate[all_send_index_list[send_cnt][1]]:
#if send_find[all_send_index_list[send_cnt][1]]:
continue
send_tmp = list(all_send_index_list[send_cnt][0])
if recv_tmp[0] - send_tmp[0] < 0:
pass #break
send_feature_pattern = str(send_tmp[13]) + str(send_tmp[15]) + str(send_tmp[14]) + \
str(send_tmp[16]) + str(send_tmp[18])
send_feature_pattern = str(send_tmp[10]) + str(send_tmp[11]) + str(send_tmp[12]) + str(send_tmp[9])
if (recv_feature_pattern == send_feature_pattern):
sfind = send_cnt
match_cnt += 1
acc_id = str(send_tmp[13]) + ':' + str(send_tmp[15]) + " | |
<reponame>JoonyoungYi/project-seesae
#-*- coding: utf-8 -*-
from flask import Flask, g, render_template, redirect, request, session, url_for
from db import db_connect, db_insert_favorite, db_insert_hate, db_insert_comment
from config import *
from models import *
import datetime, math, itertools
import sys, random
# -----------------------------------------------------------------------------
# FOR ENCODING
# -----------------------------------------------------------------------------
reload(sys)
sys.setdefaultencoding('utf-8')
# -----------------------------------------------------------------------------
# BASE AND MAIN
# -----------------------------------------------------------------------------
app = Flask(__name__)
app.config.from_object(__name__)
"""
BASE REQUEST
"""
@app.before_request
def before_request():
g.db = db_connect()
@app.teardown_request
def teardown_request(exception):
db = getattr(g, 'db', None)
if db is not None :
db.close()
# -----------------------------------------------------------------------------
# SECTION FOR MAIN PAGE
# -----------------------------------------------------------------------------
@app.route('/')
def show_main():
return make_main('all')
@app.route('/agricultural')
def show_main_agiricultural():
return make_main('agricultural')
@app.route('/fishery')
def show_main_fishery():
return make_main('fishery')
@app.route('/livestock')
def show_main_livestock():
return make_main('livestock')
def make_main(product_type):
if not is_logged_in():
return redirect(url_for('login'))
# user_id
if 'user_id' in session :
user_id = session['user_id']
cur = g.db.cursor()
for_display = None
if product_type == 'agricultural':
cur.execute('SELECT * FROM %s WHERE %s=1' % (table_product, column_product_type))
for_display = { "wp" : "wallpaper-agriculture", "title":"AGRICULTURAL"}
elif product_type == 'fishery':
cur.execute('SELECT * FROM %s WHERE %s=2' % (table_product, column_product_type))
for_display = { "wp" : "wallpaper-fishery", "title":"FISHERY" }
elif product_type == 'livestock':
cur.execute('SELECT * FROM %s WHERE %s=3' % (table_product, column_product_type))
for_display = { "wp" : "wallpaper-livestock", "title":"LIVESTOCK" }
else :
cur.execute('SELECT * FROM %s' % (table_product))
for_display = { "wp" : "wallpaper-agriculture", "title":"SEE-SAE" }
rows = cur.fetchall()
products = []
for row in rows:
product = ProductModel(row[0], row[1], row[2], row[3])
if row[4] == None or row[5] == None:
continue
product.setSeason(row[4], row[5])
products.append(product)
random.shuffle(products)
###
cur.execute('SELECT * FROM %s WHERE %s=%d' % (table_hate, column_user_id, user_id))
###
cur.execute('SELECT * FROM %s WHERE %s=%d' % (table_hate, column_user_id, user_id))
rows = cur.fetchall()
id_hates = []
for row in rows:
id_hates.append(rows[3])
new_products = []
for product in products:
if not product.id in id_hates:
new_products.append(product)
products = new_products
###
return render_template('main.html', product_type=product_type, products=products, for_display=for_display)
"""
SECTION FOR DETAIL
"""
@app.route('/<int:product_id>/')
def show_detail(product_id):
if not is_logged_in():
return redirect(url_for('login'))
cur = g.db.cursor()
# user_id
if 'user_id' in session :
user_id = session['user_id']
# PRODUCT!!!! and SEASON!!!
cur.execute('SELECT * FROM %s WHERE %s=%d LIMIT 1' % (table_product, column_product_id, product_id))
row = cur.fetchall()[0]
product = ProductModel(row[0], row[1], row[2], row[3])
if row[4] == None or row[5] == None:
return redirect(url_for('show_main'))
product.setSeason(row[4], row[5])
# PRICES
cur.execute('SELECT * FROM %s WHERE %s=%d' % (table_product_class, column_product_id, product_id))
price_charts_day = []
price_charts_week = []
price_charts_month = []
rows = cur.fetchall()
for row in rows:
price_chart_day = PriceChartModel(row[0], row[1])
price_charts_day.append(price_chart_day)
price_chart_week = PriceChartModel(row[0], row[1])
price_charts_week.append(price_chart_week)
price_chart_month = PriceChartModel(row[0], row[1])
price_charts_month.append(price_chart_month)
price_date_end = datetime.date.today()
price_date_end_str = price_date_end.strftime("%Y-%m-%d")
price_date_start = price_date_end + datetime.timedelta(days=-7)
price_date_start_str = price_date_start.strftime("%Y-%m-%d")
colors = ['#FF6F00', '#FF8F00', '#FFA000', '#FFB300', '#FFC107', '#FFCA28']
def setLabelColors(price_charts):
price_charts.sort(key=lambda x: (sum( x.price_values ) / len(x.price_values)) , reverse=True)
for i, price_chart in enumerate(price_charts):
price_chart.setLabel_color(colors[ int(math.floor( len(colors) * i / len(price_charts) )) ])
return price_charts
for price_chart in price_charts_day:
cur.execute('SELECT ROUND(AVG(%s)), DAY(%s) FROM %s WHERE %s=%d and %s BETWEEN \'%s\' and \'%s\' \
GROUP BY %s ORDER BY %s ASC' \
% (column_price_value, column_price_date, table_price, \
column_product_class_id, price_chart.product_class_id, column_price_date, \
price_date_start_str, price_date_end_str, \
column_price_date, column_price_date))
rows = cur.fetchall()
price_values = []
old_value = None
for row in rows:
if old_value != None:
if old_value + 1 != int(row[1]):
for i in range( 1, int(row[1]) - old_value) :
price_values.append(0)
old_value = int(row[1])
price_values.append(int(row[0]))
if (len(price_values) <= 8 ):
for i in range( 8 - len(price_values)):
price_values.append(0)
print len (price_values)
assert( len (price_values) == 8 )
price_chart.price_values = price_values
price_charts_day = setLabelColors(price_charts_day)
price_date_start = price_date_end + datetime.timedelta(weeks=-12)
price_date_start_str = price_date_start.strftime("%Y-%m-%d")
for price_chart in price_charts_week:
cur.execute('SELECT ROUND(AVG(%s)), WEEK(%s) FROM %s WHERE %s=%d and %s > 0 and %s BETWEEN \'%s\' and \'%s\' \
GROUP BY CONCAT(YEAR(%s), \'/\', WEEK(%s)) ORDER BY %s ASC' \
% (column_price_value, column_price_date, table_price, \
column_product_class_id, price_chart.product_class_id, column_price_value, column_price_date, \
price_date_start_str, price_date_end_str,\
column_price_date, column_price_date, column_price_date))
rows = cur.fetchall()
price_values = []
old_value = None
for row in rows:
if old_value != None:
if old_value + 1 != int(row[1]):
for i in range( 1, int(row[1]) - old_value) :
price_values.append(0)
old_value = int(row[1])
price_values.append(int(row[0]))
if (len(price_values) < 13 ):
for i in range( 13 - len(price_values)):
price_values.append(0)
print '>> ' + str(len (price_values))
assert( len (price_values) == 13 )
price_chart.price_values = price_values
price_charts_day = setLabelColors(price_charts_week)
price_date_start = price_date_end + datetime.timedelta(days=-365)
price_date_start_str = price_date_start.strftime("%Y-%m-%d")
for price_chart in price_charts_month:
cur.execute('SELECT ROUND(AVG(%s)), MONTH(%s) FROM %s WHERE %s=%d and %s BETWEEN \'%s\' and \'%s\' \
GROUP BY CONCAT(YEAR(%s), \'/\', MONTH(%s)) ORDER BY %s ASC' \
% (column_price_value, column_price_date, table_price, \
column_product_class_id, price_chart.product_class_id, column_price_date, \
price_date_start_str, price_date_end_str,\
column_price_date, column_price_date, column_price_date))
rows = cur.fetchall()
price_values = []
old_value = None
for row in rows:
if old_value != None:
if old_value + 1 != int(row[1]):
for i in range( 1, int(row[1]) - old_value) :
price_values.append(0)
old_value = int(row[1])
price_values.append(int(row[0]))
if (len(price_values) < 13 ):
for i in range( 13 - len(price_values)):
price_values.append(0)
print '>> ' + str(len (price_values))
price_chart.price_values = price_values
price_charts_day = setLabelColors(price_charts_month)
# SIMILAR PRODUCTS!!!
cur.execute('SELECT %s.%s, %s.%s, %s.%s, %s.%s FROM %s LEFT JOIN %s ON %s.%s=%s.%s WHERE %s.%s=%d' \
% (table_product, column_product_id, \
table_product, column_product_type, \
table_product, column_product_name, \
table_product, column_product_img_url, \
table_similar_product_relation, table_product, \
table_similar_product_relation, column_similar_product_id, table_product, column_product_id, \
table_similar_product_relation, column_product_id, product_id))
rows = cur.fetchall()
similar_products = []
for row in rows:
similar_product = ProductModel(row[0], row[1], row[2], row[3])
similar_products.append(similar_product)
# LIKE/HATE INFORMATION
cur.execute('SELECT * FROM %s WHERE %s=%d and %s=%d' \
% ( table_favorite, column_product_id, product_id, column_user_id, user_id ))
rows = cur.fetchall()
for row in rows:
print row
dLike = {}
if len(rows) == 1:
dLike['like'] = 'btn-success'
print dLike['like']
else:
dLike['like'] = ''
cur.execute('SELECT * FROM %s WHERE %s=%d and %s=%d' \
% ( table_hate, column_product_id, product_id, column_user_id, user_id ))
rows = cur.fetchall()
for row in rows:
print row
if len(rows) == 1:
dLike['hate'] = 'btn-danger'
print dLike['hate']
else :
dLike['hate'] = ''
# STORES!!!!
cur.execute('SELECT %s.%s, %s.%s, %s.%s FROM %s LEFT JOIN %s ON %s.%s=%s.%s WHERE %s.%s=%d LIMIT 4' \
% (table_store, column_store_name, \
table_store, column_store_latitude, \
table_store, column_store_longitude, \
table_product_store_relation, table_store, \
table_product_store_relation, column_store_id, table_store, column_store_id, \
table_product_store_relation, column_product_id, product_id))
rows = cur.fetchall()
stores = []
for row in rows:
store = StoreModel(row[0], row[1], row[2])
stores.append(store)
# COMMENTS!!!
cur.execute('SELECT %s.%s, %s.%s, %s.%s, %s.%s FROM %s LEFT JOIN %s ON %s.%s=%s.%s WHERE %s=%d ORDER BY %s.%s DESC' \
% (table_comment, column_comment_id, \
table_comment, column_comment_content, \
table_user, column_user_email, table_comment, column_timestamp, \
table_comment, table_user, \
table_comment, column_user_id, table_user, column_user_id, column_product_id, product_id, table_comment, column_timestamp))
rows = cur.fetchall()
comments = []
for row in rows:
comment = CommentModel(row[0], row[2], row[1], row[3])
comments.append(comment)
return render_template('detail.html', \
product=product, \
price_charts_day=price_charts_day, price_charts_week=price_charts_week, price_charts_month=price_charts_month, \
similar_products=similar_products, stores=stores, comments=comments, dLike=dLike)
# -----------------------------------------------------------------------------
# LIKE HATE BUTTON
# -----------------------------------------------------------------------------
@app.route('/toggle/like/<int:product_id>/')
def toggle_like(product_id):
if not is_logged_in():
return redirect(url_for('login'))
cur = g.db.cursor()
# user_id
if 'user_id' in session :
user_id = session['user_id']
# LIKE/HATE INFORMATION
cur.execute('SELECT * FROM %s WHERE %s=%d and %s=%d' \
% ( table_favorite, column_product_id, product_id, column_user_id, user_id ))
rows = cur.fetchall()
if len(rows) == 1:
cur.execute('DELETE FROM %s WHERE %s=%d' \
% ( table_favorite, column_favorite_id, rows[0][0]))
else :
db_insert_favorite(cur, user_id, product_id)
cur.execute('SELECT * FROM %s WHERE %s=%d and %s=%d' \
% ( table_hate, column_product_id, product_id, column_user_id, user_id ))
rows = cur.fetchall()
if len(rows) == 1:
cur.execute('DELETE FROM %s WHERE %s=%d' \
% ( table_hate, column_hate_id, rows[0][0]))
g.db.commit()
return redirect(url_for('show_detail', product_id=product_id))
@app.route('/toggle/hate/<int:product_id>/')
def toggle_hate(product_id):
if not is_logged_in():
return redirect(url_for('login'))
cur = g.db.cursor()
# user_id
if 'user_id' in session :
user_id = session['user_id']
# LIKE/HATE INFORMATION
cur.execute('SELECT * FROM %s WHERE %s=%d and %s=%d' \
% ( table_hate, column_product_id, product_id, column_user_id, user_id ))
rows = cur.fetchall()
if len(rows) == 1:
cur.execute('DELETE FROM %s WHERE %s=%d' \
% ( table_hate, column_hate_id, rows[0][0]))
else :
db_insert_hate(cur, user_id, product_id)
cur.execute('SELECT * FROM %s WHERE %s=%d and %s=%d' \
% ( table_favorite, column_product_id, product_id, column_user_id, user_id ))
rows = cur.fetchall()
if len(rows) == 1:
cur.execute('DELETE FROM %s WHERE %s=%d' \
% ( table_favorite, column_favorite_id, rows[0][0]))
g.db.commit()
return redirect(url_for('show_detail', product_id=product_id))
# -----------------------------------------------------------------------------
# COMMENT ADD
# -----------------------------------------------------------------------------
@app.route('/add/comment/<int:product_id>', methods=['POST'])
def add_comment(product_id):
if request.method == 'POST':
# user_id
if 'user_id' in session :
user_id = session['user_id']
cur = g.db.cursor()
print (user_id, product_id, | |
from tqdm import tqdm
import network
import utils
import os
import random
import argparse
import numpy as np
from torch.utils import data
from datasets import VOCSegmentation, Cityscapes, Oilwell
from utils import ext_transforms as et
from metrics import StreamSegMetrics
import torch
import torch.nn as nn
from utils.visualizer import Visualizer
from PIL import Image
import matplotlib
import matplotlib.pyplot as plt
def get_argparser():
parser = argparse.ArgumentParser()
# Datset Options
parser.add_argument("--data_root", type=str, default='./datasets/data',
help="path to Dataset")
parser.add_argument("--dataset", type=str, default='oilwell',
choices=['voc', 'cityscapes', 'oilwell'], help='Name of dataset')
parser.add_argument("--num_classes", type=int, default=None,
help="num classes (default: None)")
# Deeplab Options
parser.add_argument("--model", type=str, default='deeplabv3plus_mobilenet',
choices=['deeplabv3_resnet50', 'deeplabv3plus_resnet50',
'deeplabv3_resnet101', 'deeplabv3plus_resnet101',
'deeplabv3_mobilenet', 'deeplabv3plus_mobilenet'], help='model name')
parser.add_argument("--separable_conv", action='store_true', default=False,
help="apply separable conv to decoder and aspp")
parser.add_argument("--output_stride", type=int, default=16, choices=[8, 16])
# Train Options
parser.add_argument("--test_only", action='store_true', default=False)
parser.add_argument("--save_val_results", action='store_true', default=False,
help="save segmentation results to \"./results\"")
parser.add_argument("--total_itrs", type=int, default=15e3,
help="epoch number (default: 60k)")
parser.add_argument("--lr", type=float, default=0.01,
help="learning rate (default: 0.01)")
parser.add_argument("--lr_policy", type=str, default='poly', choices=['poly', 'step'],
help="learning rate scheduler policy")
parser.add_argument("--step_size", type=int, default=10000)
parser.add_argument("--crop_val", action='store_true', default=False,
help='crop validation (default: False)')
parser.add_argument("--batch_size", type=int, default=32,
help='batch size (default: 24)')
parser.add_argument("--val_batch_size", type=int, default=8,
help='batch size for validation (default: 4)')
parser.add_argument("--crop_size", type=int, default=500)
parser.add_argument("--ckpt", default=None, type=str,
help="restore from checkpoint")
parser.add_argument("--continue_training", action='store_true', default=False)
parser.add_argument("--loss_type", type=str, default='focal_loss',
choices=['cross_entropy', 'focal_loss'], help="loss type (default: False)")
parser.add_argument("--gpu_id", type=str, default='0',
help="GPU ID")
parser.add_argument("--weight_decay", type=float, default=1e-4,
help='weight decay (default: 1e-4)')
parser.add_argument("--random_seed", type=int, default=1,
help="random seed (default: 1)")
parser.add_argument("--print_interval", type=int, default=10,
help="print interval of loss (default: 10)")
parser.add_argument("--val_interval", type=int, default=200,
help="epoch interval for eval (default: 100)")
parser.add_argument("--download", action='store_true', default=False,
help="download datasets")
# Oilwell
parser.add_argument("--oilwell_type", type=str, default='RO', choices=['RO', 'R', 'O'],
help='Type of labels to use for oilwell dataset')
parser.add_argument("--oilwell_splits", type=str, default='B', choices=['B', 'F', 'R'],
help='Whether to train using farm areas, rural areas, or both')
parser.add_argument("--oilwell_color", type=str, default='RGB', choices=['RGB', 'IF'],
help='Whether to use RGB or infrared color scheme')
parser.add_argument("--no_update", dest='update_labels', action='store_false',
help='No update to labels')
parser.add_argument("--update", dest='update_labels', action='store_true',
help='Make updates to labels')
parser.set_defaults(update_labels=False)
parser.add_argument("--update_interval", type=int, default=200,
help="update interval for discovery (default: 1000)")
parser.add_argument("--update_min_interval", type=int, default=3000,
help="update interval for discovery (default: 1000)")
parser.add_argument("--oilwell_tests", type=str, default='a', choices=['a', 'b', 'c', 'd'],
help='Whether to train using farm areas, rural areas, or both')
# PASCAL VOC Options
parser.add_argument("--year", type=str, default='2012',
choices=['2012_aug', '2012', '2011', '2009', '2008', '2007'], help='year of VOC')
# Visdom options
parser.add_argument("--enable_vis", action='store_true', default=False,
help="use visdom for visualization")
parser.add_argument("--vis_port", type=str, default='13570',
help='port for visdom')
parser.add_argument("--vis_env", type=str, default='main',
help='env for visdom')
parser.add_argument("--vis_num_samples", type=int, default=8,
help='number of samples for visualization (default: 8)')
return parser
def updateLabels(opts, model, loader, device, best):
"""Do validation and return specified samples"""
def _fast_hist(label_true, label_pred):
mask = (label_true >= 0) & (label_true < opts.num_classes)
hist = np.bincount(
opts.num_classes * label_true[mask].astype(int) + label_pred[mask],
minlength=opts.num_classes ** 2,
).reshape(opts.num_classes, opts.num_classes)
return hist
mean = [37.25768717, 50.53054942, 41.82911744]
std = [28.74567631, 34.12372886, 31.84100706]
if opts.oilwell_color == "IF":
mean = [37.25768717, 73.06358305, 105.06015209]
std = [28.74567631, 37.23757292, 40.10277116]
denorm = utils.Denormalize(mean=mean,
std=std)
a = 5
b = 3
if opts.oilwell_tests == 'b':
a = 7
b = 5
elif opts.oilwell_tests == 'c':
a = 4
b = 0
elif opts.oilwell_tests == 'd':
a = 5
b = 0
with torch.no_grad():
for i, (images, labels, olabels, imgnames, tarnames) in tqdm(enumerate(loader)):
images = images.to(device, dtype=torch.float32)
labels = labels.to(device, dtype=torch.long)
olabels = olabels.to(device, dtype=torch.long)
outputs = model(images)
preds = outputs.detach().max(dim=1)[1].cpu().numpy()
targets = labels.cpu().numpy()
otargets = labels.cpu().numpy()
confusion_matrix = np.zeros((opts.num_classes, opts.num_classes))
for lt, lp in zip(targets, preds):
confusion_matrix += _fast_hist( lt.flatten(), lp.flatten() )
hist = confusion_matrix
iu = np.diag(hist) / (hist.sum(axis=1) + hist.sum(axis=0) - np.diag(hist))
cls_iu = dict(zip(range(opts.num_classes), iu))
cutoff1 = cls_iu[1]
for i in range(len(images)):
otarget = otargets[i]
target = targets[i]
pred = preds[i]
otarget = loader.dataset.decode_target(otarget).astype(np.uint8)
target = loader.dataset.decode_target(target).astype(np.uint8)
pred = loader.dataset.decode_target(pred).astype(np.uint8)
xaxis = len(target)
yaxis = len(target[0])
save = False
for x in range(xaxis):
for y in range(yaxis):
otarVal = otarget[x][y]
predVal = pred[x][y]
tarVal = target[x][y]
if predVal[0] != tarVal[0]:
if predVal[0] != 0: # only add
if random.randint(0,100*100) < (cutoff1**(a - b*cutoff1) * 100*100):
save = True
target[x][y] = predVal
# can add and remove
# if otarVal[0] == 0:
# if random.randint(0,100*100) < (cutoff1**5 * 100*100):
# save = True
# target[x][y] = predVal
if save:
img = Image.fromarray(target)
gray = img.convert('L')
bw = gray.point(lambda x: 0 if x < 128 else 255, '1')
bw.save(tarnames[i])
if best:
bw.save(tarnames[i] + "_best.png")
def get_dataset(opts):
""" Dataset And Augmentation
"""
mean = [37.25768717, 50.53054942, 41.82911744]
std = [28.74567631, 34.12372886, 31.84100706]
if opts.oilwell_color == "IF":
mean = [ 37.25768717, 73.06358305, 105.06015209]
std = [28.74567631, 37.23757292, 40.10277116]
if opts.dataset == 'voc':
mean = [0.485, 0.456, 0.406]
std = [0.229, 0.224, 0.225]
if opts.dataset == 'voc':
train_transform = et.ExtCompose([
#et.ExtResize(size=opts.crop_size),
et.ExtRandomScale((0.5, 2.0)),
et.ExtRandomCrop(size=(opts.crop_size, opts.crop_size), pad_if_needed=True),
et.ExtRandomHorizontalFlip(),
et.ExtToTensor(),
et.ExtNormalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225]),
])
if opts.crop_val:
val_transform = et.ExtCompose([
et.ExtResize(opts.crop_size),
et.ExtCenterCrop(opts.crop_size),
et.ExtToTensor(),
et.ExtNormalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225]),
])
else:
val_transform = et.ExtCompose([
et.ExtToTensor(),
et.ExtNormalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225]),
])
train_dst = VOCSegmentation(root=opts.data_root, year=opts.year,
image_set='train', download=opts.download, transform=train_transform)
val_dst = VOCSegmentation(root=opts.data_root, year=opts.year,
image_set='val', download=False, transform=val_transform)
update_dst = VOCSegmentation(root=opts.data_root, year=opts.year,
image_set='train', download=False, transform=val_transform)
if opts.dataset == 'cityscapes':
train_transform = et.ExtCompose([
#et.ExtResize( 512 ),
et.ExtRandomCrop(size=(opts.crop_size, opts.crop_size)),
et.ExtColorJitter( brightness=0.5, contrast=0.5, saturation=0.5 ),
et.ExtRandomHorizontalFlip(),
et.ExtToTensor(),
et.ExtNormalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225]),
])
val_transform = et.ExtCompose([
#et.ExtResize( 512 ),
et.ExtToTensor(),
et.ExtNormalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225]),
])
train_dst = Cityscapes(root=opts.data_root,
split='train', transform=train_transform)
val_dst = Cityscapes(root=opts.data_root,
split='val', transform=val_transform)
update_dst = Cityscapes(root=opts.data_root,
split='train', transform=val_transform)
if opts.dataset == 'oilwell':
train_transform = et.ExtCompose([
#et.ExtResize( 512 ),
et.ExtRandomCrop(size=(opts.crop_size, opts.crop_size)),
et.ExtColorJitter( brightness=0.5, contrast=0.5, saturation=0.5 ),
et.ExtRandomHorizontalFlip(),
et.ExtToTensor(),
et.ExtNormalize(mean=mean,
std=std),
])
val_transform = et.ExtCompose([
#et.ExtResize( 512 ),
et.ExtToTensor(),
et.ExtNormalize(mean=mean,
std=std),
])
train_dst = Oilwell(root=opts.data_root,
image_set='train',
type=opts.oilwell_type,
splits=opts.oilwell_splits,
color=opts.oilwell_color,
update=opts.update_labels,
transform=train_transform)
val_dst = Oilwell(root=opts.data_root,
image_set='val',
type=opts.oilwell_type,
splits=opts.oilwell_splits,
color=opts.oilwell_color,
update=opts.update_labels,
transform=val_transform)
update_dst = Oilwell(root=opts.data_root,
image_set='train',
type=opts.oilwell_type,
splits=opts.oilwell_splits,
color=opts.oilwell_color,
update=opts.update_labels,
transform=val_transform)
return train_dst, val_dst, update_dst
def validate(opts, model, loader, device, metrics, ret_samples_ids=None):
"""Do validation and return specified samples"""
mean = [37.25768717, 50.53054942, 41.82911744]
std = [28.74567631, 34.12372886, 31.84100706]
if opts.oilwell_color == "IF":
mean = [ 37.25768717, 73.06358305, 105.06015209]
std = [28.74567631, 37.23757292, 40.10277116]
if opts.dataset == 'voc':
mean = [0.485, 0.456, 0.406]
std = [0.229, 0.224, 0.225]
metrics.reset()
ret_samples = []
if opts.save_val_results:
if not os.path.exists('results'):
os.mkdir('results')
denorm = utils.Denormalize(mean=mean,
std=std)
img_id = 0
with torch.no_grad():
for i, (images, labels, olabels, imgnames, tarnames) in tqdm(enumerate(loader)):
images = images.to(device, dtype=torch.float32)
labels = labels.to(device, dtype=torch.long)
outputs = model(images)
preds = outputs.detach().max(dim=1)[1].cpu().numpy()
targets = labels.cpu().numpy()
metrics.update(targets, preds)
if ret_samples_ids is not None and i in ret_samples_ids: # get vis samples
ret_samples.append(
(images[0].detach().cpu().numpy(), targets[0], preds[0]))
if opts.save_val_results:
for i in range(len(images)):
image = images[i].detach().cpu().numpy()
target = targets[i]
pred = preds[i]
image = (denorm(image) * 255).transpose(1, 2, 0).astype(np.uint8)
target = loader.dataset.decode_target(target).astype(np.uint8)
pred = loader.dataset.decode_target(pred).astype(np.uint8)
Image.fromarray(image).save('results/%d_image.png' % img_id)
Image.fromarray(target).save('results/%d_target.png' % img_id)
Image.fromarray(pred).save('results/%d_pred.png' % img_id)
fig = plt.figure()
plt.imshow(image)
plt.axis('off')
plt.imshow(pred, alpha=0.7)
ax = plt.gca()
ax.xaxis.set_major_locator(matplotlib.ticker.NullLocator())
ax.yaxis.set_major_locator(matplotlib.ticker.NullLocator())
plt.savefig('results/%d_overlay.png' % img_id, bbox_inches='tight', pad_inches=0)
plt.close()
img_id += 1
score = metrics.get_results()
return score, ret_samples
def main():
opts = get_argparser().parse_args()
print(opts)
if opts.dataset.lower() == 'voc':
opts.num_classes = 21
elif opts.dataset.lower() == 'cityscapes':
opts.num_classes = 19
elif opts.dataset.lower() == 'oilwell':
opts.num_classes = 2
mean = [37.25768717, 50.53054942, 41.82911744]
std = [28.74567631, 34.12372886, 31.84100706]
if opts.oilwell_color == "IF":
mean = [ 37.25768717, 73.06358305, 105.06015209]
std = [28.74567631, 37.23757292, 40.10277116]
if opts.dataset == 'voc':
mean = [0.485, 0.456, 0.406]
std = [0.229, 0.224, 0.225]
print(torch.version.cuda)
torch.backends.cudnn.enabled = False
# Setup visualization
vis = Visualizer(port=opts.vis_port,
env=opts.vis_env) if opts.enable_vis else None
if vis is not None: # display options
vis.vis_table("Options", vars(opts))
os.environ['CUDA_VISIBLE_DEVICES'] = opts.gpu_id
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
print("Device: %s" % device)
# Setup random seed
torch.manual_seed(opts.random_seed)
np.random.seed(opts.random_seed)
random.seed(opts.random_seed)
# Setup dataloader
if opts.dataset=='voc' and not opts.crop_val:
opts.val_batch_size = 1
train_dst, val_dst, update_dst = get_dataset(opts)
train_loader = data.DataLoader(
train_dst, batch_size=opts.batch_size, shuffle=True, num_workers=24)
val_loader = data.DataLoader(
val_dst, batch_size=opts.val_batch_size, shuffle=True, num_workers=24)
update_loader = data.DataLoader(
update_dst, batch_size=opts.val_batch_size, shuffle=True, num_workers=24)
print("Dataset: %s, Train set: %d, Val set: %d" %
(opts.dataset, len(train_dst), len(val_dst)))
# Set up model
model_map = {
| |
import os
from ctypes import c_int, c_uint, c_long, c_char_p, c_void_p
from ctypes import POINTER as _P
from .dll import DLL, SDLFunc, AttributeDict
from .version import SDL_version, SDL_VERSIONNUM
from .rwops import SDL_RWops
from .stdinc import Uint16, Uint32, SDL_bool
from .pixels import SDL_Color
from .surface import SDL_Surface
from .error import SDL_GetError, SDL_SetError
__all__ = [
# Opaque Types
"TTF_Font",
# Enums
"hb_direction_t",
"HB_DIRECTION_INVALID", "HB_DIRECTION_LTR", "HB_DIRECTION_RTL",
"HB_DIRECTION_TTB", "HB_DIRECTION_BTT",
# Defines
"SDL_TTF_MAJOR_VERSION", "SDL_TTF_MINOR_VERSION", "SDL_TTF_PATCHLEVEL",
"TTF_MAJOR_VERSION", "TTF_MINOR_VERSION", "TTF_PATCHLEVEL",
"UNICODE_BOM_NATIVE", "UNICODE_BOM_SWAPPED",
"TTF_STYLE_NORMAL", "TTF_STYLE_BOLD", "TTF_STYLE_ITALIC",
"TTF_STYLE_UNDERLINE", "TTF_STYLE_STRIKETHROUGH",
"TTF_HINTING_NORMAL", "TTF_HINTING_LIGHT", "TTF_HINTING_MONO",
"TTF_HINTING_NONE", "TTF_HINTING_LIGHT_SUBPIXEL",
# Macro Functions
"SDL_TTF_VERSION", "TTF_VERSION", "SDL_TTF_COMPILEDVERSION",
"SDL_TTF_VERSION_ATLEAST", "HB_TAG",
# Function Aliases
"TTF_RenderText", "TTF_RenderUTF8", "TTF_RenderUNICODE",
"TTF_SetError", "TTF_GetError",
# Python Functions
"get_dll_file",
]
try:
dll = DLL("SDL2_ttf", ["SDL2_ttf", "SDL2_ttf-2.0"],
os.getenv("PYSDL2_DLL_PATH"))
except RuntimeError as exc:
raise ImportError(exc)
def get_dll_file():
"""Gets the file name of the loaded SDL2_ttf library."""
return dll.libfile
_bind = dll.bind_function
# Constants, enums, type definitions, and macros
SDL_TTF_MAJOR_VERSION = 2
SDL_TTF_MINOR_VERSION = 0
SDL_TTF_PATCHLEVEL = 18
def SDL_TTF_VERSION(x):
x.major = SDL_TTF_MAJOR_VERSION
x.minor = SDL_TTF_MINOR_VERSION
x.patch = SDL_TTF_PATCHLEVEL
TTF_MAJOR_VERSION = SDL_TTF_MAJOR_VERSION
TTF_MINOR_VERSION = SDL_TTF_MINOR_VERSION
TTF_PATCHLEVEL = SDL_TTF_PATCHLEVEL
TTF_VERSION = SDL_TTF_VERSION
SDL_TTF_COMPILEDVERSION = SDL_VERSIONNUM(SDL_TTF_MAJOR_VERSION, SDL_TTF_MINOR_VERSION, SDL_TTF_PATCHLEVEL)
SDL_TTF_VERSION_ATLEAST = lambda x, y, z: (SDL_TTF_COMPILEDVERSION >= SDL_VERSIONNUM(x, y, z))
UNICODE_BOM_NATIVE = 0xFEFF
UNICODE_BOM_SWAPPED = 0xFFFE
TTF_STYLE_NORMAL = 0x00
TTF_STYLE_BOLD = 0x01
TTF_STYLE_ITALIC = 0x02
TTF_STYLE_UNDERLINE = 0x04
TTF_STYLE_STRIKETHROUGH = 0x08
TTF_HINTING_NORMAL = 0
TTF_HINTING_LIGHT = 1
TTF_HINTING_MONO = 2
TTF_HINTING_NONE = 3
TTF_HINTING_LIGHT_SUBPIXEL = 4
class TTF_Font(c_void_p):
"""The opaque data type for fonts opened using the TTF library.
This contains all data associated with a loaded font. Once you are done
with a :obj:`TTF_Font`, it should be freed using :func:`TTF_CloseFont`.
"""
pass
# Some additional definitions from HarfBuzz for SetDirection/SetScript
hb_direction_t = c_int
HB_DIRECTION_INVALID = 0
HB_DIRECTION_LTR = 4
HB_DIRECTION_RTL = 5
HB_DIRECTION_TTB = 6
HB_DIRECTION_BTT = 7
def HB_TAG(c1, c2, c3, c4):
"""Converts a 4-character ISO 15924 code into a HarfBuzz script constant.
A full list of possible 4-character script codes can be found
here: https://unicode.org/iso15924/iso15924-codes.html
Args:
c1 (str): The first character of the code.
c2 (str): The second character of the code.
c3 (str): The third character of the code.
c4 (str): The fourth character of the code.
Returns:
int: The HarfBuzz contstant corresponding to the given script.
"""
c1, c2, c3, c4 = [ord(c) & 0xFF for c in (c1, c2, c3, c4)]
return (c1 << 24 | c2 << 16 | c3 << 8 | c4)
# Raw ctypes function definitions
_funcdefs = [
SDLFunc("TTF_Linked_Version", None, _P(SDL_version)),
SDLFunc("TTF_GetFreeTypeVersion", [_P(c_int), _P(c_int), _P(c_int)], added='2.0.18'),
SDLFunc("TTF_GetHarfBuzzVersion", [_P(c_int), _P(c_int), _P(c_int)], added='2.0.18'),
SDLFunc("TTF_ByteSwappedUNICODE", [c_int], None),
SDLFunc("TTF_Init", None, c_int),
SDLFunc("TTF_OpenFont", [c_char_p, c_int], _P(TTF_Font)),
SDLFunc("TTF_OpenFontIndex", [c_char_p, c_int, c_long], _P(TTF_Font)),
SDLFunc("TTF_OpenFontRW", [_P(SDL_RWops), c_int, c_int], _P(TTF_Font)),
SDLFunc("TTF_OpenFontIndexRW", [_P(SDL_RWops), c_int, c_int, c_long], _P(TTF_Font)),
SDLFunc("TTF_OpenFontDPI", [c_char_p, c_int, c_uint, c_uint], _P(TTF_Font), added='2.0.18'),
SDLFunc("TTF_OpenFontIndexDPI", [c_char_p, c_int, c_long, c_uint, c_uint], _P(TTF_Font), added='2.0.18'),
SDLFunc("TTF_OpenFontDPIRW", [_P(SDL_RWops), c_int, c_int, c_uint, c_uint], _P(TTF_Font), added='2.0.18'),
SDLFunc("TTF_OpenFontIndexDPIRW", [_P(SDL_RWops), c_int, c_int, c_long, c_uint, c_uint], _P(TTF_Font), added='2.0.18'),
SDLFunc("TTF_SetFontSize", [_P(TTF_Font), c_int], c_int, added='2.0.18'),
SDLFunc("TTF_SetFontSizeDPI", [_P(TTF_Font), c_int, c_uint, c_uint], c_int, added='2.0.18'),
SDLFunc("TTF_GetFontStyle", [_P(TTF_Font)], c_int),
SDLFunc("TTF_SetFontStyle", [_P(TTF_Font), c_int], None),
SDLFunc("TTF_GetFontOutline", [_P(TTF_Font)], c_int),
SDLFunc("TTF_SetFontOutline", [_P(TTF_Font), c_int], None),
SDLFunc("TTF_GetFontHinting", [_P(TTF_Font)], c_int),
SDLFunc("TTF_SetFontHinting", [_P(TTF_Font), c_int], None),
SDLFunc("TTF_FontHeight", [_P(TTF_Font)], c_int),
SDLFunc("TTF_FontAscent", [_P(TTF_Font)], c_int),
SDLFunc("TTF_FontDescent", [_P(TTF_Font)], c_int),
SDLFunc("TTF_FontLineSkip", [_P(TTF_Font)], c_int),
SDLFunc("TTF_GetFontKerning", [_P(TTF_Font)], c_int),
SDLFunc("TTF_SetFontKerning", [_P(TTF_Font), c_int]),
SDLFunc("TTF_FontFaces", [_P(TTF_Font)], c_long),
SDLFunc("TTF_FontFaceIsFixedWidth", [_P(TTF_Font)], c_int),
SDLFunc("TTF_FontFaceFamilyName", [_P(TTF_Font)], c_char_p),
SDLFunc("TTF_FontFaceStyleName", [_P(TTF_Font)], c_char_p),
SDLFunc("TTF_GlyphIsProvided", [_P(TTF_Font), Uint16], c_int),
SDLFunc("TTF_GlyphIsProvided32", [_P(TTF_Font), Uint32], c_int, added='2.0.18'),
SDLFunc("TTF_GlyphMetrics", [_P(TTF_Font), Uint16, _P(c_int), _P(c_int), _P(c_int), _P(c_int), _P(c_int)], c_int),
SDLFunc("TTF_GlyphMetrics32", [_P(TTF_Font), Uint32, _P(c_int), _P(c_int), _P(c_int), _P(c_int), _P(c_int)], c_int, added='2.0.18'),
SDLFunc("TTF_SizeText", [_P(TTF_Font), c_char_p, _P(c_int), _P(c_int)], c_int),
SDLFunc("TTF_SizeUTF8", [_P(TTF_Font), c_char_p, _P(c_int), _P(c_int)], c_int),
SDLFunc("TTF_SizeUNICODE", [_P(TTF_Font), _P(Uint16), _P(c_int), _P(c_int)], c_int),
SDLFunc("TTF_MeasureText", [_P(TTF_Font), c_char_p, c_int, _P(c_int), _P(c_int)], c_int, added='2.0.18'),
SDLFunc("TTF_MeasureUTF8", [_P(TTF_Font), c_char_p, c_int, _P(c_int), _P(c_int)], c_int, added='2.0.18'),
SDLFunc("TTF_MeasureUNICODE", [_P(TTF_Font), _P(Uint16), c_int, _P(c_int), _P(c_int)], c_int, added='2.0.18'),
SDLFunc("TTF_RenderText_Solid", [_P(TTF_Font), c_char_p, SDL_Color], _P(SDL_Surface)),
SDLFunc("TTF_RenderUTF8_Solid", [_P(TTF_Font), c_char_p, SDL_Color], _P(SDL_Surface)),
SDLFunc("TTF_RenderUNICODE_Solid", [_P(TTF_Font), _P(Uint16), SDL_Color], _P(SDL_Surface)),
SDLFunc("TTF_RenderText_Solid_Wrapped", [_P(TTF_Font), c_char_p, SDL_Color, Uint32], _P(SDL_Surface), added='2.0.18'),
SDLFunc("TTF_RenderUTF8_Solid_Wrapped", [_P(TTF_Font), c_char_p, SDL_Color, Uint32], _P(SDL_Surface), added='2.0.18'),
SDLFunc("TTF_RenderUNICODE_Solid_Wrapped", [_P(TTF_Font), _P(Uint16), SDL_Color, Uint32], _P(SDL_Surface), added='2.0.18'),
SDLFunc("TTF_RenderGlyph_Solid", [_P(TTF_Font), Uint16, SDL_Color], _P(SDL_Surface)),
SDLFunc("TTF_RenderGlyph32_Solid", [_P(TTF_Font), Uint32, SDL_Color], _P(SDL_Surface), added='2.0.18'),
SDLFunc("TTF_RenderText_Shaded", [_P(TTF_Font), c_char_p, SDL_Color, SDL_Color], _P(SDL_Surface)),
SDLFunc("TTF_RenderUTF8_Shaded", [_P(TTF_Font), c_char_p, SDL_Color, SDL_Color], _P(SDL_Surface)),
SDLFunc("TTF_RenderUNICODE_Shaded", [_P(TTF_Font), _P(Uint16), SDL_Color, SDL_Color], _P(SDL_Surface)),
SDLFunc("TTF_RenderText_Shaded_Wrapped", [_P(TTF_Font), c_char_p, SDL_Color, SDL_Color, Uint32], _P(SDL_Surface), added='2.0.18'),
SDLFunc("TTF_RenderUTF8_Shaded_Wrapped", [_P(TTF_Font), c_char_p, SDL_Color, SDL_Color, Uint32], _P(SDL_Surface), added='2.0.18'),
SDLFunc("TTF_RenderUNICODE_Shaded_Wrapped", [_P(TTF_Font), _P(Uint16), SDL_Color, SDL_Color, Uint32], _P(SDL_Surface), added='2.0.18'),
SDLFunc("TTF_RenderGlyph_Shaded", [_P(TTF_Font), Uint16, SDL_Color, SDL_Color], _P(SDL_Surface)),
SDLFunc("TTF_RenderGlyph32_Shaded", [_P(TTF_Font), Uint32, SDL_Color, SDL_Color], _P(SDL_Surface), added='2.0.18'),
SDLFunc("TTF_RenderText_Blended", [_P(TTF_Font), c_char_p, SDL_Color], _P(SDL_Surface)),
SDLFunc("TTF_RenderUTF8_Blended", [_P(TTF_Font), c_char_p, SDL_Color], _P(SDL_Surface)),
SDLFunc("TTF_RenderUNICODE_Blended", [_P(TTF_Font), _P(Uint16), SDL_Color], _P(SDL_Surface)),
SDLFunc("TTF_RenderText_Blended_Wrapped", [_P(TTF_Font), c_char_p, SDL_Color, Uint32], _P(SDL_Surface)),
SDLFunc("TTF_RenderUTF8_Blended_Wrapped", [_P(TTF_Font), c_char_p, SDL_Color, Uint32], _P(SDL_Surface)),
SDLFunc("TTF_RenderUNICODE_Blended_Wrapped", [_P(TTF_Font), _P(Uint16), SDL_Color, Uint32], _P(SDL_Surface)),
SDLFunc("TTF_RenderGlyph_Blended", [_P(TTF_Font), Uint16, SDL_Color], _P(SDL_Surface)),
SDLFunc("TTF_RenderGlyph32_Blended", [_P(TTF_Font), Uint32, SDL_Color], _P(SDL_Surface), added='2.0.18'),
SDLFunc("TTF_SetDirection", [c_int], c_int, added='2.0.18'),
SDLFunc("TTF_SetScript", [c_int], c_int, added='2.0.18'),
SDLFunc("TTF_CloseFont", [_P(TTF_Font)]),
SDLFunc("TTF_Quit"),
SDLFunc("TTF_WasInit", None, c_int),
SDLFunc("TTF_GetFontKerningSize", [_P(TTF_Font), c_int, c_int], c_int),
SDLFunc("TTF_GetFontKerningSizeGlyphs", [_P(TTF_Font), Uint16, Uint16], c_int, added='2.0.14'),
SDLFunc("TTF_GetFontKerningSizeGlyphs32", [_P(TTF_Font), Uint32, Uint32], c_int, added='2.0.18'),
SDLFunc("TTF_SetFontSDF", [_P(TTF_Font), SDL_bool], c_int, added='2.0.18'),
SDLFunc("TTF_GetFontSDF", [_P(TTF_Font)], SDL_bool, added='2.0.18'),
]
_ctypes = AttributeDict()
for f in _funcdefs:
_ctypes[f.name] = _bind(f.name, f.args, f.returns, f.added)
__all__.append(f.name) # Add all bound functions to module namespace
# Python wrapper functions
def TTF_Linked_Version():
"""Gets the version of the dynamically-linked **SDL2_ttf** library.
Returns:
POINTER(:obj:`SDL_version`): A pointer to a structure containing the
version of the SDL2_ttf library currently in use.
"""
return _ctypes.TTF_Linked_Version()
def TTF_GetFreeTypeVersion(major, minor, patch):
"""Gets the version of the FreeType library currently linked by SDL2_ttf.
This function returns the version numbers by reference, meaning that
it needs to be called using pre-allocated ctypes variables (see
:func:`TTF_GlyphMetrics` for an example).
`Note: Added in SDL_ttf 2.0.18`
Args:
major (byref(:obj:`~ctypes.c_int`)): A pointer to an integer containing
the major version number of the linked FreeType library.
minor (byref(:obj:`~ctypes.c_int`)): A pointer to an integer containing
the minor version number of the linked FreeType library.
patch (byref(:obj:`~ctypes.c_int`)): A pointer to an integer containing
the patch level of the linked FreeType library.
"""
return _ctypes["TTF_GetFreeTypeVersion"](major, minor, patch)
def TTF_GetHarfBuzzVersion(major, minor, patch):
"""Gets the version of the HarfBuzz library currently linked by SDL2_ttf.
This function returns the version numbers by reference, meaning that
it needs to be called using pre-allocated ctypes variables (see
:func:`TTF_GlyphMetrics` for an example).
`Note: Added in SDL_ttf 2.0.18`
Args:
major (byref(:obj:`~ctypes.c_int`)): A pointer to an integer containing
the major version number of the linked HarfBuzz library.
minor (byref(:obj:`~ctypes.c_int`)): A pointer to an integer containing
the minor version number of the linked HarfBuzz library.
patch (byref(:obj:`~ctypes.c_int`)): A pointer to an integer containing
the patch level of the linked HarfBuzz library.
"""
return _ctypes["TTF_GetHarfBuzzVersion"](major, minor, patch)
def TTF_ByteSwappedUNICODE(swapped):
"""Tells the library whether UCS-2 unicode text is generally byteswapped.
A unicode BOM character in a string will override this setting for the
remainder of that string. The default mode is non-swapped, native
endianness of the CPU.
Note that this only affects the behaviour of ``UNICODE`` (UCS-2) functions
and has no effect on UTF8 functions.
Args:
swapped (int): If 0, native CPU endianness will be used. If not 0,
UCS-2 data will be byte-swapped relative to native CPU endianness.
"""
return _ctypes["TTF_ByteSwappedUNICODE"](swapped)
def TTF_Init():
"""Initializes the TTF engine.
This function must be called before using other functions in this library
(except for :func:`TTF_WasInit`). SDL does not have to be initialized
before this function is called.
Returns:
int: 0 if successful, or -1 on error.
"""
return _ctypes["TTF_Init"]()
def TTF_OpenFont(file, ptsize):
"""Opens a font file at a given size.
Point sizes are based on a DPI of 72. Use the :func:`TTF_GetError` function
to check for any errors opening the font.
Args:
file (bytes): A UTF8-encoded bytestring containing the path of the font
file to load.
ptsize (int): The size (in points) at which to open the font.
Returns:
POINTER(:obj:`TTF_Font`): A pointer to the opened font object, or a null
pointer if there was an error.
"""
return _ctypes["TTF_OpenFont"](file, ptsize)
def TTF_OpenFontIndex(file, ptsize, index):
"""Opens a specific font face by index from a file at a given size.
This function allows for loading a specific font face from a multi-face
font. See :func:`TTF_OpenFont` for more information.
Args:
file (bytes): A UTF8-encoded bytestring containing the path of the font
file to load.
ptsize (int): The size (in points) at which to open the font.
index (int): The index (from 0 to 15) of | |
(3, 0, None, None) , 0 , )),
(( 'StartTime' , 'StartTime' , ), 4105, (4105, (), [ (16391, 10, None, None) , ], 1 , 2 , 4 , 0 , 152 , (3, 0, None, None) , 0 , )),
(( 'StartTime' , 'StartTime' , ), 4105, (4105, (), [ (7, 1, None, None) , ], 1 , 4 , 4 , 0 , 156 , (3, 0, None, None) , 0 , )),
(( 'GetOccurrence' , 'StartDate' , 'AppointmentItem' , ), 4111, (4111, (), [ (7, 1, None, None) ,
(16397, 10, None, "IID('{00061030-0000-0000-C000-000000000046}')") , ], 1 , 1 , 4 , 0 , 160 , (3, 0, None, None) , 0 , )),
]
Selection_vtables_dispatch_ = 1
Selection_vtables_ = [
(( 'Application' , 'Application' , ), 61440, (61440, (), [ (16393, 10, None, "IID('{00063001-0000-0000-C000-000000000046}')") , ], 1 , 2 , 4 , 0 , 28 , (3, 0, None, None) , 0 , )),
(( 'Class' , 'Class' , ), 61450, (61450, (), [ (16387, 10, None, None) , ], 1 , 2 , 4 , 0 , 32 , (3, 0, None, None) , 0 , )),
(( 'Session' , 'Session' , ), 61451, (61451, (), [ (16393, 10, None, "IID('{00063002-0000-0000-C000-000000000046}')") , ], 1 , 2 , 4 , 0 , 36 , (3, 0, None, None) , 0 , )),
(( 'Parent' , 'Parent' , ), 61441, (61441, (), [ (16393, 10, None, None) , ], 1 , 2 , 4 , 0 , 40 , (3, 0, None, None) , 0 , )),
(( 'Count' , 'Count' , ), 80, (80, (), [ (16387, 10, None, None) , ], 1 , 2 , 4 , 0 , 44 , (3, 0, None, None) , 0 , )),
(( 'Item' , 'Index' , 'Item' , ), 81, (81, (), [ (12, 1, None, None) ,
(16393, 10, None, None) , ], 1 , 1 , 4 , 0 , 48 , (3, 0, None, None) , 0 , )),
]
SyncObjects_vtables_dispatch_ = 1
SyncObjects_vtables_ = [
(( 'Application' , 'Application' , ), 61440, (61440, (), [ (16393, 10, None, "IID('{00063001-0000-0000-C000-000000000046}')") , ], 1 , 2 , 4 , 0 , 28 , (3, 0, None, None) , 0 , )),
(( 'Class' , 'Class' , ), 61450, (61450, (), [ (16387, 10, None, None) , ], 1 , 2 , 4 , 0 , 32 , (3, 0, None, None) , 0 , )),
(( 'Session' , 'Session' , ), 61451, (61451, (), [ (16393, 10, None, "IID('{00063002-0000-0000-C000-000000000046}')") , ], 1 , 2 , 4 , 0 , 36 , (3, 0, None, None) , 0 , )),
(( 'Parent' , 'Parent' , ), 61441, (61441, (), [ (16393, 10, None, None) , ], 1 , 2 , 4 , 0 , 40 , (3, 0, None, None) , 0 , )),
(( 'Count' , 'Count' , ), 80, (80, (), [ (16387, 10, None, None) , ], 1 , 2 , 4 , 0 , 44 , (3, 0, None, None) , 0 , )),
(( 'Item' , 'Index' , 'Item' , ), 81, (81, (), [ (12, 1, None, None) ,
(16397, 10, None, "IID('{00063084-0000-0000-C000-000000000046}')") , ], 1 , 1 , 4 , 0 , 48 , (3, 0, None, None) , 0 , )),
]
UserProperties_vtables_dispatch_ = 1
UserProperties_vtables_ = [
(( 'Application' , 'Application' , ), 61440, (61440, (), [ (16393, 10, None, "IID('{00063001-0000-0000-C000-000000000046}')") , ], 1 , 2 , 4 , 0 , 28 , (3, 0, None, None) , 0 , )),
(( 'Class' , 'Class' , ), 61450, (61450, (), [ (16387, 10, None, None) , ], 1 , 2 , 4 , 0 , 32 , (3, 0, None, None) , 0 , )),
(( 'Session' , 'Session' , ), 61451, (61451, (), [ (16393, 10, None, "IID('{00063002-0000-0000-C000-000000000046}')") , ], 1 , 2 , 4 , 0 , 36 , (3, 0, None, None) , 0 , )),
(( 'Parent' , 'Parent' , ), 61441, (61441, (), [ (16393, 10, None, None) , ], 1 , 2 , 4 , 0 , 40 , (3, 0, None, None) , 0 , )),
(( 'Count' , 'Count' , ), 80, (80, (), [ (16387, 10, None, None) , ], 1 , 2 , 4 , 0 , 44 , (3, 0, None, None) , 0 , )),
(( 'Item' , 'Index' , 'Item' , ), 81, (81, (), [ (12, 1, None, None) ,
(16393, 10, None, "IID('{00063042-0000-0000-C000-000000000046}')") , ], 1 , 1 , 4 , 0 , 48 , (3, 0, None, None) , 0 , )),
(( 'Add' , 'Name' , 'Type' , 'AddToFolderFields' , 'DisplayFormat' ,
'UserProperty' , ), 102, (102, (), [ (8, 1, None, None) , (3, 1, None, None) , (12, 17, None, None) ,
(12, 17, None, None) , (16393, 10, None, "IID('{00063042-0000-0000-C000-000000000046}')") , ], 1 , 1 , 4 , 2 , 52 , (3, 0, None, None) , 0 , )),
(( 'Find' , 'Name' , 'Custom' , 'UserProperty' , ), 103, (103, (), [
(8, 1, None, None) , (12, 17, None, None) , (16393, 10, None, "IID('{00063042-0000-0000-C000-000000000046}')") , ], 1 , 1 , 4 , 1 , 56 , (3, 0, None, None) , 0 , )),
(( 'Remove' , 'Index' , ), 82, (82, (), [ (3, 1, None, None) , ], 1 , 1 , 4 , 0 , 60 , (3, 0, None, None) , 0 , )),
]
UserProperty_vtables_dispatch_ = 1
UserProperty_vtables_ = [
(( 'Application' , 'Application' , ), 61440, (61440, (), [ (16393, 10, None, "IID('{00063001-0000-0000-C000-000000000046}')") , ], 1 , 2 , 4 , 0 , 28 , (3, 0, None, None) , 0 , )),
(( 'Class' , 'Class' , ), 61450, (61450, (), [ (16387, 10, None, None) , ], 1 , 2 , 4 , 0 , 32 , (3, 0, None, None) , 0 , )),
(( 'Session' , 'Session' , ), 61451, (61451, (), [ (16393, 10, None, "IID('{00063002-0000-0000-C000-000000000046}')") , ], 1 , 2 , 4 , 0 , 36 , (3, 0, None, None) , 0 , )),
(( 'Parent' , 'Parent' , ), 61441, (61441, (), [ (16393, 10, None, None) , ], 1 , 2 , 4 , 0 , 40 , (3, 0, None, None) , 0 , )),
(( 'Formula' , 'Formula' , ), 103, (103, (), [ (16392, 10, None, None) , ], 1 , 2 , 4 , 0 , 44 , (3, 0, None, None) , 0 , )),
(( 'Formula' , 'Formula' , ), 103, (103, (), [ (8, 1, None, None) , ], 1 , 4 , 4 , 0 , 48 , (3, 0, None, None) , 0 , )),
(( 'Name' , 'Name' , ), 112, (112, (), [ (16392, 10, None, None) , ], 1 , 2 , 4 , 0 , 52 , (3, 0, None, None) , 0 , )),
(( 'Type' , 'Type' , ), 109, (109, (), [ (16387, 10, None, None) , ], 1 , 2 , 4 , 0 , 56 , (3, 0, None, None) , 0 , )),
(( 'ValidationFormula' , 'ValidationFormula' , ), 104, (104, (), [ (16392, 10, None, None) , ], 1 , 2 , 4 , 0 , 60 , (3, 0, None, None) , 0 , )),
(( 'ValidationFormula' , 'ValidationFormula' , ), 104, (104, (), [ (8, 1, None, None) , ], 1 , 4 , 4 , 0 , 64 , (3, 0, None, None) , 0 , )),
(( 'ValidationText' , 'ValidationText' , ), 105, (105, (), [ (16392, 10, None, None) , ], 1 , 2 , 4 , 0 , 68 , (3, 0, None, None) , 0 , )),
(( 'ValidationText' , 'ValidationText' , ), 105, (105, (), [ (8, 1, None, None) , ], 1 , 4 , 4 , 0 , 72 , (3, 0, None, None) , 0 | |
#!/usr/bin/python
import time, os, errno, argparse, sys, random, string
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException, ElementNotVisibleException
from selenium.webdriver.common.by import By
from datetime import datetime
from pyvirtualdisplay import Display
from selenium.webdriver.support.ui import Select
class PhpMyAdminTests:
def __init__(self):
# UPDATE HERE (1/5)
self.main_page = 'http://localhost:8085/phpMyAdmin-4.0.0-all-languages/'
print "[+] Setting up ChromeDriver"
options = webdriver.chrome.options.Options()
self.driver = webdriver.Chrome(chrome_options=options)
self.driver.maximize_window()
self.driver.set_page_load_timeout(60)
self.add_cookies()
self.new_username = ''
self.logged_in = False
def add_cookies(self):
# Fix for https://stackoverflow.com/a/28331099/1821461
# Must load static content not to change code coverage outcome
self.driver.get(self.main_page + 'doc/html/intro.html')
# UPDATE HERE (2/5)
self.driver.add_cookie({'name': 'test_group', 'value': 'pma400_tutorials'})
# UPDATE HERE (3/5)
self.driver.add_cookie({'name': 'test_name', 'value': 'pma_login_pma400_tutorials'})
# UPDATE HERE (4/5)
self.driver.add_cookie({'name': 'software_id', 'value': '1'})
# UPDATE HERE (5/5)
self.driver.add_cookie({'name': 'software_version_id', 'value': '1'})
def set_test_name(self, test_name):
self.driver.delete_cookie('test_name')
self.driver.add_cookie({'name': 'test_name', 'value': test_name})
def click_element(self, xpath_selector):
try:
self.wait_for_element_become_visible(xpath_selector)
element = self.driver.find_element(By.XPATH, xpath_selector)
element.click()
except Exception as e:
print '[-] Failed to click on element'
print e
def fill_textbox(self, xpath_selector, text):
try:
self.wait_for_element_become_visible(xpath_selector)
element = self.driver.find_element(By.XPATH, xpath_selector)
element.clear()
element.send_keys(text)
except Exception as e:
print '[-] Failed to fill textbox'
print e
def select_dropdown(self, xpath_selector, text):
try:
self.wait_for_element_become_visible(xpath_selector)
element = self.driver.find_element(By.XPATH, xpath_selector)
Select(element).select_by_visible_text(text)
except Exception as e:
print '[-] Failed to select optin'
print e
def check_exists_and_visible_by_xpath(self, xpath_selector):
try :
return self.driver.find_element_by_xpath(xpath_selector).is_displayed()
except NoSuchElementException :
return False
return True
def wait_for_element_become_visible(self, xpath_selector) :
timeout = 20
while not self.check_exists_and_visible_by_xpath(xpath_selector) :
if timeout == 20 :
print "[+] Waiting for %s to become visible" % xpath_selector,
else :
print '.',
# Wait for login pop up to load via ajax
time.sleep(1)
timeout = timeout - 1
if timeout == 0 :
print "[-] Timed out %s" % xpath_selector
return None
def wait_for_text_in_page(self, text) :
timeout = 20
while not text in self.driver.page_source :
print "[+] Waiting for text: %s to load in page" % text
time.sleep(1)
timeout = timeout - 1
if timeout == 0 :
print "[-] Timed out %s" % text
return None
return True
def login(self, username=None, password=<PASSWORD>):
self.set_test_name('pma_login')
print "[*] Starting login process..."
self.driver.get(self.main_page)
# Fill form fields
try :
if self.logged_in:
print '[+] Already logged in, skipping.'
return
# Enter Username
self.fill_textbox('//*[@id="input_username"]', 'root' if username == None else username)
# Enter password
self.fill_textbox('//*[@id="input_password"]', 'root' if password == None else password)
# Click submit
self.click_element('//*[@id="input_go"]')
time.sleep(3)
if self.wait_for_text_in_page('Log out' if username == None else username) == None :
print '[-] Login failed'
else :
self.logged_in = True
print '[+] Login successful'
except (NoSuchElementException, ElementNotVisibleException) as ex :
print "[-] Elements not found on page"
print str(ex)
except Exception as ex :
print "[-] Unhandled error"
print str(ex)
def logout(self):
try:
self.click_element('//*[@id="leftframelinks"]/a[2]')
self.logged_in = False
except (NoSuchElementException, ElementNotVisibleException) as ex :
print "[-] Elements not found on page"
print str(ex)
except Exception as ex :
print "[-] Unhandled error"
print str(ex)
def create_database(self):
self.login()
self.set_test_name('pma_create_database')
print "[*] Starting create database process..."
self.driver.get(self.main_page)
# Fill form fields
try :
# Create database
self.click_element('//*[@id="topmenu"]/li[1]/a')
self.fill_textbox('//*[@id="text_create_db"]', 'test_db')
self.click_element('//*[@id="buttonGo"]')
# Select database
self.click_element('//a[contains(text(), \'test_db\')]')
self.fill_textbox('//*[@id="create_table_form_minimal"]/fieldset[1]/div[1]/input', 'tbl1')
self.fill_textbox('//*[@id="create_table_form_minimal"]/fieldset[1]/div[2]/input', '5')
self.click_element('//*[@id="create_table_form_minimal"]/fieldset[2]/input')
# Create columns
self.fill_textbox('//*[@id="field_0_1"]', 'col1')
self.select_dropdown('//*[@id="field_0_8"]', 'PRIMARY')
self.click_element('//*[@id="field_0_9"]')
self.fill_textbox('//*[@id="field_1_1"]', 'col2')
self.select_dropdown('//*[@id="field_1_2"]', 'VARCHAR')
self.fill_textbox('//*[@id="field_1_3"]', '50')
self.select_dropdown('//*[@id="field_1_4"]', 'As defined:')
self.fill_textbox('//*[@id="table_columns"]/tbody/tr[3]/td[4]/input', 'a')
self.fill_textbox('//*[@id="field_2_1"]', 'col3')
self.select_dropdown('//*[@id="field_2_2"]', 'DATE')
self.fill_textbox('//*[@id="field_3_1"]', 'col4')
self.select_dropdown('//*[@id="field_2_2"]', 'DATE')
self.fill_textbox('//*[@id="field_4_1"]', 'col5')
self.click_element('//*[@id="page_content"]/form/fieldset/input')
if self.wait_for_text_in_page('<span class="table_comment" id="span_table_comment">"tbl1"</span>') == None :
print '[-] Test failed'
else :
print '[+] Test successful'
except (NoSuchElementException, ElementNotVisibleException) as ex :
print "[-] Elements not found on page"
print str(ex)
except Exception as ex :
print "[-] Unhandled error"
print str(ex)
def input_data_to_table(self):
self.login()
self.set_test_name('pma_input_data_to_table')
print "[*] Starting input data to table process..."
self.driver.get(self.main_page)
# Fill form fields
try :
self.click_element('//a[contains(text(), "test_db")]')
self.click_element('//*[@id="row_tbl_1"]/td[5]/a/span[contains(text(), "Insert")]')
self.fill_textbox('//*[@id="field_2_3"]', 'b')
self.fill_textbox('//*[@id="field_3_3"]', '2018-07-01')
self.fill_textbox('//*[@id="field_4_3"]', '4')
self.fill_textbox('//*[@id="field_5_3"]', '6')
self.click_element('//*[@id="insertForm"]/table[1]/tfoot/tr/th/input')
if self.wait_for_text_in_page('1 row inserted.') == None :
print '[-] Test failed'
else :
print '[+] Test successful'
except (NoSuchElementException, ElementNotVisibleException) as ex :
print "[-] Elements not found on page"
print str(ex)
except Exception as ex :
print "[-] Unhandled error"
print str(ex)
def run_query(self):
self.login()
self.set_test_name('pma_run_query')
print "[*] Starting run query process..."
self.driver.get(self.main_page)
# Fill form fields
try :
self.click_element('//a[contains(text(),"SQL")]')
self.fill_textbox('//*[@id="sqlquery"]', 'USE test_db;\nINSERT INTO tbl1(col2,col3,col4,col5) VALUES(2,NOW(),4,5);\nSELECT * FROM tbl1;')
self.click_element('//*[@id="button_submit_query"]')
if self.wait_for_text_in_page('Your SQL query has been executed successfully') == None :
print '[-] Test failed'
else :
print '[+] Test successful'
except (NoSuchElementException, ElementNotVisibleException) as ex :
print "[-] Elements not found on page"
print str(ex)
except Exception as ex :
print "[-] Unhandled error"
print str(ex)
def create_index(self):
self.login()
self.set_test_name('pma_create_index')
print "[*] Starting create index process..."
self.driver.get(self.main_page)
# Fill form fields
try :
self.click_element('//a[contains(text(),"test_db")]')
self.click_element('//*[@id="checkall"]')
self.click_element('//*[@id="row_tbl_1"]/td[3]/a/span[contains(text(), "Structure")]')
self.click_element('//span[contains(text(), "Index")]')
if self.wait_for_text_in_page('MySQL returned an empty result set (i.e. zero rows)') == None :
print '[-] Test failed'
else :
print '[+] Test successful'
except (NoSuchElementException, ElementNotVisibleException) as ex :
print "[-] Elements not found on page"
print str(ex)
except Exception as ex :
print "[-] Unhandled error"
print str(ex)
def browse_table(self):
self.login()
self.set_test_name('pma_browse_table_data')
print "[*] Starting browse table process..."
self.driver.get(self.main_page)
# Fill form fields
try :
self.click_element('//a[contains(text(),"test_db")]')
self.click_element('//span[contains(text(),"Browse")]')
if self.wait_for_text_in_page('Showing rows') == None :
print '[-] Test failed'
else :
print '[+] Test successful'
except (NoSuchElementException, ElementNotVisibleException) as ex :
print "[-] Elements not found on page"
print str(ex)
except Exception as ex :
print "[-] Unhandled error"
print str(ex)
def backup_export(self):
self.login()
self.set_test_name('pma_backup_export')
print "[*] Starting backup export process..."
self.driver.get(self.main_page)
# Fill form fields
try :
self.click_element('//a[contains(text(),"test_db")]')
time.sleep(5)
self.click_element('//a[contains(text(),"Export")]')
self.click_element('//*[@id="buttonGo"]')
if self.wait_for_text_in_page('Exporting tables from "test_db" database') == None :
print '[-] Test failed'
else :
print '[+] Test successful'
except (NoSuchElementException, ElementNotVisibleException) as ex :
print "[-] Elements not found on page"
print str(ex)
except Exception as ex :
print "[-] Unhandled error"
print str(ex)
def adduser(self):
self.login()
self.set_test_name('pma_adduser')
print "[*] Starting add user process..."
self.driver.get(self.main_page)
# Fill form fields
try :
self.click_element('//a[contains(text(),"test_db")]')
self.click_element('//a[contains(text(),"Privileges")]')
self.click_element('//*[@id="fieldset_add_user"]/a')
self.fill_textbox('//*[@id="fieldset_add_user_login"]/div[1]/input', 'testuser' + ''.join(random.choice(string.digits) for _ in range(5)))
self.fill_textbox('//*[@id="text_pma_pw"]', '1234567890')
self.fill_textbox('//*[@id="text_pma_pw2"]', '1234567890')
self.click_element('//*[@id="fieldset_add_user_footer"]/input')
if self.wait_for_text_in_page('CREATE USER \'testuser') == None :
print '[-] Test failed'
else :
print '[+] Test successful'
except (NoSuchElementException, ElementNotVisibleException) as ex :
print "[-] Elements not found on page"
print str(ex)
except Exception as ex :
print "[-] Unhandled error"
print str(ex)
def optimize_database(self):
self.login()
self.set_test_name('pma_optimize_database')
print "[*] Starting optimize database process..."
self.driver.get(self.main_page)
# Fill form fields
try :
self.click_element('//a[contains(text(),"test_db")]')
self.click_element('//*[@id="checkall"]')
self.select_dropdown('//*[@id="tablesForm"]/div/select', 'Optimize table')
if self.wait_for_text_in_page('Your SQL query has been executed successfully') == None :
print '[-] Test failed'
else :
print '[+] Test successful'
except (NoSuchElementException, ElementNotVisibleException) as ex :
print "[-] Elements not found on page"
print str(ex)
except Exception as ex :
print "[-] Unhandled error"
print str(ex)
def check_status(self):
self.login()
self.set_test_name('pma_check_status')
print "[*] Starting check status process..."
self.driver.get(self.main_page)
# Fill form fields
try :
self.click_element('//a[contains(text(),"Status")]')
if self.wait_for_text_in_page('This MySQL server has been running for') == None :
print '[-] Test failed'
else :
print '[+] Test successful'
except (NoSuchElementException, ElementNotVisibleException) as ex :
print "[-] Elements not found on page"
print str(ex)
except Exception as ex :
print "[-] Unhandled error"
print str(ex)
def check_variables(self):
self.login()
self.set_test_name('pma_check_variables')
print "[*] Starting check variables process..."
self.driver.get(self.main_page)
# Fill form fields
try :
self.click_element('//a[contains(text(),"Variables")]')
if self.wait_for_text_in_page('Server variables and settings') == None :
print '[-] Test failed'
else :
print '[+] Test successful'
except (NoSuchElementException, ElementNotVisibleException) as ex :
print "[-] Elements not found on page"
print str(ex)
except Exception as ex :
print "[-] Unhandled error"
print str(ex)
def check_charsets(self):
self.login()
self.set_test_name('pma_check_charsets')
print "[*] Starting check charsets process..."
self.driver.get(self.main_page)
# Fill form fields
try :
self.click_element('//a[contains(text(),"Charsets")]')
if self.wait_for_text_in_page('Character Sets and Collations') == None :
print '[-] Test failed'
else :
print '[+] Test successful'
except (NoSuchElementException, ElementNotVisibleException) as ex :
print "[-] Elements not found on page"
print str(ex)
except Exception as ex :
print "[-] Unhandled error"
print str(ex)
def check_engines(self):
self.login()
self.set_test_name('pma_check_engines')
print "[*] Starting check charsets process..."
self.driver.get(self.main_page)
# Fill form fields
try :
self.click_element('//a[contains(text(),"Engines")]')
if self.wait_for_text_in_page('Storage Engines') == None :
print '[-] Test failed'
else :
print '[+] Test successful'
except (NoSuchElementException, ElementNotVisibleException) as ex :
print "[-] Elements not found | |
<reponame>Petr-By/qtpyvis<gh_stars>1-10
""":py:class:`Storable` objects allow to store their state to restore
it later.
"""
# standard imports
from typing import BinaryIO, Union
from pathlib import Path
import os
import json
import logging
# toolbox imports
from .prepare import Preparable
# logging
LOG = logging.getLogger(__name__)
class Storable(Preparable):
"""A :py:class:`Storable` object provides means to store its state
persistently and restore it later.
A typical application is to allow to abort and resume a work
process. For this to work, the current state of the process has to
be stored.
Another application is to store results of complex computations
for later inspection by another tool. This can be a faster
alternative to directly executing the computations when values are
needed.
Arguments
---------
store: bool
A flag indicating that the persistent work mode should be applied,
that is that the state of the object should automatically be stored
after usage. Storage is performed upon unpreparing the object, which
can be initiated explicitly by calling :py:meth:`unprepare` or
implicitly, for example by deleting the object.
restore: bool
A flag indicating that the state of the object should be restored.
The default behaviour (indicated by `None`) is to try restoration
if the `store` flag is `True` but to not complain (raise exception)
if restoration fails. If explicitly set to `False`, no restoration
is tried, even if `store` is `True`. This allows to overwrite
an existing stored state. If explicitly set to `True`, preparation
of the object fails if restoration raises an exception.
These two flags allow to realize different scenarios:
Scenario 0 - no store (store=False, restore=False/None, default):
the object neither initializes from a stored
state, nor does it store its state upon deletion.
Scenario 1 - persistent (store=True, restore=None):
the object state is restored upon initialization (that is
during preparation) if a stored state exist, and stores its
state upon deletion (that is during unprepare)
Scenario 2 - continue (store=True, restore=True)
like scenario 1 - but initialization (prepare) fails if no
stored state exists
Scenario 4 - overwrite (store=True, restore=False)
like scenario 1 - but alwas start with a fresh state, even
if a stored state exists (such a state will be overwritten
upon deletion of the object).
Scenario 5 - read only (store=False, restore=True)
initialize object from a stored state but do not store changes
upon deletion. If no stored state exists, an exception is thrown
Subclassing
-----------
There are two ways to make classes :py:class:`Storable`: (1) by
providing a collection of storable attributes and (2) by providing
store and restore methods. The methods can be used individually or
in combination.
(1) Subclasses of the :py:class:`Storable` can introduce an
optional `storables` argument to the class definition. This should
provide a list of names for attributes to be stored.
(2) Subclasses can implement the methods :py:meth:`_store` and
:py:meth:`_restore`. Within this method, they can perform custom
steps for storing and restoring (=initializing object from storage).
The general goal is to be agnostic concerning the storage
mechanism (file, database, ...). However, currently the emphasis
is on file storage (using a :py:class:`FileStorage` object
providing some configuration and auxiliary functions). There is
one central file into which the storable attributes are stored.
Additional data can be stored in this file as well. This can
be achieved by implementing the methods :py:meth:`_store_to_file`
and :py:meth:`_restore_from_file`, which will get a file handle
as argument.
"""
_storables = set()
def __init_subclass__(cls: type, storables: set = None, **kwargs):
# pylint: disable=arguments-differ
"""Initialization of subclasses of :py:class:`Storable`.
Each of these classes will provide an extra class property
`storables`, listing the attributes that are to be stored.
Arguments
---------
storables: Sequence[str]
A list of additional properties that should be stored for
the new subclass. These will be added to the storables
of superclass to obtain the actual list of storables for
the new subclass.
"""
super().__init_subclass__(**kwargs)
LOG.debug("Initializing new Storable class: %s, new storables=%s",
cls, storables)
new_storables = set() if storables is None else set(storables)
for base in cls.__bases__:
if issubclass(base, Storable):
# pylint: disable=protected-access
new_storables |= base._storables
if new_storables != cls._storables:
cls._storables = new_storables
def __init__(self, store: bool = None, restore: bool = None,
directory: Union[Path, str] = None,
filename: Union[Path, str] = None,
**kwargs) -> None:
super().__init__(**kwargs)
self._store_flag = store
self._restore_flag = restore
if directory is not None or filename is not None:
self._storage = FileStorage(filename=filename, directory=directory)
else:
self._storage = None
def _prepare(self) -> None:
super()._prepare()
if self._restore_flag is True:
self.restore()
elif self._restore_flag is None and self._store_flag:
try:
self.restore()
except Exception: # pylint: disable=broad-except
self._fresh()
else:
self._fresh()
def _pre_unprepare(self) -> None:
"""Storing the state of a :py:class:`Storable` object should
be done before resources of the object are released.
"""
print(f"store_flag={self._store_flag}")
if self._store_flag:
self.store()
super()._pre_unprepare()
def store(self) -> None:
"""Store the current state of this :py:class:`Storable` object.
"""
self._store()
def _store(self) -> bool:
"""This method should be implemented by subclasses.
"""
self._storage.store(self)
def restore(self) -> None:
"""Restore the current state of this :py:class:`Storable` object from
the persistent storage. This initializes (prepares) the object
and hence will be called on an unprepared (or only partly
prepared) object.
"""
if not self.restorable:
raise RuntimeError("Object is not restorable.")
self._restore()
def _restore(self) -> bool:
"""This method should be implemented by subclasses. It is supposed
to prepare the object by reading in stored values. In other
words, this is an alternative to a fresh preparation, which
should be done in :py:meth:`_fresh`. Code that should be
executed in both cases (restore and fresh preparation) should
go into the :py:meth:`_prepare` method.
"""
self._storage.restore(self)
def _fresh(self) -> bool:
"""This method should be implemented by subclasses. This is the
place to perform initialization of storable properties. This
method is only called if no restoration takes place. It will
be called by :py:class:`Storable._prepare`.
"""
@property
def restorable(self) -> bool:
"""Check if this :py:class:`Storable` object can be restored.
This is the case if a persistent storage (e.g., a file or a
database) is available.
"""
return self._restorable()
def _restorable(self) -> bool:
"""This method should be implemented by subclasses.
"""
return self._storage.exists()
#
# File specific storage
#
def store_to_file(self, outfile: BinaryIO) -> None:
"""Store this :py:class:`Storable` into a file.
Subclasses may extend this mechanism by overriding this
function. If doing so, the first command should be
`super().store_to_file(outfile)`.
Arguments
---------
outfile:
A writable filelike object.
"""
values = {}
for attribute in self._storables:
values[attribute] = getattr(self, attribute)
json.dump(values, outfile)
def restore_from_file(self, infile: BinaryIO) -> None:
"""Restore this :py:class:`Storable` from a file. This initializes
(prepares) the object with data from that file, hence it will
be called on an unprepared (or only partly prepared) object.
Subclasses may extend this mechanism by overriding this
function. If doing so, the first command should be
`super().restore_from_file(outfile)`.
Arguments
---------
infile:
A readable filelike object.
"""
values = json.load(infile)
for attribute in self._storables:
try:
setattr(self, attribute, values.pop(attribute))
except KeyError:
LOG.warning("Storable: attribute '%s' missing in file %s.",
attribute, infile)
if values:
LOG.warning("Storable: unkown attributes in file %s: %s",
infile, list(values.keys()))
class Storage:
"""An abstract class representing a storage mechanism.
"""
def store(self, storable: Storable) -> None:
"""Store a :py:class:`Storable` in this :py:class:`Storage`.
"""
def restore(self, storable: Storable) -> None:
"""Restore a :py:class:`Storable` in this :py:class:`Storage`.
"""
class FileStorage(Storage):
"""A storage realized by one or multiple files.
"""
def __init__(self, directory: Union[Path, str] = None,
filename: Union[Path, str] = 'meta.json',
**kwargs) -> None:
super().__init__(**kwargs)
self._directory = Path(directory)
self._filename = Path(filename)
def __str__(self) -> str:
return (f"FileStorage(directory={self._directory}, "
f"filename={self._filename})")
@property
def directory(self) -> Path:
"""The name of the directory into which data for this
:py:class:`FileStorage` is stored on disk.
"""
return self._directory
def filename(self, name: str = None) -> Path:
"""The absolute filename holding the data.
"""
if name is None:
if self._filename is None:
raise ValueError("No name provided for filename")
name = self._filename
return self.directory / name
def exists(self, name: str = None) -> bool:
"""Check if the fiven file exists.
"""
return self.filename(name).exists()
def store(self, storable: Storable) -> None:
"""Store a :py:class:`Storable` in | |
import sys
sys.setrecursionlimit(20000) # to allow the e2wrn28_10R model to be exported as a torch.nn.Module
import os.path
from typing import Tuple
import torch.nn.functional as F
from e2cnn import nn
from e2cnn import gspaces
from e2cnn.nn import init
import torch
import math
import numpy as np
STORE_PATH = "./models/stored/"
CHANNELS_CONSTANT = 1
def _get_fco(fco):
if fco > 0.:
fco *= np.pi
return fco
def conv7x7(in_type: nn.FieldType, out_type: nn.FieldType, stride=1, padding=3, dilation=1, bias=False, sigma=None, F=1., initialize=True):
"""7x7 convolution with padding"""
fco = _get_fco(F)
return nn.R2Conv(in_type, out_type, 7,
stride=stride,
padding=padding,
dilation=dilation,
bias=bias,
sigma=sigma,
frequencies_cutoff=fco,
initialize=initialize
)
def conv5x5(in_type: nn.FieldType, out_type: nn.FieldType, stride=1, padding=2, dilation=1, bias=False, sigma=None, F=1., initialize=True):
"""5x5 convolution with padding"""
fco = _get_fco(F)
return nn.R2Conv(in_type, out_type, 5,
stride=stride,
padding=padding,
dilation=dilation,
bias=bias,
sigma=sigma,
frequencies_cutoff=fco,
initialize=initialize
)
def conv3x3(in_type: nn.FieldType, out_type: nn.FieldType, padding=1, stride=1, dilation=1, bias=False, sigma=None, F=1., initialize=True):
"""3x3 convolution with padding"""
fco = _get_fco(F)
return nn.R2Conv(in_type, out_type, 3,
stride=stride,
padding=padding,
dilation=dilation,
bias=bias,
sigma=sigma,
frequencies_cutoff=fco,
initialize=initialize
)
def conv1x1(in_type: nn.FieldType, out_type: nn.FieldType, padding=0, stride=1, dilation=1, bias=False, sigma=None, F=1., initialize=True):
"""1x1 convolution"""
fco = _get_fco(F)
return nn.R2Conv(in_type, out_type, 1,
stride=stride,
padding=padding,
dilation=dilation,
bias=bias,
sigma=sigma,
frequencies_cutoff=fco,
initialize=initialize
)
def regular_fiber(gspace: gspaces.GeneralOnR2, planes: int, fixparams: bool = True):
""" build a regular fiber with the specified number of channels"""
assert gspace.fibergroup.order() > 0
N = gspace.fibergroup.order()
planes = planes / N
if fixparams:
planes *= math.sqrt(N * CHANNELS_CONSTANT)
planes = int(planes)
return nn.FieldType(gspace, [gspace.regular_repr] * planes)
def quotient_fiber(gspace: gspaces.GeneralOnR2, planes: int, fixparams: bool = True):
""" build a quotient fiber with the specified number of channels"""
N = gspace.fibergroup.order()
assert N > 0
if isinstance(gspace, gspaces.FlipRot2dOnR2):
n = N/2
subgroups = []
for axis in [0, round(n/4), round(n/2)]:
subgroups.append((int(axis), 1))
elif isinstance(gspace, gspaces.Rot2dOnR2):
assert N % 4 == 0
# subgroups = [int(round(N/2)), int(round(N/4))]
subgroups = [2, 4]
elif isinstance(gspace, gspaces.Flip2dOnR2):
subgroups = [2]
else:
raise ValueError(f"Space {gspace} not supported")
rs = [gspace.quotient_repr(subgroup) for subgroup in subgroups]
size = sum([r.size for r in rs])
planes = planes / size
if fixparams:
planes *= math.sqrt(N * CHANNELS_CONSTANT)
planes = int(planes)
return nn.FieldType(gspace, rs * planes).sorted()
def trivial_fiber(gspace: gspaces.GeneralOnR2, planes: int, fixparams: bool = True):
""" build a trivial fiber with the specified number of channels"""
if fixparams:
planes *= math.sqrt(gspace.fibergroup.order() * CHANNELS_CONSTANT)
planes = int(planes)
return nn.FieldType(gspace, [gspace.trivial_repr] * planes)
def mixed_fiber(gspace: gspaces.GeneralOnR2, planes: int, ratio: float, fixparams: bool = True):
N = gspace.fibergroup.order()
assert N > 0
if isinstance(gspace, gspaces.FlipRot2dOnR2):
subgroup = (0, 1)
elif isinstance(gspace, gspaces.Flip2dOnR2):
subgroup = 1
else:
raise ValueError(f"Space {gspace} not supported")
qr = gspace.quotient_repr(subgroup)
rr = gspace.regular_repr
planes = planes / rr.size
if fixparams:
planes *= math.sqrt(N * CHANNELS_CONSTANT)
r_planes = int(planes * ratio)
q_planes = int(2*planes * (1-ratio))
return nn.FieldType(gspace, [rr] * r_planes + [qr] * q_planes).sorted()
def mixed1_fiber(gspace: gspaces.GeneralOnR2, planes: int, fixparams: bool = True):
return mixed_fiber(gspace=gspace, planes=planes, ratio=0.5, fixparams=fixparams)
def mixed2_fiber(gspace: gspaces.GeneralOnR2, planes: int, fixparams: bool = True):
return mixed_fiber(gspace=gspace, planes=planes, ratio=0.25, fixparams=fixparams)
FIBERS = {
"trivial": trivial_fiber,
"quotient": quotient_fiber,
"regular": regular_fiber,
"mixed1": mixed1_fiber,
"mixed2": mixed2_fiber,
}
class WideBasic(nn.EquivariantModule):
def __init__(self,
in_fiber: nn.FieldType,
inner_fiber: nn.FieldType,
dropout_rate, stride=1,
out_fiber: nn.FieldType = None,
F: float = 1.,
sigma: float = 0.45,
):
super(WideBasic, self).__init__()
if out_fiber is None:
out_fiber = in_fiber
self.in_type = in_fiber
inner_class = inner_fiber
self.out_type = out_fiber
if isinstance(in_fiber.gspace, gspaces.FlipRot2dOnR2):
rotations = in_fiber.gspace.fibergroup.rotation_order
elif isinstance(in_fiber.gspace, gspaces.Rot2dOnR2):
rotations = in_fiber.gspace.fibergroup.order()
else:
rotations = 0
if rotations in [0, 2, 4]:
conv = conv3x3
else:
conv = conv5x5
self.bn1 = nn.InnerBatchNorm(self.in_type)
self.relu1 = nn.ReLU(self.in_type, inplace=True)
self.conv1 = conv(self.in_type, inner_class, sigma=sigma, F=F, initialize=False)
self.bn2 = nn.InnerBatchNorm(inner_class)
self.relu2 = nn.ReLU(inner_class, inplace=True)
self.dropout = nn.PointwiseDropout(inner_class, p=dropout_rate)
self.conv2 = conv(inner_class, self.out_type, stride=stride, sigma=sigma, F=F, initialize=False)
self.shortcut = None
if stride != 1 or self.in_type != self.out_type:
self.shortcut = conv1x1(self.in_type, self.out_type, stride=stride, bias=False, sigma=sigma, F=F, initialize=False)
# if rotations in [0, 2, 4]:
# self.shortcut = conv1x1(self.in_type, self.out_type, stride=stride, bias=False, sigma=sigma, F=F, initialize=False)
# else:
# self.shortcut = conv3x3(self.in_type, self.out_type, stride=stride, bias=False, sigma=sigma, F=F, initialize=False)
def forward(self, x):
x_n = self.relu1(self.bn1(x))
out = self.relu2(self.bn2(self.conv1(x_n)))
out = self.dropout(out)
out = self.conv2(out)
if self.shortcut is not None:
out += self.shortcut(x_n)
else:
out += x
return out
def evaluate_output_shape(self, input_shape: Tuple):
assert len(input_shape) == 4
assert input_shape[1] == self.in_type.size
if self.shortcut is not None:
return self.shortcut.evaluate_output_shape(input_shape)
else:
return input_shape
class Wide_ResNet(torch.nn.Module):
def __init__(self, depth, widen_factor, dropout_rate, num_classes=100,
N: int = 8,
r: int = 1,
f: bool = True,
main_fiber: str = "regular",
inner_fiber: str = "regular",
F: float = 1.,
sigma: float = 0.45,
deltaorth: bool = False,
fixparams: bool = True,
initial_stride: int = 1,
conv2triv: bool = True,
):
super(Wide_ResNet, self).__init__()
assert ((depth - 4) % 6 == 0), 'Wide-resnet depth should be 6n+4'
n = int((depth - 4) / 6)
k = widen_factor
print(f'| Wide-Resnet {depth}x{k} ({CHANNELS_CONSTANT * 100}%)')
nStages = [16, 16 * k, 32 * k, 64 * k]
self.distributed = False
self._fixparams = fixparams
self.conv2triv = conv2triv
self._layer = 0
self._N = N
# if the model is [F]lip equivariant
self._f = f
# level of [R]estriction:
# r < 0 : never do restriction, i.e. initial group (either D8 or C8) preserved for the whole network
# r = 0 : do restriction before first layer, i.e. initial group doesn't have rotation equivariance (C1 or D1)
# r > 0 : restrict after every block, i.e. start with 8 rotations, then restrict to 4 and finally 1
self._r = r
self._F = F
self._sigma = sigma
if self._f:
self.gspace = gspaces.FlipRot2dOnR2(N)
else:
self.gspace = gspaces.Rot2dOnR2(N)
if self._r == 0:
id = (0, 1) if self._f else 1
self.gspace, _, _ = self.gspace.restrict(id)
r1 = nn.FieldType(self.gspace, [self.gspace.trivial_repr] * 3)
self.in_type = r1
# r2 = FIBERS[main_fiber](self.gspace, nStages[0], fixparams=self._fixparams)
r2 = FIBERS[main_fiber](self.gspace, nStages[0], fixparams=True)
self._in_type = r2
self.conv1 = conv5x5(r1, r2, sigma=sigma, F=F, initialize=False)
self.layer1 = self._wide_layer(WideBasic, nStages[1], n, dropout_rate, stride=initial_stride,
main_fiber=main_fiber,
inner_fiber=inner_fiber)
if self._r > 0:
id = (0, 4) if self._f else 4
self.restrict1 = self._restrict_layer(id)
else:
self.restrict1 = lambda x: x
self.layer2 = self._wide_layer(WideBasic, nStages[2], n, dropout_rate, stride=2,
main_fiber=main_fiber,
inner_fiber=inner_fiber)
if self._r > 1:
id = (0, 1) if self._f else 1
self.restrict2 = self._restrict_layer(id)
else:
self.restrict2 = lambda x: x
if self.conv2triv:
out_fiber = "trivial"
else:
out_fiber = None
self.layer3 = self._wide_layer(WideBasic, nStages[3], n, dropout_rate, stride=2,
main_fiber=main_fiber,
inner_fiber=inner_fiber,
out_fiber=out_fiber
)
self.bn1 = nn.InnerBatchNorm(self.layer3.out_type, momentum=0.9)
if self.conv2triv:
self.relu = nn.ReLU(self.bn1.out_type, inplace=True)
else:
self.mp = nn.GroupPooling(self.layer3.out_type)
self.relu = nn.ReLU(self.mp.out_type, inplace=True)
self.linear = torch.nn.Linear(self.relu.out_type.size, num_classes)
for name, module in self.named_modules():
if isinstance(module, nn.R2Conv):
if deltaorth:
init.deltaorthonormal_init(module.weights.data, module.basisexpansion)
else:
init.generalized_he_init(module.weights.data, module.basisexpansion)
elif isinstance(module, torch.nn.BatchNorm2d):
module.weight.data.fill_(1)
module.bias.data.zero_()
elif isinstance(module, torch.nn.Linear):
module.bias.data.zero_()
print("MODEL TOPOLOGY:")
# for i, (name, mod) in enumerate(self.named_modules()):
# print(f"\t{i} - {name}")
# for i, (name, mod) in enumerate(self.named_modules()):
# params = sum([p.numel() for p in mod.parameters() if p.requires_grad])
# if isinstance(mod, nn.EquivariantModule) and isinstance(mod.in_type, nn.FieldType) and isinstance(mod.out_type,
# nn.FieldType):
# print(f"\t{i: <3} - {name: <70} | {params: <8} | {mod.in_type.size: <4}- {mod.out_type.size: <4}")
# else:
# print(f"\t{i: <3} - {name: <70} | {params: <8} |")
tot_param = sum([p.numel() for p in self.parameters()]) #if p.requires_grad])
print("Total number of parameters:", tot_param)
self.exported=False
def _restrict_layer(self, subgroup_id):
layers = list()
layers.append(nn.RestrictionModule(self._in_type, subgroup_id))
layers.append(nn.DisentangleModule(layers[-1].out_type))
self._in_type = layers[-1].out_type
self.gspace = self._in_type.gspace
restrict_layer = nn.SequentialModule(*layers)
return restrict_layer
def _wide_layer(self, block, planes: int, num_blocks: int, dropout_rate: float, stride: int,
main_fiber: str = "regular",
inner_fiber: str = "regular",
out_fiber: str = None,
):
self._layer += 1
print("start building", self._layer)
strides = [stride] + [1] * (num_blocks - 1)
layers = []
main_type = FIBERS[main_fiber](self.gspace, planes, fixparams=self._fixparams)
inner_class = FIBERS[inner_fiber](self.gspace, planes, fixparams=self._fixparams)
if out_fiber is None:
out_fiber = main_fiber
out_type = FIBERS[out_fiber](self.gspace, planes, fixparams=self._fixparams)
for b, stride in enumerate(strides):
if b == num_blocks - 1:
out_f = out_type
else:
out_f = main_type
layers.append(
block(self._in_type, inner_class, dropout_rate, stride, out_fiber=out_f, sigma=self._sigma, | |
#! /usr/bin/env python
""" This python script exercises various aspects of the IBM Cloud
Hyper Protect Crypto Services (HPCS) Key Protect service through its REST API.
Here is the format of the input file (-f option), with example input shown.
This example passes in a null service_host, which tells the script
to dynamically retrieve the connection info (this does not work for
standard Key Protect, it is a unique function for HPCS Key Protect):
{
"service_host": "",
"service_instance_id": "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx",
"root_key_name":"SampleRootKey"
}
This example passes in a specific service_host for HPCS Key Protect
(this is normally not needed, the null string example above is
all that is needed for HPCS Key Protect instances):
{
"service_host": "us-south.hpcs.cloud.ibm.com:11399",
"service_instance_id": "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx",
"root_key_name":"SampleRootKey"
}
This example passes in a service_host for standard Key Protect:
{
"service_host": "keyprotect.us-south.bluemix.net",
"service_instance_id": "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx",
"root_key_name":"SampleRootKey"
}
Here is the format of the input Key Protect API key file (-a option), which
is the same format as provided by the Key Protect service:
{
"name": "SampleAPIKey",
"description": "A sample for test purposes",
"createdAt": "2018-03-21T17:15+0000",
"apikey": "<KEY>"
}
Developed and tested on Linux on System z using python 2.7.12.
"""
from __future__ import print_function
import sys, optparse, time, httplib, urllib, json, socket
import os, string, time, base64, copy
import uuid
from Crypto.Cipher import AES
# ##########################################################################
# get_access_token function
# ##########################################################################
def get_access_token(api_key):
'''Contact Cloud Identity and Access Management to get an access token.
Input is an api key for the user's Key Protect service.
'''
headers = {"Content-type":"application/x-www-form-urlencoded",
"Accept":"application/json"}
params = "grant_type=urn%3Aibm%3Aparams%3Aoauth%3Agrant-type%3Aapikey&apikey=" + api_key
conn = httplib.HTTPSConnection('iam.bluemix.net')
try:
conn.request("POST", "/oidc/token", params, headers)
response = conn.getresponse()
except socket.error as errno:
print("Error attempting to connect to Cloud Identity and Access Management to get access token")
print(errno)
sys.exit()
if response.status == 200:
if not quiet:
print(">>> Acquired access token at", time.strftime("%m/%d/%Y %H:%M:%S"))
else:
print("Failed to aquire access token. Ensure API key passed in via input file is correct")
print("status", response.status, "reason", response.reason)
sys.exit()
#get the json object and convert to a python object
objs = json.loads(response.read())
access_token = objs['access_token']
expiration = objs['expiration']
if extraverbose:
print("Access token:", access_token)
if verbose:
print("Token expires:", time.strftime("%m/%d/%Y %H:%M:%S", time.localtime(expiration)))
conn.close()
return access_token
# ##########################################################################
# get_api_endpoint_url function
# ##########################################################################
def get_api_endpoint_uri(instance_id, access_token):
'''Contact zcryptobroker to get the Key Protect API endpoint URI (url:port).
Input is an instance ID for our crypto instance,
an access token for the user's Key Protect service, and
the uri of the zcryptobroker to connect to.
'''
broker = 'zcryptobroker.mybluemix.net'
headers = {
"authorization":"Bearer " + access_token,
}
need_new_token = False
conn = httplib.HTTPSConnection(broker)
try:
conn.request("GET", "/crypto_v1/instances/" + instance_id, "", headers)
response = conn.getresponse()
except socket.error as errno:
print ("Socket error attempting to connect to zcryptobroker service to get API endpoint URI")
print (errno)
sys.exit()
except:
print ("Unexpected error attempting to connect to zcryptobroker service to get API endpoint URI")
raise
if response.status == 200:
if not quiet:
print ("Retrieved API endpoint URI", time.strftime("%m/%d/%Y %H:%M:%S"))
# get the json object and convert to a python object
objs = json.loads(response.read())
# now get the connection info...
if 'apiConnectionInfo' in objs:
api_endpoint_uri = objs['apiConnectionInfo'] # get the uri
else:
try:
objs = json.loads(response.read())
error_msg = objs['resources']
except ValueError, e:
error_msg = {}
error_msg[0] = {}
error_msg[0]['errorMsg'] = 'No error message text returned'
if ((response.status == 401) and
(error_msg[0]['errorMsg'] == 'Unauthorized: Token is expired')):
need_new_token = True
key_id = ""
if not quiet:
print ("get_api_endpoint_uri: Redrive loop, must get a new access token, the old one is expired...")
else:
print ("Failed to get API endpoint URI")
print ("Status:", response.status, "reason:", response.reason)
print ("Key Protect instance ID:", instance_id)
print ("Error Message:", error_msg[0]['errorMsg'])
sys.exit()
conn.close()
return api_endpoint_uri, need_new_token
# ##########################################################################
# get_key_list function
# ##########################################################################
def get_key_list(correlation_id, host, instance_id, access_token):
'''Contact Key Protect to get a list of the root keys it owns.
Input is the target Key Protect host, an instance ID for the
Key Protect service, an access token, and a correlation ID.
Outputs a list of root keys owned by this KP instance, or
a boolean indicating a new access token is needed.
'''
headers = {
"accept":"application/vnd.ibm.collection+json",
"authorization":"Bearer " + access_token,
"bluemix-instance":instance_id,
"correlation-id":correlation_id # used for debug
}
need_new_token = False
key_list = {}
conn = httplib.HTTPSConnection(host)
try:
conn.request("GET", "/api/v2/keys", "", headers)
response = conn.getresponse()
except socket.error as errno:
print("Socket error attempting to connect to Key Protect service to get list of keys")
print(errno)
sys.exit()
except:
print("Unexpected error attempting to connect to Key Protect service to get list of keys")
raise
if response.status == 200:
if not quiet:
print("Retrieved list of stored keys", time.strftime("%m/%d/%Y %H:%M:%S"))
#get the json object and convert to a python object
objs = json.loads(response.read())
if 'resources' in objs:
key_list = objs['resources']
else:
try:
objs = json.loads(response.read())
error_msg = objs['resources']
except ValueError, e:
error_msg = {}
error_msg[0] = {}
error_msg[0]['errorMsg'] = 'No error message text returned'
if ((response.status == 400) and
(error_msg[0]['errorMsg'] == 'Bad Request: Token is expired')):
need_new_token = True
key_id = ""
if not quiet:
print("get_key_list: Redrive loop, must get a new access token, the old one is expired...")
else:
print("Failed to get list of root keys")
print("Status:", response.status, "reason:", response.reason)
print("Correlation id=", headers['correlation-id'])
print("Key Protect instance ID:", instance_id)
print("Error Message:", error_msg[0]['errorMsg'])
if response.status == 404:
print(">>> If this is a new HPCS instance, ensure you have initialized it with a master key via the ibmcloud tke cli")
sys.exit()
conn.close()
return key_list, need_new_token
# ##########################################################################
# get_key_id function
# ##########################################################################
def get_key_id(correlation_id, host, instance_id, access_token, key_name):
'''Get the id corresponding to a key name.
Input is the target Key Protect host, an instance ID for the
Key Protect service, an access token, and the name of the key
for which we want to retrieve an ID. Also a correlation ID.
Output is the ID associated with the requested key.
'''
headers = {
"accept":"application/vnd.ibm.collection+json",
"authorization":"Bearer " + access_token,
"bluemix-instance":instance_id,
"correlation-id":correlation_id # used for debug
}
need_new_token = False
key_id = "NOT FOUND"
# get list of root keys owned by this KP instance...
key_list, need_new_token = get_key_list(correlation_id,
host,
instance_id,
access_token)
# search for the desired root key from the returned list of keys
# and save it's ID
if not need_new_token:
for k in key_list:
if k['name'] == key_name:
key_id = k['id']
if not quiet:
print("Found desired key in list of stored keys")
if verbose:
print("List of stored keys found:")
for k in key_list:
print(" Key name:", k['name'], "with ID:", k['id'])
if key_id == "NOT FOUND" and not quiet:
print("Desired key", key_name, "not found in list of stored keys")
return key_id, need_new_token
# ##########################################################################
# Function to create a standard or root key
# ##########################################################################
def create_key(correlation_id, host, instance_id, access_token, key_alias, extractable=False):
'''Contact Key Protect to create a standard or root key.
Input is the target Key Protect host, an instance ID for the
Key Protect service, an access token, a unique, human-readable
key name (alias), and a boolean variable indicating whether the key
to be created is extractable or not. Extractable=True means
we are creating a standard key that can be retrieved and used to
encrypt/decrypt data; extractable=False, the default, means we
are creating a root key that will never leave the HSM, and can only
be used to encrypt/decrypt other keys.
Output is the ID of the newly-created key, or a boolean indicating
that a new access token is needed.
'''
if extractable:
description = 'KeyProtectSample.py standard key'
else:
description = 'KeyProtectSample.py root key -- generated'
headers = {
"content_type":"application/vnd.ibm.kms.key+json",
"authorization":"Bearer " + access_token,
"bluemix-instance":instance_id,
"prefer":"return=representation",
"correlation-id":correlation_id # used for debug
}
body_template = {
'metadata':{
'collectionType':'application/vnd.ibm.kms.key+json',
'collectionTotal':1
},
'resources':[]
}
body = {
'type':'application/vnd.ibm.kms.key+json',
'name':key_alias,
'description': description,
'extractable': extractable
}
request_body = copy.deepcopy(body_template)
request_body['resources'].append(body)
request_string_body = json.dumps(request_body)
need_new_token = False
key_list = {}
key_id = ""
conn = httplib.HTTPSConnection(host)
try:
conn.request("POST", "/api/v2/keys", request_string_body, headers)
response = conn.getresponse()
except socket.error as errno:
print("Socket error attempting to connect to Key Protect service to create a key")
print(errno)
sys.exit()
except:
print("Unexpected error attempting to connect to Key Protect service to | |
snatEntry: Whether to configure SNAT for the network.
When a VPC can access the public network environment, set it to false.
When an existing VPC cannot access the public network environment:
When set to True, SNAT is configured and the public network environment can be accessed at this time.
If set to false, it means that SNAT is not configured and the public network environment cannot be accessed at this time.
Default to true.
'''
result = self._values.get("snat_entry")
return typing.cast(typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]], result)
@builtins.property
def tags(
self,
) -> typing.Optional[typing.List["RosManagedKubernetesCluster.TagsProperty"]]:
'''Property tags: Tag the cluster.'''
result = self._values.get("tags")
return typing.cast(typing.Optional[typing.List["RosManagedKubernetesCluster.TagsProperty"]], result)
@builtins.property
def taint(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Mapping[builtins.str, typing.Any]]]]:
'''Property taint: It is used to mark nodes with taints.
It is usually used for the scheduling strategy of Pods. The corresponding concept is: tolerance. If there is a corresponding tolerance mark on the Pods, the stain on the node can be tolerated and scheduled to the node.
'''
result = self._values.get("taint")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Mapping[builtins.str, typing.Any]]]], result)
@builtins.property
def timeout_mins(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''Property timeoutMins: Cluster resource stack creation timeout, in minutes.
The default value is 60.
'''
result = self._values.get("timeout_mins")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
@builtins.property
def worker_auto_renew(
self,
) -> typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]]:
'''Property workerAutoRenew: Whether to enable automatic renewal of Worker nodes.
The optional values are:
true: automatic renewal
false: do not renew automatically
Default to true.
'''
result = self._values.get("worker_auto_renew")
return typing.cast(typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]], result)
@builtins.property
def worker_auto_renew_period(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''Property workerAutoRenewPeriod: Automatic renewal cycle, which takes effect when prepaid and automatic renewal are selected, and is required: When PeriodUnit = Week, the values are: {"1", "2", "3"} When PeriodUnit = Month, the value is {"1", "2", "3", "6", "12"} Default to 1.'''
result = self._values.get("worker_auto_renew_period")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
@builtins.property
def worker_data_disk(
self,
) -> typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]]:
'''Property workerDataDisk: Whether to mount the data disk.
The options are as follows:
true: indicates that the worker node mounts data disks.
false: indicates that the worker node does not mount data disks.
Default to false.
'''
result = self._values.get("worker_data_disk")
return typing.cast(typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]], result)
@builtins.property
def worker_data_disks(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[ros_cdk_core.IResolvable, "RosManagedKubernetesCluster.WorkerDataDisksProperty"]]]]:
'''Property workerDataDisks: A combination of configurations such as worker data disk type and size.
This parameter is valid only when the worker node data disk is mounted.
'''
result = self._values.get("worker_data_disks")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[ros_cdk_core.IResolvable, "RosManagedKubernetesCluster.WorkerDataDisksProperty"]]]], result)
@builtins.property
def worker_instance_charge_type(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property workerInstanceChargeType: Worker node payment type.
The optional values are:
PrePaid: prepaid
PostPaid: Pay as you go
Default to PostPaid.
'''
result = self._values.get("worker_instance_charge_type")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def worker_period(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''Property workerPeriod: The duration of the annual and monthly subscription.
It takes effect when the worker_instance_charge_type value is PrePaid and is required. The value range is:
When PeriodUnit = Week, Period values are: {"1", "2", "3", "4"}
When PeriodUnit = Month, Period values are: {"1", "2", "3", "4", "5", "6", "7", "8", "9", "12", "24", "36", "48", "60"}
Default to 1.
'''
result = self._values.get("worker_period")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
@builtins.property
def worker_period_unit(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property workerPeriodUnit: When you specify PrePaid, you need to specify the period.
The options are:
Week: Time is measured in weeks
Month: time in months
Default to Month.
'''
result = self._values.get("worker_period_unit")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def worker_system_disk_category(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property workerSystemDiskCategory: Worker node system disk type.
The value includes:
cloud_efficiency: efficient cloud disk
cloud_ssd: SSD cloud disk
Default to cloud_efficiency.
'''
result = self._values.get("worker_system_disk_category")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def worker_system_disk_size(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''Property workerSystemDiskSize: Worker disk system disk size, the unit is GiB.
Default to 120.
'''
result = self._values.get("worker_system_disk_size")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "ManagedKubernetesClusterProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class RosAnyCluster(
ros_cdk_core.RosResource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-cs.RosAnyCluster",
):
'''A ROS template type: ``ALIYUN::CS::AnyCluster``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "RosAnyClusterProps",
enable_resource_property_constraint: builtins.bool,
) -> None:
'''Create a new ``ALIYUN::CS::AnyCluster``.
:param scope: - scope in which this resource is defined.
:param id: - scoped id of the resource.
:param props: - resource properties.
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@jsii.member(jsii_name="renderProperties")
def _render_properties(
self,
props: typing.Mapping[builtins.str, typing.Any],
) -> typing.Mapping[builtins.str, typing.Any]:
'''
:param props: -
'''
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "renderProperties", [props]))
@jsii.python.classproperty # type: ignore[misc]
@jsii.member(jsii_name="ROS_RESOURCE_TYPE_NAME")
def ROS_RESOURCE_TYPE_NAME(cls) -> builtins.str:
'''The resource type name for this resource class.'''
return typing.cast(builtins.str, jsii.sget(cls, "ROS_RESOURCE_TYPE_NAME"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrClusterId")
def attr_cluster_id(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: ClusterId: Cluster instance ID.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrClusterId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrDefaultUserKubeConfig")
def attr_default_user_kube_config(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: DefaultUserKubeConfig: Default user kubernetes config which is used for configuring cluster credentials.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrDefaultUserKubeConfig"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrNodes")
def attr_nodes(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: Nodes: The list of cluster nodes.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrNodes"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrPrivateUserKubConfig")
def attr_private_user_kub_config(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: PrivateUserKubConfig: Private user kubernetes config which is used for configuring cluster credentials.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrPrivateUserKubConfig"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrScalingConfigurationId")
def attr_scaling_configuration_id(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: ScalingConfigurationId: Scaling configuration id
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrScalingConfigurationId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrScalingGroupId")
def attr_scaling_group_id(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: ScalingGroupId: Scaling group id
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrScalingGroupId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrScalingRuleId")
def attr_scaling_rule_id(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: ScalingRuleId: Scaling rule id
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrScalingRuleId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrTaskId")
def attr_task_id(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: TaskId: Task ID. Automatically assigned by the system, the user queries the task status.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrTaskId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrWorkerRamRoleName")
def attr_worker_ram_role_name(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: WorkerRamRoleName: Worker ram role name.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrWorkerRamRoleName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="rosProperties")
def _ros_properties(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.get(self, "rosProperties"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="clusterConfig")
def cluster_config(
self,
) -> typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]]:
'''
:Property: clusterConfig: Cluster config.
'''
return typing.cast(typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]], jsii.get(self, "clusterConfig"))
@cluster_config.setter
def cluster_config(
self,
value: typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]],
) -> None:
jsii.set(self, "clusterConfig", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="enableResourcePropertyConstraint")
def enable_resource_property_constraint(self) -> builtins.bool:
return typing.cast(builtins.bool, jsii.get(self, "enableResourcePropertyConstraint"))
@enable_resource_property_constraint.setter
def enable_resource_property_constraint(self, value: builtins.bool) -> None:
jsii.set(self, "enableResourcePropertyConstraint", value)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-cs.RosAnyClusterProps",
jsii_struct_bases=[],
name_mapping={"cluster_config": "clusterConfig"},
)
class RosAnyClusterProps:
def __init__(
self,
*,
cluster_config: typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]],
) -> None:
'''Properties for defining a ``ALIYUN::CS::AnyCluster``.
:param cluster_config:
'''
self._values: typing.Dict[str, typing.Any] = {
"cluster_config": cluster_config,
}
@builtins.property
def cluster_config(
self,
) -> typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]]:
'''
:Property: clusterConfig: Cluster config.
'''
result = self._values.get("cluster_config")
assert result is not None, "Required property 'cluster_config' is missing"
return typing.cast(typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "RosAnyClusterProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class RosClusterNodePool(
ros_cdk_core.RosResource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-cs.RosClusterNodePool",
):
'''A ROS template type: ``ALIYUN::CS::ClusterNodePool``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "RosClusterNodePoolProps",
enable_resource_property_constraint: builtins.bool,
) -> None:
'''Create a new ``ALIYUN::CS::ClusterNodePool``.
:param scope: - scope in which this resource is defined.
:param id: - scoped id of the resource.
:param props: - resource properties.
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@jsii.member(jsii_name="renderProperties")
def _render_properties(
self,
props: typing.Mapping[builtins.str, typing.Any],
) -> typing.Mapping[builtins.str, typing.Any]:
'''
:param props: -
'''
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "renderProperties", [props]))
@jsii.python.classproperty # type: ignore[misc]
@jsii.member(jsii_name="ROS_RESOURCE_TYPE_NAME")
def ROS_RESOURCE_TYPE_NAME(cls) -> builtins.str:
'''The resource type name for this resource class.'''
return typing.cast(builtins.str, jsii.sget(cls, "ROS_RESOURCE_TYPE_NAME"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrNodePoolId")
def attr_node_pool_id(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: NodePoolId: Cluster node pool ID.
'''
return | |
<gh_stars>0
from __future__ import unicode_literals
import time
import frappe
import frappe.client
import frappe.handler
import jwt
from frappe import _
import base64
from passlib.context import CryptContext
from mimetypes import guess_type
from frappe.utils import add_days, cint
@frappe.whitelist(allow_guest=True)
def login(**kwards):
lang = "ar"
if frappe.get_request_header("Language"):
lang = frappe.get_request_header("Language")
frappe.local.lang = lang
data = kwards
if 'udid' not in data:
frappe.local.response['status'] = {"message": _("Incorrect credentials"), "success": False,
"code": 422}
frappe.local.response['data'] = None
return
if 'email' not in data:
frappe.local.response['status'] = {"message": _("Email Required"), "success": False,
"code": 422}
frappe.local.response['data'] = None
return
if 'password' not in data:
frappe.local.response['status'] = {"message": _("Password Required"), "success": False,
"code": 422}
frappe.local.response['data'] = None
return
email = data['email']
udid = data['udid']
password = data['password']
fcm = None
if 'fcm' in data:
fcm = data['fcm']
log = frappe.get_doc({"doctype": "Api Log"})
password = <PASSWORD>("<PASSWORD>")
encoded = base64.b64encode(password)
encoded = str(encoded)
if not frappe.get_all("Customer", ['name'], filters={"email": email, "password": encoded}):
frappe.local.response['http_status_code'] = 403
log.response = "Incorrect credentials"
log.request = "login"
log.flags.ignore_permissions = True
log.insert()
frappe.db.commit()
frappe.local.response['status'] = {"message": _("Incorrect credentials1"), "success": False,
"code": 403}
frappe.local.response['data'] = None
return
customer_list = frappe.get_all("Customer", ['name'], filters={"email": email, "password": encoded})
customer_doc = frappe.get_doc("Customer", customer_list[0].name)
full_name = customer_doc.customer_name
name = customer_doc.name
secret_key = "Me System"
issuedat_claim = time.time()
notbefore_claim = issuedat_claim
expire_claim = issuedat_claim + (60 * 60 * 3 * 24 * 5)
token = {
"iat": issuedat_claim,
"nbf": notbefore_claim,
"exp": expire_claim,
"data": {
"full_name": full_name,
"name": name
}}
try:
token = jwt.encode(token, secret_key, algorithm="HS256").decode()
except:
token = jwt.encode(token, secret_key, algorithm="HS256")
customer_devices = frappe.get_all("User Device", ['name'], filters={"udid": udid, "docstatus": ['<', 2]})
customer_device = None
if customer_devices:
customer_device = frappe.get_doc("User Device", customer_devices[0].name)
else:
customer_device = frappe.get_doc({"doctype": "User Device"})
customer_device.user_type = "Customer"
customer_device.user = customer_doc.name
customer_device.udid = udid
customer_device.fcm = fcm
customer_device.access_token = str(token)
customer_device.enabled = 1
customer_device.flags.ignore_permissions = True
customer_device.save(ignore_permissions=True)
ret_Customer = user(customer_doc.name)
msg = _("Login Success")
log.response = msg
log.token = None
log.Customer = customer_doc.name
log.request = "login"
log.flags.ignore_permissions = True
log.insert()
frappe.db.commit()
frappe.local.response['status'] = {
"message": _("Login Success"),
"code": 1,
"success": True
}
frappe.local.response['data'] = {
"User": ret_Customer,
"user_type":"Customer",
"access_token": str(token)
}
@frappe.whitelist(allow_guest=True)
def register(**kwards):
lang = "ar"
if frappe.get_request_header("Language"):
lang = frappe.get_request_header("Language")
frappe.local.lang = lang
data = kwards
if 'udid' not in data:
frappe.local.response['status'] = {"message": _("Incorrect credentials"), "success": False, "code": 422}
frappe.local.response['data'] = None
return
mobile = data['mobile_number']
udid = data['udid']
fcm = None
email = None
full_name = None
city = None
password = <PASSWORD>
if 'email' in data:
email = data['email']
else:
frappe.local.response['status'] = {
"message": _("Email required"),
"code": 1,
"success": False
}
frappe.local.response['data'] = None
if 'password' in data:
password = data['password']
else:
frappe.local.response['status'] = {
"message": _("Password required"),
"code": 1,
"success": False
}
frappe.local.response['data'] = None
if 'udid' in data:
udid = data['udid']
else:
frappe.local.response['status'] = {
"message": _("Device Id required"),
"code": 1,
"success": False
}
frappe.local.response['data'] = None
if 'full_name' in data:
full_name = data['full_name']
else:
frappe.local.response['status'] = {
"message": _("Full name required"),
"code": 1,
"success": False
}
frappe.local.response['data'] = None
if 'fcm' in data:
fcm = data['fcm']
if 'city' in data:
city = data['city']
if 'gender' in data:
gender = data['gender']
log = frappe.get_doc({"doctype": "Api Log"})
if (len(frappe.get_all('Customer', ['email'], filters={"email": email, "mobile_number": mobile})) > 0):
frappe.local.response['status'] = {"message": _("This user is already exist"), "success": False, "code": 422}
frappe.local.response['data'] = None
return
password = <PASSWORD>("<PASSWORD>")
encoded = base64.b64encode(password)
image = None
try:
res = uploadfile()
image = res.file_url
except:
image = None
customer_doc = frappe.get_doc({"doctype": "Customer",
"mobile_number": mobile,
"email": email,
"customer_name": full_name,
"password":str(<PASSWORD>),
"city":city,
"image":image,
"customer_type":"Individual",
"customer_group":"Individual",
"territory":"Rest Of The World"
}).save(ignore_permissions=True)
frappe.db.commit()
cus_list = frappe.get_all("Customer", ['name'], filters={"mobile_number": mobile, "email": email})
name = cus_list[0].name
secret_key = "Me System"
issuedat_claim = time.time()
notbefore_claim = issuedat_claim
expire_claim = issuedat_claim + (60 * 60 * 3 * 24 * 5)
token = {
"iat": issuedat_claim,
"nbf": notbefore_claim,
"exp": expire_claim,
"data": {
"full_name": full_name,
"name": name
}}
try:
token = jwt.encode(token, secret_key, algorithm="HS256").decode()
except:
token = jwt.encode(token, secret_key, algorithm="HS256")
# token = token.decode("utf-8")
customer_devices = frappe.get_all("User Device", ['name'], filters={"udid": udid, "docstatus": ['<', 2]})
customer_device = None
if customer_device:
customer_device = frappe.get_doc("User Device", customer_devices[0].name)
else:
customer_device = frappe.get_doc({"doctype": "User Device"})
customer_device.user_type = "Customer"
customer_device.user = name
customer_device.udid = udid
customer_device.fcm = fcm
customer_device.access_token = str(token)
customer_device.enabled = 1
customer_device.flags.ignore_permissions = True
customer_device.save()
ret_Customer = user(name)
msg = _("Register Success")
log.response = msg
log.token = None
log.Customer = name
log.request = "register"
log.flags.ignore_permissions = True
log.insert()
frappe.db.commit()
frappe.local.response['status'] = {
"message": msg,
"code": 1,
"success": True
}
frappe.local.response['data'] = {
"Customer": ret_Customer,
"access_token": str(token)
}
@frappe.whitelist(allow_guest=True)
def change_password(**kwards):
lang = "ar"
if frappe.get_request_header("Language"):
lang = frappe.get_request_header("Language")
frappe.local.lang = lang
data = kwards
check = check_token()
user1 = None
old_password = None
new_password = None
if 'old_password' in data:
old_password = data['old_password']
else:
frappe.local.response['status'] = {"message": _("Old password required"), "success": False, "code": 403}
frappe.local.response['data'] = None
return
if 'new_password' in data:
new_password = data['new_password']
else:
frappe.local.response['status'] = {"message": _("New password required"), "success": False, "code": 403}
frappe.local.response['data'] = None
return
if check and "user" in check:
user1 = check['user']
if not user1 or user1.customer_type != "Individual":
frappe.local.response['http_status_code'] = 403
frappe.local.response['status'] = {"message": _("Not Authorized"), "success": False, "code": 403}
frappe.local.response['data'] = None
return
password = <PASSWORD>.encode("utf-8")
old_encoded = base64.b64encode(password)
password = <PASSWORD>_password.encode("utf-8")
new_encoded = base64.b64encode(password)
if str(old_encoded) != user1.password:
frappe.local.response['status'] = {"message": _("Old password not correct"), "success": False, "code": 403}
frappe.local.response['data'] = None
return
customer = frappe.get_doc("Customer",user1.name)
customer.set("password",str(<PASSWORD>))
customer.save(ignore_permissions=True)
frappe.db.commit()
name = customer.name
secret_key = "Me System"
issuedat_claim = time.time()
notbefore_claim = issuedat_claim
expire_claim = issuedat_claim + (60 * 60 * 3 * 24 * 5)
token = {
"iat": issuedat_claim,
"nbf": notbefore_claim,
"exp": expire_claim,
"data": {
"full_name": customer.customer_name,
"name": name
}}
try:
token = jwt.encode(token, secret_key, algorithm="HS256").decode()
except:
token = jwt.encode(token, secret_key, algorithm="HS256")
# token = token.decode("utf-8")
current_token = frappe.get_request_header("Authorization").replace('Bearer ', '')
customer_devices = frappe.get_all("User Device", ['name'], filters={"access_token": current_token, "docstatus": ['<', 2]})
customer_device = frappe.get_doc("User Device",customer_devices[0].name)
customer_device.access_token = str(token)
customer_device.save(ignore_permissions=True)
frappe.db.commit()
frappe.local.response['status'] = {"message": _("Password reset successfully"), "success": True, "code": 200}
frappe.local.response['data'] = {
"data":None,
"access_token":str(token)
}
return
@frappe.whitelist(allow_guest=True)
def get_notifications():
lang = "ar"
if frappe.get_request_header("Language"):
lang = frappe.get_request_header("Language")
frappe.local.lang = lang
check = check_token()
user1 = None
if check and "user" in check:
user1 = check['user']
else:
return
result =[]
notifications = frappe.get_all("Notification Me",fields=["*"],filters = {"reference":user1.name},order_by="modified desc")
for notification in notifications:
result.append({
"id":notification.name,
"type":notification.type,
"text": notification.text,
"status": notification.status,
"doc_type": notification.doc_type,
"doc_reference": notification.doc_reference,
"date":notification.modified
})
frappe.local.response['status'] = {"message": _("Notifications"), "success": True,
"code": 200}
frappe.local.response['data'] = result
@frappe.whitelist(allow_guest=True)
def update_notification(**kwards):
lang = "ar"
if frappe.get_request_header("Language"):
lang = frappe.get_request_header("Language")
frappe.local.lang = lang
check = check_token()
user1 = None
data = kwards
if check and "user" in check:
user1 = check['user']
else:
return
if not frappe.db.exists("Notification Me",data['id']):
frappe.local.response['status'] = {"message": _("Notification Not Found"), "success": False, "code": 403}
frappe.local.response['data'] = None
return
notification = frappe.get_doc("Notification Me",data['id'])
notification.status = "opened"
notification.save(ignore_permissions=True)
frappe.db.commit()
frappe.local.response['status'] = {"message": _("Notification Updated"), "success": True, "code": 200}
frappe.local.response['data'] = None
@frappe.whitelist(allow_guest=True)
def logout(**kwards):
lang = "ar"
if frappe.get_request_header("Language"):
lang = frappe.get_request_header("Language")
frappe.local.lang = lang
request = frappe.request
secret_key = "Me System"
if frappe.get_request_header("Authorization"):
authorization_header = frappe.get_request_header("Authorization").split(" ")
if authorization_header[0] != "Bearer" and len(authorization_header) != 2:
frappe.local.response['status'] = {"message": _("Not Authorized"), "success": False, "code": 15}
frappe.local.response['data'] = None
return
token = frappe.get_request_header("Authorization").replace('Bearer ', '')
customerDevices = frappe.get_all("User Device", ['name'],
filters={"access_token": token, "docstatus": ['<', 2]})
if not customerDevices:
frappe.local.response['http_status_code'] = 403
frappe.local.response['status'] = {"message": _("Not Authorized"), "success": False, "code": 15}
frappe.local.response['data'] = None
return
try:
token = jwt.decode(token, secret_key, algorithms="HS256")
except Exception:
frappe.local.response['http_status_code'] = 401
frappe.local.response['status'] = {"message": _("Not Authorized"), "success": False, "code": 15}
frappe.local.response['data'] = None
return
customer_device = frappe.get_doc("User Device", customerDevices[0].name)
frappe.db.sql(
"""update `tabUser Device` set access_token = "" where name = '{0}' """.format(customer_device.name))
frappe.db.commit()
msg = _("Logout")
frappe.local.response['status'] = {"message": msg,
"success": True,
"code": 15}
frappe.local.response['data'] = None
return
@frappe.whitelist(allow_guest=True)
def get_profile(**kwards):
lang = "ar"
if frappe.get_request_header("Language"):
lang = frappe.get_request_header("Language")
frappe.local.lang = lang
check = check_token()
user1 = None
if check and "user" in check:
user1 = check['user']
if not user1 or user1.customer_type != "Individual":
frappe.local.response['http_status_code'] = 403
frappe.local.response['status'] = {"message": _("Not Authorized"), "success": False, "code": 403}
frappe.local.response['data'] = None
return
log = | |
<filename>ramjet/data_interface/tess_data_interface.py
"""
Code for a class for common interfacing with TESS data, such as downloading, sorting, and manipulating.
"""
try:
from enum import StrEnum
except ImportError:
from backports.strenum import StrEnum
import math
import re
import shutil
import sys
import tempfile
from enum import Enum
from pathlib import Path
from typing import Union, List, Dict
import numpy as np
import pandas as pd
import requests
from astropy.coordinates import SkyCoord
from astropy.io import fits
from astropy.table import Table
from astroquery.mast import Observations, Catalogs
from astroquery.exceptions import TimeoutError as AstroQueryTimeoutError
from astroquery.vizier import Vizier
from bokeh.io import show
from retrying import retry
from bokeh.plotting import Figure
from ramjet.analysis.light_curve_visualizer import plot_light_curve, create_dual_light_curve_figure
class TessFluxType(Enum):
"""
An enum to represent the types of available fluxes in TESS two minute data.
"""
SAP = 'SAP_FLUX'
PDCSAP = 'PDCSAP_FLUX'
class ColumnName(StrEnum):
"""
An enum for the names of the columns produced by the data interface class.
"""
TIC_ID = 'TIC ID'
SECTOR = 'Sector'
def is_common_mast_connection_error(exception: Exception) -> bool:
"""
Returns if the passed exception is a common MAST connection error. Made for deciding whether to retry a function.
:param exception: The exception to check.
:return: A boolean stating if the exception is a common MAST connection error.
"""
return (isinstance(exception, AstroQueryTimeoutError) or
isinstance(exception, TimeoutError) or
isinstance(exception, requests.exceptions.ReadTimeout) or
isinstance(exception, requests.exceptions.ChunkedEncodingError) or
isinstance(exception, requests.exceptions.HTTPError) or
isinstance(exception, requests.exceptions.ConnectionError))
class NoDataProductsFoundException(Exception):
"""An exception when no data products are found from MAST."""
pass
class TessDataInterface:
"""
A class for common interfacing with TESS data, such as downloading, sorting, and manipulating.
"""
def __init__(self):
Observations.TIMEOUT = 2000
Observations.PAGESIZE = 3000
Catalogs.TIMEOUT = 2000
Catalogs.PAGESIZE = 3000
try: # Temporary fix for astroquery's update of timeout and pagesize locations.
Observations._portal_api_connection.TIMEOUT = 2000
Observations._portal_api_connection.PAGESIZE = 3000
Catalogs._portal_api_connection.TIMEOUT = 2000
Catalogs._portal_api_connection.PAGESIZE = 3000
except AttributeError:
pass
self.mast_input_query_chunk_size = 1000
def get_all_tess_time_series_observations(self, tic_id: Union[int, List[int]] = None) -> pd.DataFrame:
"""
Gets all TESS time-series observations, limited to science data product level. Breaks large queries up to make
the communication with MAST smoother.
:param tic_id: An optional TIC ID or list of TIC IDs to limit the query to.
:return: The list of time series observations as rows in a Pandas data frame.
"""
if tic_id is None or np.isscalar(tic_id):
observations = self.get_all_tess_time_series_observations_chunk(tic_id)
else:
observations = None
for tic_id_list_chunk in np.array_split(tic_id, math.ceil(len(tic_id) / self.mast_input_query_chunk_size)):
observations_chunk = self.get_all_tess_time_series_observations_chunk(tic_id_list_chunk)
if observations is None:
observations = observations_chunk
else:
observations = observations.append(observations_chunk, ignore_index=True)
return observations
@staticmethod
@retry(retry_on_exception=is_common_mast_connection_error)
def get_all_tess_time_series_observations_chunk(tic_id: Union[int, List[int]] = None) -> pd.DataFrame:
"""
Gets all TESS time-series observations, limited to science data product level. Repeats download attempt on
error.
:param tic_id: An optional TIC ID or list of TIC IDs to limit the query to.
:return: The list of time series observations as rows in a Pandas data frame.
"""
if tic_id is None:
tic_id = [] # When the empty list is passed to `query_criteria`, any value is considered a match.
tess_observations = Observations.query_criteria(obs_collection='TESS', dataproduct_type='timeseries',
calib_level=3, # Science data product level.
target_name=tic_id)
return tess_observations.to_pandas()
def get_product_list(self, observations: pd.DataFrame) -> pd.DataFrame:
"""
A wrapper for MAST's `get_product_list`, allowing the use of Pandas DataFrames instead of AstroPy Tables.
Breaks large queries up to make the communication with MAST smoother.
:param observations: The data frame of observations to get. Will be converted from DataFrame to Table for query.
:return: The data frame of the product list. Will be converted from Table to DataFrame for use.
"""
if observations.shape[0] > 1:
product_list = None
for observations_chunk in np.array_split(observations,
math.ceil(observations.shape[0] / self.mast_input_query_chunk_size)):
product_list_chunk = self.get_product_list_chunk(observations_chunk)
if product_list is None:
product_list = product_list_chunk
else:
product_list = product_list.append(product_list_chunk, ignore_index=True)
else:
product_list = self.get_product_list_chunk(observations)
return product_list
@staticmethod
@retry(retry_on_exception=is_common_mast_connection_error)
def get_product_list_chunk(observations: pd.DataFrame) -> pd.DataFrame:
"""
A wrapper for MAST's `get_product_list`, allowing the use of Pandas DataFrames instead of AstroPy Tables.
Retries on error when communicating with the MAST server.
:param observations: The data frame of observations to get. Will be converted from DataFrame to Table for query.
:return: The data frame of the product list. Will be converted from Table to DataFrame for use.
"""
data_products = Observations.get_product_list(Table.from_pandas(observations))
return data_products.to_pandas()
@staticmethod
@retry(retry_on_exception=is_common_mast_connection_error)
def download_products(data_products: pd.DataFrame, data_directory: Path) -> pd.DataFrame:
"""
A wrapper for MAST's `download_products`, allowing the use of Pandas DataFrames instead of AstroPy Tables.
Retries on error when communicating with the MAST server.
:param data_products: The data frame of data products to download. Will be converted from DataFrame to Table
for sending the request to MAST.
:param data_directory: The path to download the data to.
:return: The manifest of the download. Will be converted from Table to DataFrame for use.
"""
manifest = Observations.download_products(Table.from_pandas(data_products), download_dir=str(data_directory))
if manifest is None:
raise NoDataProductsFoundException
return manifest.to_pandas()
@staticmethod
def filter_for_single_sector_observations(time_series_observations: pd.DataFrame) -> pd.DataFrame:
"""
Filters a data frame of observations to get only the single sector observations.
:param time_series_observations: A data frame of observations to filter for single sector observations.
:return: The data frame of single sector observations.
"""
single_sector_observations = time_series_observations[
time_series_observations['dataURL'].str.endswith('lc.fits')
]
return single_sector_observations.copy()
@staticmethod
def filter_out_twenty_second_cadence_observations(time_series_observations: pd.DataFrame) -> pd.DataFrame:
"""
Removes 20-second cadence data from the observation data frame.
:param time_series_observations: A data frame of observations to filtered.
:return: The data frame without 20-second cadence data.
"""
observations_without_twenty_second_cadence = time_series_observations[
~time_series_observations['dataURL'].str.endswith('fast-lc.fits')
]
return observations_without_twenty_second_cadence.copy()
@staticmethod
def filter_for_multi_sector_observations(time_series_observations: pd.DataFrame) -> pd.DataFrame:
"""
Filters a data frame of observations to get only the multi sector observations.
:param time_series_observations: A data frame of observations to filter for multi sector observations.
:return: The data frame of multi sector observations.
"""
multi_sector_observations = time_series_observations[
time_series_observations['dataURL'].str.endswith('dvt.fits')
]
return multi_sector_observations.copy()
@staticmethod
def get_tic_id_from_single_sector_obs_id(obs_id: str) -> int:
"""
Extracts the TIC ID from a single-sector obs_id string.
:param obs_id: The obs_id to extract from.
:return: The extracted TIC ID.
"""
return int(obs_id.split('-')[2].lstrip('0'))
@staticmethod
def get_sector_from_single_sector_obs_id(obs_id: str) -> int:
"""
Extracts the sector from a single-sector obs_id string.
:param obs_id: The obs_id to extract from.
:return: The extracted sector number.
"""
return int(obs_id.split('-')[1][1:])
def add_tic_id_column_to_single_sector_observations(self, data_frame: pd.DataFrame) -> pd.DataFrame:
"""
Adds a column with the TIC ID the row is related to.
:param data_frame: The data frame of single-sector entries.
:return: The table with the added TIC ID column.
"""
data_frame[ColumnName.TIC_ID] = data_frame['obs_id'].map(self.get_tic_id_from_single_sector_obs_id)
return data_frame
def add_sector_column_to_single_sector_observations(self, observations: pd.DataFrame) -> pd.DataFrame:
"""
Adds a column with the sector the data was taken from.
:param observations: The table of single-sector observations.
:return: The table with the added sector column.
"""
observations[ColumnName.SECTOR] = observations['obs_id'].map(self.get_sector_from_single_sector_obs_id)
return observations
def load_light_curve_from_fits_file(self, light_curve_path: Union[str, Path]) -> Dict[str, np.ndarray]:
"""
Loads a light_curve from a FITS file in a dictionary form with the structure of the FITS arrays.
:param light_curve_path: The path to the FITS file.
:return: The light_curve.
"""
try:
with fits.open(light_curve_path) as hdu_list:
light_curve = hdu_list[1].data # Light curve information is in first extension table.
except OSError: # If the FITS file is corrupt, re-download (seems to happen often enough).
light_curve_path = Path(light_curve_path) # In case it's currently a string.
light_curve_path.unlink()
tic_id, sector = self.get_tic_id_and_sector_from_file_path(light_curve_path)
self.download_two_minute_cadence_light_curve(tic_id=tic_id, sector=sector,
save_directory=light_curve_path.parent)
with fits.open(light_curve_path) as hdu_list:
light_curve = hdu_list[1].data # Light curve information is in first extension table.
return light_curve
def load_fluxes_and_times_from_fits_file(self, light_curve_path: Union[str, Path],
flux_type: TessFluxType = TessFluxType.PDCSAP,
remove_nans: bool = True) -> (np.ndarray, np.ndarray):
"""
Extract the flux and time values from a TESS FITS file.
:param light_curve_path: The path to the FITS file.
:param flux_type: The flux type to extract from the FITS file.
:param remove_nans: Whether or not to remove nans.
:return: The flux and times values from the FITS file.
"""
light_curve = self.load_light_curve_from_fits_file(light_curve_path)
fluxes = light_curve[flux_type.value]
times = light_curve['TIME']
assert times.shape == fluxes.shape
if remove_nans:
# noinspection PyUnresolvedReferences
nan_indexes = np.union1d(np.argwhere(np.isnan(fluxes)), np.argwhere(np.isnan(times)))
fluxes = np.delete(fluxes, nan_indexes)
times = np.delete(times, nan_indexes)
return fluxes, times
def load_fluxes_flux_errors_and_times_from_fits_file(self, light_curve_path: Union[str, Path],
flux_type: TessFluxType = TessFluxType.PDCSAP,
remove_nans: bool = True
) -> (np.ndarray, np.ndarray, np.ndarray):
"""
Extract the flux and time values from a TESS FITS file.
:param light_curve_path: The path to the FITS file.
:param flux_type: The flux type to extract from the FITS file.
:param remove_nans: Whether or not to remove nans.
:return: The flux and times values from the | |
pc: program counter of the first instruction(optional)
:type pc: int
:param fork: fork name (optional)
:type fork: str
:return: An generator of Instruction objects
:rtype: generator[Instructions]
Example use::
>>> assemble_one('''PUSH1 0x60\n \
PUSH1 0x40\n \
MSTORE\n \
PUSH1 0x2\n \
PUSH2 0x108\n \
PUSH1 0x0\n \
POP\n \
SSTORE\n \
PUSH1 0x40\n \
MLOAD\n \
''')
"""
asmcode = asmcode.split('\n')
asmcode = iter(asmcode)
# we use a dictionary to record label locations:
labels = {}
# another dictionary to record which instruction
# we need to fill in.
fillins = {}
# we have to traverse the generated instruction twice
# so no use of generator here
instrs = []
for line in asmcode:
line = line.strip()
# skip empty lines
if not line:
continue
# remove comments
index = line.find("#")
if index is not -1:
line = line[:index]
# skip directives:
if line.find(".") is 0:
continue
# handle labels
if line.endswith(":"):
# this is a label, record it with location (PC)
labels[line[:-1]] = pc
continue
instr = assemble_one(line, pc=pc, fork=fork, fillins=fillins)
instrs.append(instr)
pc += instr.size
# size of the contract is the current PC
labels["deploy.size"] = pc - 1
# fixup instructions
for label in labels:
if label not in fillins.keys():
continue
for instr in instrs:
if instr._pc in fillins[label]:
label_pc = labels[label]
fixup_instr(instr, label_pc)
# to keep it compatible with existing APIs
for instr in instrs:
yield instr
def disassemble_one(bytecode, pc=0, fork=DEFAULT_FORK):
""" Disassemble a single instruction from a bytecode
:param bytecode: the bytecode stream
:type bytecode: str | bytes | bytearray | iterator
:param pc: program counter of the instruction(optional)
:type pc: int
:param fork: fork name (optional)
:type fork: str
:return: an Instruction object
:rtype: Instruction
Example use::
>>> print disassemble_one('\x60\x10')
"""
instruction_table = instruction_tables[fork]
if isinstance(bytecode, bytes):
bytecode = bytearray(bytecode)
if isinstance(bytecode, str):
bytecode = bytearray(bytecode.encode('latin-1'))
bytecode = iter(bytecode)
try:
opcode = next(bytecode)
except StopIteration:
return
assert isinstance(opcode, int)
instruction = copy.copy(instruction_table.get(opcode, None))
if instruction is None:
instruction = Instruction(opcode, 'INVALID', 0, 0, 0, 0, 'Unspecified invalid instruction.')
instruction.pc = pc
try:
if instruction.has_operand:
instruction.parse_operand(bytecode)
except ParseError:
instruction = None
finally:
return instruction
def disassemble_all(bytecode, pc=1, fork=DEFAULT_FORK):
""" Disassemble all instructions in bytecode
:param bytecode: an evm bytecode (binary)
:type bytecode: str | bytes | bytearray | iterator
:param pc: program counter of the first instruction(optional)
:type pc: int
:param fork: fork name (optional)
:type fork: str
:return: An generator of Instruction objects
:rtype: list[Instruction]
Example use::
>>> for inst in disassemble_all(bytecode):
... print(instr)
...
PUSH1 0x60
PUSH1 0x40
MSTORE
PUSH1 0x2
PUSH2 0x108
PUSH1 0x0
POP
SSTORE
PUSH1 0x40
MLOAD
"""
if isinstance(bytecode, bytes):
bytecode = bytearray(bytecode)
if isinstance(bytecode, str):
bytecode = bytearray(bytecode.encode('latin-1'))
bytecode = iter(bytecode)
while True:
instr = disassemble_one(bytecode, pc=pc, fork=fork)
if not instr:
return
pc += instr.size
yield instr
def disassemble(bytecode, pc=0, fork=DEFAULT_FORK):
""" Disassemble an EVM bytecode
:param bytecode: binary representation of an evm bytecode
:type bytecode: str | bytes | bytearray
:param pc: program counter of the first instruction(optional)
:type pc: int
:param fork: fork name (optional)
:type fork: str
:return: the text representation of the assembler code
Example use::
>>> disassemble("\x60\x60\x60\x40\x52\x60\x02\x61\x01\x00")
...
PUSH1 0x60
BLOCKHASH
MSTORE
PUSH1 0x2
PUSH2 0x100
"""
return '\n'.join(map(str, disassemble_all(bytecode, pc=pc, fork=fork)))
def assemble(asmcode, pc=1, fork=DEFAULT_FORK):
""" Assemble an EVM program
:param asmcode: an evm assembler program
:type asmcode: str
:param pc: program counter of the first instruction(optional)
:type pc: int
:param fork: fork name (optional)
:type fork: str
:return: the hex representation of the bytecode
:rtype: str
Example use::
>>> assemble('''PUSH1 0x60\n \
BLOCKHASH\n \
MSTORE\n \
PUSH1 0x2\n \
PUSH2 0x100\n \
''')
...
b"\x60\x60\x60\x40\x52\x60\x02\x61\x01\x00"
"""
return b''.join(x.bytes for x in assemble_all(asmcode, pc=pc, fork=fork))
def disassemble_hex(bytecode, pc=0, fork=DEFAULT_FORK):
""" Disassemble an EVM bytecode
:param bytecode: canonical representation of an evm bytecode (hexadecimal)
:type bytecode: str
:param pc: program counter of the first instruction(optional)
:type pc: int
:param fork: fork name (optional)
:type fork: str
:return: the text representation of the assembler code
:rtype: str
Example use::
>>> disassemble_hex("0x6060604052600261010")
...
PUSH1 0x60
BLOCKHASH
MSTORE
PUSH1 0x2
PUSH2 0x100
"""
if bytecode.startswith('0x'):
bytecode = bytecode[2:]
bytecode = unhexlify(bytecode)
return disassemble(bytecode, pc=pc, fork=fork)
def assemble_hex(asmcode, pc=0, fork=DEFAULT_FORK):
""" Assemble an EVM program
:param asmcode: an evm assembler program
:type asmcode: str | iterator[Instruction]
:param pc: program counter of the first instruction(optional)
:type pc: int
:param fork: fork name (optional)
:type fork: str
:return: the hex representation of the bytecode
:rtype: str
Example use::
>>> assemble_hex('''PUSH1 0x60\n \
BLOCKHASH\n \
MSTORE\n \
PUSH1 0x2\n \
PUSH2 0x100\n \
''')
...
"0x6060604052600261010"
"""
if isinstance(asmcode, list):
return '0x' + hexlify(b''.join([x.bytes for x in asmcode])).decode('ascii')
return '0x' + hexlify(assemble(asmcode, pc=pc, fork=fork)).decode('ascii')
class InstructionTable():
"""
EVM Instruction factory
Implements an immutable, iterable instruction LUT that can be indexed by both mnemonic or opcode.
Example::
>>> from pyevmasm import instruction_tables
>>> instruction_table = instruction_tables['byzantium']
>>> instruction_table[0]
Instruction(0x0, 'STOP', 0, 0, 0, 0, 'Halts execution.', None, 0)
>>> instruction_table['STOP']
Instruction(0x0, 'STOP', 0, 0, 0, 0, 'Halts execution.', None, 0)
>>> i = instruction_table.__iter__()
>>> i.__next__()
Instruction(0x0, 'STOP', 0, 0, 0, 0, 'Halts execution.', None, 0)
>>> i.__next__()
Instruction(0x1, 'ADD', 0, 2, 1, 3, 'Addition operation.', None, 0)
>>> i.__next__()
Instruction(0x2, 'MUL', 0, 2, 1, 5, 'Multiplication operation.', None, 0)
>>> i.__next__()
Instruction(0x3, 'SUB', 0, 2, 1, 3, 'Subtraction operation.', None, 0)
"""
__slots__ = ('_instruction_list', '__name_to_opcode')
def __init__(self, *args, **kwargs):
previous_fork = kwargs.get('previous_fork', None)
self._instruction_list = {}
self.__name_to_opcode = None
if previous_fork is not None:
if not isinstance(previous_fork, self.__class__):
raise TypeError("{} expected".format(self.__class__))
self._instruction_list.update(previous_fork._instruction_list)
self._instruction_list.update(args[0])
self.__name_to_opcode = None
@property
def _name_to_opcode(self):
if self.__name_to_opcode is None:
self.__name_to_opcode = {}
for opcode, (name, operand_size, pops, pushes, gas, description) in self._instruction_list.items():
if name == 'PUSH':
long_name = 'PUSH%d' % operand_size
elif name == 'DUP':
long_name = 'DUP%d' % pops
elif name == 'SWAP':
long_name = 'SWAP%d' % (pops - 1)
elif name == 'LOG':
long_name ='LOG%d' % (pops - 2)
else:
long_name = name
self.__name_to_opcode[long_name] = opcode
return self.__name_to_opcode
def _search_by_name(self, k):
return self._search_by_opcode(self._name_to_opcode[k])
def _search_by_opcode(self, k):
return (k,) + self._instruction_list[k]
def _search(self, k):
try:
value = self._search_by_opcode(k)
except KeyError:
value = self._search_by_name(k)
return value
def __getitem__(self, k):
return Instruction(*self._search(k))
def get(self, k, default=None):
try:
return Instruction(*self._search(k))
except KeyError:
return default
def __contains__(self, k):
return k in self._instruction_list or k in self._name_to_opcode
def __iter__(self):
for k in self.keys():
yield Instruction(*((k,)+ self._instruction_list[k]))
def keys(self):
return sorted(self._instruction_list.keys())
def __repr__(self):
return repr(self._instruction_list)
# from http://gavwood.com/paper.pdf
frontier_instruction_table = {
0x0 : ('STOP', 0, 0, 0, 0, 'Halts execution.'),
0x1 : ('ADD', 0, 2, 1, 3, 'Addition operation.'),
0x2 : ('MUL', 0, 2, 1, 5, 'Multiplication operation.'),
0x3 : ('SUB', 0, 2, 1, 3, 'Subtraction operation.'),
0x4 : ('DIV', 0, 2, 1, 5, 'Integer division operation.'),
0x5 : ('SDIV', 0, 2, 1, 5, 'Signed integer division operation (truncated).'),
0x6 : ('MOD', 0, 2, 1, 5, 'Modulo remainder operation.'),
0x7 : ('SMOD', 0, 2, 1, 5, 'Signed modulo remainder operation.'),
0x8 : ('ADDMOD', 0, 3, 1, 8, 'Modulo addition operation.'),
0x9 : ('MULMOD', 0, 3, 1, 8, 'Modulo multiplication operation.'),
0xa : ('EXP', 0, 2, 1, 10, 'Exponential operation.'),
0xb : ('SIGNEXTEND', 0, 2, 1, 5, "Extend length of two's complement signed integer."),
0x10 : ('LT', 0, 2, 1, 3, 'Less-than comparision.'),
0x11 : ('GT', 0, 2, 1, 3, 'Greater-than comparision.'),
0x12 : ('SLT', 0, 2, 1, 3, 'Signed less-than comparision.'),
0x13 : ('SGT', 0, 2, 1, 3, 'Signed greater-than comparision.'),
0x14 : ('EQ', 0, 2, 1, 3, 'Equality comparision.'),
0x15 : ('ISZERO', 0, 1, 1, 3, 'Simple not operator.'),
0x16 : ('AND', 0, 2, 1, 3, 'Bitwise AND operation.'),
0x17 : ('OR', 0, 2, 1, 3, 'Bitwise OR operation.'),
0x18 : ('XOR', 0, 2, 1, 3, 'Bitwise XOR operation.'),
0x19 : ('NOT', 0, 1, 1, 3, 'Bitwise NOT operation.'),
0x1a : ('BYTE', 0, 2, 1, 3, 'Retrieve single byte from word.'),
0x20 : ('SHA3', 0, 2, 1, 30, | |
import collections
import re
import six
from six import string_types
import warnings
from contextlib import contextmanager
from copy import deepcopy, copy
from pprint import PrettyPrinter
from .optional_imports import get_module
from . import offline as pyo
from _plotly_utils.basevalidators import (
CompoundValidator, CompoundArrayValidator, BaseDataValidator,
BaseValidator, LiteralValidator
)
from . import animation
from .callbacks import (Points, BoxSelector, LassoSelector,
InputDeviceState)
from .utils import ElidedPrettyPrinter
from .validators import (DataValidator, LayoutValidator, FramesValidator)
# Optional imports
# ----------------
np = get_module('numpy')
# Create Undefined sentinel value
# - Setting a property to None removes any existing value
# - Setting a property to Undefined leaves existing value unmodified
Undefined = object()
class BaseFigure(object):
"""
Base class for all figure types (both widget and non-widget)
"""
_bracket_re = re.compile('^(.*)\[(\d+)\]$')
# Constructor
# -----------
def __init__(self,
data=None,
layout_plotly=None,
frames=None,
skip_invalid=False):
"""
Construct a BaseFigure object
Parameters
----------
data
One of:
- A list or tuple of trace objects (or dicts that can be coerced
into trace objects)
- If `data` is a dict that contains a 'data',
'layout', or 'frames' key then these values are used to
construct the figure.
- If `data` is a `BaseFigure` instance then the `data`, `layout`,
and `frames` properties are extracted from the input figure
layout_plotly
The plotly layout dict.
Note: this property is named `layout_plotly` rather than `layout`
to deconflict it with the `layout` constructor parameter of the
`widgets.DOMWidget` ipywidgets class, as the `BaseFigureWidget`
class is a subclass of both BaseFigure and widgets.DOMWidget.
If the `data` property is a BaseFigure instance, or a dict that
contains a 'layout' key, then this property is ignored.
frames
A list or tuple of `plotly.graph_objs.Frame` objects (or dicts
that can be coerced into Frame objects)
If the `data` property is a BaseFigure instance, or a dict that
contains a 'frames' key, then this property is ignored.
skip_invalid: bool
If True, invalid properties in the figure specification will be
skipped silently. If False (default) invalid properties in the
figure specification will result in a ValueError
Raises
------
ValueError
if a property in the specification of data, layout, or frames
is invalid AND skip_invalid is False
"""
super(BaseFigure, self).__init__()
# Assign layout_plotly to layout
# ------------------------------
# See docstring note for explanation
layout = layout_plotly
# Subplot properties
# ------------------
# These properties are used by the tools.make_subplots logic.
# We initialize them to None here, before checking if the input data
# object is a BaseFigure, or a dict with _grid_str and _grid_ref
# properties, in which case we bring over the _grid* properties of
# the input
self._grid_str = None
self._grid_ref = None
# Handle case where data is a Figure or Figure-like dict
# ------------------------------------------------------
if isinstance(data, BaseFigure):
# Bring over subplot fields
self._grid_str = data._grid_str
self._grid_ref = data._grid_ref
# Extract data, layout, and frames
data, layout, frames = data.data, data.layout, data.frames
elif (isinstance(data, dict)
and ('data' in data or 'layout' in data or 'frames' in data)):
# Bring over subplot fields
self._grid_str = data.get('_grid_str', None)
self._grid_ref = data.get('_grid_ref', None)
# Extract data, layout, and frames
data, layout, frames = (data.get('data', None),
data.get('layout', None),
data.get('frames', None))
# Handle data (traces)
# --------------------
# ### Construct data validator ###
# This is the validator that handles importing sequences of trace
# objects
self._data_validator = DataValidator(set_uid=True)
# ### Import traces ###
data = self._data_validator.validate_coerce(data,
skip_invalid=skip_invalid)
# ### Save tuple of trace objects ###
self._data_objs = data
# ### Import clone of trace properties ###
# The _data property is a list of dicts containing the properties
# explicitly set by the user for each trace.
self._data = [deepcopy(trace._props) for trace in data]
# ### Create data defaults ###
# _data_defaults is a tuple of dicts, one for each trace. When
# running in a widget context, these defaults are populated with
# all property values chosen by the Plotly.js library that
# aren't explicitly specified by the user.
#
# Note: No property should exist in both the _data and
# _data_defaults for the same trace.
self._data_defaults = [{} for _ in data]
# ### Reparent trace objects ###
for trace_ind, trace in enumerate(data):
# By setting the trace's parent to be this figure, we tell the
# trace object to use the figure's _data and _data_defaults
# dicts to get/set it's properties, rather than using the trace
# object's internal _orphan_props dict.
trace._parent = self
# We clear the orphan props since the trace no longer needs then
trace._orphan_props.clear()
# Set trace index
trace._trace_ind = trace_ind
# Layout
# ------
# ### Construct layout validator ###
# This is the validator that handles importing Layout objects
self._layout_validator = LayoutValidator()
# ### Import Layout ###
self._layout_obj = self._layout_validator.validate_coerce(
layout, skip_invalid=skip_invalid)
# ### Import clone of layout properties ###
self._layout = deepcopy(self._layout_obj._props)
# ### Initialize layout defaults dict ###
self._layout_defaults = {}
# ### Reparent layout object ###
self._layout_obj._orphan_props.clear()
self._layout_obj._parent = self
# Frames
# ------
# ### Construct frames validator ###
# This is the validator that handles importing sequences of frame
# objects
self._frames_validator = FramesValidator()
# ### Import frames ###
self._frame_objs = self._frames_validator.validate_coerce(
frames, skip_invalid=skip_invalid)
# Note: Because frames are not currently supported in the widget
# context, we don't need to follow the pattern above and create
# _frames and _frame_defaults properties and then reparent the
# frames. The figure doesn't need to be notified of
# changes to the properties in the frames object hierarchy.
# Context manager
# ---------------
# ### batch mode indicator ###
# Flag that indicates whether we're currently inside a batch_*()
# context
self._in_batch_mode = False
# ### Batch trace edits ###
# Dict from trace indexes to trace edit dicts. These trace edit dicts
# are suitable as `data` elements of Plotly.animate, but not
# the Plotly.update (See `_build_update_params_from_batch`)
#
# type: typ.Dict[int, typ.Dict[str, typ.Any]]
self._batch_trace_edits = {}
# ### Batch layout edits ###
# Dict from layout properties to new layout values. This dict is
# directly suitable for use in Plotly.animate and Plotly.update
# type: typ.Dict[str, typ.Any]
self._batch_layout_edits = {}
# Animation property validators
# -----------------------------
self._animation_duration_validator = animation.DurationValidator()
self._animation_easing_validator = animation.EasingValidator()
# Magic Methods
# -------------
def __reduce__(self):
"""
Custom implementation of reduce is used to support deep copying
and pickling
"""
props = self.to_dict()
props['_grid_str'] = self._grid_str
props['_grid_ref'] = self._grid_ref
return (self.__class__,
(props,))
def __setitem__(self, prop, value):
# Normalize prop
# --------------
# Convert into a property tuple
orig_prop = prop
prop = BaseFigure._str_to_dict_path(prop)
# Handle empty case
# -----------------
if len(prop) == 0:
raise KeyError(orig_prop)
# Handle scalar case
# ------------------
# e.g. ('foo',)
elif len(prop) == 1:
# ### Unwrap scalar tuple ###
prop = prop[0]
if prop == 'data':
self.data = value
elif prop == 'layout':
self.layout = value
elif prop == 'frames':
self.frames = value
else:
raise KeyError(prop)
# Handle non-scalar case
# ----------------------
# e.g. ('foo', 1)
else:
res = self
for p in prop[:-1]:
res = res[p]
res[prop[-1]] = value
def __setattr__(self, prop, value):
"""
Parameters
----------
prop : str
The name of a direct child of this object
value
New property value
Returns
-------
None
"""
if prop.startswith('_') or hasattr(self, prop):
# Let known properties and private properties through
super(BaseFigure, self).__setattr__(prop, value)
else:
# Raise error on unknown public properties
raise AttributeError(prop)
def __getitem__(self, prop):
# Normalize prop
# --------------
# Convert into a property tuple
orig_prop = prop
prop = BaseFigure._str_to_dict_path(prop)
# Handle scalar case
# ------------------
# e.g. ('foo',)
if len(prop) == 1:
# Unwrap scalar tuple
prop = prop[0]
if prop == 'data':
return self._data_validator.present(self._data_objs)
elif prop == 'layout':
return self._layout_validator.present(self._layout_obj)
elif prop == 'frames':
return self._frames_validator.present(self._frame_objs)
else:
raise KeyError(orig_prop)
# Handle non-scalar case
# ----------------------
# e.g. ('foo', 1)
else:
res = self
for p in prop:
res = res[p]
return res
def __iter__(self):
return iter(('data', 'layout', 'frames'))
def __contains__(self, prop):
return prop in ('data', 'layout', 'frames')
def __eq__(self, other):
if not isinstance(other, BaseFigure):
# Require | |
<filename>cogs/webserver.py
import logging
import asyncio
import discord
import hikari
import aiohttp_cors
import ast
from typing import List
from discord.ext import commands
from utils.bot import ModMail
from aiohttp import web, ClientSession
try:
import uvloop
uvloop.install()
except Exception:
pass
class Guild:
def __init__(self, id: str, name: str, icon: str, owner: bool, permissions: int, features: List[str], permissions_new: str):
self.id: str = id
self.name: str = name
self.icon: str = icon
self.owner: bool = owner
self.permissions: int = permissions
self.features: List[str] = features
self.permissions_new: str = permissions_new
self.invited: bool = False
def __str__(self) -> str:
return f"<Guild name='{self.name}' id={self.id}>"
def __repr__(self) -> str:
return self.__str__()
@property
def icon_url(self) -> str:
return f"https://cdn.discordapp.com/icons/{self.id}/{self.icon}.png" if self.icon is not None else None
class WebServer(commands.Cog):
def __init__(self, client: ModMail):
self.client = client
self.rest_api = hikari.RESTApp()
self.api = None
self.BASE = "https://discord.com/api"
self.REDIRECT_URI = "https://mail-hook.xyz/callback"
# self.REDIRECT_URI = "http://localhost:3000/callback"
self.cors_thing = {
"*": aiohttp_cors.ResourceOptions(
allow_credentials=True,
expose_headers="*",
allow_headers="*",
allow_methods="*"
)
}
def filter_guilds(self, user_guilds: List[Guild], bot_guilds: List[discord.Guild]) -> List[Guild]:
mutual_guilds: List[Guild] = []
bot_guild_ids = [g.id for g in bot_guilds]
for guild in user_guilds:
if int(guild.id) in bot_guild_ids:
guild.invited = True
else:
guild.invited = False
mutual_guilds.append(guild)
return [g for g in mutual_guilds if g.permissions & hikari.Permissions.MANAGE_GUILD]
async def get_access_token(self, code: str) -> dict:
async with ClientSession() as session:
async with session.post(
f"{self.BASE}/oauth2/token",
data={
"client_id": str(self.client.user.id),
"client_secret": self.client.config.client_secret,
"grant_type": "authorization_code",
"code": code,
"redirect_uri": self.REDIRECT_URI,
},
) as resp:
return await resp.json()
async def get_user(self, token: str) -> hikari.OwnUser:
async with self.rest_api.acquire(token) as client:
return await client.fetch_my_user()
async def get_user_guilds(self, token: str) -> List[Guild]:
async with ClientSession() as session:
async with session.get(
f"{self.BASE}/users/@me/guilds",
headers={"Authorization": f"Bearer {token}"},
) as resp:
data = await resp.json()
return [Guild(**g) for g in data]
async def callback(self, request: web.Request):
code = (await request.json()).get("code").get("code")
if code is None:
raise web.HTTPBadRequest()
data = await self.get_access_token(code)
print(data)
return web.json_response({"access_token": data.get("access_token")})
async def get_own_user(self, request: web.Request):
access_token = request.headers.get("access_token")
if access_token is None:
raise web.HTTPBadRequest()
user = await self.get_user(access_token)
return web.json_response({
"id": str(user.id),
"username": user.username,
"discriminator": user.discriminator,
"avatar": str(user.avatar_url)
})
async def get_guilds(self, request: web.Request):
access_token = request.headers.get("access_token")
if access_token is None:
raise web.HTTPBadRequest()
user_guilds = await self.get_user_guilds(access_token)
bot_guilds = self.client.guilds
final_guilds = self.filter_guilds(user_guilds, bot_guilds)
return web.json_response({
"guilds": [{
"id": g.id,
"name": g.name,
"icon_url": g.icon_url,
"invited": g.invited
} for g in final_guilds]
})
async def update_mod_role(self, request: web.Request):
susu = await request.json()
role_id = susu.get("role_id")
guild_id = susu.get("guild_id")
access_token = susu.get("access_token")
user = await self.get_user(access_token)
user_id = user.id
if role_id is None or guild_id is None:
raise web.HTTPBadRequest()
guild = self.client.get_guild(int(guild_id))
if guild is None:
return web.json_response({"error": "Guild not found"})
member = guild.get_member(int(user_id))
if member is None:
return web.json_response({"error": "Not authorized"})
if not member.guild_permissions.manage_guild:
return web.json_response({"error": "Not authorized"})
role = guild.get_role(int(role_id))
if role is None:
return web.json_response({"error": "Role not found"})
await self.client.mongo.set_guild_data(guild_id=int(guild_id), staff_role=role.id)
self.client.dispatch("mod_role_update", guild, role)
return web.json_response({"success": True})
async def toggle_modping(self, request: web.Request):
susu = await request.json()
guild_id = susu.get("guild_id")
access_token = susu.get("access_token")
if guild_id is None or access_token is None:
raise web.HTTPBadRequest()
user = await self.get_user(access_token)
user_id = user.id
guild = self.client.get_guild(int(guild_id))
if guild is None:
return web.json_response({"error": "Guild not found"})
member = guild.get_member(int(user_id))
if member is None:
return web.json_response({"error": "Not authorized"})
if not member.guild_permissions.manage_guild:
return web.json_response({"error": "Not authorized"})
guild_data = await self.client.mongo.get_guild_data(guild_id=int(guild_id), raise_error=False)
await self.client.mongo.set_guild_data(guild_id=int(guild_id), ping_staff=not guild_data.get("ping_staff", True))
return web.json_response({"success": True})
async def update_category(self, request: web.Request):
susu = await request.json()
category_id = susu.get("category_id")
guild_id = susu.get("guild_id")
access_token = susu.get("access_token")
user = await self.get_user(access_token)
user_id = user.id
if category_id is None or guild_id is None:
raise web.HTTPBadRequest()
guild = self.client.get_guild(int(guild_id))
if guild is None:
return web.json_response({"error": "Guild not found"})
member = guild.get_member(int(user_id))
if member is None:
return web.json_response({"error": "Not authorized"})
if not member.guild_permissions.manage_guild:
return web.json_response({"error": "Not authorized"})
category = guild.get_channel(int(category_id))
if category is None:
return web.json_response({"error": "Category not found"})
await self.client.mongo.set_guild_data(guild_id=int(guild_id), category=category.id)
self.client.dispatch("category_update", guild, category)
return web.json_response({"success": True})
async def update_transcript_channel(self, request: web.Request):
susu = await request.json()
channel_id = susu.get("channel_id")
guild_id = susu.get("guild_id")
access_token = susu.get("access_token")
user = await self.get_user(access_token)
user_id = user.id
if channel_id is None or guild_id is None:
raise web.HTTPBadRequest()
guild = self.client.get_guild(int(guild_id))
if guild is None:
return web.json_response({"error": "Guild not found"})
member = guild.get_member(int(user_id))
if member is None:
return web.json_response({"error": "Not authorized"})
if not member.guild_permissions.manage_guild:
return web.json_response({"error": "Not authorized"})
channel = guild.get_channel(int(channel_id))
if channel is None:
return web.json_response({"error": "Channel not found"})
await self.client.mongo.set_guild_data(guild_id=int(guild_id), transcripts=channel.id)
self.client.dispatch("transcript_channel_update", guild, channel)
return web.json_response({"success": True})
async def check_setup(self, request: web.Request):
guild_id = request.headers.get("guild_id")
access_token = request.headers.get("access_token")
if guild_id is None or access_token is None:
raise web.HTTPBadRequest()
user = await self.get_user(access_token)
user_id = user.id
try:
guild_id = int(guild_id)
user_id = int(user_id)
except ValueError:
return web.json_response({"error": "Invalid guild id"})
guild = self.client.get_guild(guild_id)
if guild is None:
return web.json_response({"error": "Guild not found"})
member = guild.get_member(user_id)
if member is None:
return web.json_response({"error": "You are not in the guild."})
if not member.guild_permissions.manage_guild:
return web.json_response({"error": "You need manage_guild permissions to do that."})
guild_data = await self.client.mongo.get_guild_data(guild_id, raise_error=False)
if guild_data is None:
return web.json_response({"setup": False})
return web.json_response({"setup": True})
async def setup_guild(self, request: web.Request):
guild_id = request.headers.get("guild_id")
staff_role_id = request.headers.get("staff_role_id")
category_id = request.headers.get("category_id")
transcripts_id = request.headers.get("transcripts_id")
prefixes: List[str] = ast.literal_eval(request.headers.get("prefixes", "[]"))
prefixes = prefixes or self.client.config.prefixes.copy()
access_token = request.headers.get("access_token")
if guild_id is None or staff_role_id is None or category_id is None or transcripts_id is None or access_token is None:
raise web.HTTPBadRequest()
user = await self.get_user(access_token)
user_id = user.id
try:
guild_id = int(guild_id)
staff_role_id = int(staff_role_id)
category_id = int(category_id)
transcripts_id = int(transcripts_id)
except ValueError:
return web.json_response({"error": "Invalid guild id or role id or category id or transcripts id"})
guild = self.client.get_guild(guild_id)
if guild is None:
return web.json_response({"error": "Guild not found"})
member = guild.get_member(user_id)
if member is None:
return web.json_response({"error": "Unauthorized"})
if not member.guild_permissions.manage_guild:
return web.json_response({"error": "Unauthorized"})
staff_role = guild.get_role(staff_role_id)
if staff_role is None:
return web.json_response({"error": "Staff role not found"})
category = guild.get_channel(category_id)
if category is None:
return web.json_response({"error": "Category not found"})
transcripts = guild.get_channel(transcripts_id)
if transcripts is None:
return web.json_response({"error": "Transcripts channel not found"})
if not category.permissions_for(guild.me).manage_channels:
return web.json_response({"error": "I don't have permissions to create channels in the category."})
if not category.permissions_for(guild.me).manage_webhooks:
return web.json_response({"error": "I don't have permissions to create webhooks in the category."})
if not category.permissions_for(guild.me).read_message_history:
return web.json_response({"error": "I don't have permissions to read message history in the category."})
if not category.permissions_for(guild.me).use_external_emojis:
return web.json_response({"error": "I don't have permissions to use external emojis in the category."})
if not category.permissions_for(guild.me).add_reactions:
return web.json_response({"error": "I don't have permissions to add reactions in the category."})
if not category.permissions_for(guild.me).read_messages:
return web.json_response({"error": "I don't have permissions to read messages in the category."})
if not category.permissions_for(guild.me).send_messages:
return web.json_response({"error": "I don't have permissions to send messages in the category."})
if not transcripts.permissions_for(guild.me).read_messages or not transcripts.permissions_for(guild.me).send_messages:
return web.json_response({"error": "I don't have permissions to read messages or send messages in the transcripts channel."})
await self.client.mongo.set_guild_data(
guild.id,
category=category.id,
staff_role=staff_role.id,
transcripts=transcripts.id,
prefixes=prefixes
)
return web.json_response({"success": True, "message": "Guild setup complete." if guild.me.guild_permissions.administrator else "The guild setup is complete, it is recommended that you grant me administrator permissions for the best experience."})
async def get_guild_data(self, request: web.Request):
guild_id = request.headers.get("guild_id")
access_token = request.headers.get("access_token")
if guild_id is None or access_token is None:
raise web.HTTPBadRequest()
try:
user = await self.get_user(access_token)
except hikari.UnauthorizedError:
return web.json_response({"error": "Unauthorized"})
user_id = user.id
print(user_id, guild_id)
try:
int(guild_id)
int(user_id)
except ValueError:
return web.json_response({"error": "Invalid guild id or user id"})
guild = self.client.get_guild(int(guild_id))
if guild is None:
return web.json_response({"error": "Guild not found"})
member = guild.get_member(int(user_id))
if member is None:
return web.json_response({"error": "Member not found"})
if not member.guild_permissions.manage_guild:
return web.json_response({"error": "Insufficient permissions"})
guild_data = await self.client.mongo.get_guild_data(guild.id, raise_error=False)
if guild_data is not None:
modrole = guild.get_role(guild_data['staff_role'])
ping_staff = guild_data.get("ping_staff", True)
ticket_category = guild.get_channel(guild_data['category'])
transcripts_channel = guild.get_channel(guild_data['transcripts'])
guild_transcripts = [{
"ticketId": t_id,
"ticketUser": (lambda user: {
"id": user.id if user is not None else t_data['user_id'],
"username": user.name if user is not None else "Unknown User",
"discriminator": str(user.discriminator) if user is not None else "0000",
"avatar": user.display_avatar.url if user is not None else "https://cdn.discordapp.com/embed/avatars/0.png"
})(self.client.get_user(t_data['user_id'])),
} for t_id, t_data in guild_data.get('ticket_transcripts', {}).items()]
current_tickets = await self.client.mongo.get_guild_modmail_threads(guild.id)
prefixes = self.client.config.prefixes.copy()
templates = guild_data.get('templates', {})
ticket_open_message = guild_data.get('ticket_open_message', "{staff_role_mention} {user_mention} has | |
nsizespec, nstate, nX, nXp, nGX, nGXp = n.nodeargd.argnlist
sym = m.get('sym', self.DCSym)
process_arg_subscripts = m.get('process_arg_subscripts', False)
text = '{' + sym + '}'
tX = self.preprocess_contents_latex(nX)
tXp = self.preprocess_contents_latex(nXp)
if tX and tXp:
text += '_{' + tX + r'\to ' + tXp + '}'
elif tX:
text += '_{' + tX + '}'
elif tXp:
text += '_{' + tXp + '}'
if nepsilon is not None:
text += '^{' + self.preprocess_contents_latex(nepsilon) + '}'
(od, md, cd) = _delims(nsizespec, '(', r'\Vert', ')')
if nstate.isNodeType(latexwalker.LatexGroupNode) \
and len(nstate.nodelist) \
and nstate.nodelist[0].isNodeType(latexwalker.LatexCharsNode) \
and nstate.nodelist[0].chars.lstrip().startswith('*'):
# remove '*'
statelatex = self.preprocess_contents_latex(nstate).lstrip(' \t*')
else:
if process_arg_subscripts:
statelatex = self.preprocess_contents_latex(nstate) + '_{' \
+ tX + r'\to ' + tXp + '}'
else:
statelatex = self.preprocess_contents_latex(nstate) + '_{' + tXp \
+ 'R_{' + tX + '}}'
text += od + statelatex + r'\,' + md + r'\,' + \
self.preprocess_contents_latex(nGX) + r',\,' \
+ self.preprocess_contents_latex(nGXp) + cd
return text
raise ValueError("Unknown phfqit macro type: {!r}".format(m))
def _delims(sizenode, opendelim, middelim, closedelim):
if sizenode is None:
return (opendelim, middelim, closedelim)
if sizenode.isNodeType(latexwalker.LatexGroupNode):
assert( len(sizenode.nodelist) == 1 )
sizenode = sizenode.nodelist[0]
if sizenode.isNodeType(latexwalker.LatexCharsNode) and sizenode.chars == '*':
return (r'\mathopen{}\left'+opendelim,
r'\mathclose{}\middle'+middelim+r'\mathopen{}',
r'\right'+closedelim+r'\mathclose{}')
if sizenode.isNodeType(latexwalker.LatexMacroNode):
mname = sizenode.macroname
return (r'\mathopen{}'+'\\'+mname+'l '+opendelim, # \bigl(
r'\mathopen{}'+'\\'+mname+' '+middelim, # \big|
r'\mathopen{}'+'\\'+mname+'r '+closedelim) # \bigr)
raise ValueError("unexpected optional sizing node : "+repr(sizenode))
def _delimtype(sizenode):
if sizenode is None:
return None
if sizenode.isNodeType(latexwalker.LatexGroupNode):
assert( len(sizenode.nodelist) == 1 )
sizenode = sizenode.nodelist[0]
if sizenode.isNodeType(latexwalker.LatexCharsNode) and sizenode.chars == '*':
return '*'
if sizenode.isNodeType(latexwalker.LatexMacroNode):
return '\\'+sizenode.macroname
mathtools_delims_macros = {
'abs': (r'\lvert', r'\rvert'),
'norm': (r'\lVert', r'\rVert'),
'avg': (r'\langle', r'\rangle'),
'ket': (r'\lvert', r'{%(1)s}', r'\rangle'),
'bra': (r'\langle', r'{%(1)s}', r'\rvert'),
'braket': (r'\langle', r'{%(1)s}%(phfqitKetsBarSpace)s%(delimsize)s\vert\phfqitKetsBarSpace{%(2)s}',
r'\rangle'),
'ketbra': (r'\lvert', r'{%(1)s}%(delimsize)s\rangle %(phfqitKetsRLAngleSpace)s%(delimsize)s\langle{%(2)s}',
r'\rvert'),
'proj': (r'\lvert', r'{%(1)s}%(delimsize)s\rangle %(phfqitKetsRLAngleSpace)s%(delimsize)s\langle{%(1)s}',
r'\rvert'),
'matrixel': (r'\langle',
r'{%(1)s}%(phfqitKetsBarSpace)s%(delimsize)s\vert %(phfqitKetsBarSpace)s{%(2)s}'
+r'%(phfqitKetsBarSpace)s%(delimsize)s\vert %(phfqitKetsBarSpace)s{%(3)s}',
r'\rangle'),
'dmatrixel': (r'\langle',
r'{%(1)s}%(phfqitKetsBarSpace)s%(delimsize)s\vert %(phfqitKetsBarSpace)s{%(2)s}'
+r'%(phfqitKetsBarSpace)s%(delimsize)s\vert %(phfqitKetsBarSpace)s{%(1)s}',
r'\rangle'),
'innerprod': (r'\langle',
r'{%(1)s}%(phfqitBeforeCommaSpace)s,%(phfqitAfterCommaSpace)s{%(2)s}',
r'\rangle'),
'oket': (r'\lvert', r'{%(1)s}', r'\rrangle'),
'obra': (r'\llangle', r'{%(1)s}', r'\rvert'),
'obraket': (r'\llangle', r'{%(1)s}%(phfqitOKetsBarSpace)s%(delimsize)s\vert %(phfqitOKetsBarSpace)s{%(2)s}',
r'\rrangle'),
'oketbra': (r'\lvert', r'{%(1)s}%(delimsize)s\rrangle %(phfqitOKetsRLAngleSpace)s%(delimsize)s\llangle{%(2)s}',
r'\rvert'),
'oproj': (r'\lvert', r'{%(1)s}%(delimsize)s\rrangle %(phfqitOKetsRLAngleSpace)s%(delimsize)s\llangle{%(1)s}',
r'\rvert'),
'omatrixel': (r'\llangle',
r'{%(1)s}%(phfqitOKetsBarSpace)s%(delimsize)s\vert %(phfqitOKetsBarSpace)s{%(2)s}'
+r'%(phfqitOKetsBarSpace)s%(delimsize)s\vert %(phfqitOKetsBarSpace)s{%(3)s}',
r'\rrangle'),
'odmatrixel': (r'\llangle',
r'{%(1)s}%(phfqitOKetsBarSpace)s%(delimsize)s\vert %(phfqitOKetsBarSpace)s{%(2)s}'
+r'%(phfqitOKetsBarSpace)s%(delimsize)s\vert %(phfqitOKetsBarSpace)s{%(1)s}',
r'\rrangle'),
'intervalc': (r'[', r'{%(1)s\mathclose{},\mathopen{}%(2)s}', r']'),
'intervalo': (r']', r'{%(1)s\mathclose{},\mathopen{}%(2)s}', r'['),
'intervalco': (r'[', r'{%(1)s\mathclose{},\mathopen{}%(2)s}', r'['),
'intervaloc': (r']', r'{%(1)s\mathclose{},\mathopen{}%(2)s}', r']'),
}
def gate(x):
return r'\ifmmode\textsc{\lowercase{'+x+r'}}\else{\rmfamily\textsc{\lowercase{'+x+r'}}}\fi'
simple_substitution_macros = {
r'Hs': r'\mathscr{H}',
r'Ident': r'\mathds{1}',
# bits and gates
r'bit': {'qitargspec': '{', 'repl': r'\texttt{%(1)s}'},
r'bitstring': {'qitargspec': '{', 'repl': r'\ensuremath{\underline{\overline{\texttt{%(1)s}}}}'},
r'gate': {'qitargspec': '{',
'repl': gate("%(1)s") },
r'AND': gate('And'),
r'XOR': gate('Xor'),
r'CNOT': gate('C-Not'),
r'NOT': gate('Not'),
r'NOOP': gate('No-Op'),
# math groups
'uu': dict(qitargspec='(', repl=r'\mathrm{u}({%(1)s})'),
'UU': dict(qitargspec='(', repl=r'\mathrm{U}({%(1)s})'),
'su': dict(qitargspec='(', repl=r'\mathrm{su}({%(1)s})'),
'SU': dict(qitargspec='(', repl=r'\mathrm{SU}({%(1)s})'),
'so': dict(qitargspec='(', repl=r'\mathrm{so}({%(1)s})'),
'SO': dict(qitargspec='(', repl=r'\mathrm{SO}({%(1)s})'),
#'sl': dict(qitargspec='(', repl=r'\mathrm{sl}({%(1)s})'), # not in phfqit -- why? should add it there
#'SL': dict(qitargspec='(', repl=r'\mathrm{SL}({%(1)s})'),
'GL': dict(qitargspec='(', repl=r'\mathrm{GL}({%(1)s})'),
'SN': dict(qitargspec='(', repl=r'\mathrm{S}_{%(1)s}'),
}
math_operators = {
'tr': 'tr',
'supp': 'supp',
'rank': 'rank',
'linspan': 'span',
'spec': 'spec',
'diag': 'diag',
'Re': 'Re',
'Im': 'Im',
'poly': 'poly',
}
rx_hspace = re.compile(r'\\hspace\*?\{[^}]+\}')
def _delempties(d):
delkeys = [k for k, v in d.items() if v is None]
for k in delkeys:
del d[k]
class ExpandMacros(BaseFix):
r"""
Expand various macros defined by the {phfqit} package.
If applied along with :py:class:`latexpp.fixes.pkg.phfqit.ExpandQitObjects`,
the dependency on package {phfqit} should be removed.
Arguments:
- `subst`: a dictionary of substitutions to perform. The dictionary keys
are macro names without leading backslash, and values are dictionaries of
the form ``{'qitargspec': <qitargspec>, 'repl': <repl>}``. This has a
similar syntax to the :py:class:`latexpp.fixes.macro_subst.Subst` fix
class, but argument parsing allows an extended syntax. Instead of
specifying an `'argspec': <argspec>`, you specify `'qitargspec':
<qitargspec>` which provides argument parsing extensions to the usual
`argspec`.
Each character in `<qitargspec>` is one of:
- '*', '[', '{' represent the same kind of arguments as for 'argspec' in
:py:class:`latexpp.fixes.macro_subst.Subst`;
- '(' represents a mandatory argument in parentheses;
- '`' represents an optional argument introduced by ```<token or group>``;
- '_' represents an optional argument introduced by ``_<token or group>``;
- or '^' which represents an optional argument introduced by ``^<token or
group>``.
As for :py:class:`latexpp.fixes.macro_subst.Subst`, arguments are
available in the replacement string `<repl>` via the syntax ``%(n)s``
where `n` is the argument number.
A default set of substitutions are provided according to the macros
defined in the {phfqit} package; arguments here override the defaults.
You can disable individual default substitutions by providingthe value
`None` (`null` in the YAML file) for the given macro name in the `subst`
dictionary.
- `ops`: a dictionary of "operator names" to substitute for. This is a
dictionary ``{<opname>: <opstring>, ...}`` where `<opname>` is the macro
name of the operator without leading backslash (e.g., ``tr`` for "trace"),
and `<opstring>` is the replacement LaTeX string that will be formatted as
an operator name. See `math_operator_fmt=` for how operators are
formatted.
A default set of operator names are provided according to the macros
defined in the {phfqit} package; arguments here override the defaults.
You can disable individual default operator names by providing the value
`None` (`null` in the YAML file) for the given operator name in the `ops`
dictionary.
- `math_operator_fmt`: The template string to use to format an operator. By
default, we use `\\operatorname{...}` to format the operator. The
template should contain the string `%(opname)s` which will be replaced by
the actual operator name. The default value is
``\operatorname{%(opname)s}``; if you prefer to use ``\mbox`` for
operators, you could set this to ``\mbox{%(opname)s}``.
- `delims`: A dictionary specifying macros that format delimited expressions
(such as `\\abs`, `\\ket`, `\\norm`, etc.). These macros take an optional
star (which indicates that the delimiters should be latex-dynamically
sized with ``\left`` and ``\right``), or an optional sizing macro in
square braces (such as ``\norm[\big]{...}``). After the optional star and
optional argument, the macro must take a fixed number of mandatory
arguments (e.g., one for ``\norm`` but two for ``\ketbra`` and three for
``\matrixel``).
The `delims` argument is a dictionary ``{<delim-macro-name>: <delim-spec>,
...}`` where `<delim-macro-name>` is the name of the macro without leading
backslash (e.g., 'ket' or 'abs'). The `<delim-spec>` is either:
- `<delim-spec>=(<left-delim>, <right-delim>)`, i.e., a two-item tuple or
list specifying the left and right delimiter. The macro must take a
single mandatory argument, which will be typeset between the two
delimiters. One must be able to size the delimiters using sizing
commands such as ``\big`` or ``\left``/``\right``.
- `<delim-spec>=(<left-delim>, <contents-repl>, <right-delim>)`, i.e., a
three-item tuple or list. The `<left-delim>` and `<right-delim>` are as
above. The `<contents-repl>` specifies how to format the contents
between the two delimiters, and should contain replacement strings of
the form ``%(n)s`` that expand into the `n`-th mandatory argument of the
macro. The number of mandatory arguments that the macro accepts is
inferred by inspecting the replacement string and looking for the
highest `n` in these replacement placeholders. Furthermore, you can use
the replacement placeholder ``%(delimsize)s``, which expands to the
relevant sizing command (e.g., ``\big``, ``\middle`` to match
``\left``/``\right``, or nothing if no sizing options are given) and
which can be placed immediately before a delimiter.
- `subst_use_hspace`: In all the above substitutions (including delimiters),
there are some custom sizing corrections in the form of ``\hspace*{XXex}``
that adjust the spacing between the different symbols in the expansion of
those macros. By default, they are kept in the replacement latex code so
that the document looks the same when compiled. If instead, you would
like simple substitutions without these fine-tuning spacing commands, set
`subst_use_hspace=False`.
"""
def __init__(self, *,
subst=None, ops=None, delims=None,
math_operator_fmt=r'\operatorname{%(opname)s}',
subst_use_hspace=True,
subst_space=None,
):
super().__init__()
if subst is None:
subst = {}
if ops is None:
ops = {}
if delims is None:
delims = {}
the_simple_substitution_macros = {}
the_simple_substitution_macros.update(simple_substitution_macros)
the_simple_substitution_macros.update(subst)
# remove any items which have a None value (used to indicate a default
# key should be removed from the YAML config)
the_math_operators = {}
the_math_operators.update(math_operators)
the_math_operators.update(ops)
the_simple_substitution_macros.update(**{
opname: math_operator_fmt%dict(opname=opv)
for opname, opv in the_math_operators.items()
})
# delimiter macros --> substitution rules
self.mathtools_delims_macros = dict(mathtools_delims_macros)
self.mathtools_delims_macros.update(delims)
_delempties(self.mathtools_delims_macros)
def delim_cfg(delimtuple):
if len(delimtuple) == 2:
return dict(qitargspec='`*[{',
repl=r'%(open_delim)s{%(1)s}%(close_delim)s')
numargs | |
xS0_E : float (arcsec)
Position of the source in RA relative to the
geometric center of the lens system at time t0.
xS0_N : float (arcsec)
Position of the source in Dec relative to the
geometric center of the lens system at time t0.
beta : float (mas)
The closest projected approach between the source
and the geometric center of the lens system in heliocentric
coordinates.
muL_E : float (mas/yr)
Proper motion of the lens system in RA direction
muL_N : float (mas/yr)
Proper motion of the lens system in the Dec direction
muS_E : float (mas/yr)
Proper motion of the source in the RA direction
muS_N : float (mas/yr)
Proper motion of the source in the Dec direction
dL : float (pc)
Distance to the lens system
dS : float (pc)
Distance to the source
sep : float (arcsec)
Separation between the binary lens stars,
projected onto the sky.
alpha : float (degrees)
Angle of the project binary separation vector on the
sky. The separation vector points from the secondary
to the primary and the angle alpha is measured in
degrees East of North.
mag_src : float (mag)
Brightness of the source.
b_sff : float
Source flux fraction = fluxS / (fluxS + fluxL1 + fluxL2 + fluxN)
"""
start = time.time()
if parallax:
psbl = model.PSBL_PhotAstrom_Par_Param1(mL1, mL2, t0, xS0_E, xS0_N,
beta, muL_E, muL_N, muS_E, muS_N, dL, dS,
sep, alpha, [b_sff], [mag_src],
raL=raL, decL=decL, root_tol=1e-8)
else:
psbl = model.PSBL_PhotAstrom_noPar_Param1(mL1, mL2, t0, xS0_E, xS0_N,
beta, muL_E, muL_N, muS_E, muS_N, dL, dS,
sep, alpha, [b_sff], [mag_src],
root_tol=1e-8)
# Simulate photometric and astrometric observations every day.
t_pho = np.array([], dtype=float)
t_ast = np.array([], dtype=float)
t_pho = np.arange(54000, 60000, 1)
t_ast = np.arange(54000, 60000, 1)
t_mod = np.arange(t_pho.min(), t_pho.max(), 1)
i_pho, A_pho = psbl.get_all_arrays(t_pho)
i_ast, A_ast = psbl.get_all_arrays(t_ast)
i_mod, A_mod = psbl.get_all_arrays(t_mod)
imag_pho = psbl.get_photometry(t_pho, amp_arr=A_pho)
imag_mod = psbl.get_photometry(t_mod, amp_arr=A_mod)
# Make the photometric observations.
# Assume 0.005 mag photoemtric errors at I=19.
# This means Signal = 40000 e- at I=19.
flux0 = 40000.0
imag0 = 19.0
flux_pho = flux0 * 10 ** ((imag_pho - imag0) / -2.5)
flux_pho_err = flux_pho ** 0.5
flux_pho += np.random.randn(len(t_pho)) * flux_pho_err
imag_pho = -2.5 * np.log10(flux_pho / flux0) + imag0
imag_pho_err = 1.087 / flux_pho_err
# Make the astrometric observations.
# Assume 0.15 milli-arcsec astrometric errors in each direction at all epochs.
# Q: Where does the 0.15 milliarcsec error comes from?
lens_pos = psbl.get_lens_astrometry(t_mod)
lens1_pos, lens2_pos = psbl.get_resolved_lens_astrometry(t_mod)
srce_pos = psbl.get_astrometry_unlensed(t_mod)
srce_pos_lensed_res = psbl.get_resolved_astrometry(t_mod)
srce_pos_lensed_unres = psbl.get_astrometry(t_mod)
srce_pos_lensed_res = np.ma.masked_invalid(srce_pos_lensed_res)
stop = time.time()
fmt = 'It took {0:.2f} seconds to evaluate the model at {1:d} time steps'
print(fmt.format(stop - start, len(t_mod) + len(t_ast) + len(t_pho)))
##########
# Plot photometry
##########
plt.figure(1)
plt.clf()
plt.errorbar(t_pho, imag_pho, yerr=imag_pho_err, fmt='k.', label='Sim Obs',
alpha=0.2)
plt.plot(t_mod, imag_mod, color='red', label='Model')
plt.gca().invert_yaxis()
plt.xlabel('Time (MJD)')
plt.ylabel('I (mag)')
plt.legend()
##########
# Plot astrometry
##########
plt.figure(2)
plt.clf()
plt.plot(lens_pos[:, 0], lens_pos[:, 1],
c='gray', marker='.', linestyle='none', alpha=0.2,
label='lens system')
plt.plot(lens1_pos[:, 0], lens1_pos[:, 1],
c='black', linestyle='none', marker='o',
label='lens primary')
plt.plot(lens2_pos[:, 0], lens2_pos[:, 1],
c='black', linestyle='none', marker='o', mfc='none',
label='lens secondary')
plt.scatter(srce_pos[:, 0], srce_pos[:, 1],
c=t_mod, marker='.', s=2, alpha=0.2,
label='src unlensed')
colors = ['navy', 'blue', 'slateblue', 'darkslateblue', 'indigo']
for ii in range(srce_pos_lensed_res.shape[1]):
plt.plot(srce_pos_lensed_res[:, ii, 0], srce_pos_lensed_res[:, ii, 1],
c=colors[ii], linestyle='none', marker='.', markersize=1,
alpha=0.5,
label='src lensed img{0:d}'.format(ii + 1))
plt.plot(srce_pos_lensed_unres[:, 0], srce_pos_lensed_unres[:, 1],
c='red', linestyle='-',
label='src lensed unres')
pos_ast_tmp = psbl.get_astrometry(t_ast, image_arr=i_ast, amp_arr=A_ast)
pos_ast_err = np.ones((len(t_ast), 2), dtype=float) * 0.15 * 1e-3
pos_ast = pos_ast_tmp + pos_ast_err * np.random.randn(len(t_ast), 2)
plt.errorbar(pos_ast[:, 0], pos_ast[:, 1],
xerr=pos_ast_err[:, 0], yerr=pos_ast_err[:, 0],
fmt='k.', color='black', alpha=0.2)
plt.gca().invert_xaxis()
plt.xlabel(r'$\Delta \alpha^*$ (mas)')
plt.ylabel(r'$\Delta \delta$ (mas)')
plt.legend(fontsize=8)
plt.subplots_adjust(left=0.25, top=0.8)
p2 = plt.gca().get_position().get_points().flatten()
ax_cbar = plt.gcf().add_axes([p2[0], 0.82, p2[2] - p2[0], 0.05])
plt.colorbar(cax=ax_cbar, orientation='horizontal', label='Time (MJD)',
ticklocation='top')
data = {}
data['t_phot1'] = t_pho
data['mag1'] = imag_pho
data['mag_err1'] = imag_pho_err
data['phot_files'] = ['fake_data_parallax_phot1']
data['ast_files'] = ['fake_data_parallax_ast1']
data['t_ast1'] = t_ast
data['xpos1'] = pos_ast[:, 0]
data['ypos1'] = pos_ast[:, 1]
data['xpos_err1'] = pos_ast_err[:, 0]
data['ypos_err1'] = pos_ast_err[:, 1]
data['raL'] = raL
data['decL'] = decL
data['target'] = target
data['phot_data'] = 'sim'
data['ast_data'] = 'sim'
params = {}
params['mL1'] = mL1
params['mL2'] = mL2
params['sep'] = sep
params['alpha'] = alpha
params['t0'] = t0
params['xS0_E'] = xS0_E
params['xS0_N'] = xS0_N
params['beta'] = beta
params['muS_E'] = muS_E
params['muS_N'] = muS_N
params['muL_E'] = muL_E
params['muL_N'] = muL_N
params['dL'] = dL
params['dS'] = dS
params['b_sff'] = b_sff
params['mag_src'] = mag_src
out_name = outdir + outroot + '_movie.gif'
if animate:
ani = plot_models.animate_PSBL(psbl, outfile=out_name)
else:
ani = None
# np.savetxt('fake_data_continuous_tiny_err_PSBL_phot.dat', (data['t_phot1'], data['mag1'], data['mag_err1']))
return data, params, psbl, ani
def fake_data_PSBL_phot(outdir='', outroot='psbl',
raL=259.5, decL=-29.0,
t0=57000.0, u0_amp=0.8, tE=500.0,
piE_E=0.02, piE_N=0.02,
q=0.5, sep=5.0, phi=75.0, b_sff1=0.5, mag_src1=16.0,
parallax=True, target='Unknown', animate=False):
"""
Optional Inputs
---------------
outdir : str
The output directory where figures and data are saved.
outroot : str
The output file name root for a saved figure.
raL : float (deg)
The right ascension in degrees. Needed if parallax=True.
decL : float (deg)
The declination in degrees. Needed if parallax=False.
t0: float
Time of photometric peak, as seen from Earth [MJD]
u0_amp: float
Angular distance between the lens and source on the plane of the
sky at closest approach in units of thetaE. It can be
positive (u0_hat cross thetaE_hat pointing away from us) or
negative (u0_hat cross thetaE_hat pointing towards us).
tE: float
Einstein crossing time. [MJD]
piE_E: float
The microlensing parallax in the East direction in units of thetaE
piE_N: float
The microlensing parallax in the North direction in units of thetaE
q: float
Mass ratio (low-mass / high-mass)
sep: float
Angular separation of the two lenses in units of thetaE where
thetaE is defined with the total binary mass.
phi: float
Angle made between the binary axis and the relative proper motion vector,
measured in degrees.
b_sff: array or list
The ratio of the source flux to the total (source + neighbors + lens)
b_sff = f_S / (f_S + f_L + f_N). This must be passed in as a list or
array, with one entry for each photometric filter.
mag_src: array or list
Photometric magnitude of the source. This must be passed in as a
list or array, with one entry for each photometric filter.
"""
start = time.time()
if parallax:
psbl = model.PSBL_Phot_Par_Param1(t0, u0_amp, tE, piE_E, piE_N, q, sep, phi,
[b_sff1], [mag_src1],
raL=raL, decL=decL, root_tol=1e-8)
else:
psbl = model.PSBL_Phot_noPar_Param1(t0, u0_amp, tE, piE_E, piE_N, q, sep, phi,
[b_sff1], [mag_src1],
root_tol=1e-8)
# Simulate
# photometric observations every 1 day and
# for the bulge observing window. Observations missed
# for 125 days out of 365 days for photometry.
t_pho = np.array([], dtype=float)
for year_start in np.arange(54000, 60000, 365.25):
phot_win = 240.0
phot_start = (365.25 - phot_win) / 2.0
t_pho_new = np.arange(year_start + phot_start,
year_start + phot_start + phot_win, 1)
t_pho = np.concatenate([t_pho, t_pho_new])
t_mod = np.arange(t_pho.min(), t_pho.max(), 1)
i_pho, A_pho = psbl.get_all_arrays(t_pho)
i_mod, A_mod = psbl.get_all_arrays(t_mod)
imag_pho = psbl.get_photometry(t_pho, amp_arr=A_pho)
imag_mod = psbl.get_photometry(t_mod, amp_arr=A_mod)
# Make the photometric observations.
# Assume 0.05 mag photoemtric errors at I=19.
# This means Signal = 400 e- at I=19.
flux0 = 400.0
imag0 = 19.0
flux_pho = flux0 * 10 ** ((imag_pho - imag0) / -2.5)
flux_pho_err = flux_pho ** 0.5
flux_pho += np.random.randn(len(t_pho)) * flux_pho_err
imag_pho = -2.5 * np.log10(flux_pho / flux0) + imag0
imag_pho_err = 1.087 / flux_pho_err
stop = time.time()
fmt = 'It took {0:.2f} seconds to evaluate the model at {1:d} time steps'
print(fmt.format(stop - start, len(t_mod) + len(t_pho)))
##########
# Plot photometry
##########
plt.figure(1)
plt.clf()
plt.errorbar(t_pho, imag_pho, yerr=imag_pho_err, fmt='k.', label='Sim Obs',
alpha=0.2)
plt.plot(t_mod, imag_mod, color='red', label='Model')
plt.gca().invert_yaxis()
plt.xlabel('Time (MJD)')
plt.ylabel('I (mag)')
plt.legend()
data = {}
data['t_phot1'] = t_pho
data['mag1'] = imag_pho
data['mag_err1'] = imag_pho_err
data['phot_files'] = ['fake_data_parallax_phot1']
data['ast_files'] = ['fake_data_parallax_ast1']
data['target'] = target
data['phot_data'] = 'sim'
data['ast_data'] = 'sim'
data['raL'] = raL
data['decL'] = decL
params = {}
params['t0'] = t0
params['u0_amp'] = u0_amp
params['tE'] = tE
params['piE_E'] = piE_E
params['piE_N'] = | |
protocol.
Initialised by a call to the :class:`HttpClient.request` method.
"""
_has_proxy = False
_data_sent = None
_cookies = None
_raw = None
content = None
headers = None
parser = None
version = None
status_code = None
request_again = None
ONE_TIME_EVENTS = ('pre_request', 'on_headers', 'post_request')
def __repr__(self):
return '<Response [%s]>' % (self.status_code or 'None')
__str__ = __repr__
@property
def url(self):
"""The request full url.
"""
request = self.request
if request:
return request.url
@property
def history(self):
"""List of :class:`.HttpResponse` objects from the history of the
request. Any redirect responses will end up here.
The list is sorted from the oldest to the most recent request."""
request = self.request
if request:
return request.history
@property
def ok(self):
if self.status_code:
return is_succesful(self.status_code)
else:
return not self.event('post_request').fired()
@property
def cookies(self):
"""Dictionary of cookies set by the server or ``None``.
"""
return self._cookies
@property
def encoding(self):
ct = self.headers.get('content-type')
if ct:
ct, options = parse_options_header(ct)
return options.get('charset')
@property
def raw(self):
"""A raw asynchronous Http response
"""
if self._raw is None:
self._raw = HttpStream(self)
return self._raw
@property
def links(self):
"""Returns the parsed header links of the response, if any
"""
headers = self.headers or {}
header = headers.get('link')
li = {}
if header:
links = parse_header_links(header)
for link in links:
key = link.get('rel') or link.get('url')
li[key] = link
return li
@property
def reason(self):
return responses.get(self.status_code)
@property
def text(self):
"""Decode content as a string.
"""
data = self.content
return data.decode(self.encoding or 'utf-8') if data else ''
def json(self):
"""Decode content as a JSON object.
"""
return _json.loads(self.text)
def decode_content(self):
"""Return the best possible representation of the response body.
"""
ct = self.headers.get('content-type')
if ct:
ct, options = parse_options_header(ct)
charset = options.get('charset')
if ct in JSON_CONTENT_TYPES:
return self.json()
elif ct.startswith('text/'):
return self.text
elif ct == FORM_URL_ENCODED:
return parse_qsl(self.content.decode(charset),
keep_blank_values=True)
return self.content
def raise_for_status(self):
"""Raises stored :class:`HTTPError` or :class:`URLError`, if occurred.
"""
if not self.ok:
reason = self.reason or 'No response from %s' % self.url
if not self.status_code:
raise HttpConnectionError(reason, response=self)
if 400 <= self.status_code < 500:
http_error_msg = '%s Client Error - %s - %s %s' % (
self.status_code, reason, self.request.method, self.url)
else:
http_error_msg = '%s Server Error - %s - %s %s' % (
self.status_code, reason, self.request.method, self.url)
raise HttpRequestException(http_error_msg, response=self)
def info(self):
"""Required by python CookieJar.
Return :attr:`headers`.
"""
return InfoHeaders(self.headers)
# #####################################################################
# # PROTOCOL CONSUMER IMPLEMENTATION
def start_request(self):
request = self.request
self.parser = request.new_parser(self)
headers = request.encode()
self.connection.transport.write(headers)
if not headers or request.headers.get('expect') != '100-continue':
self.write_body()
def feed_data(self, data):
try:
self.parser.feed_data(data)
except http.HttpParserUpgrade:
pass
def on_header(self, name, value):
self.headers.add(name.decode(CHARSET), value.decode(CHARSET))
def on_headers_complete(self):
request = self.request
self.status_code = self.parser.get_status_code()
self.version = self.parser.get_http_version()
self.event('on_headers').fire()
if request.method == 'HEAD':
self.event('post_request').fire()
def on_body(self, body):
if self.request.stream or self._raw:
self.raw.feed_data(body)
elif self.content is None:
self.content = body
else:
self.content += body
def on_message_complete(self):
self.producer.maybe_decompress(self)
self.fire_event('post_request')
def write_body(self):
self.request.write_body(self.connection)
class HttpClient(AbstractClient):
"""A client for HTTP/HTTPS servers.
It handles pool of asynchronous connections.
:param pool_size: set the :attr:`pool_size` attribute.
:param store_cookies: set the :attr:`store_cookies` attribute
.. attribute:: headers
Default headers for this :class:`HttpClient`.
Default: :attr:`DEFAULT_HTTP_HEADERS`.
.. attribute:: cookies
Default cookies for this :class:`HttpClient`.
.. attribute:: store_cookies
If ``True`` it remembers response cookies and sends them back to
servers.
Default: ``True``
.. attribute:: timeout
Default timeout for requests. If None or 0, no timeout on requests
.. attribute:: proxies
Dictionary of proxy servers for this client.
.. attribute:: pool_size
The size of a pool of connection for a given host.
.. attribute:: connection_pools
Dictionary of connection pools for different hosts
.. attribute:: DEFAULT_HTTP_HEADERS
Default headers for this :class:`HttpClient`
"""
max_redirects = 10
"""Maximum number of redirects.
It can be overwritten on :meth:`request`.
"""
connection_pool = Pool
"""Connection :class:`.Pool` factory
"""
client_version = pulsar.SERVER_SOFTWARE
"""String for the ``User-Agent`` header.
"""
version = 'HTTP/1.1'
"""Default HTTP request version for this :class:`HttpClient`.
It can be overwritten on :meth:`request`.
"""
DEFAULT_HTTP_HEADERS = (
('Connection', 'Keep-Alive'),
('Accept', '*/*'),
('Accept-Encoding', 'deflate'),
('Accept-Encoding', 'gzip')
)
DEFAULT_TUNNEL_HEADERS = (
('Connection', 'Keep-Alive'),
('Proxy-Connection', 'Keep-Alive')
)
request_parameters = (
'max_redirects',
'decompress',
'websocket_handler',
'version',
'verify',
'stream',
'cert'
)
# Default hosts not affected by proxy settings. This can be overwritten
# by specifying the "no" key in the proxies dictionary
no_proxy = set(('localhost', platform.node()))
def __init__(self, proxies=None, headers=None, verify=True,
cookies=None, store_cookies=True, cert=None,
max_redirects=10, decompress=True, version=None,
websocket_handler=None, parser=None, trust_env=True,
loop=None, client_version=None, timeout=None, stream=False,
pool_size=10, frame_parser=None, logger=None,
close_connections=False, keep_alive=None):
super().__init__(
partial(Connection, HttpResponse),
loop=loop,
keep_alive=keep_alive or cfg_value('http_keep_alive')
)
self.logger = logger or LOGGER
self.client_version = client_version or self.client_version
self.connection_pools = {}
self.pool_size = pool_size
self.trust_env = trust_env
self.timeout = timeout
self.store_cookies = store_cookies
self.max_redirects = max_redirects
self.cookies = cookiejar_from_dict(cookies)
self.decompress = decompress
self.version = version or self.version
# SSL Verification default
self.verify = verify
# SSL client certificate default, if String, path to ssl client
# cert file (.pem). If Tuple, ('cert', 'key') pair
self.cert = cert
self.stream = stream
self.close_connections = close_connections
dheaders = CIMultiDict(self.DEFAULT_HTTP_HEADERS)
dheaders['user-agent'] = self.client_version
# override headers
if headers:
for name, value in mapping_iterator(headers):
if value is None:
dheaders.pop(name, None)
else:
dheaders[name] = value
self.headers = dheaders
self.proxies = dict(proxies or ())
if not self.proxies and self.trust_env:
self.proxies = get_environ_proxies()
if 'no' not in self.proxies:
self.proxies['no'] = ','.join(self.no_proxy)
self.websocket_handler = websocket_handler
self.http_parser = parser or http.HttpResponseParser
self.frame_parser = frame_parser or websocket.frame_parser
# Add hooks
self.event('on_headers').bind(handle_cookies)
self.event('pre_request').bind(WebSocket())
self.event('post_request').bind(Expect())
self.event('post_request').bind(Redirect())
self._decompressors = dict(
gzip=GzipDecompress(),
deflate=DeflateDecompress()
)
# API
def connect(self, address):
if isinstance(address, tuple):
address = ':'.join(('%s' % v for v in address))
return self.request('CONNECT', address)
def get(self, url, **kwargs):
"""Sends a GET request and returns a :class:`.HttpResponse` object.
:params url: url for the new :class:`HttpRequest` object.
:param \*\*kwargs: Optional arguments for the :meth:`request` method.
"""
return self.request('GET', url, **kwargs)
def options(self, url, **kwargs):
"""Sends a OPTIONS request and returns a :class:`.HttpResponse` object.
:params url: url for the new :class:`HttpRequest` object.
:param \*\*kwargs: Optional arguments for the :meth:`request` method.
"""
return self.request('OPTIONS', url, **kwargs)
def head(self, url, **kwargs):
"""Sends a HEAD request and returns a :class:`.HttpResponse` object.
:params url: url for the new :class:`HttpRequest` object.
:param \*\*kwargs: Optional arguments for the :meth:`request` method.
"""
return self.request('HEAD', url, **kwargs)
def post(self, url, **kwargs):
"""Sends a POST request and returns a :class:`.HttpResponse` object.
:params url: url for the new :class:`HttpRequest` object.
:param \*\*kwargs: Optional arguments for the :meth:`request` method.
"""
return self.request('POST', url, **kwargs)
def put(self, url, **kwargs):
"""Sends a PUT request and returns a :class:`.HttpResponse` object.
:params url: url for the new :class:`HttpRequest` object.
:param \*\*kwargs: Optional arguments for the :meth:`request` method.
"""
return self.request('PUT', url, **kwargs)
def patch(self, url, **kwargs):
"""Sends a PATCH request and returns a :class:`.HttpResponse` object.
:params url: url for the new :class:`HttpRequest` object.
:param \*\*kwargs: Optional arguments for the :meth:`request` method.
"""
return self.request('PATCH', url, **kwargs)
def delete(self, url, **kwargs):
"""Sends a DELETE request and returns a :class:`.HttpResponse` object.
:params url: url for the new :class:`HttpRequest` object.
:param \*\*kwargs: Optional arguments for the :meth:`request` method.
"""
return self.request('DELETE', url, **kwargs)
def request(self, method, url, **params):
"""Constructs and sends a request to a remote server.
It returns a :class:`.Future` which results in a
:class:`.HttpResponse` object.
:param method: request method for the :class:`HttpRequest`.
:param url: URL for the :class:`HttpRequest`.
:param params: optional parameters for the :class:`HttpRequest`
initialisation.
:rtype: a coroutine
"""
response = self._request(method, url, **params)
if not self._loop.is_running():
return self._loop.run_until_complete(response)
else:
return response
def close(self):
"""Close all connections
"""
waiters = []
for p in self.connection_pools.values():
waiters.append(p.close())
self.connection_pools.clear()
return asyncio.gather(*waiters, loop=self._loop)
def maybe_decompress(self, response):
encoding = response.headers.get('content-encoding')
if encoding and response.request.decompress:
deco = self._decompressors.get(encoding)
if not deco:
self.logger.warning('Cannot decompress %s', encoding)
response.content = deco(response.content)
async def __aenter__(self):
await self.close()
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
await self.close()
# INTERNALS
async def _request(self, method, url, timeout=None, **params):
if timeout is None:
timeout = self.timeout
if method != 'HEAD':
params.setdefault('allow_redirects', True)
with async_timeout(self._loop, timeout):
nparams = params.copy()
nparams.update(((name, getattr(self, name)) for name in
self.request_parameters if name not in params))
request = HttpRequest(self, | |
{'error_message': "Subject " + subject_name + " Does Not Exist"}
return JsonResponse(response_data, status=status.HTTP_400_BAD_REQUEST)
except Div.DoesNotExist:
response_data = {'error_message': "Division " + div + " Does Not Exist"}
return JsonResponse(response_data, status=status.HTTP_400_BAD_REQUEST)
teacher = Teacher.objects.get(user=request.user)
if div.classteacher and div.classteacher.teacherID == teacher.teacherID:
lecs = Lecture.objects.filter(date=date, div=div, subject=subject)
else:
lecs = Lecture.objects.filter(date=date, teacher=teacher, div=div, subject=subject)
for attendance_object in attendance_list:
current_lecture = None
lecTime = attendance_object['time']
for lec in lecs:
if lec.getTimeString() == lecTime:
current_lecture = lec
for student_entry in attendance_object['attendance_list']:
student = Student.objects.get(sapID=student_entry['sapID'])
if int(student_entry['attendance']) == 1:
StudentLecture.objects.get_or_create(student=student, lecture=current_lecture)
else:
try:
sl = StudentLecture.objects.get(student=student, lecture=current_lecture)
sl.delete()
except StudentLecture.DoesNotExist:
pass
response_data = {'success_message': 'Successfully saved attendance data'}
return JsonResponse(response_data, status=status.HTTP_200_OK)
class DownloadCsv(generics.GenericAPIView):
permission_classes = (IsAuthenticated,)
def multiple_lectures(self, lecture):
if lecture.div.get_class_type() == 'Practical':
return 1
start = datetime.datetime.combine(lecture.date, lecture.startTime)
end = datetime.datetime.combine(lecture.date, lecture.endTime)
difference = end - start
td = difference.total_seconds() / 60
if td > 90 and td <= 150:
return 2
elif td > 150 and td < 210:
return 3
return 1
def get(self, request, *args, **kwargs):
subject_name = kwargs['subject']
div = kwargs['div']
try:
date_from = kwargs['date_from']
d, m, y = date_from.split('-')
date_from = datetime.datetime(int(y), int(m), int(d)).date()
except KeyError:
date_from = datetime.date.today()
try:
date_to = kwargs['date_to']
d, m, y = date_to.split('-')
date_to = datetime.datetime(int(y), int(m), int(d)).date()
except KeyError:
date_to = datetime.date.today()
teacher = Teacher.objects.get(user=request.user)
yearname, division = div.split("_")
year = Div.yearnameToYear(yearname)
if date_from.month < 6 and date_to.month < 6:
semester = year * 2
elif date_from.month > 6 and date_to.month > 6:
semester = year * 2 - 1
else:
response_data = {'error_message': "Dates are not from the same semester."}
return JsonResponse(response_data, status=status.HTTP_400_BAD_REQUEST)
try:
subject = Subject.objects.get(name=subject_name)
div = Div.objects.get(division=division, semester=semester, calendar_year=datetime.date.today().year)
if div.classteacher and div.classteacher.teacherID == teacher.teacherID:
if not SubjectTeacher.objects.filter(div=div, subject=subject).exists():
response_data = {'error_message': str(div) + " does not have subject " + subject_name}
return JsonResponse(response_data, status=status.HTTP_400_BAD_REQUEST)
else:
SubjectTeacher.objects.get(div=div, subject=subject, teacher=teacher)
except Subject.DoesNotExist:
response_data = {'error_message': "Subject " + subject_name + " Does Not Exist"}
return JsonResponse(response_data, status=status.HTTP_400_BAD_REQUEST)
except Div.DoesNotExist:
response_data = {'error_message': "Division " + div + " Does Not Exist"}
return JsonResponse(response_data, status=status.HTTP_400_BAD_REQUEST)
except SubjectTeacher.DoesNotExist:
response_data = {'error_message': "You do not have access to " + subject_name + " for " + div + " data."}
return JsonResponse(response_data, status=status.HTTP_400_BAD_REQUEST)
lecs = Lecture.objects.filter(date__lte=date_to, date__gte=date_from, div=div, subject=subject,
attendanceTaken=True)
total = 0
for lec in lecs:
count = self.multiple_lectures(lec)
total += count
student_list = Student.objects.filter(div=div).order_by('sapID')
student_lectures = StudentLecture.objects.filter(lecture__in=lecs)
attendance_list = []
for student in student_list:
relevant_student_lectures = student_lectures.filter(student=student)
student_attended = 0
for lec in relevant_student_lectures:
count = self.multiple_lectures(lec.lecture)
student_attended += count
student_json = StudentSerializer(student).data
student_json["attendance_count"] = student_attended
if lecs:
student_json["attendance_percentage"] = student_attended * 100 / total
else:
student_json["attendance_percentage"] = 100
attendance_list.append(student_json)
attendance_list.sort(key=lambda x: x["sapID"])
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'blob; filename="AttendanceData.csv"'
csvwriter = csv.writer(response)
csvwriter.writerow(["SAP ID", "Name", "Attendance Count (" + str(total) + ")", "Attendance Percentage"])
for student in attendance_list:
csvwriter.writerow([student["sapID"], student["name"], student["attendance_count"],
student["attendance_percentage"]])
return response
class DownloadSAPSheet(generics.GenericAPIView):
permission_classes = (IsAuthenticated,)
def get(self, request, *args, **kwargs):
subject_name = kwargs['subject']
div = kwargs['div']
try:
date_from = kwargs['date_from']
d, m, y = date_from.split('-')
date_from = datetime.datetime(int(y), int(m), int(d)).date()
except KeyError:
date_from = datetime.date.today()
try:
date_to = kwargs['date_to']
d, m, y = date_to.split('-')
date_to = datetime.datetime(int(y), int(m), int(d)).date()
except KeyError:
date_to = datetime.date.today()
teacher = Teacher.objects.get(user=request.user)
yearname, division = div.split("_")
year = Div.yearnameToYear(yearname)
if date_from.month < 6 and date_to.month < 6:
semester = year * 2
elif date_from.month >= 6 and date_to.month >= 6:
semester = year * 2 - 1
else:
response_data = {'error_message': "Dates are not from the same semester."}
return JsonResponse(response_data, status=status.HTTP_400_BAD_REQUEST)
try:
subject = Subject.objects.get(name=subject_name)
div = Div.objects.get(division=division, semester=semester, calendar_year=datetime.date.today().year)
if div.classteacher and div.classteacher.teacherID == teacher.teacherID:
if not SubjectTeacher.objects.filter(div=div, subject=subject).exists():
response_data = {'error_message': str(div) + " does not have subject " + subject_name}
return JsonResponse(response_data, status=status.HTTP_400_BAD_REQUEST)
else:
SubjectTeacher.objects.get(div=div, subject=subject, teacher=teacher)
except Subject.DoesNotExist:
response_data = {'error_message': "Subject " + subject_name + " Does Not Exist"}
return JsonResponse(response_data, status=status.HTTP_400_BAD_REQUEST)
except Div.DoesNotExist:
response_data = {'error_message': "Division " + division + " Does Not Exist"}
return JsonResponse(response_data, status=status.HTTP_400_BAD_REQUEST)
except SubjectTeacher.DoesNotExist:
response_data = {
'error_message': "You do not have access to " + subject_name + " for " + str(div) + " data."
}
return JsonResponse(response_data, status=status.HTTP_400_BAD_REQUEST)
from docx import Document
from docx.shared import Inches, Pt
from docx.enum.text import WD_ALIGN_PARAGRAPH, WD_LINE_SPACING
from docx.enum.table import WD_TABLE_ALIGNMENT
from docx.enum.section import WD_SECTION
from docx.oxml.ns import qn
is_practical = div.get_class_type() == "Practical"
if is_practical:
divs = Div.objects.filter(
division__contains=div.division[0],
division__regex=r'[A,B][1-4]',
semester=semester,
calendar_year=datetime.date.today().year
)
lecs = Lecture.objects.filter(date__gte=date_from, date__lte=date_to, div__in=divs, subject=subject,
attendanceTaken=True).order_by('date')
student_list = Student.objects.filter(div__in=divs).order_by('sapID')
student_divs = StudentDivision.objects.filter(division__in=divs, student__in=student_list)
else:
lecs = Lecture.objects.filter(date__gte=date_from, date__lte=date_to, div=div, subject=subject,
attendanceTaken=True).order_by('date')
student_list = Student.objects.filter(div=div).order_by('sapID')
student_divs = StudentDivision.objects.filter(division=div, student__in=student_list)
student_lectures = StudentLecture.objects.filter(lecture__in=lecs)
attendance_sheet = []
for student in student_list:
student_row = [student.sapID, str(student).upper()]
for lec in lecs:
if student_lectures.filter(lecture=lec, student=student).exists():
student_row.append('P')
elif student_divs.filter(student=student, division=lec.div).exists():
student_row.append('Ab')
attendance_sheet.append(student_row)
response = HttpResponse(content_type='application/vnd.openxmlformats-officedocument.wordprocessingml.document')
response['Content-Disposition'] = 'blob; filename="WeeklyAttendance.docx"'
document = Document()
sections = document.sections
for section in sections:
section.top_margin = Inches(1)
section.bottom_margin = Inches(1)
section.left_margin = Inches(0.5)
section.right_margin = Inches(0.5)
paragraph_format = document.styles['Normal'].paragraph_format
paragraph_format.space_before = 0
paragraph_format.space_after = 2
style = document.styles['Table Grid']
font = style.font
font.name = 'Cambria'
font.size = Pt(11)
try:
document.add_picture('/home/wizdem/Attendance-System-Web/SAP/autonomous_header.jpeg', width=Inches(6))
except FileNotFoundError:
document.add_picture('SAP/autonomous_header.jpeg', width=Inches(6))
last_paragraph = document.paragraphs[-1]
last_paragraph.alignment = WD_ALIGN_PARAGRAPH.CENTER
digits = div.calendar_year % 100
if div.semester % 2 == 0:
academic_year = str(div.calendar_year - 1) + "-" + str(digits)
else:
academic_year = str(div.calendar_year) + "-" + str(digits + 1)
p = document.add_paragraph('Academic Year: ' + academic_year)
p.alignment = WD_ALIGN_PARAGRAPH.CENTER
p = document.add_paragraph('Report of Attendance Record of Lectures')
p.alignment = WD_ALIGN_PARAGRAPH.CENTER
table = document.add_table(rows=5, cols=4)
table.style = 'Table Grid'
table.alignment = WD_TABLE_ALIGNMENT.CENTER
table.cell(0, 0).text = "Week No.:"
table.cell(0, 1).text = "Date: "
table.cell(0, 2).text = "From: " + date_from.strftime("%d-%m-%Y")
table.cell(0, 3).text = "To:" + date_to.strftime("%d-%m-%Y")
table.cell(1, 0).merge(table.cell(1, 1)).text = "Teacher: " + (str(teacher) if not is_practical else "")
table.cell(1, 2).merge(table.cell(1, 3)).text = "Subject: " + subject.name
table.cell(2, 0).text = "Class: " + yearname
table.cell(2, 1).text = "Course/Branch: Computer"
table.cell(2, 2).text = "Semester: " + str(semester)
table.cell(2, 3).text = "Division: " + division[0]
table.cell(3, 0).merge(table.cell(3, 1)).text = "No. of Lectures Scheduled (S):"
table.cell(3, 2).merge(table.cell(3, 3)).text = "No. of Lectures Conducted (C):"
table.cell(4, 0).merge(table.cell(4, 3)).text = "Remark (In case S ≠ C):"
p = document.add_paragraph('')
if is_practical:
section = document.add_section(WD_SECTION.CONTINUOUS)
sectPr = section._sectPr
cols = sectPr.xpath('./w:cols')[0]
cols.set(qn('w:num'), str(len(divs)))
for i in range(1, 5):
prac_div = divs.get(division=division[0] + str(i))
prac_list = []
for row in attendance_sheet:
stu_list = student_list.filter(sapID=row[0])
if student_divs.filter(student__in=stu_list, division=prac_div).exists():
prac_list.append(row)
lec_list = lecs.filter(div=prac_div)
table = document.add_table(rows=4, cols=1 + (len(lec_list) if (len(lec_list) > 1) else 1))
table.style = 'Table Grid'
table.alignment = WD_TABLE_ALIGNMENT.CENTER
table.autofit = True
table.cell(0, 0).merge(table.cell(0, 1)).text = "Batch: " + str(prac_div)
table.cell(1, 0).merge(table.cell(3, 0)).text = "SAP ID"
if len(lec_list) <= 1:
table.cell(1, 1).text = "Date"
else:
table.cell(1, 1).merge(table.cell(1, len(lec_list))).text = "Date"
col_no = 1
for lec in lec_list:
table.cell(2, col_no).text = str(lec.date.strftime("%d/%m"))
table.cell(3, col_no).text = str(lec.getShortTimeString())
col_no += 1
for entry in prac_list:
row = table.add_row()
row.cells[0].text = str(entry[0])
for i, val in enumerate(entry[2:]):
row.cells[i + 1].text = str(val)
if i < 4:
document.add_section(WD_SECTION.NEW_COLUMN)
else:
if(len(lecs) > 6):
cols = 3 + len(lecs)
else:
cols = 9
table = document.add_table(rows=3 + len(attendance_sheet), cols=cols)
table.style = 'Table Grid'
table.alignment = WD_TABLE_ALIGNMENT.CENTER
table.autofit = True
for col in table.columns:
col.width = Inches(0.6)
for cell in col.cells:
cell.width = Inches(0.6)
col = table.columns[0]
col.width = Inches(0.4)
for cell in col.cells:
cell.width = Inches(0.4)
col = table.columns[1]
col.width = Inches(1.1)
for cell in col.cells:
cell.width = Inches(1.1)
col = table.columns[2]
col.width = Inches(2)
for cell in col.cells:
cell.width = Inches(2)
p = table.cell(0, 0).merge(table.cell(2, 0)).paragraphs[0]
p.text = "Sr.No."
p.alignment = WD_ALIGN_PARAGRAPH.CENTER
p = table.cell(0, 1).merge(table.cell(2, 1)).paragraphs[0]
p.text = "SAP ID"
p.alignment = WD_ALIGN_PARAGRAPH.CENTER
p = table.cell(0, 2).merge(table.cell(2, 2)).paragraphs[0]
p.text = "Name"
p.alignment = WD_ALIGN_PARAGRAPH.CENTER
if(len(lecs) > 6):
p = table.cell(0, 3).merge(table.cell(0, 2 + len(lecs))).paragraphs[0]
else:
p = table.cell(0, 3).merge(table.cell(0, 8)).paragraphs[0]
p.text = "Date & Time"
p.alignment = WD_ALIGN_PARAGRAPH.CENTER
col_no = 3
for lec in lecs:
table.cell(1, col_no).text = str(lec.date.strftime("%d/%m"))
table.cell(2, col_no).text = str(lec.getShortTimeString())
| |
1.63300865e-11,
# -2.81558165e-11, -1.84642248e-16, 3.30755613e-16, 9.65096318e-06,
# -7.81673553e-07, -3.06700108e-09, 3.63509506e-10], [-2.19201461e-07, -2.42721105e-12, -4.62657860e-15, 2.23721479e-11,
# -2.75661325e-11, -1.83185234e-16, 1.73944578e-16, 9.77685398e-06,
# -1.04302581e-06, -3.19356387e-09, 4.26767890e-10], [-3.02013758e-07, -2.06278591e-12, -4.62657860e-15, 2.23721479e-11,
# -3.25165105e-11, -1.54944717e-16, 2.08454805e-16, 1.22647642e-05,
# -8.09341117e-07, -3.49416113e-09, 3.63509506e-10], [-2.86805509e-07, -3.33589667e-12, -4.19538456e-15, 2.00000300e-11,
# -3.03394633e-11, -2.01320041e-16, 2.79173949e-16, 9.77685398e-06,
# -1.04302581e-06, -2.34790534e-09, 3.43373035e-10], [-2.64761668e-07, -2.93093043e-12, -4.62657860e-15, 2.23721479e-11,
# -2.75661325e-11, -2.23702269e-16, 1.89016179e-16, 1.02187396e-05,
# -1.27732445e-06, -3.36702276e-09, 3.63509506e-10], [-2.31437515e-07, -2.42721105e-12, -4.30896242e-15, 1.44568338e-11,
# -2.75661325e-11, -1.57667879e-16, 3.72467796e-16, 9.65096318e-06,
# -1.32796721e-06, -3.19356387e-09, 3.65875300e-10]],
# [[-2.64761668e-07, -2.95424344e-12, -3.58721127e-15, 2.15822546e-11,
# -2.40278613e-11, -1.57667879e-16, 3.34709788e-16, 9.65096318e-06,
# -1.14518487e-06, -2.74136051e-09, 3.65875300e-10], [-2.89466954e-07, -2.42721105e-12, -5.88785422e-15, 2.47235637e-11,
# -2.97755539e-11, -2.16595194e-16, 2.00124460e-16, 9.77685398e-06,
# -1.04302581e-06, -3.06700108e-09, 3.63509506e-10], [-2.64761668e-07, -2.76700116e-12, -4.22506109e-15, 1.88901875e-11,
# -2.07647763e-11, -1.57667879e-16, 2.62240001e-16, 9.65096318e-06,
# -1.11813367e-06, -2.03675714e-09, 3.78235738e-10], [-2.89466954e-07, -1.88068413e-12, -4.69475923e-15, 2.47235637e-11,
# -2.97755539e-11, -1.88142673e-16, 2.43070279e-16, 9.76850383e-06,
# -8.46837805e-07, -3.06700108e-09, 3.63509506e-10], [-2.89466954e-07, -2.95424344e-12, -4.62657860e-15, 2.15822546e-11,
# -2.40278613e-11, -1.98809012e-16, 2.62240001e-16, 1.12865320e-05,
# -1.04302581e-06, -3.06700108e-09, 3.63509506e-10], [-2.64761668e-07, -2.42721105e-12, -4.62657860e-15, 2.47235637e-11,
# -2.97755539e-11, -1.93131557e-16, 2.43070279e-16, 8.73899486e-06,
# -1.04302581e-06, -3.03554115e-09, 3.51929228e-10], [-2.89466954e-07, -2.42721105e-12, -4.62657860e-15, 1.82672963e-11,
# -2.97755539e-11, -2.08597352e-16, 2.60644704e-16, 1.04919956e-05,
# -1.04302581e-06, -2.21467924e-09, 3.65875300e-10], [-2.89466954e-07, -2.42721105e-12, -4.62657860e-15, 2.47235637e-11,
# -2.97755539e-11, -1.83185234e-16, 2.47599052e-16, 1.20072090e-05,
# -1.04302581e-06, -2.64597642e-09, 3.65875300e-10], [-2.86805509e-07, -3.33589667e-12, -4.19538456e-15, 2.00000300e-11,
# -2.54094242e-11, -2.01320041e-16, 2.79173949e-16, 9.77685398e-06,
# -1.04302581e-06, -2.34790534e-09, 4.10281382e-10], [-2.86805509e-07, -3.94196570e-12, -5.15241862e-15, 2.00000300e-11,
# -2.20713706e-11, -1.41760987e-16, 2.79173949e-16, 9.77685398e-06,
# -1.04302581e-06, -2.34790534e-09, 3.86372307e-10]],
# [[-2.64761668e-07, -2.76700116e-12, -4.22506109e-15, 1.61162146e-11,
# -2.38339539e-11, -1.57667879e-16, 2.62240001e-16, 1.24546893e-05,
# -1.04302581e-06, -2.34790534e-09, 2.98042282e-10], [-3.16101743e-07, -3.94196570e-12, -5.15241862e-15, 2.00000300e-11,
# -2.20713706e-11, -1.41760987e-16, 2.79173949e-16, 1.21553203e-05,
# -1.11813367e-06, -2.08523017e-09, 3.78235738e-10], [-2.27804758e-07, -2.09000069e-12, -4.22506109e-15, 1.88901875e-11,
# -1.56017448e-11, -1.57667879e-16, 2.14135199e-16, 9.65096318e-06,
# -1.11813367e-06, -2.03675714e-09, 3.78235738e-10], [-2.64761668e-07, -2.76700116e-12, -3.59203377e-15, 1.88901875e-11,
# -2.07647763e-11, -1.57667879e-16, 2.62240001e-16, 9.65096318e-06,
# -1.11813367e-06, -2.03675714e-09, 3.78235738e-10], [-2.64761668e-07, -2.76700116e-12, -4.22506109e-15, 1.88901875e-11,
# -2.07647763e-11, -1.57667879e-16, 1.87636345e-16, 1.22783737e-05,
# -9.70572748e-07, -2.64597642e-09, 3.65875300e-10], [-3.47693987e-07, -2.42721105e-12, -4.62657860e-15, 2.47235637e-11,
# -2.97755539e-11, -1.83185234e-16, 2.47599052e-16, 9.65096318e-06,
# -1.11813367e-06, -2.03675714e-09, 3.78235738e-10], [-2.86883575e-07, -2.08329166e-12, -4.55843509e-15, 2.47235637e-11,
# -2.97755539e-11, -2.27944017e-16, 2.00124460e-16, 9.77685398e-06,
# -1.04302581e-06, -3.06700108e-09, 2.76109376e-10], [-2.89466954e-07, -2.42721105e-12, -4.62657860e-15, 1.82672963e-11,
# -3.78090613e-11, -2.08597352e-16, 2.60644704e-16, 1.04919956e-05,
# -1.04302581e-06, -2.44303677e-09, 3.63509506e-10], [-2.64761668e-07, -2.76700116e-12, -6.34379640e-15, 3.02382046e-11,
# -2.40798570e-11, -2.24030316e-16, 2.00124460e-16, 9.77685398e-06,
# -1.04302581e-06, -3.06700108e-09, 3.63509506e-10], [-2.89466954e-07, -2.42721105e-12, -4.22506109e-15, 1.88901875e-11,
# -2.07647763e-11, -1.57667879e-16, 2.62240001e-16, 9.65096318e-06,
# -1.11813367e-06, -1.59059089e-09, 3.78235738e-10]],
# [[-2.64761668e-07, -2.95044433e-12, -4.09608894e-15, 2.44727589e-11,
# -2.07647763e-11, -2.02990709e-16, 2.60644704e-16, 1.04919956e-05,
# -1.04302581e-06, -2.44303677e-09, 3.43605321e-10], [-2.90189081e-07, -2.42721105e-12, -4.74534271e-15, 1.82672963e-11,
# -3.78090613e-11, -1.18131870e-16, 2.62240001e-16, 7.04653398e-06,
# -8.62016280e-07, -2.56745427e-09, 3.78235738e-10], [-4.01100901e-07, -2.42721105e-12, -4.23200891e-15, 2.47235637e-11,
# -2.97755539e-11, -1.83185234e-16, 3.19729715e-16, 9.65096318e-06,
# -1.11813367e-06, -1.86113983e-09, 3.78235738e-10], [-2.89466954e-07, -2.42721105e-12, -4.22506109e-15, 1.51692359e-11,
# -2.07647763e-11, -1.57667879e-16, 2.47599052e-16, 9.65096318e-06,
# -1.11813367e-06, -2.03675714e-09, 3.78235738e-10], [-3.76476244e-07, -3.35792918e-12, -4.60985249e-15, 2.00000300e-11,
# -2.20713706e-11, -1.57667879e-16, 3.05597050e-16, 9.65096318e-06,
# -1.30418622e-06, -1.76484665e-09, 3.78235738e-10], [-2.12109363e-07, -2.42721105e-12, -4.22506109e-15, 1.88901875e-11,
# -1.71109894e-11, -1.00448831e-16, 2.79173949e-16, 1.21553203e-05,
# -1.11813367e-06, -2.08523017e-09, 3.78235738e-10], [-2.27804758e-07, -2.09000069e-12, -4.22506109e-15, 1.88901875e-11,
# -1.56017448e-11, -1.57667879e-16, 2.14135199e-16, 9.65096318e-06,
# -1.11813367e-06, -2.60331801e-09, 3.63509506e-10], [-2.03924423e-07, -2.42721105e-12, -4.62657860e-15, 1.82672963e-11,
# -3.78090613e-11, -1.78226641e-16, 2.85271548e-16, 1.04919956e-05,
# -1.04302581e-06, -2.44303677e-09, 3.78235738e-10], [-2.89466954e-07, -2.42721105e-12, -4.22506109e-15, 1.88901875e-11,
# -2.24900829e-11, -2.16953648e-16, 2.47599052e-16, 9.65096318e-06,
# -1.11813367e-06, -2.03675714e-09, 3.78235738e-10], [-3.47693987e-07, -1.97844779e-12, -4.62657860e-15, 2.47235637e-11,
# -2.97755539e-11, -1.58271143e-16, 2.62240001e-16, 9.65096318e-06,
# -1.11813367e-06, -1.59059089e-09, 3.78235738e-10]],
# [[-3.47693987e-07, -1.97844779e-12, -4.62657860e-15, 2.47235637e-11,
# -2.97755539e-11, -1.58271143e-16, 2.62240001e-16, 1.12024368e-05,
# -9.60751580e-07, -2.11832390e-09, 3.78235738e-10], [-2.88154725e-07, -2.42721105e-12, -4.22506109e-15, 1.51692359e-11,
# -1.70109594e-11, -1.51100846e-16, 2.47599052e-16, 9.91778210e-06,
# -1.34227834e-06, -1.59059089e-09, 3.78235738e-10], [-2.64761668e-07, -2.62082059e-12, -3.36771405e-15, 2.47235637e-11,
# -2.97755539e-11, -1.85062714e-16, 2.62240001e-16, 1.15932129e-05,
# -1.11813367e-06, -2.00351239e-09, 3.78235738e-10], [-3.47693987e-07, -1.97844779e-12, -3.83226879e-15, 2.28725950e-11,
# -1.63583694e-11, -2.02990709e-16, 2.60644704e-16, 1.04919956e-05,
# -1.01843014e-06, -2.44303677e-09, 3.43605321e-10], [-2.64761668e-07, -2.95044433e-12, -4.94362236e-15, 2.44727589e-11,
# -2.07647763e-11, -2.19426502e-16, 2.67914370e-16, 1.04919956e-05,
# -1.11813367e-06, -1.59059089e-09, 3.33187085e-10], [-3.47693987e-07, -1.86534963e-12, -5.97841243e-15, 2.12582752e-11,
# -2.97755539e-11, -2.03357384e-16, 3.27845996e-16, 9.65096318e-06,
# -1.04302581e-06, -2.44303677e-09, 2.59806247e-10], [-3.32785159e-07, -2.42721105e-12, -4.22506109e-15, 1.60716620e-11,
# -2.07647763e-11, -1.57667879e-16, 2.47599052e-16, 9.65096318e-06,
# -9.29340187e-07, -2.03675714e-09, 2.76999612e-10], [-2.89466954e-07, -2.42721105e-12, -4.22506109e-15, 1.51692359e-11,
# -2.07647763e-11, -1.57667879e-16, 2.47599052e-16, 9.65096318e-06,
# -7.89487802e-07, -2.03675714e-09, 2.97509810e-10], [-3.30931324e-07, -2.42721105e-12, -4.62657860e-15, 2.47235637e-11,
# -3.16252781e-11, -1.82830417e-16, 2.62240001e-16, 9.15055453e-06,
# -1.11813367e-06, -1.59059089e-09, 3.78235738e-10], [-3.47693987e-07, -1.97844779e-12, -4.22506109e-15, 1.88901875e-11,
# -1.79391576e-11, -2.16953648e-16, 2.47599052e-16, 9.65096318e-06,
# -1.11813367e-06, -2.03675714e-09, 3.78235738e-10]],
# [[-3.47693987e-07, -1.97844779e-12, -4.22506109e-15, 1.88901875e-11,
# -1.79391576e-11, -2.16953648e-16, 2.67914370e-16, 1.36358151e-05,
# -1.11813367e-06, -1.59059089e-09, 3.84187598e-10], [-2.30636816e-07, -2.27824114e-12, -4.94362236e-15, 2.44727589e-11,
# -2.07647763e-11, -2.19426502e-16, 2.49410172e-16, 9.65096318e-06,
# -1.11813367e-06, -2.03675714e-09, 3.78235738e-10], [-3.54595599e-07, -2.42721105e-12, -4.62657860e-15, 2.91851217e-11,
# -3.16252781e-11, -1.82830417e-16, 2.62240001e-16, 1.17359700e-05,
# -1.11813367e-06, -1.59059089e-09, 3.78235738e-10], [-3.30931324e-07, -3.05538008e-12, -4.62657860e-15, 2.47235637e-11,
# -3.16252781e-11, -1.82830417e-16, 2.62240001e-16, 9.15055453e-06,
# -1.11813367e-06, -1.82069461e-09, 3.78235738e-10], [-2.89466954e-07, -2.42721105e-12, -4.22506109e-15, 1.45378896e-11,
# -2.07647763e-11, -1.57667879e-16, 2.62240001e-16, 9.15055453e-06,
# -1.11813367e-06, -1.59059089e-09, 3.78235738e-10], [-3.30931324e-07, -2.42721105e-12, -4.62657860e-15, 2.47235637e-11,
# -3.16252781e-11, -1.82830417e-16, 2.47599052e-16, 9.65096318e-06,
# -7.89487802e-07, -2.03675714e-09, 3.25643564e-10], [-3.30931324e-07, -2.52052731e-12, -4.62657860e-15, 1.40147725e-11,
# -1.79391576e-11, -2.43327164e-16, 1.82532405e-16, 9.65096318e-06,
# -1.11813367e-06, -2.03675714e-09, 3.78235738e-10], [-3.47693987e-07, -1.45214729e-12, -4.22506109e-15, 2.47235637e-11,
# -3.16252781e-11, -1.82830417e-16, 2.62240001e-16, 9.15055453e-06,
# -1.11813367e-06, -1.59059089e-09, 3.78235738e-10], [-3.47693987e-07, -1.97844779e-12, -4.68467550e-15, 1.88901875e-11,
# -1.79391576e-11, -2.16953648e-16, 2.82477713e-16, 1.02350490e-05,
# -1.11813367e-06, -1.59059089e-09, 2.65395167e-10], [-3.30931324e-07, -2.42721105e-12, -4.62657860e-15, 2.47235637e-11,
# -3.16252781e-11, -1.82830417e-16, 2.62240001e-16, 9.15055453e-06,
# -1.11813367e-06, -2.31652552e-09, 3.78235738e-10]],
# [[-2.71973699e-07, -2.19181037e-12, -6.28767630e-15, 2.44727589e-11,
# -2.07647763e-11, -2.19426502e-16, 1.87204938e-16, 9.65096318e-06,
# -1.11813367e-06, -2.16737018e-09, 3.78235738e-10], [-2.30636816e-07, -2.27824114e-12, -4.94362236e-15, 2.44727589e-11,
# -2.07100012e-11, -2.19426502e-16, 2.49410172e-16, 1.14115142e-05,
# -1.11813367e-06, -2.03675714e-09, 3.78235738e-10], [-3.30931324e-07, -3.05538008e-12, -4.62657860e-15, 2.47235637e-11,
# -3.16252781e-11, -1.82830417e-16, 2.62240001e-16, 7.88085471e-06,
# -1.11813367e-06, -1.82069461e-09, 3.78235738e-10], [-3.30931324e-07, -3.05538008e-12, -4.62657860e-15, 2.47235637e-11,
# -2.62670321e-11, -1.82830417e-16, 2.62240001e-16, 8.71478290e-06,
# -1.11813367e-06, -1.77108226e-09, 4.12401366e-10], [-3.30931324e-07, -2.42721105e-12, -4.62657860e-15, 2.73591358e-11,
# -3.16252781e-11, -1.82830417e-16, 2.62240001e-16, 9.15055453e-06,
# -1.22039099e-06, -1.59059089e-09, 3.29571480e-10], [-3.39247997e-07, -1.97844779e-12, -4.68467550e-15, 1.81129097e-11,
# -2.00320367e-11, -2.16953648e-16, 2.82477713e-16, 9.73848338e-06,
# -1.41958341e-06, -2.31652552e-09, 3.78235738e-10], [-2.89466954e-07, -2.42721105e-12, -4.22506109e-15, 1.86945236e-11,
# -2.07647763e-11, -2.43327164e-16, 1.82532405e-16, 9.65096318e-06,
# -1.11813367e-06, -2.03675714e-09, 3.41465090e-10], [-3.38011841e-07, -2.52052731e-12, -5.46166986e-15, 1.04532875e-11,
# -1.79391576e-11, -1.57667879e-16, 3.03308934e-16, 1.07719905e-05,
# -1.11813367e-06, -1.59059089e-09, 3.78235738e-10], [-3.64994795e-07, -3.05538008e-12, -5.48992675e-15, 2.47235637e-11,
# -3.16252781e-11, -1.82830417e-16, 2.49410172e-16, 9.65096318e-06,
# -1.11813367e-06, -1.85600946e-09, 3.65932134e-10], [-2.75640336e-07, -2.38657845e-12, -4.94362236e-15, 2.44727589e-11,
# -2.07647763e-11, -2.19426502e-16, 2.62240001e-16, 8.84184760e-06,
# -1.15823105e-06, -1.82069461e-09, 3.46083758e-10]],
# [[-3.30114547e-07, -2.75774398e-12, -7.78123651e-15, 2.44727589e-11,
# -2.07647763e-11, -2.19426502e-16, 2.49410172e-16, 1.14115142e-05,
# -9.33366631e-07, -2.27216915e-09, 3.78235738e-10], [-1.65558132e-07, -2.27824114e-12, -4.94362236e-15, 2.44727589e-11,
# -2.07100012e-11, -2.19426502e-16, 2.24428686e-16, 9.65096318e-06,
# -1.11813367e-06, -2.27833613e-09, 3.26719541e-10], [-2.13116158e-07, -2.19181037e-12, -7.66149733e-15, 2.47235637e-11,
# -3.16252781e-11, -2.21802051e-16, 2.62240001e-16, 7.88085471e-06,
# -1.11813367e-06, -2.11253510e-09, 3.78235738e-10], [-3.30931324e-07, -3.05538008e-12, -4.62657860e-15, 3.12172650e-11,
# -1.81796618e-11, -2.29676492e-16, 1.87204938e-16, 9.65096318e-06,
# -1.11813367e-06, -2.12967578e-09, 3.78235738e-10], [-2.75640336e-07, -2.38657845e-12, -4.94362236e-15, 2.44727589e-11,
# -2.13301418e-11, -1.57667879e-16, 3.03308934e-16, 1.07719905e-05,
# -1.11813367e-06, -1.59059089e-09, 3.78235738e-10], [-3.38011841e-07, -2.52052731e-12, -5.46166986e-15, 1.04532875e-11,
# -1.74547091e-11, -2.19426502e-16, 2.62240001e-16, 8.84184760e-06,
# -8.30268475e-07, -2.17072912e-09, 3.46083758e-10], [-2.46496178e-07, -3.05538008e-12, -4.62657860e-15, 2.08639092e-11,
# -2.62670321e-11, -2.13914511e-16, 2.62240001e-16, 8.71478290e-06,
# -1.11813367e-06, -1.59059089e-09, 3.04731109e-10], [-3.38011841e-07, -2.52052731e-12, -4.48784760e-15, 1.04532875e-11,
# -1.98658521e-11, -1.57667879e-16, 3.03308934e-16, 1.07719905e-05,
# -1.11813367e-06, -1.77108226e-09, 4.12401366e-10], [-2.30636816e-07, -2.27824114e-12, -4.94362236e-15, 2.44727589e-11,
# -2.07100012e-11, -2.19426502e-16, 2.49410172e-16, 9.65096318e-06,
# -1.11813367e-06, -2.16737018e-09, 3.78235738e-10], [-2.84215795e-07, -2.19181037e-12, -6.94487895e-15, 3.12521789e-11,
# -2.07647763e-11, -2.19426502e-16, 1.87204938e-16, 1.14115142e-05,
# -1.32433900e-06, -2.03675714e-09, 3.78235738e-10]],
# [[-2.31929682e-07, -2.19181037e-12, -7.66149733e-15, 2.01327248e-11,
# -3.18870005e-11, -2.21802051e-16, 2.68909102e-16, 7.88085471e-06,
# -1.11813367e-06, -2.69215729e-09, 3.78235738e-10], [-1.90731230e-07, -2.27824114e-12, -4.94362236e-15, 2.44727589e-11,
# -2.07100012e-11, -2.19426502e-16, 2.49410172e-16, 1.12138398e-05,
# -1.11813367e-06, -2.49836784e-09, 3.78235738e-10], [-2.13116158e-07, -1.82364824e-12, -4.94362236e-15, 2.21338354e-11,
# -1.62722580e-11, -1.57667879e-16, 3.03308934e-16, 1.07719905e-05,
# -1.11813367e-06, -1.59059089e-09, 3.78235738e-10], [-2.75640336e-07, -2.19181037e-12, -7.76934751e-15, 2.47235637e-11,
# -2.29249670e-11, -2.21802051e-16, 2.62240001e-16, 6.43415771e-06,
# -1.11813367e-06, -2.11253510e-09, 3.28449953e-10], [-2.38234690e-07, -3.05538008e-12, -4.62657860e-15, 3.12172650e-11,
# -1.81796618e-11, -2.29676492e-16, 2.49410172e-16, 9.65096318e-06,
# -1.11813367e-06, -2.16737018e-09, 3.78235738e-10], [-2.30636816e-07, -2.27824114e-12, -4.94362236e-15, 2.44727589e-11,
# -2.07100012e-11, -2.27652048e-16, 1.94154198e-16, 9.65096318e-06,
# -1.11813367e-06, -2.12967578e-09, 3.79466480e-10], [-3.38118967e-07, -3.55963483e-12, -4.62657860e-15, 3.12172650e-11,
# -1.81796618e-11, -2.29676492e-16, 2.29395821e-16, 9.65096318e-06,
# -1.28846782e-06, -2.16737018e-09, 3.78235738e-10], [-2.30636816e-07, -2.27824114e-12, -5.76582636e-15, 1.91866915e-11,
# -2.07100012e-11, -2.19426502e-16, 2.12697664e-16, 9.65096318e-06,
# -1.11813367e-06, -1.94087589e-09, 3.78235738e-10], [-2.30636816e-07, -2.27824114e-12, -4.94362236e-15, 2.44727589e-11,
# -2.42501768e-11, -2.19426502e-16, 1.98770994e-16, 9.65096318e-06,
# -1.11813367e-06, -1.65023161e-09, 3.78235738e-10], [-2.75640336e-07, -2.38657845e-12, -4.94362236e-15, 2.44727589e-11,
# -2.13301418e-11, -1.28819225e-16, 3.31800400e-16, 1.07719905e-05,
# -1.11813367e-06, -2.16737018e-09, 3.03385976e-10]],
# [[-1.95710919e-07, -2.27824114e-12, -4.94362236e-15, 2.44727589e-11,
# -2.87950459e-11, -2.19426502e-16, 1.98770994e-16, 6.43415771e-06,
# -1.11813367e-06, -2.11253510e-09, 3.28449953e-10], [-3.53566299e-07, -2.19181037e-12, -7.76934751e-15, 2.46393371e-11,
# -2.29249670e-11, -2.21802051e-16, 2.15422498e-16, 8.34874936e-06,
# -1.02788919e-06, -1.24052976e-09, 3.87768108e-10], [-2.30636816e-07, -3.05538008e-12, -4.62657860e-15, 2.97148984e-11,
# -1.81796618e-11, -2.29676492e-16, 2.49410172e-16, 9.65096318e-06,
# -1.11813367e-06, -2.16737018e-09, 4.62932861e-10], [-2.38234690e-07, -2.34956564e-12, -6.69428450e-15, 1.91866915e-11,
# -2.07100012e-11, -2.19426502e-16, 2.12697664e-16, 9.65096318e-06,
# -1.11813367e-06, -1.94087589e-09, 3.78235738e-10], [-1.90254264e-07, -1.82364824e-12, -4.94362236e-15, 2.21338354e-11,
# -1.62722580e-11, -1.57667879e-16, 3.03308934e-16, 9.65096318e-06,
# -1.11813367e-06, -1.89357558e-09, 3.78235738e-10], [-2.30636816e-07, -2.27824114e-12, -4.94362236e-15, 2.44727589e-11,
# -2.42501768e-11, -2.19426502e-16, 1.46939759e-16, 1.07719905e-05,
# -1.40952142e-06, -1.59059089e-09, 3.78235738e-10], [-2.38234690e-07, -3.05538008e-12, -4.62657860e-15, 3.12172650e-11,
# -1.81796618e-11, -2.29676492e-16, 2.49410172e-16, 9.65096318e-06,
# -9.32591571e-07, -1.91293849e-09, 3.41579584e-10], [-2.30636816e-07, -2.27824114e-12, -5.76582636e-15, 1.91866915e-11,
# -2.07100012e-11, -2.03140420e-16, 1.82291954e-16, 1.17275249e-05,
# -1.11813367e-06, -1.94087589e-09, 3.78235738e-10], [-2.30636816e-07, -2.37119172e-12, -5.10921717e-15, 1.91866915e-11,
# -2.07100012e-11, -2.19426502e-16, 1.69760691e-16, 9.65096318e-06,
# -9.68521076e-07, -1.56681860e-09, 3.78235738e-10], [-2.30636816e-07, -2.27824114e-12, -4.94362236e-15, 2.44727589e-11,
# -2.42501768e-11, -2.19426502e-16, 1.98770994e-16, 9.65096318e-06,
# -1.17180876e-06, -1.94087589e-09, 3.78235738e-10]],
# [[-2.38234690e-07, -2.34956564e-12, -7.16612789e-15, 2.29725021e-11,
# -2.07100012e-11, -2.19426502e-16, 2.12697664e-16, 9.65096318e-06,
# -8.65681077e-07, -1.94087589e-09, 3.78235738e-10], [-2.38234690e-07, -2.34956564e-12, -6.69428450e-15, 2.41175086e-11,
# -1.88916587e-11, -2.19426502e-16, 2.12697664e-16, 9.65096318e-06,
# -1.11813367e-06, -1.58371593e-09, 2.67916790e-10], [-1.90254264e-07, -1.82364824e-12, -4.94362236e-15, 2.73837927e-11,
# -1.62722580e-11, -1.57667879e-16, 3.03308934e-16, 9.65096318e-06,
# -1.11813367e-06, -1.85225514e-09, 3.02799754e-10], [-2.38234690e-07, -2.34956564e-12, -5.45954868e-15, 1.91866915e-11,
# -2.07100012e-11, -2.19426502e-16, 1.76687708e-16, 9.65096318e-06,
# -1.11813367e-06, -1.89357558e-09, 3.78235738e-10], [-1.90254264e-07, -1.91693654e-12, -4.94362236e-15, 2.21338354e-11,
# -1.53976341e-11, -1.13394627e-16, 3.03308934e-16, 9.65096318e-06,
# -9.68217802e-07, -1.94087589e-09, 3.78235738e-10], [-3.01720907e-07, -2.34956564e-12, -6.69428450e-15, 1.91866915e-11,
# -2.07100012e-11, -2.19426502e-16, 2.70743954e-16, 9.65096318e-06,
# -1.11813367e-06, -1.89357558e-09, 3.78235738e-10], [-1.90254264e-07, -1.82364824e-12, -4.94362236e-15, 2.21338354e-11,
# -1.62722580e-11, -1.44742977e-16, 2.45995034e-16, 9.65096318e-06,
# -1.17180876e-06, -1.43477271e-09, 3.78235738e-10], [-2.30636816e-07, -2.27824114e-12, -4.94362236e-15, 2.44727589e-11,
# -2.42501768e-11, -2.19426502e-16, 1.98770994e-16, 9.65096318e-06,
# -1.11813367e-06, -1.89357558e-09, 3.78235738e-10], [-2.38234690e-07, -2.34956564e-12, -5.24275621e-15, 1.91866915e-11,
# -2.07100012e-11, -2.19426502e-16, 1.81926852e-16, 9.65096318e-06,
# -1.39259640e-06, -1.89357558e-09, 3.78235738e-10], [-1.90254264e-07, -1.82364824e-12, -4.94362236e-15, 2.21338354e-11,
# -1.62722580e-11, -1.57667879e-16, 2.98247899e-16, 9.40527117e-06,
# -1.11813367e-06, -1.58467050e-09, 3.78235738e-10]],
# [[-3.01720907e-07, -2.13274935e-12, -6.69428450e-15, 1.91866915e-11,
# -2.42501768e-11, -1.59073185e-16, 1.98770994e-16, 9.65096318e-06,
# -8.33928520e-07, -1.89357558e-09, 3.78235738e-10], [-2.45393681e-07, -2.24112955e-12, -4.94362236e-15, 2.44727589e-11,
# -2.42366032e-11, -2.19426502e-16, 2.70279113e-16, 9.65096318e-06,
# -1.11813367e-06, -1.89357558e-09, 3.96106702e-10], [-2.38234690e-07, -2.34956564e-12, -8.11551999e-15, 2.21338354e-11,
# -1.62722580e-11, -1.23429218e-16, 3.28663609e-16, 9.40527117e-06,
# -1.20546600e-06, -1.97824088e-09, 3.78235738e-10], [-1.90254264e-07, -2.22452254e-12, -4.94362236e-15, 2.29725021e-11,
# -2.07100012e-11, -2.19426502e-16, 2.12697664e-16, 9.65096318e-06,
# -8.65681077e-07, -1.94087589e-09, 3.00282232e-10], [-1.90254264e-07, -1.60277947e-12, -4.94362236e-15, 2.21338354e-11,
# -1.59130195e-11, -1.44742977e-16, 2.45995034e-16, 7.22962360e-06,
# -1.17180876e-06, -1.47205710e-09, 3.78235738e-10], [-2.38234690e-07, -2.13570331e-12, -6.73183056e-15, 1.91866915e-11,
# -2.07100012e-11, -2.19426502e-16, 1.76687708e-16, 1.05631234e-05,
# -1.11813367e-06, -1.89357558e-09, 2.87675426e-10], [-2.42251909e-07, -1.67219450e-12, -4.69502818e-15, 2.21338354e-11,
# -1.96793589e-11, -1.57667879e-16, 2.98247899e-16, 9.65096318e-06,
# -1.17180876e-06, -1.59249365e-09, 4.35713874e-10], [-1.90254264e-07, -1.82364824e-12, -4.94362236e-15, 2.21338354e-11,
# -1.62722580e-11, -1.44742977e-16, 2.45995034e-16, 9.40527117e-06,
# -1.11813367e-06, -1.58467050e-09, 3.78235738e-10], [-1.90254264e-07, -1.82364824e-12, -4.27114911e-15, 2.21338354e-11,
# -1.62722580e-11, -1.53795905e-16, 2.45995034e-16, 9.65096318e-06,
# -1.17180876e-06, -1.43477271e-09, 3.78235738e-10], [-1.48307248e-07, -1.82364824e-12, -4.94362236e-15, 2.21338354e-11,
# -1.52485683e-11, -1.44742977e-16, 2.45995034e-16, 8.93661800e-06,
# -1.49457104e-06, -1.43477271e-09, 3.78235738e-10]],
# [[-2.45393681e-07, -2.24112955e-12, -4.94362236e-15, 2.44727589e-11,
# -2.42366032e-11, -2.19426502e-16, 2.70279113e-16, | |
# --- Do not remove these libs ---
from freqtrade.strategy.interface import IStrategy
from pandas import DataFrame
# --------------------------------
import talib.abstract as ta
import logging
import pandas_ta as pta
from pandas import DataFrame, Series
from datetime import datetime, timezone
from freqtrade.persistence import Trade
logger = logging.getLogger(__name__)
class UziChan2(IStrategy):
minimal_roi = {
"0": 0.1
}
stoploss = -0.10
timeframe = '1m'
def custom_sell(self, pair: str, trade: 'Trade', current_time: 'datetime', current_rate: float, current_profit: float, **kwargs):
dataframe, _ = self.dp.get_analyzed_dataframe(pair, self.timeframe)
if current_profit*100 > 1:
logger.info(f'custom sell for {pair} at {current_rate}')
return 'sell_1.2pc'
return None
def populate_indicators(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
dataframe['perc'] = ((dataframe['high'] - dataframe['low']) / dataframe['low']*100)
dataframe['avg3_perc'] = ta.EMA(dataframe['perc'], 3)
dataframe['perc_norm'] = (dataframe['perc'] - dataframe['perc'].rolling(50).min())/(dataframe['perc'].rolling(50).max()-dataframe['perc'].rolling(50).min())
# Uzirox's channel prezzo
periodo = 15
dataframe['uc_mid'] = pta.ssf(dataframe['close'],5)
dataframe['uc_stdv'] = ta.STDDEV(dataframe['uc_mid'], periodo).round(5)
dataframe['uc_low'] = ta.EMA(dataframe['uc_mid'] - dataframe['uc_stdv'],3).round(5)
dataframe['uc_up'] = ta.EMA(dataframe['uc_mid'] + dataframe['uc_stdv'],3).round(5)
dataframe['co'] = ta.ADOSC(dataframe,fastperiod = 30, slowperiod = 100).round(3)
return dataframe
def populate_buy_trend(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
dataframe.loc[
(
((dataframe['close'] < dataframe['uc_low']) | (dataframe['open'] < dataframe['uc_low'])) &
(dataframe['co'] > dataframe['co'].shift())
),
'buy'] = 1
return dataframe
def populate_sell_trend(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
dataframe.loc[
(
(dataframe['high'] > dataframe['uc_up']) &
(dataframe['co'] > dataframe['co'].shift())
),
'sell'] = 1
return dataframe
class UziChanTB2(UziChan2):
process_only_new_candles = True
custom_info_trail_buy = dict()
custom_info_trail_sell = dict()
# Trailing buy parameters
trailing_buy_order_enabled = True
trailing_sell_order_enabled = True
trailing_expire_seconds = 1800 #NOTE 5m timeframe
#trailing_expire_seconds = 1800/5 #NOTE 1m timeframe
#trailing_expire_seconds = 1800*3 #NOTE 15m timeframe
# If the current candle goes above min_uptrend_trailing_profit % before trailing_expire_seconds_uptrend seconds, buy the coin
trailing_buy_uptrend_enabled = True
trailing_sell_uptrend_enabled = True
trailing_expire_seconds_uptrend = 90
min_uptrend_trailing_profit = 0.02
debug_mode = True
trailing_buy_max_stop = 0.02 # stop trailing buy if current_price > starting_price * (1+trailing_buy_max_stop)
trailing_buy_max_buy = 0.000 # buy if price between uplimit (=min of serie (current_price * (1 + trailing_buy_offset())) and (start_price * 1+trailing_buy_max_buy))
trailing_sell_max_stop = 0.02 # stop trailing sell if current_price < starting_price * (1+trailing_buy_max_stop)
trailing_sell_max_sell = 0.000 # sell if price between downlimit (=max of serie (current_price * (1 + trailing_sell_offset())) and (start_price * 1+trailing_sell_max_sell))
abort_trailing_when_sell_signal_triggered = False
init_trailing_buy_dict = {
'trailing_buy_order_started': False,
'trailing_buy_order_uplimit': 0,
'start_trailing_price': 0,
'buy_tag': None,
'start_trailing_time': None,
'offset': 0,
'allow_trailing': False,
}
init_trailing_sell_dict = {
'trailing_sell_order_started': False,
'trailing_sell_order_downlimit': 0,
'start_trailing_sell_price': 0,
'sell_tag': None,
'start_trailing_time': None,
'offset': 0,
'allow_sell_trailing': False,
}
def trailing_buy(self, pair, reinit=False):
# returns trailing buy info for pair (init if necessary)
if not pair in self.custom_info_trail_buy:
self.custom_info_trail_buy[pair] = dict()
if (reinit or not 'trailing_buy' in self.custom_info_trail_buy[pair]):
self.custom_info_trail_buy[pair]['trailing_buy'] = self.init_trailing_buy_dict.copy()
return self.custom_info_trail_buy[pair]['trailing_buy']
def trailing_sell(self, pair, reinit=False):
# returns trailing sell info for pair (init if necessary)
if not pair in self.custom_info_trail_sell:
self.custom_info_trail_sell[pair] = dict()
if (reinit or not 'trailing_sell' in self.custom_info_trail_sell[pair]):
self.custom_info_trail_sell[pair]['trailing_sell'] = self.init_trailing_sell_dict.copy()
return self.custom_info_trail_sell[pair]['trailing_sell']
def trailing_buy_info(self, pair: str, current_price: float):
# current_time live, dry run
current_time = datetime.now(timezone.utc)
if not self.debug_mode:
return
trailing_buy = self.trailing_buy(pair)
duration = 0
try:
duration = (current_time - trailing_buy['start_trailing_time'])
except TypeError:
duration = 0
finally:
logger.info(
f"pair: {pair} : "
f"start: {trailing_buy['start_trailing_price']:.4f}, "
f"duration: {duration}, "
f"current: {current_price:.4f}, "
f"uplimit: {trailing_buy['trailing_buy_order_uplimit']:.4f}, "
f"profit: {self.current_trailing_buy_profit_ratio(pair, current_price)*100:.2f}%, "
f"offset: {trailing_buy['offset']}")
def trailing_sell_info(self, pair: str, current_price: float):
# current_time live, dry run
current_time = datetime.now(timezone.utc)
if not self.debug_mode:
return
trailing_sell = self.trailing_sell(pair)
duration = 0
try:
duration = (current_time - trailing_sell['start_trailing_time'])
except TypeError:
duration = 0
finally:
logger.info("'\033[36m'SELL: "
f"pair: {pair} : "
f"start: {trailing_sell['start_trailing_sell_price']:.4f}, "
f"duration: {duration}, "
f"current: {current_price:.4f}, "
f"downlimit: {trailing_sell['trailing_sell_order_downlimit']:.4f}, "
f"profit: {self.current_trailing_sell_profit_ratio(pair, current_price)*100:.2f}%, "
f"offset: {trailing_sell['offset']}")
def current_trailing_buy_profit_ratio(self, pair: str, current_price: float) -> float:
trailing_buy = self.trailing_buy(pair)
if trailing_buy['trailing_buy_order_started']:
return (trailing_buy['start_trailing_price'] - current_price) / trailing_buy['start_trailing_price']
else:
return 0
def current_trailing_sell_profit_ratio(self, pair: str, current_price: float) -> float:
trailing_sell = self.trailing_sell(pair)
if trailing_sell['trailing_sell_order_started']:
return (current_price - trailing_sell['start_trailing_sell_price'])/ trailing_sell['start_trailing_sell_price']
#return 0-((trailing_sell['start_trailing_sell_price'] - current_price) / trailing_sell['start_trailing_sell_price'])
else:
return 0
def trailing_buy_offset(self, dataframe, pair: str, current_price: float):
# return rebound limit before a buy in % of initial price, function of current price
# return None to stop trailing buy (will start again at next buy signal)
# return 'forcebuy' to force immediate buy
# (example with 0.5%. initial price : 100 (uplimit is 100.5), 2nd price : 99 (no buy, uplimit updated to 99.5), 3price 98 (no buy uplimit updated to 98.5), 4th price 99 -> BUY
current_trailing_profit_ratio = self.current_trailing_buy_profit_ratio(pair, current_price)
last_candle = dataframe.iloc[-1]
adapt = (last_candle['perc_norm']).round(5)
default_offset = 0.0045 * (1 + adapt) #NOTE: default_offset 0.0045 <--> 0.009
trailing_buy = self.trailing_buy(pair)
if not trailing_buy['trailing_buy_order_started']:
return default_offset
# example with duration and indicators
# dry run, live only
last_candle = dataframe.iloc[-1]
current_time = datetime.now(timezone.utc)
trailing_duration = current_time - trailing_buy['start_trailing_time']
if trailing_duration.total_seconds() > self.trailing_expire_seconds:
if ((current_trailing_profit_ratio > 0) and (last_candle['buy'] == 1)):
# more than 1h, price under first signal, buy signal still active -> buy
return 'forcebuy'
else:
# wait for next signal
return None
elif (self.trailing_buy_uptrend_enabled and (trailing_duration.total_seconds() < self.trailing_expire_seconds_uptrend) and (current_trailing_profit_ratio < (-1 * self.min_uptrend_trailing_profit))):
# less than 90s and price is rising, buy
return 'forcebuy'
if current_trailing_profit_ratio < 0:
# current price is higher than initial price
return default_offset
trailing_buy_offset = {
0.06: 0.02,
0.03: 0.01,
0: default_offset,
}
for key in trailing_buy_offset:
if current_trailing_profit_ratio > key:
return trailing_buy_offset[key]
return default_offset
def trailing_sell_offset(self, dataframe, pair: str, current_price: float):
# return rebound limit before a buy in % of initial price, function of current price
# return None to stop trailing buy (will start again at next buy signal)
# return 'forcebuy' to force immediate buy
# (example with 0.5%. initial price : 100 (uplimit is 100.5), 2nd price : 99 (no buy, uplimit updated to 99.5), 3price 98 (no buy uplimit updated to 98.5), 4th price 99 -> BUY
current_trailing_sell_profit_ratio = self.current_trailing_sell_profit_ratio(pair, current_price)
last_candle = dataframe.iloc[-1]
adapt = (last_candle['perc_norm']).round(5)
default_offset = 0.003 * (1 + adapt) #NOTE: default_offset 0.003 <--> 0.006
trailing_sell = self.trailing_sell(pair)
if not trailing_sell['trailing_sell_order_started']:
return default_offset
# example with duration and indicators
# dry run, live only
last_candle = dataframe.iloc[-1]
current_time = datetime.now(timezone.utc)
trailing_duration = current_time - trailing_sell['start_trailing_time']
if trailing_duration.total_seconds() > self.trailing_expire_seconds:
if ((current_trailing_sell_profit_ratio > 0) and (last_candle['sell'] != 0)):
# more than 1h, price over first signal, sell signal still active -> sell
return 'forcesell'
else:
# wait for next signal
return None
elif (self.trailing_sell_uptrend_enabled and (trailing_duration.total_seconds() < self.trailing_expire_seconds_uptrend) and (current_trailing_sell_profit_ratio < (-1 * self.min_uptrend_trailing_profit))):
# less than 90s and price is falling, sell
return 'forcesell'
if current_trailing_sell_profit_ratio > 0:
# current price is lower than initial price
return default_offset
trailing_sell_offset = {
# 0.06: 0.02,
# 0.03: 0.01,
0.1: default_offset,
}
for key in trailing_sell_offset:
if current_trailing_sell_profit_ratio < key:
return trailing_sell_offset[key]
return default_offset
# end of trailing sell parameters
# -----------------------------------------------------
def populate_indicators(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
dataframe = super().populate_indicators(dataframe, metadata)
self.trailing_buy(metadata['pair'])
self.trailing_sell(metadata['pair'])
return dataframe
def confirm_trade_entry(self, pair: str, order_type: str, amount: float, rate: float, time_in_force: str, **kwargs) -> bool:
val = super().confirm_trade_entry(pair, order_type, amount, rate, time_in_force, **kwargs)
if val:
if self.trailing_buy_order_enabled and self.config['runmode'].value in ('live', 'dry_run'):
val = False
dataframe, _ = self.dp.get_analyzed_dataframe(pair, self.timeframe)
if(len(dataframe) >= 1):
last_candle = dataframe.iloc[-1].squeeze()
current_price = rate
trailing_buy = self.trailing_buy(pair)
trailing_buy_offset = self.trailing_buy_offset(dataframe, pair, current_price)
if trailing_buy['allow_trailing']:
if (not trailing_buy['trailing_buy_order_started'] and (last_candle['buy'] == 1)):
# start trailing buy
trailing_buy['trailing_buy_order_started'] = True
trailing_buy['trailing_buy_order_uplimit'] = last_candle['close']
trailing_buy['start_trailing_price'] = last_candle['close']
trailing_buy['buy_tag'] = last_candle['buy_tag']
trailing_buy['start_trailing_time'] = datetime.now(timezone.utc)
trailing_buy['offset'] = 0
self.trailing_buy_info(pair, current_price)
logger.info(f'start trailing buy for {pair} at {last_candle["close"]}')
elif trailing_buy['trailing_buy_order_started']:
if trailing_buy_offset == 'forcebuy':
# buy in custom conditions
val = True
ratio = "%.2f" % ((self.current_trailing_buy_profit_ratio(pair, current_price)) * 100)
self.trailing_buy_info(pair, current_price)
logger.info(f"price OK for {pair} ({ratio} %, {current_price}), order may not be triggered if all slots are full")
elif trailing_buy_offset is None:
# stop trailing buy custom conditions
self.trailing_buy(pair, reinit=True)
logger.info(f'STOP trailing buy for {pair} because "trailing buy offset" returned None')
elif current_price < trailing_buy['trailing_buy_order_uplimit']:
# update uplimit
old_uplimit = trailing_buy["trailing_buy_order_uplimit"]
self.custom_info_trail_buy[pair]['trailing_buy']['trailing_buy_order_uplimit'] = min(current_price * (1 + trailing_buy_offset), self.custom_info_trail_buy[pair]['trailing_buy']['trailing_buy_order_uplimit'])
self.custom_info_trail_buy[pair]['trailing_buy']['offset'] = trailing_buy_offset
self.trailing_buy_info(pair, current_price)
logger.info(f'update trailing | |
<filename>greykite/tests/algo/changepoint/adalasso/test_changepoints_utils.py
from datetime import datetime as dt
import numpy as np
import pandas as pd
import pytest
from greykite.algo.changepoint.adalasso.changepoints_utils import adaptive_lasso_cv
from greykite.algo.changepoint.adalasso.changepoints_utils import build_seasonality_feature_df_from_detection_result
from greykite.algo.changepoint.adalasso.changepoints_utils import build_seasonality_feature_df_with_changes
from greykite.algo.changepoint.adalasso.changepoints_utils import build_trend_feature_df_with_changes
from greykite.algo.changepoint.adalasso.changepoints_utils import check_freq_unit_at_most_day
from greykite.algo.changepoint.adalasso.changepoints_utils import combine_detected_and_custom_trend_changepoints
from greykite.algo.changepoint.adalasso.changepoints_utils import compute_fitted_components
from greykite.algo.changepoint.adalasso.changepoints_utils import compute_min_changepoint_index_distance
from greykite.algo.changepoint.adalasso.changepoints_utils import estimate_seasonality_with_detected_changepoints
from greykite.algo.changepoint.adalasso.changepoints_utils import estimate_trend_with_detected_changepoints
from greykite.algo.changepoint.adalasso.changepoints_utils import filter_changepoints
from greykite.algo.changepoint.adalasso.changepoints_utils import find_neighbor_changepoints
from greykite.algo.changepoint.adalasso.changepoints_utils import get_changes_from_beta
from greykite.algo.changepoint.adalasso.changepoints_utils import get_seasonality_changepoint_df_cols
from greykite.algo.changepoint.adalasso.changepoints_utils import get_seasonality_changes_from_adaptive_lasso
from greykite.algo.changepoint.adalasso.changepoints_utils import get_trend_changepoint_dates_from_cols
from greykite.algo.changepoint.adalasso.changepoints_utils import get_trend_changes_from_adaptive_lasso
from greykite.algo.changepoint.adalasso.changepoints_utils import get_yearly_seasonality_changepoint_dates_from_freq
from greykite.algo.changepoint.adalasso.changepoints_utils import plot_change
from greykite.common.testing_utils import generate_df_for_tests
@pytest.fixture
def hourly_data():
"""Generate 500 days of hourly data for tests"""
return generate_df_for_tests(freq="H", periods=24 * 100)
def test_check_freq_unit_at_most_day():
# tests no error
check_freq_unit_at_most_day("D", "name")
check_freq_unit_at_most_day("50H", "name")
# tests ValueError
with pytest.raises(
ValueError,
match="In name, the maximal unit is 'D', "
"i.e., you may use units no more than 'D' such as"
"'10D', '5H', '100T', '200S'. The reason is that 'W', 'M' "
"or higher has either cycles or indefinite number of days, "
"thus is not parsable by pandas as timedelta."):
check_freq_unit_at_most_day("2M", "name")
def test_build_trend_feature_df_with_changes(hourly_data):
# test default parameters
df = hourly_data["df"]
df_trend = build_trend_feature_df_with_changes(
df=df,
time_col="ts"
)
assert df_trend.shape[0] == df.shape[0]
assert df_trend.shape[1] == 101 # default value
# test given parameters
df_trend = build_trend_feature_df_with_changes(
df=df,
time_col="ts",
changepoints_dict={
"method": "uniform",
"n_changepoints": 50
}
)
assert df_trend.shape[0] == df.shape[0]
assert df_trend.shape[1] == 51
# test no change point
df_trend = build_trend_feature_df_with_changes(
df=df,
time_col="ts",
changepoints_dict={
"method": "uniform",
"n_changepoints": 0
}
)
assert df_trend.shape[0] == df.shape[0]
assert df_trend.shape[1] == 1
# test result values
df = pd.DataFrame(
{
"ts": [dt(2020, 1, 1),
dt(2020, 1, 2),
dt(2020, 1, 3),
dt(2020, 1, 4),
dt(2020, 1, 5),
dt(2020, 1, 6)],
"y": [1, 2, 3, 4, 5, 6]
}
)
df_trend = build_trend_feature_df_with_changes(
df=df,
time_col="ts",
changepoints_dict={
"method": "uniform",
"n_changepoints": 2
}
)
expected_df_trend = pd.DataFrame(
{
"changepoint0_2020_01_01_00": [0, 1 / 366, 2 / 366, 3 / 366, 4 / 366, 5 / 366],
"changepoint1_2020_01_03_00": [0, 0, 0, 1 / 366, 2 / 366, 3 / 366],
"changepoint2_2020_01_05_00": [0, 0, 0, 0, 0, 1 / 366]
},
index=[dt(2020, 1, 1),
dt(2020, 1, 2),
dt(2020, 1, 3),
dt(2020, 1, 4),
dt(2020, 1, 5),
dt(2020, 1, 6)]
)
assert df_trend.round(3).equals(expected_df_trend.round(3))
def test_build_seasonality_feature_df_with_changes(hourly_data):
# test default parameters
df = hourly_data["df"]
df_seasonality = build_seasonality_feature_df_with_changes(
df=df,
time_col="ts"
)
assert df_seasonality.shape[0] == df.shape[0]
assert df_seasonality.shape[1] == 22 # default value
# test given parameters
df_seasonality = build_seasonality_feature_df_with_changes(
df=df,
time_col="ts",
changepoints_dict={
"method": "uniform",
"n_changepoints": 10
}
)
assert df_seasonality.shape[0] == df.shape[0]
assert df_seasonality.shape[1] == 22 * 11
# test given parameters
df_seasonality = build_seasonality_feature_df_with_changes(
df=df,
time_col="ts",
changepoints_dict={
"method": "uniform",
"n_changepoints": 10
},
fs_components_df=pd.DataFrame({
"name": ["conti_year"],
"period": [1.0],
"order": [8],
"seas_names": ["yearly"]})
)
assert df_seasonality.shape[0] == df.shape[0]
assert df_seasonality.shape[1] == 16 * 11
# test no change point
df_seasonality = build_seasonality_feature_df_with_changes(
df=df,
time_col="ts",
changepoints_dict={
"method": "uniform",
"n_changepoints": 0
}
)
assert df_seasonality.shape[0] == df.shape[0]
assert df_seasonality.shape[1] == 22
# test result values
df = pd.DataFrame(
{
"ts": [dt(2020, 1, 1),
dt(2020, 1, 2),
dt(2020, 1, 3),
dt(2020, 1, 4),
dt(2020, 1, 5)],
"y": [1, 2, 3, 4, 5]
}
)
df_seasonality = build_seasonality_feature_df_with_changes(
df=df,
time_col="ts",
changepoints_dict={
"method": "uniform",
"n_changepoints": 1
},
fs_components_df=pd.DataFrame({
"name": ["conti_year"],
"period": [1.0],
"order": [1],
"seas_names": ["yearly"]})
)
freq = 2 * np.pi / 366
expected_df_seasonality = pd.DataFrame(
{
"sin1_conti_year_yearly": [0, np.sin(freq * 1), np.sin(freq * 2), np.sin(freq * 3), np.sin(freq * 4)],
"cos1_conti_year_yearly": [1, np.cos(freq * 1), np.cos(freq * 2), np.cos(freq * 3), np.cos(freq * 4)],
"sin1_conti_year_yearly_2020_01_03_00": [0, 0, np.sin(freq * 2), np.sin(freq * 3), np.sin(freq * 4)],
"cos1_conti_year_yearly_2020_01_03_00": [0, 0, np.cos(freq * 2), np.cos(freq * 3), np.cos(freq * 4)],
},
index=[dt(2020, 1, 1),
dt(2020, 1, 2),
dt(2020, 1, 3),
dt(2020, 1, 4),
dt(2020, 1, 5)]
)
pd.testing.assert_frame_equal(df_seasonality, expected_df_seasonality, check_names=False)
def test_build_seasonality_feature_df_with_detection_result():
df = pd.DataFrame({
"ts": pd.date_range(start="2020-01-06", end="2020-01-12", freq="D"),
"y": list(range(7))
})
seasonality_changepoints = {
"weekly": list(pd.to_datetime(["2020-01-08", "2020-01-11"])),
"yearly": list(pd.to_datetime(["2020-01-09"]))
}
seasonality_components_df = pd.DataFrame({
"name": ["tow", "conti_year"],
"period": [7.0, 1.0],
"order": [1, 2],
"seas_names": ["weekly", "yearly"]
})
# we only assert values equal, since indices may differ
# with overall block
seasonality_df = build_seasonality_feature_df_from_detection_result(
df=df,
time_col="ts",
seasonality_changepoints=seasonality_changepoints,
seasonality_components_df=seasonality_components_df,
include_original_block=True
)
# date index for changepoints
week_cp1 = 1 # dates start from the 6th, first weekly cp is the 8th, last index before cp is 1 (from Monday)
week_cp2 = 4 # dates start from the 6th, second weekly cp is the 11th, last index before cp is 4 (from Monday)
year_cp1 = 7 # dates start from the 6th, first yearly cp is the 9th, last index before cp is 7 (from the 1st)
expected_df = pd.DataFrame({
"sin1_tow_weekly": np.sin([2 * np.pi * i / 7 for i in range(7)]),
"cos1_tow_weekly": np.cos([2 * np.pi * i / 7 for i in range(7)]),
"sin1_tow_weekly_2020_01_08_00": np.sin([2 * np.pi * i / 7 if i > week_cp1 else 0 for i in range(7)]),
"cos1_tow_weekly_2020_01_08_00": np.cos([2 * np.pi * i / 7 if i > week_cp1 else np.pi / 2 for i in range(7)]),
"sin1_tow_weekly_2020_01_11_00": np.sin([2 * np.pi * i / 7 if i > week_cp2 else 0 for i in range(7)]),
"cos1_tow_weekly_2020_01_11_00": np.cos([2 * np.pi * i / 7 if i > week_cp2 else np.pi / 2 for i in range(7)]),
# start date the 6th is 5 days from the 1st, and end date the 12th is 11 days from the 1st
"sin1_conti_year_yearly": np.sin([2 * np.pi * i / 366 for i in range(5, 12)]),
"cos1_conti_year_yearly": np.cos([2 * np.pi * i / 366 for i in range(5, 12)]),
"sin2_conti_year_yearly": np.sin([2 * np.pi * i / 366 * 2 for i in range(5, 12)]),
"cos2_conti_year_yearly": np.cos([2 * np.pi * i / 366 * 2 for i in range(5, 12)]),
"sin1_conti_year_yearly_2020_01_09_00": np.sin(
[2 * np.pi * i / 366 if i > year_cp1 else 0 for i in range(5, 12)]),
"cos1_conti_year_yearly_2020_01_09_00": np.cos(
[2 * np.pi * i / 366 if i > year_cp1 else np.pi / 2 for i in range(5, 12)]),
"sin2_conti_year_yearly_2020_01_09_00": np.sin(
[2 * np.pi * i / 366 * 2 if i > year_cp1 else 0 for i in range(5, 12)]),
"cos2_conti_year_yearly_2020_01_09_00": np.cos(
[2 * np.pi * i / 366 * 2 if i > year_cp1 else np.pi / 2 for i in range(5, 12)])
},
index=df["ts"])
pd.testing.assert_frame_equal(seasonality_df, expected_df, check_names=False)
# without overall block
seasonality_df = build_seasonality_feature_df_from_detection_result(
df=df,
time_col="ts",
seasonality_changepoints=seasonality_changepoints,
seasonality_components_df=seasonality_components_df,
include_original_block=False
)
expected_df = expected_df[[
"sin1_tow_weekly_2020_01_08_00",
"cos1_tow_weekly_2020_01_08_00",
"sin1_tow_weekly_2020_01_11_00",
"cos1_tow_weekly_2020_01_11_00",
"sin1_conti_year_yearly_2020_01_09_00",
"cos1_conti_year_yearly_2020_01_09_00",
"sin2_conti_year_yearly_2020_01_09_00",
"cos2_conti_year_yearly_2020_01_09_00"
]]
pd.testing.assert_frame_equal(seasonality_df, expected_df, check_names=False)
# with weekly only
with pytest.warns(UserWarning) as record:
seasonality_df = build_seasonality_feature_df_from_detection_result(
df=df,
time_col="ts",
seasonality_changepoints=seasonality_changepoints,
seasonality_components_df=seasonality_components_df,
include_original_block=False,
include_components=["weekly"]
)
assert (f"The following seasonality components have detected seasonality changepoints"
f" but these changepoints are not included in the model,"
f" because the seasonality component is not included in the model. {['yearly']}") in record[0].message.args[0]
expected_df = expected_df[[
"sin1_tow_weekly_2020_01_08_00",
"cos1_tow_weekly_2020_01_08_00",
"sin1_tow_weekly_2020_01_11_00",
"cos1_tow_weekly_2020_01_11_00"
]]
pd.testing.assert_frame_equal(seasonality_df, expected_df, check_names=False)
def test_compute_fitted_components():
# test trend
df = pd.DataFrame(
{
"changepoint0": [1, 1, 1, 1],
"changepoint1": [0, 2, 2, 2],
"sin1_conti_year_yearly_cp0": [np.sin(1), np.sin(2), np.sin(3), np.sin(4)],
"cos1_conti_year_yearly_cp0": [np.cos(1), np.cos(2), np.cos(3), np.cos(4)],
"sin1_conti_year_yearly_cp1": [0, 0, np.sin(3), np.sin(4)],
"cos1_conti_year_yearly_cp1": [0, 0, np.cos(3), np.cos(4)]
}
)
coef = np.array([1, 2, 3, 4, 5, 6])
intercept = 1
trend = compute_fitted_components(
x=df,
coef=coef,
regex="^changepoint",
include_intercept=True,
intercept=intercept
)
expected_trend = pd.Series(
[1 * 1 + 2 * 0 + 1] + [1 * 1 + 2 * 2 + 1] * 3
)
assert trend.equals(expected_trend)
# test yearly seasonality
df = pd.DataFrame(
{
"changepoint0": [1, 1, 1, 1],
"changepoint1": [0, 2, 2, 2],
"sin1_conti_year_yearly_cp0": [np.sin(1), np.sin(2), np.sin(3), np.sin(4)],
"cos1_conti_year_yearly_cp0": [np.cos(1), np.cos(2), np.cos(3), np.cos(4)],
"sin1_conti_year_yearly_cp1": [0, 0, np.sin(3), np.sin(4)],
"cos1_conti_year_yearly_cp1": [0, 0, np.cos(3), np.cos(4)]
}
)
coef = np.array([1, 1, 1, 1, 1, 1])
seasonality = compute_fitted_components(
x=df,
coef=coef,
regex="^.*yearly.*$",
include_intercept=False
)
expected_seasonality = pd.Series(
[
np.sin(1) + np.cos(1),
np.sin(2) + np.cos(2),
np.sin(3) + np.cos(3) + np.sin(3) + np.cos(3),
np.sin(4) + np.cos(4) + np.sin(4) + np.cos(4)
]
)
assert seasonality.equals(expected_seasonality)
# tests ValueError
with pytest.raises(
ValueError,
match="``intercept`` must be provided when ``include_intercept`` is True."):
compute_fitted_components(
x=df,
coef=coef,
regex="^.*yearly.*$",
include_intercept=True
)
def test_plot_change():
observations = pd.Series(
| |
(InstructionTextTokenType.RegisterToken, 'R15'),
(InstructionTextTokenType.TextToken, '+'),
(InstructionTextTokenType.OperandSeparatorToken, ', '),
(InstructionTextTokenType.RegisterToken, 'R{n}')
],
},
{
'opmask': (0x0039, 0xf0ff),
'm': (0x0, 0x0),
'n': (0xf00, 0x8),
'imm': (0x0, 0x0),
'disp': 0x0,
'cmd': 'movrt',
'width': 0,
'size': 2,
'is_label': False,
'is_delay': False,
'args': [
Oper(OpType.REG, 'R{n}', False, False, 0, 0)
],
'tokens': [
(InstructionTextTokenType.InstructionToken, 'movrt'),
(InstructionTextTokenType.TextToken, ' '),
(InstructionTextTokenType.RegisterToken, 'R{n}')
],
},
{
'opmask': (0x0029, 0xf0ff),
'm': (0x0, 0x0),
'n': (0xf00, 0x8),
'imm': (0x0, 0x0),
'disp': 0x0,
'cmd': 'movt',
'width': 0,
'size': 2,
'is_label': False,
'is_delay': False,
'args': [
Oper(OpType.REG, 'R{n}', False, False, 0, 0)
],
'tokens': [
(InstructionTextTokenType.InstructionToken, 'movt'),
(InstructionTextTokenType.TextToken, ' '),
(InstructionTextTokenType.RegisterToken, 'R{n}')
],
},
{
'opmask': (0x0068, 0xffff),
'm': (0x0, 0x0),
'n': (0x0, 0x0),
'imm': (0x0, 0x0),
'disp': 0x0,
'cmd': 'nott',
'width': 0,
'size': 2,
'is_label': False,
'is_delay': False,
'args': [
],
'tokens': [
(InstructionTextTokenType.InstructionToken, 'nott'),
(InstructionTextTokenType.TextToken, ' ')
],
},
{
'opmask': (0x6008, 0xf00f),
'm': (0xf0, 0x4),
'n': (0xf00, 0x8),
'imm': (0x0, 0x0),
'disp': 0x0,
'cmd': 'swap.b',
'width': 1,
'size': 2,
'is_label': False,
'is_delay': False,
'args': [
Oper(OpType.REG, 'R{m}', False, False, 0, 0),
Oper(OpType.REG, 'R{n}', False, False, 0, 0)
],
'tokens': [
(InstructionTextTokenType.InstructionToken, 'swap.b'),
(InstructionTextTokenType.TextToken, ' '),
(InstructionTextTokenType.RegisterToken, 'R{m}'),
(InstructionTextTokenType.OperandSeparatorToken, ', '),
(InstructionTextTokenType.RegisterToken, 'R{n}')
],
},
{
'opmask': (0x6009, 0xf00f),
'm': (0xf0, 0x4),
'n': (0xf00, 0x8),
'imm': (0x0, 0x0),
'disp': 0x0,
'cmd': 'swap.w',
'width': 2,
'size': 2,
'is_label': False,
'is_delay': False,
'args': [
Oper(OpType.REG, 'R{m}', False, False, 0, 0),
Oper(OpType.REG, 'R{n}', False, False, 0, 0)
],
'tokens': [
(InstructionTextTokenType.InstructionToken, 'swap.w'),
(InstructionTextTokenType.TextToken, ' '),
(InstructionTextTokenType.RegisterToken, 'R{m}'),
(InstructionTextTokenType.OperandSeparatorToken, ', '),
(InstructionTextTokenType.RegisterToken, 'R{n}')
],
},
{
'opmask': (0x200d, 0xf00f),
'm': (0xf0, 0x4),
'n': (0xf00, 0x8),
'imm': (0x0, 0x0),
'disp': 0x0,
'cmd': 'xtrct',
'width': 0,
'size': 2,
'is_label': False,
'is_delay': False,
'args': [
Oper(OpType.REG, 'R{m}', False, False, 0, 0),
Oper(OpType.REG, 'R{n}', False, False, 0, 0)
],
'tokens': [
(InstructionTextTokenType.InstructionToken, 'xtrct'),
(InstructionTextTokenType.TextToken, ' '),
(InstructionTextTokenType.RegisterToken, 'R{m}'),
(InstructionTextTokenType.OperandSeparatorToken, ', '),
(InstructionTextTokenType.RegisterToken, 'R{n}')
],
},
{
'opmask': (0x30094000, 0xf0fff000),
'm': (0x0, 0x0),
'n': (0xf00, 0x8),
'imm': (0x0, 0x0),
'disp': 0x0,
'cmd': 'band.b',
'width': 1,
'size': 4,
'is_label': False,
'is_delay': False,
'args': [
Oper(OpType.IMM, '0x{imm:x}', False, False, 0, 1),
Oper(OpType.DISP, '0x{disp:x}', True, False, 0, 2),
Oper(OpType.REG, 'R{n}', False, False, 0, 0)
],
'tokens': [
(InstructionTextTokenType.InstructionToken, 'band.b'),
(InstructionTextTokenType.TextToken, ' '),
(InstructionTextTokenType.IntegerToken, '0x{imm:x}'),
(InstructionTextTokenType.OperandSeparatorToken, ', '),
(InstructionTextTokenType.TextToken, '@'),
(InstructionTextTokenType.PossibleAddressToken, '0x{disp:x}'),
(InstructionTextTokenType.OperandSeparatorToken, ', '),
(InstructionTextTokenType.RegisterToken, 'R{n}')
],
},
{
'opmask': (0x3009c000, 0xf0fff000),
'm': (0x0, 0x0),
'n': (0xf00, 0x8),
'imm': (0x0, 0x0),
'disp': 0x0,
'cmd': 'bandnot.b',
'width': 1,
'size': 4,
'is_label': False,
'is_delay': False,
'args': [
Oper(OpType.IMM, '0x{imm:x}', False, False, 0, 1),
Oper(OpType.DISP, '0x{disp:x}', True, True, 0, 2),
Oper(OpType.REG, 'R{n}', False, False, 0, 0)
],
'tokens': [
(InstructionTextTokenType.InstructionToken, 'bandnot.b'),
(InstructionTextTokenType.TextToken, ' '),
(InstructionTextTokenType.IntegerToken, '0x{imm:x}'),
(InstructionTextTokenType.OperandSeparatorToken, ', '),
(InstructionTextTokenType.TextToken, '@('),
(InstructionTextTokenType.PossibleAddressToken, '0x{disp:x}'),
(InstructionTextTokenType.OperandSeparatorToken, ', '),
(InstructionTextTokenType.RegisterToken, 'R{n}'),
(InstructionTextTokenType.TextToken, ')')
],
},
{
'opmask': (0x30090000, 0xf0fff000),
'm': (0x0, 0x0),
'n': (0xf00, 0x8),
'imm': (0x0, 0x0),
'disp': 0x0,
'cmd': 'bclr.b',
'width': 1,
'size': 4,
'is_label': False,
'is_delay': False,
'args': [
Oper(OpType.IMM, '0x{imm:x}', False, False, 0, 1),
Oper(OpType.DISP, '0x{disp:x}', True, True, 0, 2),
Oper(OpType.REG, 'R{n}', False, False, 0, 0)
],
'tokens': [
(InstructionTextTokenType.InstructionToken, 'bclr.b'),
(InstructionTextTokenType.TextToken, ' '),
(InstructionTextTokenType.IntegerToken, '0x{imm:x}'),
(InstructionTextTokenType.OperandSeparatorToken, ', '),
(InstructionTextTokenType.TextToken, '@('),
(InstructionTextTokenType.PossibleAddressToken, '0x{disp:x}'),
(InstructionTextTokenType.OperandSeparatorToken, ', '),
(InstructionTextTokenType.RegisterToken, 'R{n}'),
(InstructionTextTokenType.TextToken, ')')
],
},
{
'opmask': (0x8600, 0xff0f),
'm': (0x0, 0x0),
'n': (0xf0, 0x4),
'imm': (0x7, 0x0),
'disp': 0x0,
'cmd': 'bclr',
'width': 0,
'size': 2,
'is_label': False,
'is_delay': False,
'args': [
Oper(OpType.IMM, '0x{imm:x}', False, False, 0, 1),
Oper(OpType.REG, 'R{n}', False, False, 0, 0)
],
'tokens': [
(InstructionTextTokenType.InstructionToken, 'bclr'),
(InstructionTextTokenType.TextToken, ' '),
(InstructionTextTokenType.IntegerToken, '0x{imm:x}'),
(InstructionTextTokenType.OperandSeparatorToken, ', '),
(InstructionTextTokenType.RegisterToken, 'R{n}')
],
},
{
'opmask': (0x30093000, 0xf0fff000),
'm': (0x0, 0x0),
'n': (0xf00, 0x8),
'imm': (0x0, 0x0),
'disp': 0x0,
'cmd': 'bld.b',
'width': 1,
'size': 4,
'is_label': False,
'is_delay': False,
'args': [
Oper(OpType.IMM, '0x{imm:x}', False, False, 0, 1),
Oper(OpType.DISP, '0x{disp:x}', True, True, 0, 2),
Oper(OpType.REG, 'R{n}', False, False, 0, 0)
],
'tokens': [
(InstructionTextTokenType.InstructionToken, 'bld.b'),
(InstructionTextTokenType.TextToken, ' '),
(InstructionTextTokenType.IntegerToken, '0x{imm:x}'),
(InstructionTextTokenType.OperandSeparatorToken, ', '),
(InstructionTextTokenType.TextToken, '@('),
(InstructionTextTokenType.PossibleAddressToken, '0x{disp:x}'),
(InstructionTextTokenType.OperandSeparatorToken, ', '),
(InstructionTextTokenType.RegisterToken, 'R{n}'),
(InstructionTextTokenType.TextToken, ')')
],
},
{
'opmask': (0x8708, 0xff0f),
'm': (0x0, 0x0),
'n': (0xf0, 0x4),
'imm': (0x7, 0x0),
'disp': 0x0,
'cmd': 'bld',
'width': 0,
'size': 2,
'is_label': False,
'is_delay': False,
'args': [
Oper(OpType.IMM, '0x{imm:x}', False, False, 0, 1),
Oper(OpType.REG, 'R{n}', False, False, 0, 0)
],
'tokens': [
(InstructionTextTokenType.InstructionToken, 'bld'),
(InstructionTextTokenType.TextToken, ' '),
(InstructionTextTokenType.IntegerToken, '0x{imm:x}'),
(InstructionTextTokenType.OperandSeparatorToken, ', '),
(InstructionTextTokenType.RegisterToken, 'R{n}')
],
},
{
'opmask': (0x3009b000, 0xf0fff000),
'm': (0x0, 0x0),
'n': (0xf00, 0x8),
'imm': (0x0, 0x0),
'disp': 0x0,
'cmd': 'bldnot.b',
'width': 1,
'size': 4,
'is_label': False,
'is_delay': False,
'args': [
Oper(OpType.IMM, '0x{imm:x}', False, False, 0, 1),
Oper(OpType.DISP, '0x{disp:x}', True, True, 0, 2),
Oper(OpType.REG, 'R{n}', False, False, 0, 0)
],
'tokens': [
(InstructionTextTokenType.InstructionToken, 'bldnot.b'),
(InstructionTextTokenType.TextToken, ' '),
(InstructionTextTokenType.IntegerToken, '0x{imm:x}'),
(InstructionTextTokenType.OperandSeparatorToken, ', '),
(InstructionTextTokenType.TextToken, '@('),
(InstructionTextTokenType.PossibleAddressToken, '0x{disp:x}'),
(InstructionTextTokenType.OperandSeparatorToken, ', '),
(InstructionTextTokenType.RegisterToken, 'R{n}'),
(InstructionTextTokenType.TextToken, ')')
],
},
{
'opmask': (0x30095000, 0xf0fff000),
'm': (0x0, 0x0),
'n': (0xf00, 0x8),
'imm': (0x0, 0x0),
'disp': 0x0,
'cmd': 'bor.b',
'width': 1,
'size': 4,
'is_label': False,
'is_delay': False,
'args': [
Oper(OpType.IMM, '0x{imm:x}', False, False, 0, 1),
Oper(OpType.DISP, '0x{disp:x}', True, True, 0, 2),
Oper(OpType.REG, 'R{n}', False, False, 0, 0)
],
'tokens': [
(InstructionTextTokenType.InstructionToken, 'bor.b'),
(InstructionTextTokenType.TextToken, ' '),
(InstructionTextTokenType.IntegerToken, '0x{imm:x}'),
(InstructionTextTokenType.OperandSeparatorToken, ', '),
(InstructionTextTokenType.TextToken, '@('),
(InstructionTextTokenType.PossibleAddressToken, '0x{disp:x}'),
(InstructionTextTokenType.OperandSeparatorToken, ', '),
(InstructionTextTokenType.RegisterToken, 'R{n}'),
(InstructionTextTokenType.TextToken, ')')
],
},
{
'opmask': (0x3009d000, 0xf0fff000),
'm': (0x0, 0x0),
'n': (0xf00, 0x8),
'imm': (0x0, 0x0),
'disp': 0x0,
'cmd': 'bornot.b',
'width': 1,
'size': 4,
'is_label': False,
'is_delay': False,
'args': [
Oper(OpType.IMM, '0x{imm:x}', False, False, 0, 1),
Oper(OpType.DISP, '0x{disp:x}', True, True, 0, 2),
Oper(OpType.REG, 'R{n}', False, False, 0, 0)
],
'tokens': [
(InstructionTextTokenType.InstructionToken, 'bornot.b'),
(InstructionTextTokenType.TextToken, ' '),
(InstructionTextTokenType.IntegerToken, '0x{imm:x}'),
(InstructionTextTokenType.OperandSeparatorToken, ', '),
(InstructionTextTokenType.TextToken, '@('),
(InstructionTextTokenType.PossibleAddressToken, '0x{disp:x}'),
(InstructionTextTokenType.OperandSeparatorToken, ', '),
(InstructionTextTokenType.RegisterToken, 'R{n}'),
(InstructionTextTokenType.TextToken, ')')
],
},
{
'opmask': (0x30091000, 0xf0fff000),
'm': (0x0, 0x0),
'n': (0xf00, 0x8),
'imm': (0x0, 0x0),
'disp': 0x0,
'cmd': 'bset.b',
'width': 1,
'size': 4,
'is_label': False,
'is_delay': False,
'args': [
Oper(OpType.IMM, '0x{imm:x}', False, False, 0, 1),
Oper(OpType.DISP, '0x{disp:x}', True, True, 0, 2),
Oper(OpType.REG, 'R{n}', False, False, 0, 0)
],
'tokens': [
(InstructionTextTokenType.InstructionToken, 'bset.b'),
(InstructionTextTokenType.TextToken, ' '),
(InstructionTextTokenType.IntegerToken, '0x{imm:x}'),
(InstructionTextTokenType.OperandSeparatorToken, ', '),
(InstructionTextTokenType.TextToken, '@('),
(InstructionTextTokenType.PossibleAddressToken, '0x{disp:x}'),
(InstructionTextTokenType.OperandSeparatorToken, ', '),
(InstructionTextTokenType.RegisterToken, 'R{n}'),
(InstructionTextTokenType.TextToken, ')')
],
},
{
'opmask': (0x8608, 0xff0f),
'm': (0x0, 0x0),
'n': (0xf0, 0x4),
'imm': (0x7, 0x0),
'disp': 0x0,
'cmd': 'bset',
'width': 0,
'size': 2,
'is_label': False,
'is_delay': False,
'args': [
Oper(OpType.IMM, '0x{imm:x}', False, False, 0, 1),
Oper(OpType.REG, 'R{n}', False, False, 0, 0)
],
'tokens': [
(InstructionTextTokenType.InstructionToken, 'bset'),
(InstructionTextTokenType.TextToken, ' '),
(InstructionTextTokenType.IntegerToken, '0x{imm:x}'),
(InstructionTextTokenType.OperandSeparatorToken, ', '),
(InstructionTextTokenType.RegisterToken, 'R{n}')
],
},
{
'opmask': (0x30092000, 0xf0fff000),
'm': (0x0, 0x0),
'n': (0xf00, 0x8),
'imm': (0x0, 0x0),
'disp': 0x0,
'cmd': 'bst.b',
'width': 1,
'size': 4,
'is_label': False,
'is_delay': False,
'args': [
Oper(OpType.IMM, '0x{imm:x}', False, False, 0, 1),
Oper(OpType.DISP, '0x{disp:x}', True, True, 0, 2),
Oper(OpType.REG, 'R{n}', False, False, 0, 0)
],
'tokens': [
(InstructionTextTokenType.InstructionToken, 'bst.b'),
(InstructionTextTokenType.TextToken, ' '),
(InstructionTextTokenType.IntegerToken, '0x{imm:x}'),
(InstructionTextTokenType.OperandSeparatorToken, ', '),
(InstructionTextTokenType.TextToken, '@('),
(InstructionTextTokenType.PossibleAddressToken, '0x{disp:x}'),
(InstructionTextTokenType.OperandSeparatorToken, ', '),
(InstructionTextTokenType.RegisterToken, 'R{n}'),
(InstructionTextTokenType.TextToken, ')')
],
},
{
'opmask': (0x8700, 0xff0f),
'm': (0x0, 0x0),
'n': (0xf0, 0x4),
'imm': (0x7, 0x0),
'disp': 0x0,
'cmd': 'bst',
'width': 0,
'size': 2,
'is_label': False,
'is_delay': False,
'args': [
Oper(OpType.IMM, '0x{imm:x}', False, False, 0, 1),
Oper(OpType.REG, 'R{n}', False, False, 0, 0)
],
'tokens': [
(InstructionTextTokenType.InstructionToken, 'bst'),
(InstructionTextTokenType.TextToken, ' '),
(InstructionTextTokenType.IntegerToken, '0x{imm:x}'),
(InstructionTextTokenType.OperandSeparatorToken, ', '),
(InstructionTextTokenType.RegisterToken, 'R{n}')
],
},
{
'opmask': (0x30096000, 0xf0fff000),
'm': (0x0, 0x0),
'n': (0xf00, 0x8),
'imm': (0x0, 0x0),
'disp': 0x0,
'cmd': 'bxor.b',
'width': 1,
'size': 4,
'is_label': False,
'is_delay': False,
'args': [
Oper(OpType.IMM, '0x{imm:x}', False, False, 0, 1),
Oper(OpType.DISP, '0x{disp:x}', True, True, 0, 2),
Oper(OpType.REG, 'R{n}', False, False, 0, 0)
],
'tokens': [
(InstructionTextTokenType.InstructionToken, 'bxor.b'),
(InstructionTextTokenType.TextToken, ' '),
(InstructionTextTokenType.IntegerToken, '0x{imm:x}'),
(InstructionTextTokenType.OperandSeparatorToken, ', '),
(InstructionTextTokenType.TextToken, '@('),
(InstructionTextTokenType.PossibleAddressToken, '0x{disp:x}'),
(InstructionTextTokenType.OperandSeparatorToken, ', '),
(InstructionTextTokenType.RegisterToken, 'R{n}'),
(InstructionTextTokenType.TextToken, ')')
],
},
{
'opmask': (0x300c, 0xf00f),
'm': (0xf0, 0x4),
'n': (0xf00, 0x8),
'imm': (0x0, 0x0),
'disp': 0x0,
'cmd': 'add',
'width': 0,
'size': 2,
'is_label': False,
'is_delay': False,
'args': [
Oper(OpType.REG, 'R{m}', False, False, 0, 0),
Oper(OpType.REG, 'R{n}', False, False, 0, 0)
],
'tokens': | |
data.
Exactly one of *source_file* and *source_data* must be given.
If *source_file* is given:
- If *topdir* is too, ``projects`` is rooted there.
- Otherwise, *topdir* is found starting at *source_file*.
If *source_data* is given:
- If *topdir* is too, ``projects`` is rooted there.
- Otherwise, there is no root: ``projects[i].abspath`` and
other absolute path attributes are ``None``.
- If ``source_data['manifest']['self']['path']`` is unset,
*manifest_path* is used as a fallback.
The *importer* kwarg, if given, is a callable. It is called
when *source_file* requires importing manifest data that
aren't found locally. It will be called as:
``importer(project, file)``
where ``project`` is a `Project` and ``file`` is the missing
file. The file's contents at refs/heads/manifest-rev should
usually be returned, potentially after fetching the project's
revision from its remote URL and updating that ref.
The return value should be a string containing manifest data,
or a list of strings if ``file`` is a directory containing
YAML files. A return value of None will cause the import to be
ignored.
Exceptions raised:
- `MalformedManifest`: if the manifest data is invalid
- `ManifestImportFailed`: if the manifest could not be
resolved due to import errors
- `ManifestVersionError`: if this version of west is too
old to parse the manifest
- `WestNotFound`: if *topdir* was needed and not found
- ``ValueError``: for other invalid arguments
:param source_file: YAML file containing manifest data
:param source_data: parsed YAML data as a Python object, or a
string containing unparsed YAML data
:param manifest_path: fallback `ManifestProject` ``path``
attribute
:param topdir: used as the west workspace top level
directory
:param importer: callback to resolve missing manifest import
data
:param import_flags: bit mask, controls import resolution
'''
if source_file and source_data:
raise ValueError('both source_file and source_data were given')
if not _flags_ok(import_flags):
raise ValueError(f'bad import_flags {import_flags:x}')
self.path: Optional[str] = None
'''Path to the file containing the manifest, or None if
created from data rather than the file system.
'''
if source_file:
source_file = Path(source_file)
source_data = source_file.read_text()
self.path = os.path.abspath(source_file)
if not source_data:
self._malformed('manifest contains no data')
if isinstance(source_data, str):
source_data = _load(source_data)
# Validate the manifest. Wrap a couple of the exceptions with
# extra context about the problematic file in case of errors,
# to help debugging.
try:
validate(source_data)
except ManifestVersionError as mv:
raise ManifestVersionError(mv.version, file=source_file) from mv
except MalformedManifest as mm:
self._malformed(mm.args[0], parent=mm)
except TypeError as te:
self._malformed(te.args[0], parent=te)
# The above validate() and exception handling block's job is
# to ensure this, but pacify the type checker in a way that
# crashes if something goes wrong with that.
assert isinstance(source_data, dict)
self._projects: List[Project] = []
'''Sequence of `Project` objects representing manifest
projects.
Index 0 (`MANIFEST_PROJECT_INDEX`) contains a
`ManifestProject` representing the manifest repository. The
rest of the sequence contains projects in manifest file order
(or resolution order if the manifest contains imports).
'''
self.topdir: Optional[str] = None
'''The west workspace's top level directory, or None.'''
if topdir:
self.topdir = os.fspath(topdir)
self.has_imports: bool = False
# This will be overwritten in _load() as needed.
self.group_filter: GroupFilterType = []
# Private state which backs self.group_filter. This also
# gets overwritten as needed.
self._disabled_groups: Set[str] = set()
# Stash the importer and flags in instance attributes. These
# don't change as we recurse, so they don't belong in _import_ctx.
self._importer: ImporterType = importer or _default_importer
self._import_flags = import_flags
ctx: Optional[_import_ctx] = \
kwargs.get('import-context') # type: ignore
if ctx is None:
ctx = _import_ctx(projects={},
group_filter=[],
imap_filter=None,
path_prefix=Path('.'))
else:
assert isinstance(ctx, _import_ctx)
if manifest_path:
mpath: Optional[Path] = Path(manifest_path)
else:
mpath = None
self._load(source_data['manifest'], mpath, ctx)
def get_projects(self,
# any str name is also a PathType
project_ids: Iterable[PathType],
allow_paths: bool = True,
only_cloned: bool = False) -> List[Project]:
'''Get a list of `Project` objects in the manifest from
*project_ids*.
If *project_ids* is empty, a copy of ``self.projects``
attribute is returned as a list. Otherwise, the returned list
has projects in the same order as *project_ids*.
``ValueError`` is raised if:
- *project_ids* contains unknown project IDs
- (with *only_cloned*) an uncloned project was found
The ``ValueError`` *args* attribute is a 2-tuple with a list
of unknown *project_ids* at index 0, and a list of uncloned
`Project` objects at index 1.
:param project_ids: a sequence of projects, identified by name
or (absolute or relative) path. Names are matched first; path
checking can be disabled with *allow_paths*.
:param allow_paths: if false, *project_ids* is assumed to contain
names only, not paths
:param only_cloned: raise an exception for uncloned projects
'''
projects = list(self.projects)
unknown: List[PathType] = [] # project_ids with no Projects
uncloned: List[Project] = [] # if only_cloned, the uncloned Projects
ret: List[Project] = [] # result list of resolved Projects
# If no project_ids are specified, use all projects.
if not project_ids:
if only_cloned:
uncloned = [p for p in projects if not p.is_cloned()]
if uncloned:
raise ValueError(unknown, uncloned)
return projects
# Otherwise, resolve each of the project_ids to a project,
# returning the result or raising ValueError.
for pid in project_ids:
project: Optional[Project] = None
if isinstance(pid, str):
project = self._projects_by_name.get(pid)
if project is None and allow_paths:
project = self._projects_by_rpath.get(Path(pid).resolve())
if project is None:
unknown.append(pid)
continue
ret.append(project)
if only_cloned and not project.is_cloned():
uncloned.append(project)
if unknown or (only_cloned and uncloned):
raise ValueError(unknown, uncloned)
return ret
def _as_dict_helper(
self, pdict: Optional[Callable[[Project], Dict]] = None) \
-> Dict:
# pdict: returns a Project's dict representation.
# By default, it's Project.as_dict.
if pdict is None:
pdict = Project.as_dict
projects = list(self.projects)
del projects[MANIFEST_PROJECT_INDEX]
project_dicts = [pdict(p) for p in projects]
# This relies on insertion-ordered dictionaries for
# predictability, which is a CPython 3.6 implementation detail
# and Python 3.7+ guarantee.
r: Dict[str, Any] = {}
r['manifest'] = {}
if self.group_filter:
r['manifest']['group-filter'] = self.group_filter
r['manifest']['projects'] = project_dicts
r['manifest']['self'] = self.projects[MANIFEST_PROJECT_INDEX].as_dict()
return r
def as_dict(self) -> Dict:
'''Returns a dict representing self, fully resolved.
The value is "resolved" in that the result is as if all
projects had been defined in a single manifest without any
import attributes.
'''
return self._as_dict_helper()
def as_frozen_dict(self) -> Dict:
'''Returns a dict representing self, but frozen.
The value is "frozen" in that all project revisions are the
full SHAs pointed to by `QUAL_MANIFEST_REV_BRANCH` references.
Raises ``RuntimeError`` if a project SHA can't be resolved.
'''
def pdict(p):
if not p.is_cloned():
raise RuntimeError(f'cannot freeze; project {p.name} '
'is uncloned')
try:
sha = p.sha(QUAL_MANIFEST_REV_BRANCH)
except subprocess.CalledProcessError as e:
raise RuntimeError(f'cannot freeze; project {p.name} '
f'ref {QUAL_MANIFEST_REV_BRANCH} '
'cannot be resolved to a SHA') from e
d = p.as_dict()
d['revision'] = sha
return d
return self._as_dict_helper(pdict=pdict)
def as_yaml(self, **kwargs) -> str:
'''Returns a YAML representation for self, fully resolved.
The value is "resolved" in that the result is as if all
projects had been defined in a single manifest without any
import attributes.
:param kwargs: passed to yaml.safe_dump()
'''
return yaml.safe_dump(self.as_dict(), **kwargs)
def as_frozen_yaml(self, **kwargs) -> str:
'''Returns a YAML representation for self, but frozen.
The value is "frozen" in that all project revisions are the
full SHAs pointed to by `QUAL_MANIFEST_REV_BRANCH` references.
Raises ``RuntimeError`` if a project SHA can't be resolved.
:param kwargs: passed to yaml.safe_dump()
'''
return yaml.safe_dump(self.as_frozen_dict(), **kwargs)
@property
def projects(self) -> List[Project]:
return self._projects
def is_active(self, project: Project,
extra_filter: Optional[Iterable[str]] = None) -> bool:
'''Is a project active?
Projects with empty 'project.groups' lists are always active.
Otherwise, if any group in 'project.groups' is enabled by this
manifest's 'group-filter:' list (and the
'manifest.group-filter' local configuration option, if we have
a workspace), returns True.
Otherwise, i.e. if all of the project's groups are disabled,
this returns False.
"Inactive" projects should generally be considered absent from
the workspace for purposes like updating it, listing projects,
etc.
:param project: project to check
:param extra_filter: an optional additional group filter
'''
if not project.groups:
# Projects without any groups are always active, so just
# exit early. Note that this happens | |
##
# Copyright (c) 2006-2017 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
"""
Access logs.
"""
__all__ = [
"DirectoryLogWrapperResource",
"RotatingFileAccessLoggingObserver",
"AMPCommonAccessLoggingObserver",
"AMPLoggingFactory",
]
import collections
import datetime
import json
import os
try:
import psutil
except ImportError:
psutil = None
from sys import platform
import time
from calendarserver.logAnalysis import getAdjustedMethodName
from twext.python.log import Logger
from twext.who.idirectory import RecordType
from txweb2 import iweb
from txweb2.log import BaseCommonAccessLoggingObserver
from txweb2.log import LogWrapperResource
from twisted.internet import protocol, task
from twisted.protocols import amp
from twistedcaldav.config import config
log = Logger()
class DirectoryLogWrapperResource(LogWrapperResource):
def __init__(self, resource, directory):
super(DirectoryLogWrapperResource, self).__init__(resource)
self.directory = directory
def getDirectory(self):
return self.directory
class CommonAccessLoggingObserverExtensions(BaseCommonAccessLoggingObserver):
"""
A base class for our extension to the L{BaseCommonAccessLoggingObserver}
"""
def emit(self, eventDict):
format = None
formatArgs = None
if eventDict.get("interface") is iweb.IRequest:
request = eventDict["request"]
response = eventDict["response"]
loginfo = eventDict["loginfo"]
# Try to determine authentication and authorization identifiers
uid = "-"
if getattr(request, "authnUser", None) is not None:
def convertPrincipaltoShortName(principal):
if principal.record.recordType == RecordType.user:
return principal.record.shortNames[0]
else:
return "({rtype}){name}".format(rtype=principal.record.recordType, name=principal.record.shortNames[0],)
uidn = convertPrincipaltoShortName(request.authnUser)
uidz = convertPrincipaltoShortName(request.authzUser)
if uidn != uidz:
uid = '"{authn} as {authz}"'.format(authn=uidn, authz=uidz,)
else:
uid = uidn
#
# For some methods which basically allow you to tunnel a
# custom request (eg. REPORT, POST), the method name
# itself doesn't tell you much about what action is being
# requested. This allows a method to tack a submethod
# attribute to the request, so we can provide a little
# more detail here.
#
if config.EnableExtendedAccessLog and hasattr(request, "submethod"):
method = "%s(%s)" % (request.method, request.submethod)
else:
method = request.method
# Standard Apache access log fields
format = (
'%(host)s - %(uid)s [%(date)s]'
' "%(method)s %(uri)s HTTP/%(protocolVersion)s"'
' %(statusCode)s %(bytesSent)d'
' "%(referer)s" "%(userAgent)s"'
)
formatArgs = {
"host": request.remoteAddr.host,
"uid": uid,
"date": self.logDateString(response.headers.getHeader("date", 0)),
"method": method,
"uri": request.uri.replace('"', "%22"),
"protocolVersion": ".".join(str(x) for x in request.clientproto),
"statusCode": response.code,
"bytesSent": loginfo.bytesSent,
"referer": request.headers.getHeader("referer", "-"),
"userAgent": request.headers.getHeader("user-agent", "-"),
}
# Add extended items to format and formatArgs
if config.EnableExtendedAccessLog:
format += ' i=%(serverInstance)s'
formatArgs["serverInstance"] = config.LogID if config.LogID else "0"
if request.chanRequest: # This can be None during tests
format += ' or=%(outstandingRequests)s'
formatArgs["outstandingRequests"] = request.chanRequest.channel.factory.outstandingRequests
# Tags for time stamps collected along the way - the first one in the list is the initial
# time for request creation - we use that to track the entire request/response time
nowtime = time.time()
if config.EnableExtendedTimingAccessLog:
basetime = request.timeStamps[0][1]
request.timeStamps[0] = ("t", time.time(),)
for tag, timestamp in request.timeStamps:
format += " %s=%%(%s).1f" % (tag, tag,)
formatArgs[tag] = (timestamp - basetime) * 1000
if tag != "t":
basetime = timestamp
if len(request.timeStamps) > 1:
format += " t-log=%(t-log).1f"
formatArgs["t-log"] = (timestamp - basetime) * 1000
else:
format += " t=%(t).1f"
formatArgs["t"] = (nowtime - request.timeStamps[0][1]) * 1000
if hasattr(request, "extendedLogItems"):
for k, v in sorted(request.extendedLogItems.iteritems(), key=lambda x: x[0]):
k = str(k).replace('"', "%22")
v = str(v).replace('"', "%22")
if " " in v:
v = '"%s"' % (v,)
format += " %s=%%(%s)s" % (k, k,)
formatArgs[k] = v
# Add the name of the XML error element for debugging purposes
if hasattr(response, "error"):
format += " err=%(err)s"
formatArgs["err"] = response.error.qname()[1]
fwdHeaders = request.headers.getRawHeaders("x-forwarded-for", "")
if fwdHeaders:
# Limit each x-forwarded-header to 50 in case someone is
# trying to overwhelm the logs
forwardedFor = ",".join([hdr[:50] for hdr in fwdHeaders])
forwardedFor = forwardedFor.replace(" ", "")
format += " fwd=%(fwd)s"
formatArgs["fwd"] = forwardedFor
if formatArgs["host"] == "0.0.0.0":
fwdHeaders = request.headers.getRawHeaders("x-forwarded-for", "")
if fwdHeaders:
formatArgs["host"] = fwdHeaders[-1].split(",")[-1].strip()
format += " unix=%(unix)s"
formatArgs["unix"] = "true"
elif "overloaded" in eventDict:
overloaded = eventDict.get("overloaded")
format = (
'%(host)s - %(uid)s [%(date)s]'
' "%(method)s"'
' %(statusCode)s %(bytesSent)d'
' "%(referer)s" "%(userAgent)s"'
)
formatArgs = {
"host": overloaded.transport.hostname,
"uid": "-",
"date": self.logDateString(time.time()),
"method": "???",
"uri": "",
"protocolVersion": "",
"statusCode": 503,
"bytesSent": 0,
"referer": "-",
"userAgent": "-",
}
if config.EnableExtendedAccessLog:
format += ' p=%(serverPort)s'
formatArgs["serverPort"] = overloaded.transport.server.port
format += ' or=%(outstandingRequests)s'
formatArgs["outstandingRequests"] = overloaded.outstandingRequests
# Write anything we got to the log and stats
if format is not None:
# sanitize output to mitigate log injection
for k, v in formatArgs.items():
if not isinstance(v, basestring):
continue
v = v.replace("\r", "\\r")
v = v.replace("\n", "\\n")
v = v.replace("\"", "\\\"")
formatArgs[k] = v
formatArgs["type"] = "access-log"
formatArgs["log-format"] = format
self.logStats(formatArgs)
class RotatingFileAccessLoggingObserver(CommonAccessLoggingObserverExtensions):
"""
Class to do "apache" style access logging to a rotating log file. The log
file is rotated after midnight each day.
This class also currently handles the collection of system and log statistics.
"""
def __init__(self, logpath):
self.logpath = logpath
self.systemStats = None
self.statsByMinute = []
self.stats1m = None
self.stats5m = None
self.stats1h = None
def accessLog(self, message, allowrotate=True):
"""
Log a message to the file and possibly rotate if date has changed.
@param message: C{str} for the message to log.
@param allowrotate: C{True} if log rotate allowed, C{False} to log to current file
without testing for rotation.
"""
if self.shouldRotate() and allowrotate:
self.flush()
self.rotate()
if isinstance(message, unicode):
message = message.encode("utf-8")
self.f.write(message + "\n")
def start(self):
"""
Start logging. Open the log file and log an "open" message.
"""
super(RotatingFileAccessLoggingObserver, self).start()
self._open()
self.accessLog("Log opened - server start: [%s]." % (datetime.datetime.now().ctime(),))
def stop(self):
"""
Stop logging. Close the log file and log an "open" message.
"""
self.accessLog("Log closed - server stop: [%s]." % (datetime.datetime.now().ctime(),), False)
super(RotatingFileAccessLoggingObserver, self).stop()
self._close()
if self.systemStats is not None:
self.systemStats.stop()
def _open(self):
"""
Open the log file.
"""
self.f = open(self.logpath, "a", 1)
self.lastDate = self.toDate(os.stat(self.logpath)[8])
def _close(self):
"""
Close the log file.
"""
self.f.close()
def flush(self):
"""
Flush the log file.
"""
self.f.flush()
def shouldRotate(self):
"""
Rotate when the date has changed since last write
"""
if config.RotateAccessLog:
return self.toDate() > self.lastDate
else:
return False
def toDate(self, *args):
"""
Convert a unixtime to (year, month, day) localtime tuple,
or return the current (year, month, day) localtime tuple.
This function primarily exists so you may overload it with
gmtime, or some cruft to make unit testing possible.
"""
# primarily so this can be unit tested easily
return time.localtime(*args)[:3]
def suffix(self, tupledate):
"""
Return the suffix given a (year, month, day) tuple or unixtime
"""
try:
return "_".join(map(str, tupledate))
except:
# try taking a float unixtime
return "_".join(map(str, self.toDate(tupledate)))
def rotate(self):
"""
Rotate the file and create a new one.
If it's not possible to open new logfile, this will fail silently,
and continue logging to old logfile.
"""
newpath = "%s.%s" % (self.logpath, self.suffix(self.lastDate))
if os.path.exists(newpath):
log.info("Cannot rotate log file to '{path}' because it already exists.", path=newpath)
return
self.accessLog("Log closed - rotating: [%s]." % (datetime.datetime.now().ctime(),), False)
log.info("Rotating log file to: '{path}'", path=newpath, system="Logging")
self.f.close()
os.rename(self.logpath, newpath)
self._open()
self.accessLog("Log opened - rotated: [%s]." % (datetime.datetime.now().ctime(),), False)
def logStats(self, stats):
"""
Update stats
"""
# Only use the L{SystemMonitor} when stats socket is in use
if config.Stats.EnableUnixStatsSocket or config.Stats.EnableTCPStatsSocket:
# Initialize a L{SystemMonitor} on the first call
if self.systemStats is None:
self.systemStats = SystemMonitor()
# Currently only storing stats for access log type
if "type" not in stats or stats["type"] != "access-log":
return
currentStats = self.ensureSequentialStats()
self.updateStats(currentStats, stats)
if stats["type"] == "access-log":
self.accessLog(stats["log-format"] % stats)
def getStats(self):
"""
Return the stats
"""
# Only use the L{SystemMonitor} when stats socket is in use
if not config.Stats.EnableUnixStatsSocket and not config.Stats.EnableTCPStatsSocket:
return {}
# Initialize a L{SystemMonitor} on the first call
if self.systemStats is None:
self.systemStats = SystemMonitor()
# The current stats
currentStats = self.ensureSequentialStats()
# Get previous minute details
if self.stats1m is None:
index = min(2, len(self.statsByMinute))
if index > 0:
self.stats1m = self.statsByMinute[-index][1]
else:
self.stats1m = self.initStats()
# Do five minute aggregate
if self.stats5m is None:
| |
<gh_stars>0
# Princeton University licenses this file to You under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. You may obtain a copy of the License at:
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
# ******************************************* LearningMechanism *******************************************************
"""
Contents
--------
* `LearningMechanism_Overview`
* `LearningMechanism_Creation`
- `LearningMechanism_Automatic_Creation`
- `LearningMechanism_Explicit_Creation`
* `LearningMechanism_Structure`
- `LearningMechanism_InputPorts`
- `LearningMechanism_Function`
- `LearningMechanism_OutputPorts`
- `LearningMechanism_Additional_Attributes`
- `LearningMechanism_Learning_Configurations`
* `LearningMechanism_Execution`
* `LearningMechanism_Class_Reference`
.. _LearningMechanism_Overview:
Overview
--------
A LearningMechanism is a `ModulatoryMechanism <ModulatoryMechanism>` that modifies the `matrix <MappingProjection.matrix>`
parameter of one or more `MappingProjections <MappingProjection>`. Its function takes one or more `error_signals
<LearningMechanism_Input_Error_Signal>` (usually the output of a `ComparatorMechanism` or one or more other
`LearningMechanisms <LearningMechanism>`), as well as information about the `MappingProjection(s) and activity
<LearningMechanism_Additional_Attributes>` that generated the error(s), and calculates a `learning_signal
<LearningMechanism.learning_signal>` that is used to modify the MappingProjection(s) by way of its
`LearningProjection(s) <LearningProjection>`. Typically, a LearningMechanism is used to "train" a single
MappingProjection (its `primary_learned_projection`), using the output of the Mechanism to which that
MappingProjection projects (its `output_source`) as the source of the error it attempts to reduce. A
LearningMechanism can be used to train multiple MappingProjections, by assigning it `additional LearningProjections
<LearningMechanism_Multiple_LearningSignals>`; however, these will all use the same `learning_signal
<LearningMechanism.learning_signal>`, generated by the `primary_learned_projection` and its associated `output_source`.
All of the MappingProjection(s) modified by a LearningMechanism must project from one `ProcessingMechanism
<ProcessingMechanism>` to another in the same `Composition`. The learning components of a Composition can be
displayed using the System's `show_graph <System.show_graph>` method with its **show_learning** argument assigned
`True` or *ALL*.
.. _LearningMechanism_Note
*A Note about the Implementation of Learning*
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The implementation of learning in PsyNeuLink was designed for exposition rather than efficiency. Unlike its
implementation in most other environments -- where the learning algorithm is tightly integrated with the
elements of processing that it modifies -- PsyNeuLink separates it into three constituent components: An
`ObjectiveMechanism` used to evaluate the most proximal source of error, a `LearningMechanism` that uses that error
(or one derived from it by another LearningMechanism) to calculate a learning signal; and `LearningProjection(s)
<LearningProjection>` that use that learning signal to modify the weight `matrix <MappingProjection.matrix>` of the
`MappingProjection(s) <MappingProjection>` being learned. This has the advantage of isolating and exposing the
constituent computations, making it clearer to students what these are and how they operate, and also making each
individually accessible for reconfiguration. However, it comes at the cost of efficiency. For efficient execution of
supervised forms of learning (e.g., reinforcement learning and backpropagation), the `AutodiffComposition` can be used,
which allows the model to be specified using PsyNeuLink, but actually executes learning using `PyTorch
<https://pytorch.org>`.
.. _LearningMechanism_Creation:
Creating a LearningMechanism
----------------------------
A LearningMechanism can be created in any of the ways used to `create Mechanisms <Mechanism_Creation>`.
More commonly, however, LearningMechanisms are created automatically.
.. LearningMechanism_Automatic_Creation:
*Automatic Creation*
~~~~~~~~~~~~~~~~~~~~
A LearningMechanism is created automatically when a Composition's `learning method <Composition_Learning_Methods>` is
called. In that case, a `LearningSignal`, `LearningProjection`, a `ComparatorMechanism` (in the case of `supervised
learning <>`), and any additional Projections required to implement learning that do not already exist are also
instantiated. This is described below, under `Learning Configurations <LearningMechanism_Learning_Configurations>`.
A LearningMechanism is also created automatically when either the `tuple specification
<MappingProjection_Learning_Tuple_Specification>` is used to specify learning for a MappingProjection, or a
`LearningProjection` is created without specifying its `sender <LearningProjection.sender>` attribute. However, this
is not advised, and should only used in special circumstances, as properly configuring learning generally requires
the instantiation of several other closely related Components, as described below.
.. _LearningMechanism_Explicit_Creation:
*Explicit Creation*
~~~~~~~~~~~~~~~~~~~
If a LearningMechanism is created explicitly (using its constructor), then its **variable** and **error_sources**
arguments must be specified. The **variable** must have at leaset three items that are compatible (in number and type)
with the `value <InputPort.value>` of the LearningMechanism's `InputPorts <LearningMechanism_InputPorts>`. Each
item in **error_sources** must be one of the following: a `ComparatorMechanism`, for `single layer learning
<LearningMechanism_Single_Layer_Learning>` or for the last `MappingProjection` in a `learning Pathway
<Composition_Learning_Pathway>` for `multilayer learning <LearningMechanism_Multilayer_Learning>`; or a
`LearningMechanism`.
.. _LearningMechanism_Learning_Signals:
When a LearningMechanism is created explicitly, it can also be assigned existing LearningSignals and/or specified to
create these, as well as `LearningProjections <LearningProjection>` from these to specified MappingProjections. These
are specified in the **learning_signals** argument of the LearningMechanism's constructor, using any of the forms
allowed for `specifying a LearningSignal <LearningSignal_Specification>`.
.. _LearningMechanism_Structure:
Structure
---------
A LearningMechanism has three types of `InputPorts <InputPort>`, a learning `function <LearningMechanism.function>`,
and two types of `OutputPorts <OutputPort>`. These are used, respectively, to receive, compute, and transmit the
information needed to modify the MappingProjection(s) for which the LearningMechanism is responsible. In addition,
it has several attributes that govern and provide access to its operation. These are described below.
.. _LearningMechanism_InputPorts:
*InputPorts*
~~~~~~~~~~~~~
These receive the information required by the LearningMechanism's `function <LearningMechanism.function>`. They are
listed in the LearningMechanism's `input_ports <LearningMechanism.input_ports>` attribute. They have the following
names and roles (shown in the `figure <LearningMechanism_Single_Layer_Learning_Figure>` below):
.. _LearningMechanism_Activation_Input:
* *ACTIVATION_INPUT* - receives the value of the input to the `primary_learned_projection`; that is, the `value
<Projection_Base.value>` of that MappingProjection's `sender <MappingProjection.sender>`. The value is assigned
as the first item of the LearningMechanism's `variable <LearningMechanism.variable>` attribute.
.. _LearningMechanism_Activation_Output:
* *ACTIVATION_OUTPUT* - receives the value of the LearningMechanism's `output_source <LearningMechanism.output_source>`;
that is, the `value <OutputPort.value>` of the `OutputPort` of the *ProcessingMechanism* to which the
`primary_learned_projection` projects. By default, the `output_source <LearningMechanism.output_source>`'s
`primary OutputPort <OutputPort_Primary>` is used. However, a different OutputPort can be designated in
the constructor for the `output_source <LearningMechanism.output_source>`, by assigning a `parameter specification
dictionary <ParameterPort_Specification>` to the **params** argument of its constructor, with an entry that uses
*MONITOR_FOR_LEARNING* as its key and a list containing the desired OutputPort(s) as its value. The `value
<InputPort.value>` of the *ACTIVATION_OUTPUT* InputPort is assigned as the second item of the LearningMechanism's
`variable <LearningMechanism.variable>` attribute.
.. _LearningMechanism_Input_Error_Signal:
* *ERROR_SIGNAL* - this receives the `value <OutputPort.value>` from the *OUTCOME* `OutputPort
<ComparatorMechanism_Structure>` of a `ComparatorMechanism`, or of the *ERROR_SIGNAL* OutputPort of another
`LearningMechanisms <LearningMechanism_Output_Error_Signal>`. If the `primary_learned_projection` projects
to the `TERMINAL` Mechanism of the `Composition` to which it belongs, or is not part of a `multilayer learning
sequence <LearningMechanism_Multilayer_Learning>`, then the LearningMechanism has a single *ERROR_SIGNAL* InputPort,
that receives its input from a ComparatorMechanism. If the `primary_learned_projection` is part of a `multilayer
learning pathway <LearningMechanism_Multilayer_Learning>`, then the LearningMechanism will have one or more
*ERROR_SIGNAL* InputPorts, that receive their input from the next LearningMechanism(s) in the sequence; that is,
the one(s) associated with the `efferents <Port.efferents>` (outgoing Projections) of its `output_source`,
with one *ERROR_SIGNAL* InputPort for each of those Projections. The `value <InputPort.value>`\\s of the
*ERROR_SIGNAL* InputPorts are summed by the LearningMechanism's `function <LearningMechanism.function>` to
calculate the `learning_signal <LearningMechanism.learning_signal>` (see `below <LearningMechanism_Function>`);
note that the value of the *ERROR_SIGNAL* InputPort may not be the same as that of the LearningMechanism's
`error_signal <LearningMechanism.error_signal>` attribute or *ERROR_SIGNAL* `OutputPort
<LearningMechanism_Output_Error_Signal>` (see `note <LearningMechanism_Error_Signal>` below). If a LearningMechanism
has more than one *ERROR_SIGNAL* InputPort, their names are suffixed with a hyphenated index, that is incremented for
each additional InputPort (e.g., ``error_signal-1``, ``error_signal-2``, etc.). These are listed in the
LearningMechanism's `error_signal_input_ports` attribute, and the `value <InputPort.value>` of each is assigned
as an item of the LearningMechanism's `variable <LearningMechanism.variable>` attribute, beginning with its third
item (i.e., following the `value <InputPort.value>` of the *ACTIVATION_INPUT* and *ACTIVATION_VALUE* InputPorts).
The Mechanisms from the which the `value <InputPort.values>`\\s above are received are listed in the
LearningMechanism's `input_source <LearningMechanism.input_source>`, `output_source <LearningMechanism.output_source>`,
and `error_sources <LearningMechanism.error_sources>` attributes, respectively (see
`LearningMechanism_Additional_Attributes` for additional details).
.. _LearningMechanism_Function:
*Learning Function*
~~~~~~~~~~~~~~~~~~~
The `function <LearningMechanism.function>` of a LearningMechanism uses the values received by the Mechanism's
InputPorts (described `above <LearningMechanism_InputPorts>`) to calculate the value of its `learning_signal
<LearningMechanism.learning_signal>` and `error_signal <LearningMechanism.error_signal>` attributes.
.. _LearningMechanism_Learning_Signal:
* `learning_signal` - the set of changes to the `matrix <MappingProjection.matrix>` parameter of the
`MappingProjections <MappingProjection>` being learned, calculated to reduce the summed value of the
LearningMechanism's *ERROR_SIGNAL* `InputPort(s) <LearningMechanism_Input_Error_Signal>`.
.. _LearningMechanism_Error_Signal:
* `error_signal <LearningMechanism.error_signal>` - the contribution made by the `primary_learned_projection` to the
error_signal(s) received by the LearningMechanism's *ERROR_SIGNAL* `InputPort(s)
<LearningMechanism_Input_Error_Signal>`. It is used by the LearningMechanism's `function <LearningMechanism.function>`
to calculate the `learning_signal <LearningMechanism.learning_signal>`. Depending upon the context and specific
`LearningFunction <LearningFunctions>` used, it may also take | |
"""linear and non-linear dynamic models for different animals"""
import numpy
import warnings
import re
DEFAULT_MODEL = "mamarama, units: mm"
def _get_decreasing_accel_model(dt=None):
"""get linear dynamical system matrices A and C
dt is the time-step in seconds
"""
# distance units are in m
# time units are in sec
# thus, velocity is (m/sec)
ss = 9 # length of state vector (state size)
os = 3 # length of observation vector (observation size)
half_dt2 = 0.5 * dt ** 2
ad = 0.1 # acceleration decay
# state vector describes a particle in brownian motion
# [ x y z xvel yvel zvel xaccel yaccel zaccel]
# process update matrix (time evolution update matrix)
A = numpy.array(
[
[1.0, 0.0, 0.0, dt, 0.0, 0.0, half_dt2, 0.0, 0.0],
[0.0, 1.0, 0.0, 0.0, dt, 0.0, 0.0, half_dt2, 0.0],
[0.0, 0.0, 1.0, 0.0, 0.0, dt, 0.0, 0.0, half_dt2],
[0.0, 0.0, 0.0, 1.0, 0.0, 0.0, dt, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, dt, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, dt],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ad, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ad, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, ad],
]
)
A_model_name = "decreasing_accel"
# measurement prediction matrix
C = numpy.zeros((os, ss))
C[:os, :os] = numpy.eye(os) # directly measure x,y,z positions
### process covariance
##Q = numpy.zeros((ss,ss))
##for i in range(6,9):
## Q[i,i] = 10.0 # acceleration noise (near (3.16m*sec**-2)**2)
### measurement noise covariance matrix
##R = 1e-6*numpy.eye(os) # (1mm)**2 = (0.001m)**2
model = {
"A": A,
"A_model_name": A_model_name,
"C": C,
"ss": ss,
"os": os,
"dt": dt,
}
return model
def _get_fixed_vel_model(dt=None):
"""get linear dynamical system matrices A and C
dt is the time-step in seconds
"""
# distance units are in m
# time units are in sec
# thus, velocity is (m/sec)
ss = 6 # length of state vector (state size)
os = 3 # length of observation vector (observation size)
# state vector describes a particle in brownian motion
# [ x y z xvel yvel zvel]
# process update matrix (time evolution update matrix)
A = numpy.array(
[
[1.0, 0.0, 0.0, dt, 0.0, 0.0],
[0.0, 1.0, 0.0, 0.0, dt, 0.0],
[0.0, 0.0, 1.0, 0.0, 0.0, dt],
[0.0, 0.0, 0.0, 1.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 1.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 1.0],
]
)
A_model_name = "fixed_vel"
# measurement prediction matrix
C = numpy.zeros((os, ss))
C[:os, :os] = numpy.eye(os) # directly measure x,y,z positions
model = {
"A": A,
"A_model_name": A_model_name,
"C": C,
"ss": ss,
"os": os,
"dt": dt,
}
return model
def get_dynamic_model_dict(*args, **kw):
warnings.warn(
'DeprecationWarning: call "get_kalman_model()", not old "get_dynamic_model_dict()"'
)
return create_dynamic_model_dict(*args, **kw)
def create_dynamic_model_dict(dt=None, disable_warning=False):
"""get linear dynamical system matrices
dt is the time-step in seconds
"""
if not disable_warning:
warnings.warn(
'using deprecated function "create_dynamic_model_dict()". Use "get_kalman_model()"',
DeprecationWarning,
stacklevel=2,
)
dynamic_models = {}
######################################
# 'hbird, units: mm':
# process covariance
# WARNING: these parameters haven't been tested since the
# consolidation of the flydra calibration stuff in July-August
# 2012.
base_model_dict = _get_fixed_vel_model(dt)
ss = base_model_dict["ss"]
os = base_model_dict["os"]
Q = numpy.zeros((ss, ss))
for i in range(0, 3):
Q[i, i] = (0.04) ** 2
for i in range(3, 6):
Q[i, i] = (0.4) ** 2
# measurement noise covariance matrix
R = 1e-2 * numpy.eye(os)
newdict = dict(
# data association parameters
# birth model
hypothesis_test_max_acceptable_error=50.0,
min_dist_to_believe_new_meters=0.2, # 20 cm
min_dist_to_believe_new_sigma=3.0,
initial_position_covariance_estimate=1e-2,
initial_velocity_covariance_estimate=10,
# death model
max_variance_dist_meters=0.08,
max_frames_skipped=10,
# kalman filter parameters
Q=Q,
R=R,
)
newdict.update(base_model_dict)
dynamic_models["hbird, units: mm"] = newdict
######################################
# 'mamarama, units: mm':
# process covariance
base_model_dict = _get_fixed_vel_model(dt)
ss = base_model_dict["ss"]
os = base_model_dict["os"]
if 1:
# this form is after <NAME>'s lecture notes in
# Estimation and Identification in Dynamical Systems
# http://webee.technion.ac.il/people/shimkin/Estimation09/ch8_target.pdf
assert ss == 6
T33 = dt ** 3 / 3.0
T22 = dt ** 2 / 2.0
T = dt
Q = numpy.array(
[
[T33, 0, 0, T22, 0, 0],
[0, T33, 0, 0, T22, 0],
[0, 0, T33, 0, 0, T22],
[T22, 0, 0, T, 0, 0],
[0, T22, 0, 0, T, 0],
[0, 0, T22, 0, 0, T],
]
)
# the scale of the covariance
Q = 0.01 * Q
# measurement noise covariance matrix
R = 1e-7 * numpy.eye(os)
newdict = dict(
# data association parameters
# birth model
hypothesis_test_max_acceptable_error=5.0,
min_dist_to_believe_new_meters=0.02,
min_dist_to_believe_new_sigma=10.0,
initial_position_covariance_estimate=1e-3,
initial_velocity_covariance_estimate=10,
# death model
max_variance_dist_meters=0.25,
max_frames_skipped=10,
# kalman filter parameters
Q=Q,
R=R,
)
newdict.update(base_model_dict)
dynamic_models["mamarama, units: mm"] = newdict
######################################
# 'fishbowl40':
# process covariance
base_model_dict = _get_fixed_vel_model(dt)
ss = base_model_dict["ss"]
os = base_model_dict["os"]
if 1:
# this form is after <NAME>'s lecture notes in
# Estimation and Identification in Dynamical Systems
# http://webee.technion.ac.il/people/shimkin/Estimation09/ch8_target.pdf
assert ss == 6
T33 = dt ** 3 / 3.0
T22 = dt ** 2 / 2.0
T = dt
Q = numpy.array(
[
[T33, 0, 0, T22, 0, 0],
[0, T33, 0, 0, T22, 0],
[0, 0, T33, 0, 0, T22],
[T22, 0, 0, T, 0, 0],
[0, T22, 0, 0, T, 0],
[0, 0, T22, 0, 0, T],
]
)
# the scale of the covariance
Q = 0.01 * Q
# measurement noise covariance matrix
R = 1e-7 * numpy.eye(os)
newdict = dict(
# data association parameters
# birth model
hypothesis_test_max_acceptable_error=5.0, # Big Fish 5 Smallfish 5
min_dist_to_believe_new_meters=0.05, # Big Fish 0.05 Smallfish 0.02
min_dist_to_believe_new_sigma=10.0,
initial_position_covariance_estimate=1e-3,
initial_velocity_covariance_estimate=10,
# death model
max_variance_dist_meters=0.125,
max_frames_skipped=30,
# kalman filter parameters
Q=Q,
R=R,
)
newdict.update(base_model_dict)
dynamic_models["fishbowl40"] = newdict
######################################
# 'hydra, units: m':
# process covariance
# WARNING: these parameters haven't been tested since the
# consolidation of the flydra calibration stuff in July-August
# 2012.
base_model_dict = _get_fixed_vel_model(dt)
ss = base_model_dict["ss"]
os = base_model_dict["os"]
Q = numpy.zeros((ss, ss))
for i in range(0, 3):
Q[i, i] = (0.01) ** 2
for i in range(3, 6):
Q[i, i] = (0.5) ** 2
Q = Q * 1000 ** 2 # convert to meters
# measurement noise covariance matrix
R = 1e-3 * numpy.eye(os)
newdict = dict(
# data association parameters
# birth model
hypothesis_test_max_acceptable_error=50.0,
min_dist_to_believe_new_meters=0.08, # 8 cm
min_dist_to_believe_new_sigma=3.0,
initial_position_covariance_estimate=1e-6,
initial_velocity_covariance_estimate=1,
# death model
max_variance_dist_meters=0.08,
max_frames_skipped=10,
# kalman filter parameters
Q=Q,
R=R,
)
newdict.update(base_model_dict)
dynamic_models["hydra, units: m"] = newdict
## ##################################################
return dynamic_models
class EKFAllParams(dict):
"""abstract base class hold all parameters for data association and EK filtering"""
def __init__(self):
self["ss"] = 6
self["isEKF"] = True
class MamaramaMMEKFAllParams(EKFAllParams):
"""Drosophila non-linear dynamic model for EKF"""
def __init__(self, dt=None):
super(MamaramaMMEKFAllParams, self).__init__()
assert dt is not None
linear_dict = get_kalman_model(name="mamarama, units: mm", dt=dt)
# update some parameters from linear model
for key in [
"initial_position_covariance_estimate",
"max_frames_skipped",
"A",
"Q",
"dt",
"hypothesis_test_max_acceptable_error",
"min_dist_to_believe_new_meters",
"min_dist_to_believe_new_sigma",
"max_variance_dist_meters",
]:
self[key] = linear_dict[key]
self["ekf_observation_covariance_pixels"] = numpy.array(
[[1.0, 0.0], [0.0, 1.0]], dtype=numpy.float64
)
self[
"distorted_pixel_euclidian_distance_accept"
] = 20.0 # distance in the raw image plane (i.e. before radial undistortion)
class Fishbowl40EKFAllParams(EKFAllParams):
def __init__(self, dt=None):
super(Fishbowl40EKFAllParams, self).__init__()
assert dt is not None
linear_dict = get_kalman_model(name="fishbowl40", dt=dt)
# update some parameters from linear model
for key in [
"initial_position_covariance_estimate",
"max_frames_skipped",
"A",
"Q",
"dt",
"hypothesis_test_max_acceptable_error",
"min_dist_to_believe_new_meters",
"min_dist_to_believe_new_sigma",
"max_variance_dist_meters",
]:
self[key] = linear_dict[key]
self["ekf_observation_covariance_pixels"] = numpy.array(
[[1.0, 0.0], [0.0, 1.0]], dtype=numpy.float64
)
self[
"distorted_pixel_euclidian_distance_accept"
] = 20.0 # distance in the raw image plane (i.e. before radial undistortion)
class HydraMEKFAllParams(EKFAllParams):
# WARNING: these parameters haven't been tested since the
# consolidation of the flydra calibration stuff in July-August
# 2012.
def __init__(self, dt=None):
super(HydraMEKFAllParams, self).__init__()
assert dt is not None
linear_dict = get_kalman_model(name="hydra, units: m", dt=dt)
# update some parameters from linear model
for key in [
"initial_position_covariance_estimate",
"max_frames_skipped",
"A",
"Q",
"dt",
]:
self[key] = linear_dict[key]
self["ekf_observation_covariance_pixels"] = numpy.array(
[[1.0, 0.0], [0.0, 1.0]], dtype=numpy.float64
)
self["Q"] = self["Q"] / (1000 ** 2)
self["min_dist_to_believe_new_meters"] = 0.2
self["min_dist_to_believe_new_sigma"] = 10.0
self[
"distorted_pixel_euclidian_distance_accept"
] = 20.0 # distance in the raw image plane (i.e. before radial undistortion)
if 0:
# restrictive (better for e.g. making new calibration)
self["max_variance_dist_meters"] = 0.25
| |
cons29,
cons5,
cons1918,
cons1252,
cons248,
)
rule6518 = ReplacementRule(pattern6518, replacement6518)
pattern6519 = Pattern(
Integral(
(c_ + x_ * WC("d", S(1))) ** WC("p", S(1))
* (x_ * WC("f", S(1)) + WC("e", S(0))) ** WC("m", S(1))
* exp(WC("n", S(1)) * atanh(x_ * WC("a", S(1)))),
x_,
),
cons2,
cons8,
cons29,
cons50,
cons127,
cons19,
cons5,
cons1918,
cons1252,
cons1919,
cons248,
)
rule6519 = ReplacementRule(pattern6519, replacement6519)
pattern6520 = Pattern(
Integral(
(c_ + x_ * WC("d", S(1))) ** WC("p", S(1))
* WC("u", S(1))
* exp(WC("n", S(1)) * atanh(x_ * WC("a", S(1)))),
x_,
),
cons2,
cons8,
cons29,
cons4,
cons5,
cons1920,
cons1804,
)
rule6520 = ReplacementRule(pattern6520, replacement6520)
pattern6521 = Pattern(
Integral(
(c_ + x_ * WC("d", S(1))) ** WC("p", S(1))
* WC("u", S(1))
* exp(WC("n", S(1)) * atanh(x_ * WC("a", S(1)))),
x_,
),
cons2,
cons8,
cons29,
cons4,
cons5,
cons1920,
cons1805,
)
rule6521 = ReplacementRule(pattern6521, replacement6521)
pattern6522 = Pattern(
Integral(
(c_ + WC("d", S(1)) / x_) ** WC("p", S(1))
* WC("u", S(1))
* exp(WC("n", S(1)) * atanh(x_ * WC("a", S(1)))),
x_,
),
cons2,
cons8,
cons29,
cons4,
cons1921,
cons40,
)
rule6522 = ReplacementRule(pattern6522, replacement6522)
pattern6523 = Pattern(
Integral(
(c_ + WC("d", S(1)) / x_) ** p_
* WC("u", S(1))
* exp(n_ * atanh(x_ * WC("a", S(1)))),
x_,
),
cons2,
cons8,
cons29,
cons5,
cons1921,
cons149,
cons745,
cons179,
)
rule6523 = ReplacementRule(pattern6523, replacement6523)
pattern6524 = Pattern(
Integral(
(c_ + WC("d", S(1)) / x_) ** p_
* WC("u", S(1))
* exp(n_ * atanh(x_ * WC("a", S(1)))),
x_,
),
cons2,
cons8,
cons29,
cons5,
cons1921,
cons149,
cons745,
cons119,
)
rule6524 = ReplacementRule(pattern6524, replacement6524)
pattern6525 = Pattern(
Integral(
(c_ + WC("d", S(1)) / x_) ** p_
* WC("u", S(1))
* exp(WC("n", S(1)) * atanh(x_ * WC("a", S(1)))),
x_,
),
cons2,
cons8,
cons29,
cons4,
cons5,
cons1921,
cons149,
)
rule6525 = ReplacementRule(pattern6525, replacement6525)
pattern6526 = Pattern(
Integral(
exp(n_ * atanh(x_ * WC("a", S(1))))
/ (c_ + x_ ** S(2) * WC("d", S(1))) ** (S(3) / 2),
x_,
),
cons2,
cons8,
cons29,
cons4,
cons1922,
cons25,
)
rule6526 = ReplacementRule(pattern6526, replacement6526)
pattern6527 = Pattern(
Integral(
(c_ + x_ ** S(2) * WC("d", S(1))) ** p_
* exp(n_ * atanh(x_ * WC("a", S(1)))),
x_,
),
cons2,
cons8,
cons29,
cons4,
cons1922,
cons13,
cons139,
cons25,
cons1923,
cons248,
)
rule6527 = ReplacementRule(pattern6527, replacement6527)
pattern6528 = Pattern(
Integral(
exp(WC("n", S(1)) * atanh(x_ * WC("a", S(1))))
/ (c_ + x_ ** S(2) * WC("d", S(1))),
x_,
),
cons2,
cons8,
cons29,
cons4,
cons1922,
cons1924,
)
rule6528 = ReplacementRule(pattern6528, replacement6528)
pattern6529 = Pattern(
Integral(
(c_ + x_ ** S(2) * WC("d", S(1))) ** WC("p", S(1))
* exp(WC("n", S(1)) * atanh(x_ * WC("a", S(1)))),
x_,
),
cons2,
cons8,
cons29,
cons5,
cons1922,
cons40,
cons1925,
cons1926,
)
rule6529 = ReplacementRule(pattern6529, replacement6529)
pattern6530 = Pattern(
Integral(
(c_ + x_ ** S(2) * WC("d", S(1))) ** WC("p", S(1))
* exp(n_ * atanh(x_ * WC("a", S(1)))),
x_,
),
cons2,
cons8,
cons29,
cons5,
cons1922,
cons40,
cons1927,
cons1926,
)
rule6530 = ReplacementRule(pattern6530, replacement6530)
pattern6531 = Pattern(
Integral(
(c_ + x_ ** S(2) * WC("d", S(1))) ** WC("p", S(1))
* exp(WC("n", S(1)) * atanh(x_ * WC("a", S(1)))),
x_,
),
cons2,
cons8,
cons29,
cons4,
cons5,
cons1922,
cons1804,
)
rule6531 = ReplacementRule(pattern6531, replacement6531)
pattern6532 = Pattern(
Integral(
(c_ + x_ ** S(2) * WC("d", S(1))) ** WC("p", S(1))
* exp(n_ * atanh(x_ * WC("a", S(1)))),
x_,
),
cons2,
cons8,
cons29,
cons5,
cons1922,
cons1805,
cons676,
)
rule6532 = ReplacementRule(pattern6532, replacement6532)
pattern6533 = Pattern(
Integral(
(c_ + x_ ** S(2) * WC("d", S(1))) ** WC("p", S(1))
* exp(n_ * atanh(x_ * WC("a", S(1)))),
x_,
),
cons2,
cons8,
cons29,
cons5,
cons1922,
cons1805,
cons1928,
)
rule6533 = ReplacementRule(pattern6533, replacement6533)
pattern6534 = Pattern(
Integral(
(c_ + x_ ** S(2) * WC("d", S(1))) ** p_
* exp(WC("n", S(1)) * atanh(x_ * WC("a", S(1)))),
x_,
),
cons2,
cons8,
cons29,
cons4,
cons5,
cons1922,
cons1805,
)
rule6534 = ReplacementRule(pattern6534, replacement6534)
pattern6535 = Pattern(
Integral(
x_
* exp(n_ * atanh(x_ * WC("a", S(1))))
/ (c_ + x_ ** S(2) * WC("d", S(1))) ** (S(3) / 2),
x_,
),
cons2,
cons8,
cons29,
cons4,
cons1922,
cons25,
)
rule6535 = ReplacementRule(pattern6535, replacement6535)
pattern6536 = Pattern(
Integral(
x_
* (c_ + x_ ** S(2) * WC("d", S(1))) ** p_
* exp(n_ * atanh(x_ * WC("a", S(1)))),
x_,
),
cons2,
cons8,
cons29,
cons4,
cons1922,
cons13,
cons139,
cons25,
cons248,
)
rule6536 = ReplacementRule(pattern6536, replacement6536)
pattern6537 = Pattern(
Integral(
x_ ** S(2)
* (c_ + x_ ** S(2) * WC("d", S(1))) ** WC("p", S(1))
* exp(n_ * atanh(x_ * WC("a", S(1)))),
x_,
),
cons2,
cons8,
cons29,
cons4,
cons1922,
cons1929,
cons25,
)
rule6537 = ReplacementRule(pattern6537, replacement6537)
pattern6538 = Pattern(
Integral(
x_ ** S(2)
* (c_ + x_ ** S(2) * WC("d", S(1))) ** p_
* exp(n_ * atanh(x_ * WC("a", S(1)))),
x_,
),
cons2,
cons8,
cons29,
cons4,
cons1922,
cons13,
cons139,
cons25,
cons1923,
cons248,
)
rule6538 = ReplacementRule(pattern6538, replacement6538)
pattern6539 = Pattern(
Integral(
x_ ** WC("m", S(1))
* (c_ + x_ ** S(2) * WC("d", S(1))) ** WC("p", S(1))
* exp(WC("n", S(1)) * atanh(x_ * WC("a", S(1)))),
x_,
),
cons2,
cons8,
cons29,
cons19,
cons5,
cons1922,
cons1804,
cons1925,
cons1926,
)
rule6539 = ReplacementRule(pattern6539, replacement6539)
pattern6540 = Pattern(
Integral(
x_ ** WC("m", S(1))
* (c_ + x_ ** S(2) * WC("d", S(1))) ** WC("p", S(1))
* exp(n_ * atanh(x_ * WC("a", S(1)))),
x_,
),
cons2,
cons8,
cons29,
cons19,
cons5,
cons1922,
cons1804,
cons1927,
cons1926,
)
rule6540 = ReplacementRule(pattern6540, replacement6540)
pattern6541 = Pattern(
Integral(
x_ ** WC("m", S(1))
* (c_ + x_ ** S(2) * WC("d", S(1))) ** WC("p", S(1))
* exp(WC("n", S(1)) * atanh(x_ * WC("a", S(1)))),
x_,
),
cons2,
cons8,
cons29,
cons19,
cons4,
cons5,
cons1922,
cons1804,
)
rule6541 = ReplacementRule(pattern6541, replacement6541)
pattern6542 = Pattern(
Integral(
x_ ** WC("m", S(1))
* (c_ + x_ ** S(2) * WC("d", S(1))) ** WC("p", S(1))
* exp(n_ * atanh(x_ * WC("a", S(1)))),
x_,
),
cons2,
cons8,
cons29,
cons19,
cons5,
cons1922,
cons1805,
cons676,
)
rule6542 = ReplacementRule(pattern6542, replacement6542)
pattern6543 = Pattern(
Integral(
x_ ** WC("m", S(1))
* (c_ + x_ ** S(2) * WC("d", S(1))) ** WC("p", S(1))
* exp(n_ * atanh(x_ * WC("a", S(1)))),
x_,
),
cons2,
cons8,
cons29,
cons19,
cons5,
cons1922,
cons1805,
cons1928,
)
rule6543 = ReplacementRule(pattern6543, replacement6543)
pattern6544 = Pattern(
Integral(
x_ ** WC("m", S(1))
* (c_ + x_ ** S(2) * WC("d", S(1))) ** p_
* exp(WC("n", S(1)) * atanh(x_ * WC("a", S(1)))),
x_,
),
cons2,
cons8,
cons29,
cons19,
cons4,
cons5,
cons1922,
cons1805,
cons1924,
)
rule6544 = ReplacementRule(pattern6544, replacement6544)
pattern6545 = Pattern(
Integral(
u_
* (c_ + x_ ** S(2) * WC("d", S(1))) ** WC("p", S(1))
* exp(WC("n", S(1)) * atanh(x_ * WC("a", S(1)))),
x_,
),
cons2,
cons8,
cons29,
cons4,
cons5,
cons1922,
cons1804,
)
rule6545 = ReplacementRule(pattern6545, replacement6545)
pattern6546 = Pattern(
Integral(
u_
* (c_ + x_ ** S(2) * WC("d", S(1))) ** p_
* exp(n_ * atanh(x_ * WC("a", S(1)))),
x_,
),
cons2,
cons8,
cons29,
cons4,
cons5,
cons1922,
cons1805,
cons745,
)
rule6546 = ReplacementRule(pattern6546, replacement6546)
pattern6547 = Pattern(
Integral(
u_
* (c_ + x_ ** S(2) * WC("d", S(1))) ** p_
* exp(WC("n", S(1)) * atanh(x_ * WC("a", S(1)))),
x_,
),
cons2,
cons8,
cons29,
cons4,
cons5,
cons1922,
cons1805,
cons1924,
)
rule6547 = ReplacementRule(pattern6547, replacement6547)
pattern6548 = Pattern(
Integral(
(c_ + WC("d", S(1)) / x_ ** S(2)) ** WC("p", S(1))
* WC("u", S(1))
* exp(WC("n", S(1)) * atanh(x_ * WC("a", S(1)))),
x_,
),
cons2,
cons8,
cons29,
cons4,
cons1930,
cons40,
)
rule6548 = ReplacementRule(pattern6548, replacement6548)
pattern6549 = Pattern(
Integral(
(c_ + WC("d", S(1)) / x_ ** S(2)) ** p_
* WC("u", S(1))
* exp(n_ * atanh(x_ * WC("a", S(1)))),
x_,
),
cons2,
cons8,
cons29,
cons5,
cons1930,
cons149,
cons745,
cons179,
)
rule6549 = ReplacementRule(pattern6549, replacement6549)
pattern6550 = Pattern(
Integral(
(c_ + WC("d", S(1)) / x_ ** S(2)) ** p_
* WC("u", S(1))
* exp(n_ * atanh(x_ * WC("a", S(1)))),
x_,
),
cons2,
cons8,
cons29,
cons4,
cons5,
cons1930,
cons149,
cons745,
cons119,
)
rule6550 = ReplacementRule(pattern6550, replacement6550)
pattern6551 = Pattern(
Integral(
(c_ + WC("d", S(1)) / x_ ** S(2)) ** p_
* WC("u", S(1))
* exp(WC("n", | |
linewidths=b_widths,
antialiaseds=(1,), colors=b_colors,
transOffset=ax.transData)
if b_nums is not None:
b_collection.set_array(np.asarray(b_nums))
b_collection.set_cmap(b_cmap)
b_collection.autoscale()
ax.add_collection(b_collection)
b_collection.set_zorder(3)
branch_collections.append(b_collection)
if boundaries is None:
ax.autoscale()
return (bus_collection,) + tuple(branch_collections) + tuple(arrow_collections)
def as_branch_series(ser, arg, c, n):
ser = pd.Series(ser, index=n.df(c).index)
assert not ser.isnull().any(), (f'{c}_{arg}s does not specify all '
f'entries. Missing values for {c}: {list(ser[ser.isnull()].index)}')
return ser
def get_projection_from_crs(crs):
if crs == 4326:
# if data is in latlon system, return default map with latlon system
return ccrs.PlateCarree()
try:
return ccrs.epsg(crs)
except requests.RequestException:
logger.warning("A connection to http://epsg.io/ is "
"required for a projected coordinate reference system. "
"Falling back to latlong.")
except ValueError:
logger.warning("'{crs}' does not define a projected coordinate system. "
"Falling back to latlong.".format(crs=crs))
return ccrs.PlateCarree()
def compute_bbox_with_margins(margin, x, y):
'Helper function to compute bounding box for the plot'
# set margins
pos = np.asarray((x, y))
minxy, maxxy = pos.min(axis=1), pos.max(axis=1)
xy1 = minxy - margin*(maxxy - minxy)
xy2 = maxxy + margin*(maxxy - minxy)
return tuple(xy1), tuple(xy2)
def projected_area_factor(ax, original_crs=4326):
"""
Helper function to get the area scale of the current projection in
reference to the default projection. The default 'original crs' is assumed
to be 4326, which translates to the cartopy default cartopy.crs.PlateCarree()
"""
if not hasattr(ax, 'projection'):
return 1
if isinstance(ax.projection, ccrs.PlateCarree):
return 1
x1, x2, y1, y2 = ax.get_extent()
pbounds = \
get_projection_from_crs(original_crs).transform_points(ax.projection,
np.array([x1, x2]), np.array([y1, y2]))
return np.sqrt(abs((x2 - x1) * (y2 - y1))
/abs((pbounds[0] - pbounds[1])[:2].prod()))
def draw_map_cartopy(ax, geomap=True, color_geomap=None):
resolution = '50m' if isinstance(geomap, bool) else geomap
assert resolution in ['10m', '50m', '110m'], (
"Resolution has to be one of '10m', '50m', '110m'")
if not color_geomap:
color_geomap = {}
elif not isinstance(color_geomap, dict):
color_geomap = {'ocean': 'lightblue', 'land': 'whitesmoke'}
if 'land' in color_geomap:
ax.add_feature(cartopy.feature.LAND.with_scale(resolution),
facecolor=color_geomap['land'])
if 'ocean' in color_geomap:
ax.add_feature(cartopy.feature.OCEAN.with_scale(resolution),
facecolor=color_geomap['ocean'])
ax.coastlines(linewidth=0.4, zorder=2, resolution=resolution)
border = cartopy.feature.BORDERS.with_scale(resolution)
ax.add_feature(border, linewidth=0.3)
return
def _flow_ds_from_arg(flow, n, branch_components):
if isinstance(flow, pd.Series):
if not isinstance(flow.index, pd.MultiIndex):
raise ValueError("Argument 'flow' is a pandas.Series without "
"a MultiIndex. Please provide a multiindexed series, with "
"the first level being a subset of 'branch_components'.")
return flow
if flow in n.snapshots:
return (pd.concat([n.pnl(c).p0.loc[flow]
for c in branch_components],
keys=branch_components, sort=True))
elif isinstance(flow, str) or callable(flow):
return (pd.concat([n.pnl(c).p0 for c in branch_components],
axis=1, keys=branch_components, sort=True)
.agg(flow, axis=0))
def directed_flow(coords, flow, color, area_factor=1, cmap=None):
"""
Helper function to generate arrows from flow data.
"""
# this funtion is used for diplaying arrows representing the network flow
data = pd.DataFrame(
{'arrowsize': flow.abs().pipe(np.sqrt).clip(lower=1e-8),
'direction': np.sign(flow),
'linelength': (np.sqrt((coords.x1 - coords.x2)**2. +
(coords.y1 - coords.y2)**2))})
data = data.join(coords)
if area_factor:
data['arrowsize']= data['arrowsize'].mul(area_factor)
data['arrowtolarge'] = (1.5 * data.arrowsize > data.linelength)
# swap coords for negativ directions
data.loc[data.direction == -1., ['x1', 'x2', 'y1', 'y2']] = \
data.loc[data.direction == -1., ['x2', 'x1', 'y2', 'y1']].values
if ((data.linelength > 0.) & (~data.arrowtolarge)).any():
data['arrows'] = (
data[(data.linelength > 0.) & (~data.arrowtolarge)]
.apply(lambda ds:
FancyArrow(ds.x1, ds.y1,
0.6*(ds.x2 - ds.x1) - ds.arrowsize
* 0.75 * (ds.x2 - ds.x1) / ds.linelength,
0.6 * (ds.y2 - ds.y1) - ds.arrowsize
* 0.75 * (ds.y2 - ds.y1)/ds.linelength,
head_width=ds.arrowsize), axis=1))
data.loc[(data.linelength > 0.) & (data.arrowtolarge), 'arrows'] = \
(data[(data.linelength > 0.) & (data.arrowtolarge)]
.apply(lambda ds:
FancyArrow(ds.x1, ds.y1,
0.001*(ds.x2 - ds.x1),
0.001*(ds.y2 - ds.y1),
head_width=ds.arrowsize), axis=1))
data = data.dropna(subset=['arrows'])
arrowcol = PatchCollection(data.arrows,
color=color,
edgecolors='k',
linewidths=0.,
zorder=4, alpha=1)
return arrowcol
def autogenerate_coordinates(n, assign=False, layouter=None):
"""
Automatically generate bus coordinates for the network graph
according to a layouting function from `networkx <https://networkx.github.io/>`_.
Parameters
----------
n : pypsa.Network
assign : bool, default False
Assign generated coordinates to the network bus coordinates
at ``n.buses[['x','y']]``.
layouter : networkx.drawing.layout function, default None
Layouting function from `networkx <https://networkx.github.io/>`_. See
`list <https://networkx.github.io/documentation/stable/reference/drawing.html#module-networkx.drawing.layout>`_
of available options. By default coordinates are determined for a
`planar layout <https://networkx.github.io/documentation/stable/reference/generated/networkx.drawing.layout.planar_layout.html#networkx.drawing.layout.planar_layout>`_
if the network graph is planar, otherwise for a
`Kamada-Kawai layout <https://networkx.github.io/documentation/stable/reference/generated/networkx.drawing.layout.kamada_kawai_layout.html#networkx.drawing.layout.kamada_kawai_layout>`_.
Returns
-------
coordinates : pd.DataFrame
DataFrame containing the generated coordinates with
buses as index and ['x', 'y'] as columns.
Examples
--------
>>> autogenerate_coordinates(network)
>>> autogenerate_coordinates(network, assign=True, layouter=nx.circle_layout)
"""
G = n.graph()
if layouter is None:
is_planar = nx.check_planarity(G)[0]
if is_planar:
layouter = nx.planar_layout
else:
layouter = nx.kamada_kawai_layout
coordinates = pd.DataFrame(layouter(G)).T.rename({0: 'x', 1: 'y'}, axis=1)
if assign:
n.buses[['x', 'y']] = coordinates
return coordinates
def _get_coordinates(n, layouter=None):
if layouter is not None or n.buses[['x', 'y']].isin([np.nan, 0]).all().all():
coordinates = autogenerate_coordinates(n, layouter=layouter)
return coordinates["x"], coordinates["y"]
else:
return n.buses["x"], n.buses["y"]
_token_required_mb_styles = ['basic', 'streets', 'outdoors', 'light', 'dark',
'satellite', 'satellite-streets']
_open__mb_styles = ['open-street-map', 'white-bg', 'carto-positron',
'carto-darkmatter', 'stamen-terrain', 'stamen-toner',
'stamen-watercolor']
#This function was borne out of a breakout group at the October 2017
#Munich Open Energy Modelling Initiative Workshop to hack together a
#working example of plotly for networks, see:
#https://forum.openmod-initiative.org/t/breakout-group-on-visualising-networks-with-plotly/384/7
#We thank <NAME> for holding the tutorial on plotly which
#inspired the breakout group and for contributing ideas to the iplot
#function below.
def iplot(n, fig=None, bus_colors='cadetblue', bus_alpha=1, bus_sizes=10,
bus_cmap=None, bus_colorbar=None, bus_text=None,
line_colors='rosybrown', link_colors='darkseagreen',
transformer_colors='orange', line_widths=3, link_widths=3,
transformer_widths=3, line_text=None, link_text=None,
transformer_text=None, layouter=None, title="", size=None,
branch_components=None, iplot=True, jitter=None, mapbox=False,
mapbox_style='open-street-map', mapbox_token="",mapbox_parameters={}):
"""
Plot the network buses and lines interactively using plotly.
Parameters
----------
fig : dict, default None
If not None, figure is built upon this fig.
bus_colors : dict/pandas.Series
Colors for the buses, defaults to "cadetblue". If bus_sizes is a
pandas.Series with a Multiindex, bus_colors defaults to the
n.carriers['color'] column.
bus_alpha : float
Adds alpha channel to buses, defaults to 1.
bus_sizes : float/pandas.Series
Sizes of bus points, defaults to 10.
bus_cmap : plt.cm.ColorMap/str
If bus_colors are floats, this color map will assign the colors
bus_colorbar : dict
Plotly colorbar, e.g. {'title' : 'my colorbar'}
bus_text : pandas.Series
Text for each bus, defaults to bus names
line_colors : str/pandas.Series
Colors for the lines, defaults to 'rosybrown'.
link_colors : str/pandas.Series
Colors for the links, defaults to 'darkseagreen'.
transfomer_colors : str/pandas.Series
Colors for the transfomer, defaults to 'orange'.
line_widths : dict/pandas.Series
Widths of lines, defaults to 1.5
link_widths : dict/pandas.Series
Widths of links, defaults to 1.5
transformer_widths : dict/pandas.Series
Widths of transformer, defaults to 1.5
line_text : pandas.Series
Text for lines, defaults to line names.
link_text : pandas.Series
Text for links, defaults to link names.
tranformer_text : pandas.Series
Text for transformers, defaults to transformer names.
layouter : networkx.drawing.layout function, default None
Layouting function from `networkx <https://networkx.github.io/>`_ which
overrules coordinates given in ``n.buses[['x','y']]``. See
`list <https://networkx.github.io/documentation/stable/reference/drawing.html#module-networkx.drawing.layout>`_
of available options.
title : string
Graph title
size : None|tuple
Tuple specifying width and height of figure; e.g. (width, heigh).
branch_components : list of str
Branch components to be plotted, defaults to Line and Link.
iplot : bool, default True
Automatically do an interactive plot of the figure.
jitter : None|float
Amount of random noise to add to bus positions to distinguish
overlapping buses
mapbox : bool, default False
Switch to use Mapbox.
mapbox_style : str, default 'open-street-map'
Define the mapbox layout style of the interactive plot. If this is set
to a mapbox layout, the argument ``mapbox_token`` must be a valid Mapbox
API access token.
Valid open layouts are:
open-street-map, white-bg, carto-positron, carto-darkmatter,
stamen-terrain, stamen-toner, stamen-watercolor
Valid mapbox layouts are:
basic, streets, outdoors, light, dark, satellite, satellite-streets
mapbox_token : string
Mapbox API access token. Obtain from https://www.mapbox.<EMAIL>.
Can also be included in mapbox_parameters as `accesstoken=mapbox_token`.
mapbox_parameters : dict
Configuration parameters of the Mapbox layout.
E.g. {"bearing": 5, "pitch": 10, "zoom": 1, "style": 'dark'}.
Returns
-------
fig: dictionary for plotly figure
"""
if fig is None:
fig = dict(data=[],layout={})
if bus_text is None:
bus_text = 'Bus ' + n.buses.index
x, y = _get_coordinates(n, layouter=layouter)
if jitter is not None:
x = x + np.random.uniform(low=-jitter, high=jitter, size=len(x))
y = y + np.random.uniform(low=-jitter, high=jitter, size=len(y))
bus_trace = dict(x=x, y=y,
text=bus_text,
type="scatter",
mode="markers",
hoverinfo="text",
opacity=bus_alpha,
marker=dict(color=bus_colors,
size=bus_sizes))
if bus_cmap is not None:
bus_trace['marker']['colorscale'] = bus_cmap
if bus_colorbar is not None:
bus_trace['marker']['colorbar'] = bus_colorbar
# Plot branches:
if isinstance(line_widths, pd.Series):
if isinstance(line_widths.index, pd.MultiIndex):
raise TypeError("Index of argument 'line_widths' is a Multiindex, "
"this is not support since pypsa v0.17. "
"Set differing widths with arguments 'line_widths', "
"'link_widths' and 'transformer_widths'.")
if isinstance(line_colors, pd.Series):
if isinstance(line_colors.index, pd.MultiIndex):
raise TypeError("Index of argument 'line_colors' is a Multiindex, "
"this is not support since pypsa v0.17. | |
<reponame>renovate-bot/python-hpedockerplugin
import abc
import json
import re
from collections import OrderedDict
from oslo_log import log as logging
import hpedockerplugin.exception as exception
from hpedockerplugin.hpe import share
LOG = logging.getLogger(__name__)
class RequestContextBuilderFactory(object):
def __init__(self, all_configs):
self._all_configs = all_configs
# if 'block' in all_configs:
# block_configs = all_configs['block']
# backend_configs = block_configs[1]
# self._vol_req_ctxt_creator = VolumeRequestContextBuilder(
# backend_configs)
# else:
# self._vol_req_ctxt_creator = NullRequestContextBuilder(
# "ERROR: Volume driver not enabled. Please provide hpe.conf "
# "file to enable it")
if 'file' in all_configs:
file_configs = all_configs['file']
f_backend_configs = file_configs[1]
self._file_req_ctxt_builder = FileRequestContextBuilder(
f_backend_configs)
else:
self._file_req_ctxt_builder = NullRequestContextBuilder(
"ERROR: File driver not enabled. Please provide hpe_file.conf "
"file to enable it")
def get_request_context_builder(self):
return self._file_req_ctxt_builder
class NullRequestContextBuilder(object):
def __init__(self, msg):
self._msg = msg
def build_request_context(self, contents, def_backend_name):
raise exception.InvalidInput(self._msg)
class RequestContextBuilder(object):
def __init__(self, backend_configs):
self._backend_configs = backend_configs
def build_request_context(self, contents, def_backend_name):
LOG.info("build_request_context: Entering...")
self._validate_name(contents['Name'])
req_ctxt_map = self._get_build_req_ctxt_map()
if 'Opts' in contents and contents['Opts']:
# self._validate_mutually_exclusive_ops(contents)
self._validate_dependent_opts(contents)
for op_name, req_ctxt_creator in req_ctxt_map.items():
op_name = op_name.split(',')
found = not (set(op_name) - set(contents['Opts'].keys()))
if found:
return req_ctxt_creator(contents, def_backend_name)
return self._default_req_ctxt_creator(contents)
@staticmethod
def _validate_name(vol_name):
is_valid_name = re.match("^[A-Za-z0-9]+[A-Za-z0-9_-]+$", vol_name)
if not is_valid_name:
msg = 'Invalid volume name: %s is passed.' % vol_name
raise exception.InvalidInput(reason=msg)
@staticmethod
def _get_int_option(options, option_name, default_val):
opt = options.get(option_name)
if opt and opt != '':
try:
opt = int(opt)
except ValueError as ex:
msg = "ERROR: Invalid value '%s' specified for '%s' option. " \
"Please specify an integer value." % (opt, option_name)
LOG.error(msg)
raise exception.InvalidInput(msg)
else:
opt = default_val
return opt
# This method does the following:
# 1. Option specified
# - Some value:
# -- return if valid value else exception
# - Blank value:
# -- Return default if provided
# ELSE
# -- Throw exception if value_unset_exception is set
# 2. Option NOT specified
# - Return default value
@staticmethod
def _get_str_option(options, option_name, default_val, valid_values=None,
value_unset_exception=False):
opt = options.get(option_name)
if opt:
if opt != '':
opt = str(opt)
if valid_values and opt.lower() not in valid_values:
msg = "ERROR: Invalid value '%s' specified for '%s'" \
"option. Valid values are: %s" %\
(opt, option_name, valid_values)
LOG.error(msg)
raise exception.InvalidInput(msg)
return opt
if default_val:
return default_val
if value_unset_exception:
return json.dumps({
'Err': "Value not set for option: %s" % opt
})
return default_val
def _validate_dependent_opts(self, contents):
pass
# To be implemented by derived class
@abc.abstractmethod
def _get_build_req_ctxt_map(self):
pass
def _default_req_ctxt_creator(self, contents):
pass
@staticmethod
def _validate_mutually_exclusive_ops(contents):
mutually_exclusive_ops = ['virtualCopyOf', 'cloneOf', 'importVol',
'replicationGroup']
if 'Opts' in contents and contents['Opts']:
received_opts = contents.get('Opts').keys()
diff = set(mutually_exclusive_ops) - set(received_opts)
if len(diff) < len(mutually_exclusive_ops) - 1:
mutually_exclusive_ops.sort()
msg = "Operations %s are mutually exclusive and cannot be " \
"specified together. Please check help for usage." % \
mutually_exclusive_ops
raise exception.InvalidInput(reason=msg)
@staticmethod
def _check_valid_fsMode_string(value):
valid_type = ['A', 'D', 'U', 'L']
valid_flag = ['f', 'd', 'p', 'i', 'S', 'F', 'g']
valid_perm1 = ['r', 'w', 'a', 'x', 'd', 'D', 't', 'T']
valid_perm2 = ['n', 'N', 'c', 'C', 'o', 'y']
valid_perm = valid_perm1 + valid_perm2
type_flag_perm = value.split(':')
if len(type_flag_perm) != 3:
msg = "Incorrect value passed , please check correct "\
"format and values to be passed in help"
LOG.error(msg)
raise exception.InvalidInput(reason=msg)
vtype = type_flag_perm[0]
if vtype not in valid_type:
msg = "Incorrect value passed for type of a mode, please check "\
"correct format and values to be passed."
LOG.error(msg)
raise exception.InvalidInput(reason=msg)
passed_vflag_len = len(list(type_flag_perm[1]))
vflag = list(set(list(type_flag_perm[1])))
if len(vflag) < passed_vflag_len:
msg = "Duplicate characters for given flag are passed. "\
"Please correct the passed flag characters for fsMode."
LOG.error(msg)
raise exception.InvalidInput(reason=msg)
if set(vflag) - set(valid_flag):
msg = "Invalid flag passed for the fsMode. Please "\
"pass the correct flag characters"
LOG.error(msg)
raise exception.InvalidInput(reason=msg)
passed_vperm_len = len(list(type_flag_perm[2]))
vperm = list(set(list(type_flag_perm[2])))
if len(vperm) < passed_vperm_len:
msg = "Duplicate characters for given permission are passed. "\
"Please correct the passed permissions for fsMode."
LOG.error(msg)
raise exception.InvalidInput(reason=msg)
if set(vperm) - set(valid_perm):
msg = "Invalid characters for the permissions of fsMode are "\
"passed. Please remove the invalid characters."
LOG.error(msg)
raise exception.InvalidInput(reason=msg)
return True
def _check_is_valid_acl_string(self, fsMode):
fsMode_list = fsMode.split(',')
if len(fsMode_list) != 3:
msg = "Passed acl string is not valid. "\
"Pass correct acl string."
LOG.error(msg)
raise exception.InvalidInput(reason=msg)
for value in fsMode_list:
self._check_valid_fsMode_string(value)
return True
@staticmethod
def _is_valid_octal_num(fsMode):
return re.match('^0[0-7]{3}$', fsMode)
def _validate_fsMode(self, fsMode):
is_valid_fs_mode = True
if ':' in fsMode:
is_valid_fs_mode = self._check_is_valid_acl_string(fsMode)
else:
is_valid_fs_mode = self._is_valid_octal_num(fsMode)
if not is_valid_fs_mode:
msg = "Invalid value passed for the fsMode."
LOG.error(msg)
raise exception.InvalidInput(reason=msg)
@staticmethod
def _validate_fsOwner(fsOwner):
fsOwner_list = fsOwner.split(':')
if len(fsOwner_list) != 2:
msg = "Invalid value specified for fsOwner Option."
LOG.error(msg)
raise exception.InvalidInput(reason=msg)
try:
for val in fsOwner_list:
int(val)
except ValueError as ex:
msg = "Please provide correct fsowner inforamtion. You have "\
"passed non integer values."
LOG.error(msg)
raise exception.InvalidInput(reason=msg)
@staticmethod
def _validate_opts(operation, contents, valid_opts, mandatory_opts=None):
LOG.info("Validating options for operation '%s'" % operation)
if 'Opts' in contents and contents['Opts']:
received_opts = contents.get('Opts').keys()
if mandatory_opts:
diff = set(mandatory_opts) - set(received_opts)
if diff:
# Print options in sorted manner
mandatory_opts.sort()
msg = "One or more mandatory options %s are missing " \
"for operation %s" % (mandatory_opts, operation)
LOG.error(msg)
raise exception.InvalidInput(reason=msg)
diff = set(received_opts) - set(valid_opts)
if diff:
diff = list(diff)
diff.sort()
msg = "Invalid option(s) %s specified for operation %s. " \
"Please check help for usage." % \
(diff, operation)
LOG.error(msg)
raise exception.InvalidInput(reason=msg)
class FileRequestContextBuilder(RequestContextBuilder):
def __init__(self, backend_configs):
super(FileRequestContextBuilder, self).__init__(backend_configs)
def _get_build_req_ctxt_map(self):
build_req_ctxt_map = OrderedDict()
# If share-dir is specified, file-store MUST be specified
build_req_ctxt_map['filePersona,help'] = self._create_help_req_ctxt
build_req_ctxt_map['filePersona'] = \
self._create_share_req_ctxt
# build_req_ctxt_map['persona,cpg'] = \
# self._create_share_req_ctxt
# build_req_ctxt_map['persona,cpg,size'] = \
# self._create_share_req_ctxt
# build_req_ctxt_map['persona,cpg,size,fpg_name'] = \
# self._create_share_req_ctxt
# build_req_ctxt_map['virtualCopyOf,shareName'] = \
# self._create_snap_req_ctxt
# build_req_ctxt_map['updateShare'] = \
# self._create_update_req_ctxt
return build_req_ctxt_map
def _create_share_req_params(self, name, options, def_backend_name):
LOG.info("_create_share_req_params: Entering...")
# import pdb
# pdb.set_trace()
backend = self._get_str_option(options, 'backend', def_backend_name)
if backend == 'DEFAULT_BLOCK':
msg = 'Backend DEFAULT_BLOCK is reserved for Block ' \
'operations. Cannot specify it for File operations'
LOG.error(msg)
raise exception.InvalidInput(msg)
config = self._backend_configs.get(backend)
if not config:
raise exception.InvalidInput(
'ERROR: Backend %s is not configured for File Persona'
% backend
)
cpg = self._get_str_option(
options, 'cpg',
config.hpe3par_cpg[0] if config.hpe3par_cpg else None)
if not cpg:
raise exception.InvalidInput(
"ERROR: CPG is not configured in hpe.conf. Please specify"
"name of an existing CPG in hpe.conf and restart plugin")
fpg = self._get_str_option(options, 'fpg', None)
fsMode = self._get_str_option(options, 'fsMode', None)
fsOwner = self._get_str_option(options, 'fsOwner', None)
if fsMode:
self._validate_fsMode(fsMode)
if fsOwner:
self._validate_fsOwner(fsOwner)
if fsMode:
if fsOwner is None:
raise exception.InvalidInput(
" ERROR: If mode bits or directory permissions"
" needs to be changed then, providing fsOwner"
" is mandetory")
size_gib = self._get_int_option(options, 'size', 1024)
# Default share size or quota in MiB which is 1TiB
size = size_gib * 1024
fpg_size_gib = int(config.hpe3par_default_fpg_size) * 1024
if size_gib > fpg_size_gib:
raise exception.InvalidInput(
"ERROR: Share size cannot be greater than the FPG size. "
"Either specify hpe3par_default_fpg_size >= %s GiB or "
"specify option '-o size' < %s GiB"
% (size_gib, fpg_size_gib))
# TODO: This check would be required when VFS needs to be created.
# NOT HERE
# if not ip_subnet and not config.hpe3par_ip_pool:
# raise exception.InvalidInput(
# "ERROR: Unable to create share as neither 'ipSubnet' "
# "option specified not IP address pool hpe3par_ip_pool "
# "configured in configuration file specified")
readonly_str = self._get_str_option(options, 'readonly', 'false')
readonly = str.lower(readonly_str)
if readonly == 'true':
readonly = True
elif readonly == 'false':
readonly = False
else:
raise exception.InvalidInput(
'ERROR: Invalid value "%s" supplied for "readonly" option. '
'Valid values are case insensitive ["true", "false"]'
% readonly_str)
nfs_options = self._get_str_option(options, 'nfsOptions', None)
comment = self._get_str_option(options, 'comment', None)
share_details = share.create_metadata(backend, cpg, fpg, name, size,
readonly=readonly,
nfs_options=nfs_options,
comment=comment, fsMode=fsMode,
fsOwner=fsOwner)
LOG.info("_create_share_req_params: %s" % share_details)
return share_details
def _create_share_req_ctxt(self, contents, def_backend_name):
LOG.info("_create_share_req_ctxt: Entering...")
valid_opts = ('backend', 'filePersona', 'cpg', 'fpg',
'size', 'mountConflictDelay', 'fsMode', 'fsOwner')
mandatory_opts = ('filePersona',)
self._validate_opts("create share", contents, valid_opts,
mandatory_opts)
share_args = self._create_share_req_params(contents['Name'],
contents['Opts'],
def_backend_name)
ctxt = {'orchestrator': 'file',
'operation': 'create_share',
'kwargs': share_args}
LOG.info("_create_share_req_ctxt: Exiting: %s" % ctxt)
return ctxt
def | |
in_args_section = False
last_section = None
for lineno, line in enumerate(lines[1:], start=2):
line_indent_len = len(line) - len(line.lstrip(' '))
margs = {
'offset': lineno,
'line': line,
}
l = line.strip()
# Catch semi-common javadoc style.
if l.startswith('@param') or l.startswith('@return'):
self.add_message('C9007', node=node, line=node.fromlineno, args=margs)
# See if we can detect incorrect behavior.
section = l.split(':', 1)[0]
# Remember whether we're currently in the Args: section so we don't treat
# named arguments as sections (e.g. a function has a "returns" arg). Use
# the indentation level to detect the start of the next section.
if in_args_section:
in_args_section = (indent_len < line_indent_len)
if not in_args_section:
# We only parse known invalid & valid sections here. This avoids
# picking up things that look like sections but aren't (e.g. "Note:"
# lines), and avoids running checks on sections we don't yet support.
if section.lower() in invalid_sections:
self.add_message('C9007', node=node, line=node.fromlineno, args=margs)
elif section in self.VALID_SECTIONS:
if section in sections:
# We got the same section more than once?
margs_copy = margs.copy()
margs_copy.update({
'line_old': sections[section].lineno,
'section': section,
})
self.add_message('C9017', node=node, line=node.fromlineno,
args=margs_copy)
else:
# Gather the order of the sections.
sections[section] = last_section = DocStringSectionDetails(
name=section, header=line, lineno=lineno)
# Detect whether we're in the Args section once we've processed the Args
# section itself.
in_args_section = (section == 'Args')
if l == '' and last_section:
last_section.lines = lines[last_section.lineno:lineno - 1]
last_section = None
return sections
def _check_section_lines(self, node, lines, sections):
"""Verify each section (Args/Returns/Yields/Raises) is sane"""
indent_len = self._docstring_indent(node)
# Make sure the sections are in the right order.
found_sections = [x for x in self.VALID_SECTIONS if x in sections]
if found_sections != sections.keys():
self.add_message('C9008', node=node, line=node.fromlineno)
for section in sections.values():
# We're going to check the section line itself.
lineno = section.lineno
line = section.header
want_indent = indent_len + self._indent_len
line_indent_len = len(line) - len(line.lstrip(' '))
margs = {
'offset': lineno,
'line': line,
'want_indent': want_indent,
'curr_indent': line_indent_len,
}
# Make sure it has some number of leading whitespace.
if not line.startswith(' '):
self.add_message('C9004', node=node, line=node.fromlineno, args=margs)
# Make sure it has a single trailing colon.
if line.strip() != '%s:' % section.name:
self.add_message('C9007', node=node, line=node.fromlineno, args=margs)
# Verify blank line before it. We use -2 because lineno counts from one,
# but lines is a zero-based list.
if lines[lineno - 2] != '':
self.add_message('C9006', node=node, line=node.fromlineno, args=margs)
# Check the indentation level on the section header (e.g. Args:).
if line_indent_len != indent_len:
self.add_message('C9015', node=node, line=node.fromlineno, args=margs)
# Now check the indentation of subtext in each section.
saw_exact = False
for i, line in enumerate(section.lines, start=1):
# Every line should be indented at least the minimum.
# Always update margs so that if we drop through below, it has
# reasonable values when generating the message.
line_indent_len = len(line) - len(line.lstrip(' '))
margs.update({
'line': line,
'offset': lineno + i,
'curr_indent': line_indent_len,
})
if line_indent_len == want_indent:
saw_exact = True
elif line_indent_len < want_indent:
self.add_message('C9015', node=node, line=node.fromlineno, args=margs)
# If none of the lines were indented at the exact level, then something
# is amiss like they're all incorrectly offset.
if not saw_exact:
self.add_message('C9015', node=node, line=node.fromlineno, args=margs)
def _check_all_args_in_doc(self, node, _lines, sections):
"""All function arguments are mentioned in doc"""
if not hasattr(node, 'argnames'):
return
# If they don't have an Args section, then give it a pass.
section = sections.get('Args')
if section is None:
return
# Now verify all args exist.
# TODO: Should we verify arg order matches doc order ?
# TODO: Should we check indentation of wrapped docs ?
missing_args = []
for arg in node.args.args:
# Ignore class related args.
if arg.name in ('cls', 'self'):
continue
# Ignore ignored args.
if arg.name.startswith('_'):
continue
# Valid arguments may look like `<arg>:` or `<arg> (<type>):`.
arg_re = re.compile(r'%s( \([^)]+\))?:' % re.escape(arg.name))
for l in section.lines:
aline = l.lstrip()
m = arg_re.match(aline)
if m:
amsg = aline[m.end():]
if len(amsg) and len(amsg) - len(amsg.lstrip()) != 1:
margs = {'arg': l}
self.add_message('C9012', node=node, line=node.fromlineno,
args=margs)
break
else:
missing_args.append(arg.name)
if missing_args:
margs = {'arg': '|, |'.join(missing_args)}
self.add_message('C9010', node=node, line=node.fromlineno, args=margs)
def _check_func_signature(self, node):
"""Require *args to be named args, and **kwargs kwargs"""
vararg = node.args.vararg
if vararg and vararg != 'args' and vararg != '_args':
margs = {'arg': vararg}
self.add_message('C9011', node=node, line=node.fromlineno, args=margs)
kwarg = node.args.kwarg
if kwarg and kwarg != 'kwargs' and kwarg != '_kwargs':
margs = {'arg': kwarg}
self.add_message('C9011', node=node, line=node.fromlineno, args=margs)
class Py3kCompatChecker(BaseChecker):
"""Make sure we enforce py3k compatible features"""
__implements__ = IAstroidChecker
# pylint: disable=class-missing-docstring,multiple-statements
class _MessageR9100(object): pass
# pylint: enable=class-missing-docstring,multiple-statements
name = 'py3k_compat_checker'
priority = -1
MSG_ARGS = 'offset:%(offset)i: {%(line)s}'
msgs = {
'R9100': ('Missing "from __future__ import print_function" line',
('missing-print-function'), _MessageR9100),
}
options = ()
def __init__(self, *args, **kwargs):
super(Py3kCompatChecker, self).__init__(*args, **kwargs)
self.seen_print_func = False
self.saw_imports = False
def close(self):
"""Called when done processing module"""
if not self.seen_print_func:
# Do not warn if moduler doesn't import anything at all (like
# empty __init__.py files).
if self.saw_imports:
self.add_message('R9100')
def _check_print_function(self, node):
"""Verify print_function is imported"""
if node.modname == '__future__':
for name, _ in node.names:
if name == 'print_function':
self.seen_print_func = True
def visit_from(self, node):
"""Process 'from' statements"""
self.saw_imports = True
self._check_print_function(node)
def visit_import(self, _node):
"""Process 'import' statements"""
self.saw_imports = True
class SourceChecker(BaseChecker):
"""Make sure we enforce rules on the source."""
__implements__ = IAstroidChecker
# pylint: disable=class-missing-docstring,multiple-statements
class _MessageR9200(object): pass
class _MessageR9201(object): pass
class _MessageR9202(object): pass
class _MessageR9203(object): pass
class _MessageR9204(object): pass
class _MessageR9205(object): pass
class _MessageR9210(object): pass
# pylint: enable=class-missing-docstring,multiple-statements
name = 'source_checker'
priority = -1
MSG_ARGS = 'offset:%(offset)i: {%(line)s}'
msgs = {
'R9200': ('Shebang should be #!/usr/bin/env python2 or '
'#!/usr/bin/env python3',
('bad-shebang'), _MessageR9200),
'R9201': ('Shebang is missing, but file is executable (chmod -x to fix)',
('missing-shebang'), _MessageR9201),
'R9202': ('Shebang is set, but file is not executable (chmod +x to fix)',
('spurious-shebang'), _MessageR9202),
'R9203': ('Unittest not named xxx_unittest.py',
('unittest-misnamed'), _MessageR9203),
'R9204': ('File encoding missing (the first line after the shebang'
' should be "# -*- coding: utf-8 -*-")',
('missing-file-encoding'), _MessageR9204),
'R9205': ('File encoding should be "utf-8"',
('bad-file-encoding'), _MessageR9205),
'R9210': ('Trailing new lines found at end of file',
('excess-trailing-newlines'), _MessageR9210),
}
options = ()
# Taken from PEP-263.
_ENCODING_RE = re.compile(r'^[ \t\v]*#.*?coding[:=][ \t]*([-_.a-zA-Z0-9]+)')
def visit_module(self, node):
"""Called when the whole file has been read"""
stream = node.file_stream
st = os.fstat(stream.fileno())
self._check_shebang(node, stream, st)
self._check_encoding(node, stream, st)
self._check_module_name(node)
self._check_trailing_lines(node, stream, st)
def _check_shebang(self, _node, stream, st):
"""Verify the shebang is version specific"""
stream.seek(0)
mode = st.st_mode
executable = bool(mode & 0o0111)
shebang = stream.readline()
if shebang[0:2] != '#!':
if executable:
self.add_message('R9201')
return
elif not executable:
self.add_message('R9202')
if shebang.strip() not in (
'#!/usr/bin/env python2', '#!/usr/bin/env python3'):
self.add_message('R9200')
def _check_encoding(self, _node, stream, st):
"""Verify the file has an encoding set
See PEP-263 for more details.
https://www.python.org/dev/peps/pep-0263/
"""
# Only allow empty files to have no encoding (e.g. __init__.py).
if not st.st_size:
return
stream.seek(0)
encoding = stream.readline()
# If the first line is the shebang, then the encoding is the second line.
if encoding[0:2] == '#!':
encoding = stream.readline()
# See if the encoding matches the standard.
m = self._ENCODING_RE.match(encoding)
if m:
if m.group(1) != 'utf-8':
self.add_message('R9205')
else:
self.add_message('R9204')
def _check_module_name(self, node):
"""Make sure the module name is sane"""
# Catch various typos.
name = node.name.rsplit('.', 2)[-1]
if name.rsplit('_', 2)[-1] in ('unittests',):
self.add_message('R9203')
def _check_trailing_lines(self, _node, stream, st):
"""Reject trailing lines"""
if st.st_size > 1:
stream.seek(st.st_size - 2)
if not stream.read().strip('\n'):
self.add_message('R9210')
class ChromiteLoggingChecker(BaseChecker):
"""Make sure we enforce rules on importing logging."""
__implements__ = IAstroidChecker
# pylint: disable=class-missing-docstring,multiple-statements
class _MessageR9301(object): pass
# pylint: enable=class-missing-docstring,multiple-statements
name = 'chromite_logging_checker'
priority = -1
MSG_ARGS = 'offset:%(offset)i: {%(line)s}'
msgs = {
'R9301': ('logging is deprecated. Use "from chromite.lib import '
'cros_logging as logging" to import chromite/lib/cros_logging',
('cros-logging-import'), _MessageR9301),
}
options = ()
# This checker is disabled by default because we only want to disallow "import
# logging" in chromite and not in other places cros lint is used. To enable
# this checker, modify the | |
import spidev
from concurrent import futures
import time
import math
import numpy as np
import grpc
import quanser_service_pb2_grpc
from quanser_service_pb2 import QuanserResponse
import random
self_servo = None
PI = math.pi
MOTOR_PROTECTION_VOLTAGE = 500
UNIT_TIME = 1 / 1000
class QubeServo2:
def __init__(self):
# self.log_f = "/home/pi/spi/log_file.txt"
# self.sub_t_list = []
# self.pub_t_list = []
global self_servo
self_servo = self
self.step_id_for_edgex = 0
self.pendulum_count = 0
self.spi = spidev.SpiDev()
self.spi.open(0, 0)
self.spi.mode = 0b10
self.spi.max_speed_hz = 1000000
self.step_id = 0
self.motor_command = 0
self.is_reset = False
self.color = None
self.last_motor_radian = 0
self.last_pendulum_radian = 0
self.motor_limit = False
self.last_time = 0.0
self.pendulum_reset(0,None)
self.step_start = False
def step(self, QuanserRequest, context):
self.step_start = True
previous_time = time.time()
self.color = "green"
self_servo.motor_command = int(QuanserRequest.value)
self_servo.set_motor_command()
motor_radian, motor_velocity, pendulum_radian, pendulum_velocity, step_id = self_servo.read_and_pub()
# print("motor radian :", motor_radian, "pendulum radian :", pendulum_radian)
self.limit_check(motor_velocity)
#print("rasp Grpc time :", previous_time - time.time())
return QuanserResponse(
message="STEP",
motor_radian=motor_radian, motor_velocity=motor_velocity,
pendulum_radian=pendulum_radian, pendulum_velocity=pendulum_velocity,
is_motor_limit=self.motor_limit
)
def step_sync(self, QuanserRequest, context):
self_servo.motor_command = int(QuanserRequest.value)
self_servo.set_motor_command()
motor_radian, motor_velocity, pendulum_radian, pendulum_velocity, step_id = self_servo.read_and_pub()
return QuanserResponse(
message="STEP_SYNC",
motor_radian=motor_radian, motor_velocity=motor_velocity,
pendulum_radian=pendulum_radian, pendulum_velocity=pendulum_velocity,
is_motor_limit=self.step_start
)
def reset(self, QuanserRequest, context):
# self.limit_check()
self.motor_limit = False
self.motor_command = 0
motor_radian, motor_velocity, pendulum_radian, pendulum_velocity, step_id = self_servo.read_and_pub()
self.protection()
time.sleep(1)
return QuanserResponse(
message="RESET",
motor_radian=motor_radian, motor_velocity=motor_velocity,
pendulum_radian=pendulum_radian, pendulum_velocity=pendulum_velocity
)
def reset_sync(self, QuanserRequest, context):
if not self.step_start:
while True:
time.sleep(0.001)
if self.step_start:
break
motor_radian, motor_velocity, pendulum_radian, pendulum_velocity, step_id = self_servo.read_and_pub()
return QuanserResponse(
message="RESET_SYNC",
motor_radian=motor_radian, motor_velocity=motor_velocity,
pendulum_radian=pendulum_radian, pendulum_velocity=pendulum_velocity
)
def pendulum_reset(self, QuanserRequest, context):
self.color = "yellow"
self.spi.xfer2([
0x01,
0x00,
0b01111111,
0x03, 0xe7, 0x00, 0x00, 0x03, 0xe7,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00
])
data = self.spi.xfer2([
0x01,
0x00,
0b00011111,
0x03, 0xe7, 0x00, 0x00, 0x03, 0xe7,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00
])
_, motor_radian, pendulum_radian = self.__data_conversion(data)
print("***** Pendulum Reset Complete!!! ***** ")
return QuanserResponse(
message="PENDULUM_RESET"
)
def initial_state(self):
self.spi.xfer2([
0x01,
0x00,
0b01111111,
0x03, 0xe7, 0x00, 0x00, 0x03, 0xe7,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00
])
data = self.spi.xfer2([
0x01,
0x00,
0b00011111,
0x03, 0xe7, 0x00, 0x00, 0x03, 0xe7,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00
])
_, motor_radian, pendulum_radian = self.__data_conversion(data)
def __data_conversion(self, data):
# Devoid ID
device_id = ((data[0] & 0xff) << 8) | (data[1] & 0xff)
# Motor Encoder Counts
encoder0 = ((data[2] & 0xff) << 16) | ((data[3] & 0xff) << 8) | (data[4] & 0xff)
if encoder0 & 0x00800000:
encoder0 = encoder0 | 0xFF000000
encoder0 = (0x100000000 - encoder0) * (-1)
# convert the arm encoder counts to angle theta in radians
motor_position = encoder0 * (-2.0 * PI / 2048.0)
# Pendulum Encoder Counts
encoder1 = ((data[5] & 0xff) << 16) | ((data[6] & 0xff) << 8) | (data[7] & 0xff)
if encoder1 & 0x00800000:
encoder1 = encoder1 | 0xFF000000
encoder1 = (0x100000000 - encoder1) * (-1)
# wrap the pendulum encoder counts when the pendulum is rotated more than 360 degrees
encoder1 = encoder1 % 2048
if encoder1 < 0:
encoder1 += 2048
# convert the arm encoder counts to angle theta in radians
pendulum_angle = encoder1 * (2.0 * PI / 2048.0) - PI
return device_id, motor_position, pendulum_angle
def __motor_command_split(self, motor_command):
# to signed
if motor_command & 0x0400:
motor_command = motor_command | 0xfc00
# print("__motor_command_split : ", motor_command, type(motor_command))
# add amplifier bit
motor_command = (motor_command & 0x7fff) | 0x8000
# separate into 2 bytes
motor_command_h = (motor_command & 0xff00) >> 8
motor_command_l = (motor_command & 0xff)
return motor_command_h, motor_command_l
def read_data(self):
data = self.spi.xfer2([
0x01,
0x00,
0x1f,
0x00, 0xff, 0x00, 0xff, 0x00, 0xff,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00
])
_, motor_radian, pendulum_radian = self.__data_conversion(data)
return motor_radian, pendulum_radian
def __set_motor_command(self, motor_command):
motor_command = int(motor_command)
if self.color == "red":
red_h, red_l, green_h, green_l, blue_h, blue_l = 0x03, 0xe7, 0x00, 0x00, 0x00, 0x00
elif self.color == "green":
red_h, red_l, green_h, green_l, blue_h, blue_l = 0x00, 0x00, 0x03, 0xe7, 0x00, 0x00
elif self.color == "blue":
red_h, red_l, green_h, green_l, blue_h, blue_l = 0x00, 0x00, 0x00, 0x00, 0x03, 0xe7
elif self.color == "cyan":
red_h, red_l, green_h, green_l, blue_h, blue_l = 0x00, 0x00, 0x03, 0xe7, 0x03, 0xe7
elif self.color == "magenta":
red_h, red_l, green_h, green_l, blue_h, blue_l = 0x03, 0xe7, 0x00, 0x00, 0x03, 0xe7
elif self.color == "yellow":
red_h, red_l, green_h, green_l, blue_h, blue_l = 0x03, 0xe7, 0x03, 0xe7, 0x00, 0x00
elif self.color == "white":
red_h, red_l, green_h, green_l, blue_h, blue_l = 0x03, 0xe7, 0x03, 0xe7, 0x03, 0xe7
else:
red_h, red_l, green_h, green_l, blue_h, blue_l = 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
# print("motor_command : ",motor_command)
motor_command_h, motor_command_l = self.__motor_command_split(motor_command)
data = self.spi.xfer2([
0x01,
0x00,
0x1f,
red_h, red_l, green_h, green_l, blue_h, blue_l,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
motor_command_h, motor_command_l
])
#print("RIP time :", time.time() - previous_time)
_, motor_radian, pendulum_radian = self.__data_conversion(data)
return motor_radian, pendulum_radian
def limit_check(self, motor_velocity):
motor_radian, _ = self.__set_motor_command(int(self.motor_command))
# print("motor: ", motor_radian)
if not self.motor_limit and abs(motor_radian) >= 80 * PI / 180.0:
self.step_start = False
self.color = "red"
for i in range(50):
if motor_radian >= 0:
m = int(abs(self.motor_command) * ((max(motor_velocity, 100) - i) / 100))
motor_radian, _ = self.__set_motor_command(m)
else:
m = -int(abs(self.motor_command) * ((max(motor_velocity, 100) - i) / 100))
motor_radian, _ = self.__set_motor_command(m)
#print(m, motor_velocity)
time.sleep(UNIT_TIME)
#print()
self.protection()
self.motor_limit = True
def protection(self):
n_protection = 0
n_protection_completion = 0
default_motor_command_ratio = 50
motor_radian, _ = self.read_data()
while n_protection_completion < 200:
time.sleep(UNIT_TIME)
motor_command = motor_radian * default_motor_command_ratio
#print("1111111")
if n_protection % 10 == 0:
motor_radian, _ = self.__set_motor_command(int(-motor_command))
else:
motor_radian, _ = self.__set_motor_command(int(motor_command))
#time.sleep(UNIT_TIME * 10)
# print(motor_command)
if abs(motor_radian) < 10 * PI / 180.0:
n_protection_completion += 1
else:
n_protection_completion = 0
n_protection += 1
# read radian and if step_id is changed, publish to env.
def read_and_pub(self):
motor_radian, pendulum_radian = self.__set_motor_command(int(self.motor_command))
current_time = time.time()
motor_velocity = (motor_radian - self.last_motor_radian) / (current_time - self.last_time)
pendulum_velocity = (pendulum_radian - self.last_pendulum_radian) / (current_time - self.last_time)
self.last_motor_radian = motor_radian
self.last_pendulum_radian = pendulum_radian
self.last_time = time.time()
return motor_radian, motor_velocity, pendulum_radian, pendulum_velocity, float(self.step_id)
# set motor command to last subscribe command.
def set_motor_command(self):
self.is_action = True
# print(datetime.utcnow().strftime('%S.%f')[:-1], self.motor_command, flush = True)
self.__set_motor_command(int(self.motor_command))
def set_wait(self):
self.is_action = False
self.color = "white"
self.__set_motor_command(0)
motor_radian, pendulum_radian = self.read_data()
return motor_radian, 0, pendulum_radian, 0 ,int(self.step_id)
def manual_swing_up(self):
print("\n***** Swing Up Start!!! *****")
previousTime = time.perf_counter()
last_pendulum_radian = 0
motorPWM = 0
while True:
# if the difference between the current time and the last time an SPI transaction
# occurred is greater than the sample time, start a new SPI transaction
currentTime = time.perf_counter()
if currentTime - previousTime >= UNIT_TIME:
# print("|| Time difference: {0} s ||".format(currentTime - previousTime))
previousTime = currentTime
motor_radian, pendulum_radian = self.read_data()
if 0.0 <= abs(pendulum_radian) <= PI * 11.0 / 180.0:
break
angular_variation = (pendulum_radian - last_pendulum_radian)
# angular variation filtering
if angular_variation > 2.5:
angular_variation -= math.pi * 2
elif angular_variation < -2.5:
angular_variation += math.pi * 2
pendulum_angular_velocity = angular_variation / UNIT_TIME
last_pendulum_radian = pendulum_radian
voltage = 80.0 # 48.65 # 49.215
if abs(pendulum_angular_velocity) > 25:
voltage /= int(10 * np.log(abs(pendulum_angular_velocity)))
if PI >= abs(pendulum_radian) >= PI * 90.0 / 180.0:
if pendulum_radian >= 0:
pendulum_radian = math.pi - pendulum_radian
else:
pendulum_radian = - math.pi + abs(pendulum_radian)
if pendulum_angular_velocity == 0:
if random.random() < 0.5:
motorPWM = int(-2 * math.cos(pendulum_radian) * voltage)
else:
motorPWM = int(2 * math.cos(pendulum_radian) * voltage)
elif pendulum_angular_velocity < 0:
motorPWM = int(-2 * math.cos(pendulum_radian) * voltage)
else:
motorPWM = int(2 * math.cos(pendulum_radian) * voltage)
self.__set_motor_command(motorPWM)
self.color = "blue"
print("\n***** Swing Up complete!!! *****")
def manual_balance(self):
theta_n_k1 = 0.0
theta_dot_k1 = 0.0
alpha_n_k1 = 0.0
alpha_dot_k1 = 0.0
kp_theta = 2.0
kd_theta = -2.0
kp_alpha = -30.0
kd_alpha = 2.5
previousTime = time.perf_counter()
count = 0
# time
while count < 1500 / 5:
# if the difference between the current time and the last time an SPI transaction
# occurred is greater than the sample time, start a new SPI transaction
currentTime = | |
for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"list_o_auth_client_credentials got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"userId": user_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
if 'lifecycle_state' in kwargs:
lifecycle_state_allowed_values = ["CREATING", "ACTIVE", "INACTIVE", "DELETING", "DELETED"]
if kwargs['lifecycle_state'] not in lifecycle_state_allowed_values:
raise ValueError(
"Invalid value for `lifecycle_state`, must be one of {0}".format(lifecycle_state_allowed_values)
)
query_params = {
"page": kwargs.get("page", missing),
"limit": kwargs.get("limit", missing),
"lifecycleState": kwargs.get("lifecycle_state", missing)
}
query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}
header_params = {
"accept": "application/json",
"content-type": "application/json"
}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
query_params=query_params,
header_params=header_params,
response_type="list[OAuth2ClientCredentialSummary]")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
query_params=query_params,
header_params=header_params,
response_type="list[OAuth2ClientCredentialSummary]")
def list_policies(self, compartment_id, **kwargs):
"""
Lists the policies in the specified compartment (either the tenancy or another of your compartments).
See `Where to Get the Tenancy's OCID and User's OCID`__.
To determine which policies apply to a particular group or compartment, you must view the individual
statements inside all your policies. There isn't a way to automatically obtain that information via the API.
__ https://docs.cloud.oracle.com/Content/API/Concepts/apisigningkey.htm#five
:param str compartment_id: (required)
The OCID of the compartment (remember that the tenancy is simply the root compartment).
:param str page: (optional)
The value of the `opc-next-page` response header from the previous \"List\" call.
:param int limit: (optional)
The maximum number of items to return in a paginated \"List\" call.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://oracle-cloud-infrastructure-python-sdk.readthedocs.io/en/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type list of :class:`~oci.identity.models.Policy`
:rtype: :class:`~oci.response.Response`
"""
resource_path = "/policies"
method = "GET"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"page",
"limit"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"list_policies got unknown kwargs: {!r}".format(extra_kwargs))
query_params = {
"compartmentId": compartment_id,
"page": kwargs.get("page", missing),
"limit": kwargs.get("limit", missing)
}
query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}
header_params = {
"accept": "application/json",
"content-type": "application/json"
}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
query_params=query_params,
header_params=header_params,
response_type="list[Policy]")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
query_params=query_params,
header_params=header_params,
response_type="list[Policy]")
def list_region_subscriptions(self, tenancy_id, **kwargs):
"""
Lists the region subscriptions for the specified tenancy.
:param str tenancy_id: (required)
The OCID of the tenancy.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://oracle-cloud-infrastructure-python-sdk.readthedocs.io/en/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type list of :class:`~oci.identity.models.RegionSubscription`
:rtype: :class:`~oci.response.Response`
"""
resource_path = "/tenancies/{tenancyId}/regionSubscriptions"
method = "GET"
expected_kwargs = ["retry_strategy"]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"list_region_subscriptions got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"tenancyId": tenancy_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
header_params = {
"accept": "application/json",
"content-type": "application/json"
}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
response_type="list[RegionSubscription]")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
response_type="list[RegionSubscription]")
def list_regions(self, **kwargs):
"""
Lists all the regions offered by Oracle Cloud Infrastructure.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://oracle-cloud-infrastructure-python-sdk.readthedocs.io/en/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type list of :class:`~oci.identity.models.Region`
:rtype: :class:`~oci.response.Response`
"""
resource_path = "/regions"
method = "GET"
expected_kwargs = ["retry_strategy"]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"list_regions got unknown kwargs: {!r}".format(extra_kwargs))
header_params = {
"accept": "application/json",
"content-type": "application/json"
}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
header_params=header_params,
response_type="list[Region]")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
header_params=header_params,
response_type="list[Region]")
def list_smtp_credentials(self, user_id, **kwargs):
"""
Lists the SMTP credentials for the specified user. The returned object contains the credential's OCID,
the SMTP user name but not the SMTP password. The SMTP password is returned only upon creation.
:param str user_id: (required)
The OCID of the user.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://oracle-cloud-infrastructure-python-sdk.readthedocs.io/en/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type list of :class:`~oci.identity.models.SmtpCredentialSummary`
:rtype: :class:`~oci.response.Response`
"""
resource_path = "/users/{userId}/smtpCredentials"
method = "GET"
expected_kwargs = ["retry_strategy"]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"list_smtp_credentials got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"userId": user_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
header_params = {
"accept": "application/json",
"content-type": "application/json"
}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
response_type="list[SmtpCredentialSummary]")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
response_type="list[SmtpCredentialSummary]")
def list_swift_passwords(self, user_id, **kwargs):
"""
**Deprecated. Use :func:`list_auth_tokens` instead.**
Lists the Swift passwords for the specified user. The returned object contains the password's OCID, but not
the password itself. The actual password is returned only upon creation.
:param str user_id: (required)
The OCID of the user.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://oracle-cloud-infrastructure-python-sdk.readthedocs.io/en/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type list of :class:`~oci.identity.models.SwiftPassword`
:rtype: :class:`~oci.response.Response`
"""
resource_path = "/users/{userId}/swiftPasswords"
method = "GET"
expected_kwargs = ["retry_strategy"]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"list_swift_passwords got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"userId": user_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
header_params = {
"accept": "application/json",
"content-type": "application/json"
}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
| |
<filename>source/menubar.py
# encoding: utf-8
# author: <NAME>
# email: <EMAIL>
import audio
import images
import io
import macroboxstyle
import os
import threading
import time
import webbrowser
import wx
import wx.adv
from dialogbox import AboutBox
from dialogbox import CheckItemsConsistencyConfirmBox
from dialogbox import DialogBox
from dialogbox import DialogPanel
from dialogbox import UpdateBox
from listbox import FileOpenDialog
from listbox import FileSaveDialog
from macroboxlib import Button
from macroboxlib import CheckBox
from macroboxlib import ComboBox
from macroboxlib import FONT_ITEM
from macroboxlib import GetPreference
from macroboxlib import GetPreferences
from macroboxlib import PRODUCT_ONLINE_HELP_URL
from macroboxlib import SetPreference
from macroboxlib import SetPreferences
from macroboxlib import SpinCtrl
from macroboxlib import StaticText
from macroboxlib import TextCtrl
from operator import itemgetter
from utilities import Struct
from wininstance import get_current_real_cwq
# from dialogbox import LicenseBox
# from macroboxlib import *
# from scripteditor import *
# from dialogbox import *
# from menubar import AppearanceBox
# class wxHTML(wx.html.HtmlWindow):
# def OnLinkClicked(self, link):
# webbrowser.open(link.GetHref())
# WebLinkEditorPanel
class WebLinkPreset():
def __init__(self):
self.default_preset = [
('7Digital', 'http://www.7digital.com/search?q=', 'Artist', 'Title'),
('Beatport', 'http://www.beatport.com/search?query=', 'Artist', 'Title'),
('Bing', 'http://www.bing.com/search?q=', 'Artist', 'Title'),
('Discogs', 'http://www.discogs.com/search/?q=', 'Artist', 'Title'),
('Google', 'https://www.google.com/?q=#q=', 'Artist', 'Title'),
('LastFM', 'http://www.last.fm/search?q=', 'Artist', 'Title'),
('MixCloud', 'http://www.mixcloud.com/search/?mixcloud_query=', 'Artist', 'Title'),
('SoundCloud', 'https://soundcloud.com/search?q=', 'Artist', 'Title'),
('TraxSource', 'http://www.traxsource.com/search?term=', 'Artist', 'Title'),
('YouTube', 'http://www.youtube.com/results?search_query=', 'Artist', 'Title')]
self.default_preset = sorted(self.default_preset, key=itemgetter(0))
self.default_choices = [v[0] for v in self.default_preset]
self.preset = GetPreference('weblink_preset')
if self.preset is None:
self.preset = self.default_preset
self.field_choices = ['', 'Filename', 'Album', 'Artist', 'Title']
def SetPreset(self, preset):
self.preset = preset
SetPreference('weblink_preset', preset)
def GetPreset(self):
return self.preset
class WebLinkEditorPanel(DialogPanel):
def __init__(self, parent, pos=(0, 0)):
DialogPanel.__init__(self, parent)
self.parent = parent
self.SetDoubleBuffered(True)
self.SetBackgroundColour((255, 255, 255))
self.preset = self.parent.parent.parent.WebLinkPreset.preset
self.field_choices = self.parent.parent.parent.WebLinkPreset.field_choices
self.default_preset = self.parent.parent.parent.WebLinkPreset.default_preset
self.default_choices = self.parent.parent.parent.WebLinkPreset.default_choices
self.WebName = list()
self.WebLink = list()
self.StaticTexts = list()
self.StaticTexts += [StaticText(self, label=u'Name')]
self.StaticTexts += [StaticText(self, label=u'URL')]
self.StaticTexts += [StaticText(self, label=u'Query 1')]
self.StaticTexts += [StaticText(self, label=u'Query 2')]
self.QueryFields = list()
idx = 1
for preset in self.preset:
self.WebName += [ComboBox(self, id=idx, value=preset[0],
choices=self.default_choices, style=0)]
self.WebName[-1].SetMark(0, 0)
self.WebLink += [TextCtrl(self, id=idx, value=preset[1])]
self.WebName[-1].Bind(wx.EVT_COMBOBOX, self.OnWebName)
self.WebName[-1].Bind(wx.EVT_TEXT, self.OnEnableApply)
self.WebLink[-1].Bind(wx.EVT_TEXT, self.OnEnableApply)
QueryFields = list()
for i in range(2):
QueryFields += [ComboBox(self, id=idx, value=preset[i + 2],
choices=self.field_choices, style=wx.CB_READONLY)]
QueryFields[-1].Bind(wx.EVT_COMBOBOX, self.OnEnableApply)
self.QueryFields += [QueryFields]
idx += 1
for _ in range(10 - len(self.preset)):
self.WebName += [ComboBox(self, id=idx,
value='', choices=self.default_choices, style=0)]
self.WebLink += [TextCtrl(self, id=idx, value='')]
self.WebName[-1].Bind(wx.EVT_COMBOBOX, self.OnWebName)
self.WebName[-1].Bind(wx.EVT_TEXT, self.OnEnableApply)
self.WebLink[-1].Bind(wx.EVT_TEXT, self.OnEnableApply)
QueryFields = list()
for i in range(2):
QueryFields += [ComboBox(self, id=idx, value='',
choices=self.field_choices, style=wx.CB_READONLY)]
QueryFields[-1].Bind(wx.EVT_COMBOBOX, self.OnEnableApply)
self.QueryFields += [QueryFields]
idx += 1
self.Bind(wx.EVT_SIZE, self.OnSize)
def OnSize(self, event):
width, height = self.GetClientSize()
posX = 10
posY = 20
self.StaticTexts[0].SetRect((posX + 3, posY, -1, -1))
self.StaticTexts[1].SetRect((posX + 105 + 3, posY, -1, -1))
self.StaticTexts[2].SetRect((width - posX - 170 + 5 + 8, posY, -1, -1))
self.StaticTexts[3].SetRect((width - posX - 85 + 5 + 8, posY, -1, -1))
for idx in range(len(self.preset)):
posX = 10
posY += 30
self.WebName[idx].SetRect((posX, posY - 5, 100, 22))
self.WebLink[idx].SetRect((posX + 100 + 5, posY - 5, width -
posX * 2 - 100 + 3 - 180 + 10, 22))
posX = self.WebLink[idx].GetPosition().x + self.WebLink[idx].GetSize().width
for i in range(2):
self.QueryFields[idx][i].SetRect((posX + 5, posY - 5, 80, 22))
posX += 80 + 5
def OnEnableApply(self, event):
pass
# self.parent.ApplyButton.Enable()
def OnWebName(self, event):
name = event.String
weblink = [v for v in self.default_preset if name == v[0]][0]
idx = event.GetId() - 1
self.WebLink[idx].SetValue(weblink[1])
self.QueryFields[idx][0].SetValue(weblink[2])
self.QueryFields[idx][1].SetValue(weblink[3])
# self.parent.ApplyButton.Enable()
def SaveAllPreference(self):
names = [v.GetValue() for v in self.WebName]
links = [v.GetValue() for v in self.WebLink]
fields1 = [v[0].GetValue() for v in self.QueryFields]
fields2 = [v[1].GetValue() for v in self.QueryFields]
preset = zip(names, links, fields1, fields2)
self.parent.parent.parent.WebLinkPreset.SetPreset(preset)
def OnClose(self, event):
self.SaveAllPreference()
self.Destroy()
class KeymapPreset():
def __init__(self):
preset = GetPreference('keymap_preset')
default_preset = self.GetDefaultKeymap()
if preset is None:
self.keymap_preset = default_preset
else:
self.keymap_preset = preset
if len(preset) != default_preset:
self.keymap_preset = default_preset
def GetNameSpaceByRawKeyFlag(self, keyflag, ctrl=False, shift=False):
vv = [v for v in self.keymap_preset if v[2] is not None and (
v[2][0] == keyflag or v[2][1] == keyflag)]
if len(vv) == 0:
return None
vv = [v for v in vv if v[3] == ctrl and v[4] == shift]
if len(vv) == 0:
return None
return vv[0][0]
def IsDelayedEventRawKeyFlag(self, keyflag):
pass
def SetKeymapPreset(self, keymap_preset):
self.keymap_preset = keymap_preset
def GetKeymapPreset(self):
return self.keymap_preset
def GetDefaultKeymap(self):
default_keymap = (
('play_toggle', 'Spacebar'),
('previous_track', 'W'),
('next_track', 'E'),
('fast_forward', 'D'),
('fast_backward', 'S'),
('loop_toggle', 'R'),
('highlight_toggle', 'Q'),
('highlight_decrease', '1'),
('highlight_increase', '2'),
('playlist_toggle', 'A'),
('open_id3tageditor', '`')
)
keymap_preset = list()
for i in range(len(default_keymap)):
namespace = default_keymap[i][0]
string = default_keymap[i][1]
baseflag = self.String2BaseRawKeyFlag(string)
if u'Ctrl + ' in string:
ctrl = True
else:
ctrl = False
if u'Shift + ' in string:
shift = True
else:
shift = False
keymap_preset += [(namespace, string, baseflag, ctrl, shift)]
return keymap_preset
def GetKeymapLabels(self):
keymap_labels = (
'Play and Pause',
'Play previous track',
'Play next track',
'Fast forward',
'Fast backward',
'Loop toggle',
'Highlight toggle',
'Highlight duration -',
'Highlight duration +',
'Playlist toggle',
'Open ID3Tag Editor'
)
return keymap_labels
def String2BaseRawKeyFlag(self, string):
key = string.split(' + ')[-1]
keymap = self.GetKeymap()
v = [v for v in keymap if v[1] == key]
if len(v) == 0:
return None
return v[0][0]
def RawKeyFlag2String(self, keyflag):
keymap = self.GetKeymap()
v = [v[1] for v in keymap if v[0][0] == keyflag or v[0][1] == keyflag]
if len(v) == 0:
return None
if len(v[0]) == 1:
v[0] = v[0].upper()
# return unicode(v[0])
return v[0]
def GetKeymap(self, special=False):
chars = [v for v in """`1234567890-=\\""".upper()]
rawkeyflags = [(2686977, 1076428801), (131073, 1073872897),
(196609, 1073938433), (262145, 1074003969), (327681, 1074069505),
(393217, 1074135041), (458753, 1074200577), (524289, 1074266113),
(589825, 1074331649), (655361, 1074397185), (720897, 1074462721),
(786433, 1074528257), (851969, 1074593793), (2818049, 1076559873)]
chars += [v for v in """qwertyuiop[]""".upper()]
rawkeyflags += [(1048577, 1074790401), (1114113, 1074855937),
(1179649, 1074921473), (1245185, 1074987009), (1310721, 1075052545),
(1376257, 1075118081), (1441793, 1075183617), (1507329, 1075249153),
(1572865, 1075314689), (1638401, 1075380225), (1703937, 1075445761),
(1769473, 1075511297)]
chars += [v for v in """asdfghjkl;'""".upper()]
rawkeyflags += [(1966081, 1075707905), (2031617, 1075773441),
(2097153, 1075838977), (2162689, 1075904513), (2228225, 1075970049),
(2293761, 1076035585), (2359297, 1076101121), (2424833, 1076166657),
(2490369, 1076232193), (2555905, 1076297729), (2621441, 1076363265)]
chars += [v for v in """zxcvbnm,./""".upper()]
rawkeyflags += [(2883585, 1076625409), (2949121, 1076690945),
(3014657, 1076756481), (3080193, 1076822017), (3145729, 1076887553),
(3211265, 1076953089), (3276801, 1077018625), (3342337, 1077084161),
(3407873, 1077149697), (3473409, 1077215233)]
chars += ['Esc', 'F1', 'F2', 'F3', 'F4', 'F5', 'F6', 'F7', 'F8',
'F9', 'F10', 'F11', 'F12', 'BackSpace', 'Spacebar']
# 2686977? 65537,1073807361
rawkeyflags += [(65537, 1073807361), (3866625, 1077608449),
(3932161, 1077673985), (3997697, 1077739521), (4063233, 1077805057),
(4128769, 1077870593), (4194305, 1077936129), (4259841, 1078001665),
(4325377, 1078067201), (4390913, 1078132737), (4456449, 1078198273),
(5701633, 1079443457), (5767169, 1079508993), (917505, 1074659329),
(3735553, 1077477377)]
chars += ['NumPad 1', 'NumPad 2', 'NumPad 3', 'NumPad 4', 'NumPad 5',
'NumPad 6', 'NumPad 7', 'NumPad 8', 'NumPad 9', 'NumPad 0']
rawkeyflags += [(5177345, 1078919169), (5242881, 1078984705),
(5308417, 1079050241), (4915201, 1078657025), (4980737, 1078722561),
(5046273, 1078788097), (4653057, 1078394881), (4718593, 1078460417),
(4784129, 1078525953), (5373953, 1079115777)]
chars += ['NumPad /', 'NumPad *', 'NumPad -', 'NumPad +', 'NumPad .']
rawkeyflags += [(20250625, 1093992449), (3604481, 1077346305),
(4849665, 1078591489), (5111809, 1078853633), (5439489, 1079181313)]
if special:
chars += ['ArrowLeft', 'ArrowRight', 'ArrowUp', 'ArrowDown']
rawkeyflags += [(21692417, 1095434241), (21823489, 1095565313),
(21495809, 1095237633), (22020097, 1095761921), (3735553, 1077477377)]
chars += ['Insert', 'Delete', 'Home', 'End', 'PageUp', 'PageDown']
rawkeyflags += [(22151169, 1095892993), (22216705, 1095958529),
(21430273, 1095172097), (21954561, 1095696385),
(21561345, 1095303169), (22085633, 1095827457)]
chars += ['NumLock', 'Shift(Left)', 'Shift(Right)',
'Ctrl', 'CapsLock', 'ScrollLock', 'Break']
rawkeyflags += [(21299201, 1095041025), (2752513, 1076494337),
(3538945, 1077280769), (1900545, 1075642369),
(3801089, 1077542913), (4587521, 1078329345), (4521985, 1078263809)]
chars += ['Alt(Left)', 'Alt(Right)', 'Win(Left)',
'Win(Right)', 'ContextMenu', 'Language']
rawkeyflags += [(540540929, 1614282753), (20447233, 1094189057),
(22740993, 1096482817), (22806529, 1096548353),
(22872065, 1096613889), (32636929, 1106378753)]
return [(flag, key) for flag, key in zip(rawkeyflags, chars)]
class ShortcutKeyPanel(DialogPanel, KeymapPreset):
def __init__(self, parent):
DialogPanel.__init__(self, parent)
KeymapPreset.__init__(self)
self.parent = parent
self.SetDoubleBuffered(True)
self.SetBackgroundColour((255, 255, 255))
labels = self.GetKeymapLabels()
self.Labels = list()
self.UserInput = list()
offset = 20
pad = -30
idx = 1
for i, label in enumerate(labels[:6]):
self.Labels += [StaticText(self, id=idx, label=label, style=wx.ALIGN_RIGHT)]
self.Labels[-1].SetRect((pad + 20, offset + 3, 150, -1))
self.Labels[-1].Bind(wx.EVT_LEFT_DOWN, self.OnRemoveValue)
self.UserInput += [TextCtrl(self, id=idx)]
self.UserInput[-1].SetValue(self.keymap_preset[idx - 1][1])
self.UserInput[-1].SetRect((pad + 165 + 15, offset, 100, 22))
self.UserInput[-1].Bind(wx.EVT_KEY_DOWN, self.OnKeyDown)
offset += 30
idx += 1
offset = 20
pad = 220
for i, label in enumerate(labels[6:]):
self.Labels += [StaticText(self, id=idx, label=label, style=wx.ALIGN_RIGHT)]
self.Labels[-1].SetRect((pad + 20, offset + 3, 150, -1))
self.Labels[-1].Bind(wx.EVT_LEFT_DOWN, self.OnRemoveValue)
self.UserInput += [TextCtrl(self, id=idx)]
self.UserInput[-1].SetValue(self.keymap_preset[idx - 1][1])
| |
'''
from datetime import datetime as dt
from datetime import date, timedelta
from datetime import datetime
import plotly.graph_objs as go
from plotly import tools
import numpy as np
import pandas as pd
pd.options.mode.chained_assignment = None
# Read in Travel Report Data
df = pd.read_csv('data/performance_analytics_cost_and_ga_metrics.csv')
df.rename(columns={
'Travel Product': 'Placement type',
'Spend - This Year': 'Spend TY',
'Spend - Last Year': 'Spend LY',
'Sessions - This Year': 'Sessions - TY',
'Sessions - Last Year': 'Sessions - LY',
'Bookings - This Year': 'Bookings - TY',
'Bookings - Last Year': 'Bookings - LY',
'Revenue - This Year': 'Revenue - TY',
'Revenue - Last Year': 'Revenue - LY',
}, inplace=True)
df['Date'] = pd.to_datetime(df['Date'])
current_year = df['Year'].max()
current_week = df[df['Year'] == current_year]['Week'].max()
now = datetime.now()
datestamp = now.strftime("%Y%m%d")
columns = ['Spend TY', 'Spend LY', 'Sessions - TY', 'Sessions - LY', 'Bookings - TY', 'Bookings - LY', 'Revenue - TY', 'Revenue - LY']
# Define Formatters
def formatter_currency(x):
return "${:,.0f}".format(x) if x >= 0 else "(${:,.0f})".format(abs(x))
def formatter_currency_with_cents(x):
return "${:,.2f}".format(x) if x >= 0 else "(${:,.2f})".format(abs(x))
def formatter_percent(x):
return "{:,.1f}%".format(x) if x >= 0 else "({:,.1f}%)".format(abs(x))
def formatter_percent_2_digits(x):
return "{:,.2f}%".format(x) if x >= 0 else "({:,.2f}%)".format(abs(x))
def formatter_number(x):
return "{:,.0f}".format(x) if x >= 0 else "({:,.0f})".format(abs(x))
# First Data Table Update Function
def update_first_datatable(start_date, end_date, category, aggregation):
if start_date is not None:
start_date = dt.strptime(start_date, '%Y-%m-%d')
start_date_string = start_date.strftime('%Y-%m-%d')
if end_date is not None:
end_date = dt.strptime(end_date, '%Y-%m-%d')
end_date_string = end_date.strftime('%Y-%m-%d')
days_selected = (end_date - start_date).days
prior_start_date = start_date - timedelta(days_selected + 1)
prior_start_date_string = datetime.strftime(prior_start_date, '%Y-%m-%d')
prior_end_date = end_date - timedelta(days_selected + 1)
prior_end_date_string = datetime.strftime(prior_end_date, '%Y-%m-%d')
if aggregation == 'Placement type':
df1 = df[(df['Category'] == category)].groupby(['Date', aggregation]).sum()[columns].reset_index()
df_by_date = df1[(df1['Date'] >= start_date_string) & (df1['Date'] <= end_date_string)].groupby([aggregation]).sum()[columns].reset_index()
df_by_date_prior = df1[(df1['Date'] >= prior_start_date_string) & (df1['Date'] <= prior_end_date_string)].groupby([aggregation]).sum()[['Spend TY', 'Sessions - TY', 'Bookings - TY', 'Revenue - TY']].reset_index()
df_by_date_prior.rename(columns={'Spend TY' : 'Spend - LP', 'Sessions - TY' : 'Sessions - LP', 'Bookings - TY' : 'Bookings - LP','Revenue - TY' : 'Revenue - LP'}, inplace=True)
df_by_date_combined = pd.merge(df_by_date, df_by_date_prior, on=[aggregation])
elif aggregation == 'GA Category':
df1 = df.groupby(['Date', aggregation]).sum()[columns].reset_index()
df_by_date = df1[(df1['Date'] >= start_date_string) & (df1['Date'] <= end_date_string)].groupby([aggregation]).sum()[columns].reset_index()
df_by_date_prior = df1[(df1['Date'] >= prior_start_date_string) & (df1['Date'] <= prior_end_date_string)].groupby([aggregation]).sum()[['Spend TY', 'Sessions - TY', 'Bookings - TY', 'Revenue - TY']].reset_index()
df_by_date_prior.rename(columns={'Spend TY' : 'Spend - LP', 'Sessions - TY' : 'Sessions - LP', 'Bookings - TY' : 'Bookings - LP','Revenue - TY' : 'Revenue - LP'}, inplace=True)
df_by_date_combined = pd.merge(df_by_date, df_by_date_prior, on=[aggregation])
df_by_date_combined.rename(columns={'GA Category':'Placement type'}, inplace=True)
elif aggregation == 'Birst Category':
df1 = df.groupby(['Date', aggregation]).sum()[columns].reset_index()
df_by_date = df1[(df1['Date'] >= start_date_string) & (df1['Date'] <= end_date_string)].groupby([aggregation]).sum()[columns].reset_index()
df_by_date_prior = df1[(df1['Date'] >= prior_start_date_string) & (df1['Date'] <= prior_end_date_string)].groupby([aggregation]).sum()[['Spend TY', 'Sessions - TY', 'Bookings - TY', 'Revenue - TY']].reset_index()
df_by_date_prior.rename(columns={'Spend TY' : 'Spend - LP', 'Sessions - TY' : 'Sessions - LP', 'Bookings - TY' : 'Bookings - LP','Revenue - TY' : 'Revenue - LP'}, inplace=True)
df_by_date_combined = pd.merge(df_by_date, df_by_date_prior, on=[aggregation])
df_by_date_combined.rename(columns={'Birst Category':'Placement type'}, inplace=True)
# Calculate Differences on-the-fly
df_by_date_combined['Spend PoP (%)'] = np.nan
df_by_date_combined['Spend YoY (%)'] = np.nan
df_by_date_combined['Sessions PoP (%)'] = np.nan
df_by_date_combined['Sessions YoY (%)'] = np.nan
df_by_date_combined['Bookings PoP (%)'] = np.nan
df_by_date_combined['Bookings YoY (%)'] = np.nan
df_by_date_combined['Revenue PoP (%)'] = np.nan
df_by_date_combined['Revenue YoY (%)'] = np.nan
df_by_date_combined['Spend_PoP_abs_conditional'] = df_by_date_combined['Spend PoP (Abs)'] = ((df_by_date_combined['Spend TY'] - df_by_date_combined['Spend - LP']))
# Formatter
df_by_date_combined['Spend PoP (Abs)'] = df_by_date_combined['Spend PoP (Abs)'].apply(formatter_currency)
df_by_date_combined['Spend_PoP_percent_conditional'] = df_by_date_combined['Spend PoP (%)'] = np.where((df_by_date_combined['Spend TY'] != 0) & (df_by_date_combined['Spend - LP'] != 0),\
(((df_by_date_combined['Spend TY'] - df_by_date_combined['Spend - LP'])/df_by_date_combined['Spend - LP']) * 100), df_by_date_combined['Spend PoP (%)'])
# Formatter
df_by_date_combined['Spend PoP (%)'] = np.where((df_by_date_combined['Spend TY'] != 0) & (df_by_date_combined['Spend - LP'] != 0),\
df_by_date_combined['Spend PoP (%)'].apply(formatter_percent), df_by_date_combined['Spend PoP (%)'])
df_by_date_combined['Spend_YoY_percent_conditional'] = df_by_date_combined['Spend YoY (%)'] = np.where((df_by_date_combined['Spend TY'] != 0) & (df_by_date_combined['Spend LY'] != 0),\
((df_by_date_combined['Spend TY'] - df_by_date_combined['Spend LY'])/df_by_date_combined['Spend LY']) * 100, df_by_date_combined['Spend YoY (%)'])
# Formatter
df_by_date_combined['Spend YoY (%)'] = np.where((df_by_date_combined['Spend TY'] != 0) & (df_by_date_combined['Spend LY'] != 0),\
df_by_date_combined['Spend YoY (%)'].apply(formatter_percent), df_by_date_combined['Spend YoY (%)'])
df_by_date_combined['Sessions_PoP_percent_conditional'] = df_by_date_combined['Sessions PoP (%)'] = np.where((df_by_date_combined['Sessions - TY'] != 0) & (df_by_date_combined['Sessions - LP'] != 0),\
((df_by_date_combined['Sessions - TY'] - df_by_date_combined['Sessions - LP'])/df_by_date_combined['Sessions - LP']) * 100, df_by_date_combined['Sessions PoP (%)'])
# Formatter
df_by_date_combined['Sessions PoP (%)'] = np.where((df_by_date_combined['Sessions - TY'] != 0) & (df_by_date_combined['Sessions - LP'] != 0),\
df_by_date_combined['Sessions PoP (%)'].apply(formatter_percent), df_by_date_combined['Sessions PoP (%)'])
df_by_date_combined['Sessions_YoY_percent_conditional'] = df_by_date_combined['Sessions YoY (%)'] = np.where((df_by_date_combined['Sessions - TY'] != 0) & (df_by_date_combined['Sessions - LY'] != 0),\
((df_by_date_combined['Sessions - TY'] - df_by_date_combined['Sessions - LY'])/df_by_date_combined['Sessions - LY']) * 100, df_by_date_combined['Sessions YoY (%)'])
# Formatter
df_by_date_combined['Sessions YoY (%)'] = np.where((df_by_date_combined['Sessions - TY'] != 0) & (df_by_date_combined['Sessions - LY'] != 0),\
df_by_date_combined['Sessions YoY (%)'].apply(formatter_percent), df_by_date_combined['Sessions YoY (%)'])
df_by_date_combined['Bookings_PoP_abs_conditional'] = df_by_date_combined['Bookings PoP (Abs)'] = (df_by_date_combined['Bookings - TY'] - df_by_date_combined['Bookings - LP'])
# Formatter
df_by_date_combined['Bookings PoP (Abs)'] = df_by_date_combined['Bookings PoP (Abs)'].apply(formatter_number)
df_by_date_combined['Bookings_YoY_abs_conditional'] = df_by_date_combined['Bookings YoY (Abs)'] = (df_by_date_combined['Bookings - TY'] - df_by_date_combined['Bookings - LY'])
# Formatter
df_by_date_combined['Bookings YoY (Abs)'] = df_by_date_combined['Bookings YoY (Abs)'].apply(formatter_number)
df_by_date_combined['Bookings_PoP_percent_conditional'] = df_by_date_combined['Bookings PoP (%)'] = np.where((df_by_date_combined['Bookings - TY'] != 0) & (df_by_date_combined['Bookings - LP'] != 0),\
(df_by_date_combined['Bookings - TY'] - df_by_date_combined['Bookings - LP'])/df_by_date_combined['Bookings - LP'] * 100, df_by_date_combined['Bookings PoP (%)'])
# Formatter
df_by_date_combined['Bookings PoP (%)'] = np.where((df_by_date_combined['Bookings - TY'] != 0) & (df_by_date_combined['Bookings - LP'] != 0),\
df_by_date_combined['Bookings PoP (%)'].apply(formatter_percent), df_by_date_combined['Bookings PoP (%)'])
df_by_date_combined['Bookings_YoY_percent_conditional'] = df_by_date_combined['Bookings YoY (%)'] = np.where((df_by_date_combined['Bookings - TY'] != 0) & (df_by_date_combined['Bookings - LY'] != 0),\
(df_by_date_combined['Bookings - TY'] - df_by_date_combined['Bookings - LY'])/df_by_date_combined['Bookings - LY'] * 100, df_by_date_combined['Bookings YoY (%)'])
# Formatter
df_by_date_combined['Bookings YoY (%)'] = np.where((df_by_date_combined['Bookings - TY'] != 0) & (df_by_date_combined['Bookings - LY'] != 0),\
df_by_date_combined['Bookings YoY (%)'].apply(formatter_percent), df_by_date_combined['Bookings YoY (%)'])
df_by_date_combined['Revenue_PoP_abs_conditional'] = df_by_date_combined['Revenue PoP (Abs)'] = (df_by_date_combined['Revenue - TY'] - df_by_date_combined['Revenue - LP'])
# Formatter
df_by_date_combined['Revenue PoP (Abs)'] = df_by_date_combined['Revenue PoP (Abs)'].apply(formatter_currency)
df_by_date_combined['Revenue_YoY_abs_conditional'] = df_by_date_combined['Revenue YoY (Abs)'] = (df_by_date_combined['Revenue - TY'] - df_by_date_combined['Revenue - LY'])
# Formatter
df_by_date_combined['Revenue YoY (Abs)'] = df_by_date_combined['Revenue YoY (Abs)'].apply(formatter_currency)
df_by_date_combined['Revenue_PoP_percent_conditional'] = df_by_date_combined['Revenue PoP (%)'] = np.where((df_by_date_combined['Revenue - LP'] != 0) & (df_by_date_combined['Revenue - LP'] != 0),\
(df_by_date_combined['Revenue - TY'] - df_by_date_combined['Revenue - LP'])/df_by_date_combined['Revenue - LP'] * 100, df_by_date_combined['Revenue PoP (%)'])
# Formatter
df_by_date_combined['Revenue PoP (%)'] = np.where((df_by_date_combined['Revenue - LP'] != 0) & (df_by_date_combined['Revenue - LP'] != 0),\
df_by_date_combined['Revenue PoP (%)'].apply(formatter_percent), df_by_date_combined['Revenue PoP (%)'])
df_by_date_combined['Revenue_YoY_percent_conditional'] = df_by_date_combined['Revenue YoY (%)'] = np.where((df_by_date_combined['Revenue - TY'] != 0) & (df_by_date_combined['Revenue - LY'] != 0),\
(df_by_date_combined['Revenue - TY'] - df_by_date_combined['Revenue - LY'])/df_by_date_combined['Revenue - LY'] * 100, df_by_date_combined['Revenue YoY (%)'])
# Formatter
df_by_date_combined['Revenue YoY (%)'] = np.where((df_by_date_combined['Revenue - TY'] != 0) & (df_by_date_combined['Revenue - LY'] != 0),\
df_by_date_combined['Revenue YoY (%)'].apply(formatter_percent), df_by_date_combined['Revenue YoY (%)'])
# Format Numbers
df_by_date_combined['Spend TY'] = df_by_date_combined['Spend TY'].apply(formatter_currency)
df_by_date_combined['Spend - LP'] = df_by_date_combined['Spend - LP'].apply(formatter_currency)
df_by_date_combined['Spend LY'] = df_by_date_combined['Spend LY'].apply(formatter_currency)
df_by_date_combined['Sessions - TY'] = df_by_date_combined['Sessions - TY'].apply(formatter_number)
df_by_date_combined['Sessions - LP'] = df_by_date_combined['Sessions - LP'].apply(formatter_number)
df_by_date_combined['Sessions - LY'] = df_by_date_combined['Sessions - LY'].apply(formatter_number)
df_by_date_combined['Bookings - TY'] = df_by_date_combined['Bookings - TY'].apply(formatter_number)
df_by_date_combined['Bookings - LP'] = df_by_date_combined['Bookings - LP'].apply(formatter_number)
df_by_date_combined['Bookings - LY'] = df_by_date_combined['Bookings - LY'].apply(formatter_number)
df_by_date_combined['Revenue - TY'] = df_by_date_combined['Revenue - TY'].apply(formatter_currency)
df_by_date_combined['Revenue - LP'] = df_by_date_combined['Revenue - LP'].apply(formatter_currency)
df_by_date_combined['Revenue - LY'] = df_by_date_combined['Revenue - LY'].apply(formatter_currency)
# Rearrange the columns
df_by_date_combined_dt = df_by_date_combined[[
'Placement type',
'Spend TY', 'Spend - LP', 'Spend PoP (Abs)', 'Spend PoP (%)', 'Spend LY', 'Spend YoY (%)',
'Sessions - TY', 'Sessions - LP', 'Sessions PoP (%)', 'Sessions - LY', 'Sessions YoY (%)',
'Bookings - TY', 'Bookings - LP', 'Bookings PoP (%)', 'Bookings PoP (Abs)', 'Bookings - LY', 'Bookings YoY (%)', 'Bookings YoY (Abs)',
'Revenue - TY', 'Revenue - LP', 'Revenue PoP (Abs)', 'Revenue PoP (%)', 'Revenue - LY', 'Revenue YoY (%)', 'Revenue YoY (Abs)',
# 'Spend_PoP_percent_conditional',
]]
data_df = df_by_date_combined.to_dict("rows")
return data_df
# First Data Table Download Function
def update_first_download(start_date, end_date, category, aggregation):
if start_date is not None:
start_date = dt.strptime(start_date, '%Y-%m-%d')
start_date_string = start_date.strftime('%Y-%m-%d')
if end_date is not None:
end_date = dt.strptime(end_date, '%Y-%m-%d')
end_date_string = end_date.strftime('%Y-%m-%d')
days_selected = (end_date - start_date).days
prior_start_date = start_date - timedelta(days_selected + 1)
prior_start_date_string = datetime.strftime(prior_start_date, '%Y-%m-%d')
prior_end_date = end_date - timedelta(days_selected + 1)
prior_end_date_string = datetime.strftime(prior_end_date, '%Y-%m-%d')
if aggregation == 'Placement type':
df1 = df[(df['Category'] == category)].groupby(['Date', aggregation]).sum()[columns].reset_index()
df_by_date = df1[(df1['Date'] >= start_date_string) & (df1['Date'] <= end_date_string)].groupby([aggregation]).sum()[columns].reset_index()
df_by_date_prior = df1[(df1['Date'] >= prior_start_date_string) & (df1['Date'] <= prior_end_date_string)].groupby([aggregation]).sum()[['Spend TY', 'Sessions - TY', 'Bookings - TY', 'Revenue - TY']].reset_index()
df_by_date_prior.rename(columns={'Spend TY' : 'Spend - LP', 'Sessions - TY' : 'Sessions - LP', 'Bookings - TY' : 'Bookings - LP','Revenue - TY' : 'Revenue - LP'}, inplace=True)
df_by_date_combined = pd.merge(df_by_date, df_by_date_prior, on=[aggregation])
elif aggregation == 'GA Category':
df1 = df.groupby(['Date', aggregation]).sum()[columns].reset_index()
df_by_date = df1[(df1['Date'] >= start_date_string) & (df1['Date'] <= end_date_string)].groupby([aggregation]).sum()[columns].reset_index()
df_by_date_prior = df1[(df1['Date'] >= prior_start_date_string) & (df1['Date'] <= prior_end_date_string)].groupby([aggregation]).sum()[['Spend TY', 'Sessions - TY', 'Bookings - TY', 'Revenue - TY']].reset_index()
df_by_date_prior.rename(columns={'Spend TY' : 'Spend - LP', 'Sessions - TY' : 'Sessions - LP', 'Bookings - TY' : 'Bookings - LP','Revenue - TY' : 'Revenue - LP'}, inplace=True)
df_by_date_combined = pd.merge(df_by_date, df_by_date_prior, on=[aggregation])
df_by_date_combined.rename(columns={'GA Category':'Placement type'}, inplace=True)
elif aggregation == 'Birst Category':
df1 | |
sure we got back to ssh shell
def run_set_tunnelling_dev_lan_ip_address(self, ip):
""" Run the command set_tunnelling_dev_lan_ip_address on the remote tundev shell
\param ip an ipaddr.IPv4Network object or a string containing the IP address and prefix using the CIDR notation, to communicate to the RDV server
"""
self.run_command('set_tunnelling_dev_lan_ip_address ' + str(ip), 2)
def send_lan_ip_address_for_iface(self, iface):
""" Send the IP addressing for the interface iface to the remote tundev shell
\param iface The network interface for which we will extract the IP address
"""
self.run_set_tunnelling_dev_lan_ip_address(self._get_ip_network(iface=iface))
def run_set_tunnelling_dev_dns_server_list(self, dns_list):
""" Run the command set_tunnelling_dev_dns_server_list on the remote tundev shell
\param dns_list a list of ipaddr.IPv4Network objects representing DNS servers to communicate to the RDV server
"""
self.run_command('set_tunnelling_dev_dns_server_list ' + ' '.join(str(dns) for dns in dns_list))
def send_lan_dns_config(self):
""" Send the DNS configuration for the current host to the remote tundev shell
"""
self.run_set_tunnelling_dev_dns_server_list(self._get_linux_nameserver_list())
def run_set_tunnelling_dev_hostname(self, hostname):
""" Run the command set_tunnelling_dev_hostname on the remote tundev shell
\param hostname The hostname string to send
"""
self.run_command('set_tunnelling_dev_hostname \'' + hostname.encode('string_escape') + '\'')
def send_tunnelling_dev_hostname(self):
""" Send the hostname string for the current host to the remote tundev shell
"""
self.run_set_tunnelling_dev_hostname(self._get_hostname())
def run_get_role(self):
""" Run the command get_role on the remote tundev shell
\return The role as a string
"""
role = self._strip_trailing_cr_from(self.run_command('get_role', 2))
if role == '':
raise Exception('TundevShellSyntaxError')
else:
return role
def run_get_tunnel_mode(self):
""" Run the command get_tunnel_mode on the remote tundev shell
\return The tunnel mode as a string
"""
mode = self._strip_trailing_cr_from(self.run_command('get_tunnel_mode', 2))
if mode == '':
raise Exception('TundevShellSyntaxError')
else:
return mode
def run_set_tunnelling_dev_uplink_type(self, uplink_type):
""" Run the command set_tunnelling_dev_uplink_type on the remote tundev shell
\param uplink_type The uplink type as a string (usual values 'lan' or '3g')
"""
self.run_command('set_tunnelling_dev_uplink_type ' + str(uplink_type), 2)
def run_get_vtun_parameters(self):
""" Run the command get_tunnel_mode on the remote tundev shell
\return The vtun config output string returned by the RDV server
"""
return self._strip_trailing_cr_from(self.run_command('get_vtun_parameters', 20))
class ClientVtunTunnelConfig(object):
""" Class representing a tunnelling device configuration as provided by the remote tundev shell command get_vtun_parameters
This class is just a container around a python dict, with one method allowing to generate a pythonvtunlib.client_vtun_tunnel based on the parameters contained in the self.dict attribute
"""
def __init__(self, config_dict, tunnel_mode, tunnel_name, vtun_server_hostname, vtun_server_port, vtund_exec = None, vtund_use_sudo = False, ping_use_sudo = False, vtun_connection_timeout = 20):
""" Constructor
\param dict A python dict to encapsulate into this object
\param tunnel_mode The tunnel mode ('L2', 'L3' etc...)
\param tunnel_name Name (in the vtund terminology) of the tunnel session
\param vtun_server_hostname The hostname to connect to (the vtund server)
\param vtun_server_port The TCP port to use when connecting to the vtund server
\param vtund_exec (optional) The PATH to the vtund binary
\param vtund_use_sudo (optional) A boolean indicating whether the vtund_exec needs to be run via sudo to get root access (False by default)
\param ping_use_sudo (optional) A boolean indicating whether ping needs to be run via sudo (False by default)
\param vtun_connection_timeout How many seconds we give for the tunnel establishment (20 by default)
"""
self.config_dict = config_dict
self.tunnel_mode = tunnel_mode
self.tunnel_name = tunnel_name
self.vtun_server_hostname = vtun_server_hostname
self.vtun_server_port = vtun_server_port
self.vtund_exec = vtund_exec
self.vtund_use_sudo = vtund_use_sudo
self.ping_use_sudo = ping_use_sudo
self.vtun_connection_timeout = vtun_connection_timeout
def to_client_vtun_tunnel_object(self):
""" Create a pythonvtunlib.client_vtun_tunnel object based on the configuration found in our self.dict attribute
If the self.dict attribute does not have (enough) information to build a client tunnel, an exception will be raised
\return The resulting pythonvtunlib.client_vtun_tunnel
"""
try:
tunnel_ip_prefix = str(self.config_dict['tunnel_ip_prefix'])
tunnel_ip_network = str(self.config_dict['tunnel_ip_network'])
if not tunnel_ip_prefix.startswith('/'):
tunnel_ip_network += '/'
tunnel_ip_network += tunnel_ip_prefix
client_vtun_tunnel_object = client_vtun_tunnel.ClientVtunTunnel(vtund_exec = self.vtund_exec,
vtund_use_sudo = self.vtund_use_sudo,
tunnel_ip_network=tunnel_ip_network,
tunnel_near_end_ip=str(self.config_dict['tunnelling_dev_ip_address']),
tunnel_far_end_ip=str(self.config_dict['rdv_server_ip_address']),
vtun_server_tcp_port=str(self.vtun_server_port),
vtun_shared_secret=str(self.config_dict['tunnel_secret']),
vtun_tunnel_name=str(self.tunnel_name),
vtun_server_hostname=str(self.vtun_server_hostname),
mode=self.tunnel_mode,
vtun_connection_timeout=self.vtun_connection_timeout
)
# Create post tunnel-setup script (up commands
if self.tunnel_mode == 'L3': # In L3 mode, activating routing on this tundev
client_vtun_tunnel_object.add_up_command('/sbin/ip "route add table 1 dev %% default via %A"')
client_vtun_tunnel_object.add_up_command('/sbin/ip "rule add unicast iif ' + self.config_dict['extremity_if'] + ' table 1"')
if self.config_dict['nat_to_external']: # NAT to external interface is used by onsite only
# Add a NAT rule using iptables
client_vtun_tunnel_object.add_up_command('/sbin/iptables "-t nat -A POSTROUTING -o ' + self.config_dict['extremity_if'] + ' -j MASQUERADE"')
client_vtun_tunnel_object.add_up_command('/sbin/sysctl "net.ipv4.ip_forward=1"')
elif self.tunnel_mode == 'L2': # In L2 mode, setup bridging
client_vtun_tunnel_object.add_up_command('/sbin/brctl "addbr ' + self.config_dict['bridge_if'] + '"')
client_vtun_tunnel_object.add_up_command('/sbin/brctl "addif ' + self.config_dict['bridge_if'] + ' ' + self.config_dict['extremity_if'] + '"')
client_vtun_tunnel_object.add_up_command('/sbin/brctl "addif ' + self.config_dict['bridge_if'] + ' %%"')
client_vtun_tunnel_object.add_up_command('/sbin/ip "link set ' + self.config_dict['bridge_if'] + ' up"')
# Create post tunnel-teardown script (down commands)
if self.tunnel_mode == 'L3': # In L3 mode, stop routing on this tundev
if self.config_dict['nat_to_external']: # NAT to external interface is used by onsite only
# Remove the NAT rule using iptables
client_vtun_tunnel_object.add_down_command('/sbin/iptables "-t nat -D POSTROUTING -o ' + self.config_dict['extremity_if'] + ' -j MASQUERADE"')
client_vtun_tunnel_object.add_down_command('/sbin/ip "rule del unicast iif ' + self.config_dict['extremity_if'] + ' table 1"')
client_vtun_tunnel_object.add_down_command('/sbin/ip "route del table 1 dev %% default via %A"')
elif self.tunnel_mode == 'L2': # In L2 mode, stop bridging
client_vtun_tunnel_object.add_down_command('/sbin/ip "link set ' + self.config_dict['bridge_if'] + ' down"')
client_vtun_tunnel_object.add_down_command('/sbin/brctl "delif ' + self.config_dict['bridge_if'] + ' %%"')
client_vtun_tunnel_object.add_down_command('/sbin/brctl "delif ' + self.config_dict['bridge_if'] + ' ' + self.config_dict['extremity_if'] + '"')
client_vtun_tunnel_object.add_down_command('/sbin/modprobe "-r bridge"') #Lionel: FIXME: Why not brctl delbr?
client_vtun_tunnel_object.add_down_command('/sbin/modprobe "bridge"')
return client_vtun_tunnel_object
except KeyError:
raise
def check_ping_peer(self):
""" Check that the tunnel is up and the peer remote inside the tunnel is responding to ping
\return True if the remote answered within 10 ping attempts, False otherwise
"""
try:
attempts = 10
ping_success = False
while attempts > 0:
cmd = ['ping', '-c' , '1', '-w', '1', str(self.config_dict['rdv_server_ip_address'])] # Send 1 ping and give it 1s to answer
if self.ping_use_sudo:
cmd = ['sudo'] + cmd
rc = subprocess.call(cmd, stdout=open(os.devnull, 'wb'), stderr=subprocess.STDOUT)
if rc == 0:
ping_success = True
break # Success, exit loop
else:
attempts -= 1 # One less attemps
if ping_success == False:
raise Exception('PeerNotRespondingToPing')
except KeyError:
raise Exception('IncompleteTunnelParameters')
def _get_vtun_parameters_as_dict(self):
""" Request the vtun parameters from the RDV server and return them in a dict containing each field as a key together with its value
\return A dict synthetising the vtun parameters, for example {'tunnel_ip_network': '192.168.101.0', 'tunnel_ip_prefix': '/30', ...}
"""
vtun_parameters_str = self.run_get_vtun_parameters()
config_dict = {}
for line in vtun_parameters_str.splitlines():
split = line.split(':', 1) # Cut in key:value
key = split[0].strip() # Get rid of leading and trailing whitespaces in key
value = split[1].strip() # Get rid of leading and trailing whitespaces in value
config_dict[key]=value
return config_dict
def get_client_vtun_tunnel(self, tunnel_mode, extremity_if, lan_if, vtun_server_hostname, vtun_server_port, vtund_exec = None, vtund_use_sudo = False, ping_use_sudo = False, vtun_connection_timeout = 20, nat_to_external = False):
""" Create a pythonvtunlib.client_vtun_tunnel object based on the configuration returned by the devshell command get_vtun_parameters
If the vtun_parameters_dict provided by the internal call to self._get_vtun_parameters_as_dict() does not have (enough) information to build a client tunnel, an exception will be raised
\param tunnel_mode The tunnel mode ('L2', 'L3' etc...)
\param extremity_if The external network interface (towards the support terminal for master, or towards the customer LAN for onsite)
\param lan_if The LAN network interface that allows to reach the Internet (eth0 is most cases, but could also be wlan0 if the device is connecting to the Internet via Wi-Fi)
\param vtun_server_hostname The hostname to connect to (the vtund server)
\param vtun_server_port The TCP port to use when connecting to the vtund server
\param vtund_exec (optional) The PATH to the vtund binary
\param vtund_use_sudo (optional) A boolean indicating whether the vtund_exec needs to be run via sudo to get root access (False by default)
\param ping_use_sudo (optional) A boolean indicating whether ping needs to be run via sudo (False by default)
\param vtun_connection_timeout How many seconds we give for the tunnel establishment (20 by default)
\param nat_to_external (default False) Do we also add a NAT rule to take the paternity | |
top:
path.append(key)
if type(top[key]) is OrderedDict:
is_leaf = False
self.build_cfg_list(key, top[key], path, info)
path.pop()
if is_leaf:
length = self.add_cfg_item(cfg_name, top, info['offset'], path)
info['offset'] += length
elif cfg_name == '' or (cfg_name and cfg_name[0] != '$'):
# check first element for struct
first = next(iter(top))
struct_str = CGenCfgData.STRUCT
if first != struct_str:
struct_node = OrderedDict({})
top[struct_str] = struct_node
top.move_to_end (struct_str, False)
else:
struct_node = top[struct_str]
struct_node['offset'] = start
struct_node['length'] = info['offset'] - start
if struct_node['length'] % 8 != 0:
raise SystemExit("Error: Bits length not aligned for %s !" % str(path))
def get_field_value (self, top = None):
def _get_field_value (name, cfgs, level):
if 'indx' in cfgs:
act_cfg = self.get_item_by_index (cfgs['indx'])
if act_cfg['length'] == 0:
return
value = self.get_value (act_cfg['value'], act_cfg['length'], False)
set_bits_to_bytes (result, act_cfg['offset'] - struct_info['offset'], act_cfg['length'], value)
if top is None:
top = self._cfg_tree
struct_info = top[CGenCfgData.STRUCT]
result = bytearray ((struct_info['length'] + 7) // 8)
self.traverse_cfg_tree (_get_field_value, top)
return result
def set_field_value (self, top, value_bytes, force = False):
def _set_field_value (name, cfgs, level):
if 'indx' not in cfgs:
return
act_cfg = self.get_item_by_index (cfgs['indx'])
if force or act_cfg['value'] == '':
value = get_bits_from_bytes (full_bytes, act_cfg['offset'] - struct_info['offset'], act_cfg['length'])
act_val = act_cfg['value']
if act_val == '':
act_val = '%d' % value
act_val = self.reformat_number_per_type (act_cfg['type'], act_val)
act_cfg['value'] = self.format_value_to_str (value, act_cfg['length'], act_val)
if 'indx' in top:
# it is config option
value = bytes_to_value (value_bytes)
act_cfg = self.get_item_by_index (top['indx'])
act_cfg['value'] = self.format_value_to_str (value, act_cfg['length'], act_cfg['value'])
else:
# it is structure
struct_info = top[CGenCfgData.STRUCT]
length = struct_info['length'] // 8
full_bytes = bytearray(value_bytes[:length])
if len(full_bytes) < length:
full_bytes.extend(bytearray(length - len(value_bytes)))
self.traverse_cfg_tree (_set_field_value, top)
def update_def_value (self):
def _update_def_value (name, cfgs, level):
if 'indx' in cfgs:
act_cfg = self.get_item_by_index (cfgs['indx'])
if act_cfg['value'] != '' and act_cfg['length'] > 0:
try:
act_cfg['value'] = self.reformat_value_str (act_cfg['value'], act_cfg['length'])
except:
raise Exception ("Invalid value expression '%s' for '%s' !" % (act_cfg['value'], act_cfg['path']))
else:
if CGenCfgData.STRUCT in cfgs and 'value' in cfgs[CGenCfgData.STRUCT]:
curr = cfgs[CGenCfgData.STRUCT]
value_bytes = value_to_bytearray (self.eval(curr['value']), (curr['length'] + 7) // 8)
self.set_field_value (cfgs, value_bytes)
self.traverse_cfg_tree (_update_def_value, self._cfg_tree)
def evaluate_condition (self, item):
expr = item['condition']
result = self.parse_value (expr, 1, False)
return result
def load_default_from_bin (self, bin_data):
self.set_field_value(self._cfg_tree, bin_data, True)
def generate_binary_array (self):
return self.get_field_value()
def generate_binary (self, bin_file_name):
bin_file = open(bin_file_name, "wb")
bin_file.write (self.generate_binary_array ())
bin_file.close()
return 0
def write_delta_file (self, out_file, platform_id, out_lines):
dlt_fd = open (out_file, "w")
dlt_fd.write ("%s\n" % get_copyright_header('dlt', True))
dlt_fd.write ('#\n')
dlt_fd.write ('# Delta configuration values for platform ID 0x%04X\n' % platform_id)
dlt_fd.write ('#\n\n')
for line in out_lines:
dlt_fd.write ('%s\n' % line)
dlt_fd.close()
def override_default_value(self, dlt_file):
error = 0
dlt_lines = CGenCfgData.expand_include_files(dlt_file)
platform_id = None
for line, file_path, line_num in dlt_lines:
line = line.strip()
if not line or line.startswith('#'):
continue
match = re.match("\s*([\w\.]+)\s*\|\s*(.+)", line)
if not match:
raise Exception("Unrecognized line '%s' (File:'%s' Line:%d) !" %
(line, file_path, line_num + 1))
path = match.group(1)
value_str = match.group(2)
top = self.locate_cfg_item (path)
if not top:
raise Exception(
"Invalid configuration '%s' (File:'%s' Line:%d) !" %
(path, file_path, line_num + 1))
if 'indx' in top:
act_cfg = self.get_item_by_index (top['indx'])
bit_len = act_cfg['length']
else:
struct_info = top[CGenCfgData.STRUCT]
bit_len = struct_info['length']
value_bytes = self.parse_value (value_str, bit_len)
self.set_field_value (top, value_bytes, True)
if path == 'PLATFORMID_CFG_DATA.PlatformId':
platform_id = value_str
if platform_id is None:
raise Exception(
"PLATFORMID_CFG_DATA.PlatformId is missing in file '%s' !" %
(dlt_file))
return error
def generate_delta_file_from_bin (self, delta_file, old_data, new_data, full=False):
self.load_default_from_bin (new_data)
lines = []
tag_name = ''
level = 0
platform_id = None
def_platform_id = 0
for item in self._cfg_list:
old_val = get_bits_from_bytes (old_data, item['offset'], item['length'])
new_val = get_bits_from_bytes (new_data, item['offset'], item['length'])
full_name = item['path']
if 'PLATFORMID_CFG_DATA.PlatformId' == full_name:
def_platform_id = old_val
platform_id = new_val
elif item['type'] != 'Reserved' and ((new_val != old_val) or full):
val_str = self.reformat_value_str (item['value'], item['length'])
text = '%-40s | %s' % (full_name, val_str)
lines.append(text)
if platform_id is None or def_platform_id == platform_id:
platform_id = def_platform_id
lines.insert(0, '%-40s | %s\n\n' %
('PLATFORMID_CFG_DATA.PlatformId', '0x%04X' % platform_id))
self.write_delta_file (delta_file, platform_id, lines)
return 0
def generate_delta_file(self, delta_file, bin_file, bin_file2, full=False):
fd = open (bin_file, 'rb')
new_data = bytearray(fd.read())
fd.close()
if bin_file2 == '':
old_data = self.generate_binary_array()
else:
old_data = new_data
fd = open (bin_file2, 'rb')
new_data = bytearray(fd.read())
fd.close()
return self.generate_delta_file_from_bin (delta_file, old_data, new_data, full)
def prepare_marshal (self, is_save):
if is_save:
# Ordered dict is not marshallable, convert to list
self._cfg_tree = CGenCfgData.deep_convert_dict (self._cfg_tree)
else:
# Revert it back
self._cfg_tree = CGenCfgData.deep_convert_list (self._cfg_tree)
def generate_yml_file (self, in_file, out_file):
cfg_yaml = CFG_YAML()
text = cfg_yaml.expand_yaml (in_file)
yml_fd = open(out_file, "w")
yml_fd.write (text)
yml_fd.close ()
return 0
def write_cfg_header_file (self, hdr_file_name, tag_mode, tag_dict, struct_list):
lines = []
lines.append ('\n\n')
tag_list = sorted(list(tag_dict.items()), key=lambda x: x[1])
for tagname, tagval in tag_list:
if (tag_mode == 0 and tagval >= 0x100) or (tag_mode == 1 and tagval < 0x100):
continue
lines.append ('#define %-30s 0x%03X\n' % ('CDATA_%s_TAG' % tagname[:-9], tagval))
lines.append ('\n\n')
name_dict = {}
new_dict = {}
for each in struct_list:
if (tag_mode == 0 and each['tag'] >= 0x100) or (tag_mode == 1 and each['tag'] < 0x100):
continue
new_dict[each['name']] = (each['alias'], each['count'])
if each['alias'] not in name_dict:
name_dict[each['alias']] = 1
lines.extend(self.create_struct (each['alias'], each['node'], new_dict))
self.write_header_file (lines, hdr_file_name)
def write_header_file (self, txt_body, file_name, type = 'h'):
file_name_def = os.path.basename(file_name).replace ('.', '_')
file_name_def = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', file_name_def)
file_name_def = re.sub('([a-z0-9])([A-Z])', r'\1_\2', file_name_def).upper()
lines = []
lines.append ("%s\n" % get_copyright_header(type))
lines.append ("#ifndef __%s__\n" % file_name_def)
lines.append ("#define __%s__\n\n" % file_name_def)
if type == 'h':
lines.append ("#pragma pack(1)\n\n")
lines.extend (txt_body)
if type == 'h':
lines.append ("#pragma pack()\n\n")
lines.append ("#endif\n")
# Don't rewrite if the contents are the same
create = True
if os.path.exists(file_name):
hdr_file = open(file_name, "r")
org_txt = hdr_file.read()
hdr_file.close()
new_txt = ''.join(lines)
if org_txt == new_txt:
create = False
if create:
hdr_file = open(file_name, "w")
hdr_file.write (''.join(lines))
hdr_file.close()
def generate_data_inc_file (self, dat_inc_file_name, bin_file = None):
# Put a prefix GUID before CFGDATA so that it can be located later on
prefix = b'\xa7\xbd\x7f\x73\x20\x1e\x46\xd6\xbe\x8f\x64\x12\x05\x8d\x0a\xa8'
if bin_file:
fin = open (bin_file, 'rb')
bin_dat = prefix + bytearray(fin.read())
fin.close()
else:
bin_dat = prefix + self.generate_binary_array ()
file_name = os.path.basename(dat_inc_file_name).upper()
file_name = file_name.replace('.', '_')
txt_lines = []
txt_lines.append ("UINT8 mConfigDataBlob[%d] = {\n" % len(bin_dat))
count = 0
line = [' ']
for each in bin_dat:
line.append('0x%02X, ' % each)
count = count + 1
if (count & 0x0F) == 0:
line.append('\n')
txt_lines.append (''.join(line))
line = [' ']
if len(line) > 1:
txt_lines.append (''.join(line) + '\n')
txt_lines.append ("};\n\n")
self.write_header_file (txt_lines, dat_inc_file_name, 'inc')
return 0
def get_struct_array_info (self, input):
parts = input.split(':')
if len(parts) > 1:
var = parts[1]
input = parts[0]
else:
var = ''
array_str = input.split('[')
name = array_str[0]
if len(array_str) > 1:
num_str = ''.join(c for c in array_str[-1] if c.isdigit())
num_str = '1000' if len(num_str) == 0 else num_str
array_num = int(num_str)
else:
array_num = 0
return name, array_num, var
def process_multilines (self, string, max_char_length):
multilines = ''
string_length = len(string)
current_string_start = 0
string_offset = 0
break_line_dict = []
if len(string) <= max_char_length:
while (string_offset < string_length):
if string_offset >= 1:
if string[string_offset - 1] == '\\' and string[string_offset] == 'n':
break_line_dict.append (string_offset + 1)
string_offset += 1
if break_line_dict != []:
for each in break_line_dict:
multilines += " %s\n" % string[current_string_start:each].lstrip()
current_string_start = each
if string_length - current_string_start > 0:
multilines += " %s\n" % string[current_string_start:].lstrip()
else:
multilines = " %s\n" % string
else:
new_line_start = 0
new_line_count = 0
found_space_char = False
while (string_offset < string_length):
if string_offset >= 1:
if new_line_count >= max_char_length - 1:
if string[string_offset] == ' ' and string_length - string_offset > 10:
break_line_dict.append (new_line_start + new_line_count)
new_line_start = new_line_start + new_line_count
new_line_count = 0
found_space_char = True
elif string_offset == string_length - 1 and found_space_char == False:
break_line_dict.append (0)
if string[string_offset - 1] == '\\' and string[string_offset] == 'n':
break_line_dict.append (string_offset + 1)
new_line_start = string_offset + 1
new_line_count = 0
string_offset += 1
new_line_count += 1
if break_line_dict != []:
break_line_dict.sort ()
for each in break_line_dict:
| |
<gh_stars>0
"""
Pipes provide a nice way to lazily queue steps for later execution and allow for a nice way to chain together sequential functions. They also provide many other benefits listed below along with their usage information
Pipes can accept raw values at their tops but nowhere else in the pipe as that would break the flow and be pointless
EG: Pipe(
"hi",
display_message,
display_farewell
)
not
Pipe(
"hi",
display_message,
"bye",
display_farewell
)
Pipes also accept arguments in their open function removing the need for a complex step in the pipe that pulls the data in if that is not desired.
EG:
test = Pipe(
display_message,
display_farewell
)
test.open("hi")
This also allows pipes to be joined together in succession like so, and intermingle them with the rest of the pipe
test2 = Pipe(
really_goodbye
)
test3 = Pipe(
test.open,
test2.open
)
It also makes it easy to perform unit and integration tests as each part of the pipe should be quite small in size. To perform a integration test one just needs to get the pipe and to open it and see if the outputted result is what they expect.
Use the name prop if you want to run the pipes in parallel so that the resulting data can be identified and aggregated by the join part. If you are not concerned with the outputs of the pipe and just want to run them in parallel then the name isn't necessary
Sources or input parts are parts of a pipe that take no arguments and produce some data. These are usually used to interact with a external data source like a file, an API, the user or a database
Outlets or output parts are parts of a pipe that have no proceeding parts following them which results in the processed data being released to whatever other storage is waiting on the other end when the pipe is opened
Pipe segments are pipes that are used to construct a larger pipe. Kind of like a subtree within a tree
Pipes can be serialised as well allowing for easy reuse.
You may want to consider a different structure if your problem is mostly asynchronous in nature or if you have to process variable amounts of data as large amounts of data on weak hardware can cause pipe blockages. You may want to consider the master slave architecture proposed by Google or, you may want to create a individual pipe for each unit within the batch and run said pipes in parallel to avoid blockages.
"""
from version_safe import resolve_get_args
class Pipe(object):
# slots to make things more efficent
__slots__=("steps", "name", "start_value")
def __init__(self, *steps, **kwargs):
self.name = kwargs.get("name", None)
steps = list(steps)
# steps = fix_pipe_arg(*steps)
if callable(steps[0]):
self.steps = steps
self.start_value = None
else:
self.start_value = steps[0]
self.steps = steps[1:]
def append(self, *steps):
self.steps += steps
def insert(self, index, step):
self.steps = self.steps.insert(index, step)
def replace(self, index, step):
self.steps[index] = step
def open(self, data=None):
def get_result(step, data):
args = get_args(step).args
num_args = len(args)
if num_args == 1 or (num_args == 2 and args[0] == "self"):
return step(data)
elif num_args > 1:
return step(*data)
if(callable(step)):
return step()
else:
return step
prev_result = None
get_args = resolve_get_args()
if self.start_value != None:
if(data == None):
data = self.start_value
else:
print("WARNING! you put a raw value at the top of your pipe and you put " + str(data) + " in the opening of the pipe the value at the start of the pipe has been overwritten by the passed in value. You may want to get rid of that value at the top of the pipe to get rid of this message\n")
if(len(self.steps)> 0):
if data != None:
prev_result = get_result(self.steps[0], data)
else:
prev_result = self.steps[0]()
else:
return data
for step in self.steps[1:]:
prev_result = get_result(step, prev_result)
# the use is multithreading so add the result to the pool of data
if(self.name != None):
global pool
pool.update({self.name: prev_result})
return prev_result
# returns a deep copy of the pipe that is passed to it
def copy(pipe):
return Pipe(*pipe.steps)
# Limiter, it only allows x number of things from the previous stage to the next one
def limit(limit):
def limiter(data):
if limit > 1:
return data[:limit]
elif limit == 1:
return data[0]
elif limit == 0:
return None
return limiter
# Repeater, repeatedly calls a function with the same arguments x times
def repeat(function, times):
# fix_pipe_arg(function)
def repeater(args):
results = []
for i in range(times):
results.append(function(args))
return results
def fix_pipe_arg(*args):
"""Use this function so that if a pipe is passed to it it will automatically use its open function wprk in progress"""
print(args)
new_args = []
for arg in args:
if(type(arg) is Pipe):
new_args.append(arg.open)
else:
new_args.append(arg)
return new_args
# Validator, runs check_validity function which must return true or false depending on whether or not the data is valid, if it is valid then it will return the input data, if it is not valid it will execute another function to get the corrected result
def validate(check_validity, on_success = None, on_fail = None):
# fix_pipe_arg(check_validity, on_success, on_fail)
def validator(data):
if(check_validity(data)):
if(on_success != None):
return on_success(data)
else:
return data
else:
if(on_fail != None):
return on_fail(data)
return None
return validator
# Parallel runs all the pipes listed at the same time, concurrently
# parallel accepts a optional list of arguments to be passed to each pipe
from version_safe import Thread
def parallel(*pipes, **kwargs):
args = kwargs.get("args", None)
threads = []
for i, pipe in enumerate(pipes):
if(args != None):
thread = Thread(target=pipe.open, kwargs = {'data':args[i]})
else:
thread = Thread(target=pipe.open)
thread.start()
threads.append(thread)
return threads
def run_parallel(*pipes):
"""A wrapper to make the parallel function play nice with pipes it currently does not support passing args to pipes open
function
"""
def parallel_runner():
return parallel(*pipes)
return parallel_runner
# Join pipes waits for all the pipes to stop flowing and then closes them off.
# pool is a dictionary so we can pull out the results of the joining regardless of order :)
pool = {}
# this could be improved to accept pipes, store the threads as a dictionary where the memory address is the key and
# the value is the thread, means we can do like join(pipe1, pipe2) etc
# for now it just assumes that it will receive the raw threads from the previous step
def join(threads):
global pool
# join all the threads so we wait for all the threads to finish
for thread in threads:
thread.join()
# free memory
del threads[:]
temp_pool = pool.copy()
pool.clear()
return temp_pool
# The streamer will iterate over each item that is passed to it inside a pipe and pass it to each step one by one
# this makes the code for steps a lot cleaner and improves performance by removing the need to iterate over a batch
# several times
# please note that if you want to create a online based continous pipeline then you should just create a normal pipe where all the stages expect the data to be individual values, then you continuolly feed it new data and retrieve the resulting output
# this is due to the fact that the streamer assumes that there is a finite number of items in the data it is iterating over and then accumulates the result and returns it. It does not expect a infinite data source such as a generator.
def stream(*steps):
def streamer(data):
# transparently create a pipe
pipe = Pipe(*steps)
# iterate through and put items into the pipe one by one
results = []
for item in data:
results.append(pipe.open(item))
return results
return streamer
# converts a dictionary with a numeric index into a list of results sorted by the key
def dict_to_list(mydict):
results = []
for key in sorted(mydict.keys()):
results.append(mydict[key])
return results
# this function will run multiple instances | |
e
f
i
n
i
t
i
o
n
,
r
e
p
r
e
s
e
n
t
e
d
a
s
a
U
R
I
(
g
l
o
b
a
l
l
y
u
n
i
q
u
e
)
.
Type `str`. """
self.useContext = None
"""
T
h
e
c
o
n
t
e
x
t
t
h
a
t
t
h
e
c
o
n
t
e
n
t
i
s
i
n
t
e
n
d
e
d
t
o
s
u
p
p
o
r
t
.
List of `UsageContext` items (represented as `dict` in JSON). """
self.version = None
"""
B
u
s
i
n
e
s
s
v
e
r
s
i
o
n
o
f
t
h
e
o
p
e
r
a
t
i
o
n
d
e
f
i
n
i
t
i
o
n
.
Type `str`. """
super(OperationDefinition, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(OperationDefinition, self).elementProperties()
js.extend([
("affectsState", "affectsState", bool, False, None, False),
("base", "base", str, False, None, False),
("code", "code", str, False, None, True),
("comment", "comment", str, False, None, False),
("contact", "contact", contactdetail.ContactDetail, True, None, False),
("date", "date", fhirdate.FHIRDate, False, None, False),
("description", "description", str, False, None, False),
("experimental", "experimental", bool, False, None, False),
("inputProfile", "inputProfile", str, False, None, False),
("instance", "instance", bool, False, None, True),
("jurisdiction", "jurisdiction", codeableconcept.CodeableConcept, True, None, False),
("kind", "kind", str, False, None, True),
("name", "name", str, False, None, True),
("outputProfile", "outputProfile", str, False, None, False),
("overload", "overload", OperationDefinitionOverload, True, None, False),
("parameter", "parameter", OperationDefinitionParameter, True, None, False),
("publisher", "publisher", str, False, None, False),
("purpose", "purpose", str, False, None, False),
("resource", "resource", str, True, None, False),
("status", "status", str, False, None, True),
("system", "system", bool, False, None, True),
("title", "title", str, False, None, False),
("type", "type", bool, False, None, True),
("url", "url", str, False, None, False),
("useContext", "useContext", usagecontext.UsageContext, True, None, False),
("version", "version", str, False, None, False),
])
return js
from . import backboneelement
class OperationDefinitionOverload(backboneelement.BackboneElement):
"""
D
e
f
i
n
e
o
v
e
r
l
o
a
d
e
d
v
a
r
i
a
n
t
s
f
o
r
w
h
e
n
g
e
n
e
r
a
t
i
n
g
c
o
d
e
.
D
e
f
i
n
e
s
a
n
a
p
p
r
o
p
r
i
a
t
e
c
o
m
b
i
n
a
t
i
o
n
o
f
p
a
r
a
m
e
t
e
r
s
t
o
u
s
e
w
h
e
n
i
n
v
o
k
i
n
g
t
h
i
s
o
p
e
r
a
t
i
o
n
,
t
o
h
e
l
p
c
o
d
e
g
e
n
e
r
a
t
o
r
s
w
h
e
n
g
e
n
e
r
a
t
i
n
g
o
v
e
r
l
o
a
d
e
d
p
a
r
a
m
e
t
e
r
s
e
t
s
f
o
r
t
h
i
s
o
p
e
r
a
t
i
o
n
.
"""
resource_type = "OperationDefinitionOverload"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.comment = None
"""
C
o
m
m
e
n
t
s
t
o
g
o
o
n
o
v
e
r
l
o
a
d
.
Type `str`. """
self.parameterName = None
"""
N
a
m
e
o
f
p
a
r
a
m
e
t
e
r
t
o
i
n
c
l
u
d
e
i
n
o
v
e
r
l
o
a
d
.
List of `str` items. """
super(OperationDefinitionOverload, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(OperationDefinitionOverload, self).elementProperties()
js.extend([
("comment", "comment", str, False, None, False),
("parameterName", "parameterName", str, True, None, False),
])
return js
class OperationDefinitionParameter(backboneelement.BackboneElement):
"""
P
a
r
a
m
e
t
e
r
s
f
o
r
t
h
e
o
p
e
r
a
t
i
o
n
/
q
u
e
r
y
.
T
h
e
p
a
r
a
m
e
t
e
r
s
f
o
r
t
h
e
o
p
e
r
a
t
i
o
n
/
q
u
e
r
y
.
"""
resource_type = "OperationDefinitionParameter"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.binding = None
"""
V
a
l
u
e
S
e
t
d
e
t
a
i
l
s
i
f
t
h
i
s
i
s
c
o
d
e
d
.
Type `OperationDefinitionParameterBinding` (represented as `dict` in JSON). """
self.documentation = None
"""
D
e
s
c
r
i
p
t
i
o
n
o
f
m
e
a
n
i
n
g
/
u
s
e
.
Type `str`. """
self.max = None
"""
M
a
x
i
m
u
m
C
a
r
d
i
n
a
l
i
t
y
(
a
n
u
m
b
e
r
o
r
*
)
.
Type `str`. """
self.min = None
"""
M
i
n
i
m
u
m
C
a
r
d
i
n
a
l
i
t
y
.
Type `int`. """
self.name = None
"""
N
a
m
e
i
n
P
a
r
a
m
e
t
e
r
s
.
p
a
r
a
m
e
t
e
r
.
n
a
m
e
o
r
i
n
U
R
L
.
Type `str`. """
self.part = None
"""
P
a
r
t
s
o
f
a
n
e
s
t
e
d
P
a
r
a
m
e
t
e
r
.
List of `OperationDefinitionParameter` items (represented as `dict` in JSON). """
self.referencedFrom = None
"""
R
e
f
e
r
e
n
c
e
s
t
o
t
h
i
s
p
a
r
a
m
e
t
e
r
.
List of `OperationDefinitionParameterReferencedFrom` items (represented as `dict` in JSON). """
self.searchType = None
"""
n
u
m
b
e
r
|
d
a
t
e
|
s
t
r
i
n
g
|
t
o
k
e
n
|
r
e
f
e
r
e
n
c
e
|
c
o
m
p
o
s
i
t
e
|
q
u
a
n
t
i
t
y
|
u
r
i
|
s
p
e
c
i
a
l
.
Type `str`. """
self.targetProfile = None
"""
I
f
t
y
p
e
i
s
R
e
f
e
r
e
n
c
e
|
c
a
n
o
n
i
c
a
l
,
a
l
l
o
w
e
d
t
a
r
g
e
t
s
.
List of `str` items. """
self.type = None
"""
W
h
a
t
t
y
p
e
t
h
i
s
p
a
r
a
m
e
t
e
r
h
a
s
.
Type `str`. """
self.use = None
"""
i
n
|
o
u
t
.
Type `str`. """
super(OperationDefinitionParameter, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(OperationDefinitionParameter, self).elementProperties()
js.extend([
("binding", "binding", OperationDefinitionParameterBinding, False, None, False),
("documentation", "documentation", | |
<reponame>timgates42/sc2reader<gh_stars>0
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, unicode_literals, division
from io import BytesIO
import struct
import functools
try:
from collections import OrderedDict
except ImportError as e:
from ordereddict import OrderedDict
class ByteDecoder(object):
"""
:param contents: The string or file-like object to decode
:param endian: Either > or <. Indicates the endian the bytes are stored in.
Used to unpack parse byte aligned files.
"""
#: The Bytes object used internaly for reading from the
#: decoder contents. cStringIO is faster than managing our
#: own string access in python. For PyPy installations a
#: managed string implementation might be faster.
_buffer = None
#: The string buffer being decoded. A direct reference
#: is kept around to make read_range and peek faster.
_contents = ""
def __init__(self, contents, endian):
""" Accepts both strings and files implementing ``read()`` and
decodes them in the specified endian format.
"""
if hasattr(contents, 'read'):
self._contents = contents.read()
else:
self._contents = contents
self._buffer = BytesIO(self._contents)
self.length = len(self._contents)
# Expose the basic BytesIO interface
self.read = self._buffer.read
self.seek = self._buffer.seek
self.tell = self._buffer.tell
# decode the endian value if necessary
self.endian = endian.lower()
if self.endian.lower() == 'little':
self.endian = "<"
elif self.endian.lower() == 'big':
self.endian = ">"
elif self.endian not in ('<', '>'):
raise ValueError("Endian must be one of 'little', '<', 'big', or '>' but was: "+self.endian)
# Pre-compiling
self._unpack_int = struct.Struct(str(self.endian+'I')).unpack
self._unpack_short = struct.Struct(str(self.endian+'H')).unpack
self._unpack_longlong = struct.Struct(str(self.endian+'Q')).unpack
self._unpack_bytes = lambda bytes: bytes if self.endian == '>' else bytes[::-1]
def done(self):
""" Returns true when all bytes have been decoded """
return self.tell() == self.length
def read_range(self, start, end):
""" Returns the raw byte string from the indicated address range """
return self._contents[start:end]
def peek(self, count):
""" Returns the raw byte string for the next ``count`` bytes """
start = self.tell()
return self._contents[start:start+count]
def read_uint8(self):
""" Returns the next byte as an unsigned integer """
return ord(self.read(1))
def read_uint16(self):
""" Returns the next two bytes as an unsigned integer """
return self._unpack_short(self.read(2))[0]
def read_uint32(self):
""" Returns the next four bytes as an unsigned integer """
return self._unpack_int(self.read(4))[0]
def read_uint64(self):
""" Returns the next eight bytes as an unsigned integer """
return self._unpack_longlong(self.read(8))[0]
def read_bytes(self, count):
""" Returns the next ``count`` bytes as a byte string """
return self._unpack_bytes(self.read(count))
def read_uint(self, count):
""" Returns the next ``count`` bytes as an unsigned integer """
unpack = struct.Struct(str(self.endian+'B'*count)).unpack
uint = 0
for byte in unpack(self.read(count)):
uint = uint << 8 | byte
return uint
def read_string(self, count, encoding='utf8'):
""" Read a string in given encoding (default utf8) that is ``count`` bytes long """
return self.read_bytes(count).decode(encoding)
def read_cstring(self, encoding='utf8'):
""" Read a NULL byte terminated character string decoded with given encoding (default utf8). Ignores endian. """
cstring = BytesIO()
while True:
c = self.read(1)
if ord(c) == 0:
return cstring.getvalue().decode(encoding)
else:
cstring.write(c)
class BitPackedDecoder(object):
"""
:param contents: The string of file-like object to decode
Extends :class:`ByteDecoder`. Always packed BIG_ENDIAN
Adds capabilities for parsing files that Blizzard has packed in
bits and not in bytes.
"""
#: The ByteDecoder used internally to read byte
#: aligned values.
_buffer = None
#: Tracks the how many bits have already been used
#: from the current byte.
_bit_shift = 0
#: Holds the byte, if any, that hasn't had its bits
#: fully used yet.
_next_byte = None
#: Maps bit shifts to low bit masks used for grabbing
#: the first bits off of the next byte.
_lo_masks = [0x00, 0x01, 0x03, 0x07, 0x0F, 0x1F, 0x3F, 0x7F, 0xFF]
#: Maps bit shifts to high bit masks used for grabbing
#: the remaining bits off of the previous byte.
_hi_masks = [0xFF ^ mask for mask in _lo_masks]
#: Maps bit shifts to high and low bit masks. Used for
#: joining bytes when we are not byte aligned.
_bit_masks = list(zip(_lo_masks, _hi_masks))
def __init__(self, contents):
self._buffer = ByteDecoder(contents, endian='BIG')
# Partially expose the ByteBuffer interface
self.length = self._buffer.length
self.tell = self._buffer.tell
self.peek = self._buffer.peek
self.read_range = self._buffer.read_range
# Reduce the number of lookups required to read
self._read = self._buffer.read
self.read_bool = functools.partial(self.read_bits, 1)
def done(self):
""" Returns true when all bytes in the buffer have been used"""
return self.tell() == self.length
def byte_align(self):
""" Moves cursor to the beginning of the next byte """
self._next_byte = None
self._bit_shift = 0
def read_uint8(self):
""" Returns the next 8 bits as an unsigned integer """
data = ord(self._buffer.read(1))
if self._bit_shift != 0:
lo_mask, hi_mask = self._bit_masks[self._bit_shift]
hi_bits = self._next_byte & hi_mask
lo_bits = data & lo_mask
self._next_byte = data
data = hi_bits | lo_bits
return data
def read_uint16(self):
""" Returns the next 16 bits as an unsigned integer """
data = self._buffer.read_uint16()
if self._bit_shift != 0:
lo_mask, hi_mask = self._bit_masks[self._bit_shift]
hi_bits = (self._next_byte & hi_mask) << 8
mi_bits = (data & 0xFF00) >> (8-self._bit_shift)
lo_bits = (data & lo_mask)
self._next_byte = data & 0xFF
data = hi_bits | mi_bits | lo_bits
return data
def read_uint32(self):
""" Returns the next 32 bits as an unsigned integer """
data = self._buffer.read_uint32()
if self._bit_shift != 0:
lo_mask, hi_mask = self._bit_masks[self._bit_shift]
hi_bits = (self._next_byte & hi_mask) << 24
mi_bits = (data & 0xFFFFFF00) >> (8-self._bit_shift)
lo_bits = (data & lo_mask)
self._next_byte = data & 0xFF
data = hi_bits | mi_bits | lo_bits
return data
def read_uint64(self):
""" Returns the next 64 bits as an unsigned integer """
data = self._buffer.read_uint64()
if self._bit_shift != 0:
lo_mask, hi_mask = self._bit_masks[self._bit_shift]
hi_bits = (self._next_byte & hi_mask) << 56
mi_bits = (data & 0xFFFFFFFFFFFFFF00) >> (8-self._bit_shift)
lo_bits = (data & lo_mask)
self._next_byte = data & 0xFF
data = hi_bits | mi_bits | lo_bits
return data
def read_vint(self):
""" Reads a signed integer of variable length """
byte = ord(self._buffer.read(1))
negative = byte & 0x01
result = (byte & 0x7F) >> 1
bits = 6
while byte & 0x80:
byte = self.read_uint8()
result |= (byte & 0x7F) << bits
bits += 7
return -result if negative else result
def read_aligned_bytes(self, count):
""" Skips to the beginning of the next byte and returns the next ``count`` bytes as a byte string """
self.byte_align()
return self._buffer.read_bytes(count)
def read_aligned_string(self, count, encoding='utf8'):
""" Skips to the beginning of the next byte and returns the next ``count`` bytes decoded with encoding (default utf8) """
self.byte_align()
return self._buffer.read_string(count, encoding)
def read_bytes(self, count):
""" Returns the next ``count*8`` bits as a byte string """
data = self._buffer.read_bytes(count)
if self._bit_shift != 0:
temp_buffer = BytesIO()
prev_byte = self._next_byte
lo_mask, hi_mask = self._bit_masks[self._bit_shift]
for next_byte in struct.unpack(str("B")*count, data):
temp_buffer.write(struct.pack(str("B"), prev_byte & hi_mask | next_byte & lo_mask))
prev_byte = next_byte
self._next_byte = prev_byte
data = temp_buffer.getvalue()
temp_buffer.truncate(0)
return data
def read_bits(self, count):
""" Returns the next ``count`` bits as an unsigned integer """
result = 0
bits = count
bit_shift = self._bit_shift
# If we've got a byte in progress use it first
if bit_shift != 0:
bits_left = 8-bit_shift
if bits_left < bits:
bits -= bits_left
result = (self._next_byte >> bit_shift) << bits
elif bits_left > bits:
self._bit_shift += bits
return (self._next_byte >> bit_shift) & self._lo_masks[bits]
else:
self._bit_shift = 0
return self._next_byte >> bit_shift
# Then grab any additional whole bytes as needed
if bits >= 8:
bytes = int(bits/8)
if bytes == 1:
bits -= 8
result |= self._buffer.read_uint8() << bits
elif bytes == 2:
bits -= 16
result |= self._buffer.read_uint16() << bits
elif bytes == 4:
bits -= 32
result |= self._buffer.read_uint32() << bits
else:
for byte in struct.unpack(str("B")*bytes, self._read(bytes)):
bits -= 8
result |= byte << bits
# Grab any trailing bits from the next byte
if bits != 0:
self._next_byte = ord(self._read(1))
result |= self._next_byte & self._lo_masks[bits]
self._bit_shift = bits
return result
def read_frames(self):
""" Reads a frame count as an unsigned integer """
byte = self.read_uint8()
time, additional_bytes = byte >> 2, byte & 0x03
if additional_bytes == 0:
return time
elif | |
] = IECoreScene.PrimitiveVariable( IECoreScene.PrimitiveVariable.Interpolation.FaceVarying, uvs, uvIndices )
target = GafferScene.ObjectToScene()
target[ "name" ].setValue( "target" )
target[ "object" ].setValue( mesh )
cube = GafferScene.Cube()
cube[ "name" ].setValue( "cube" )
cubeFilter = GafferScene.PathFilter()
cubeFilter[ "paths" ].setValue( IECore.StringVectorData( [ "/" + cube[ "name" ].getValue() ] ) )
constraint = GafferScene.ParentConstraint()
constraint[ "in" ].setInput( cube[ "out" ] )
constraint[ "filter" ].setInput( cubeFilter[ "out" ] )
constraint[ "targetScene" ].setInput( target[ "out" ] )
constraint[ "target" ].setValue( "/" + target[ "name" ].getValue() )
constraint[ "targetMode" ].setValue( GafferScene.Constraint.TargetMode.UV )
constraint[ "targetOffset" ].setValue( imath.V3f( 0, 0, 0 ) )
# face 0 (constant u)
constraint[ "ignoreMissingTarget" ].setValue( False )
constraint[ "targetUV" ].setValue( imath.V2f( 0.5, 0.5 ) )
m = constraint[ "out" ].fullTransform( "/" + cube[ "name" ].getValue() )
self.assertAlmostEqual( m[ 3 ][ 0 ], 0.5, places=6 )
self.assertAlmostEqual( m[ 3 ][ 1 ], 0.5, places=6 )
self.assertAlmostEqual( m[ 3 ][ 2 ], 0.0, places=6 )
self.assertAlmostEqual( m[ 0 ][ 0 ], -1.0, places=6 )
self.assertAlmostEqual( m[ 0 ][ 1 ], 0.0, places=6 )
self.assertAlmostEqual( m[ 0 ][ 2 ], 0.0, places=6 )
self.assertAlmostEqual( m[ 1 ][ 0 ], 0.0, places=6 )
self.assertAlmostEqual( m[ 1 ][ 1 ], 0.0, places=6 )
self.assertAlmostEqual( m[ 1 ][ 2 ], -1.0, places=6 )
self.assertAlmostEqual( m[ 2 ][ 0 ], 0.0, places=6 )
self.assertAlmostEqual( m[ 2 ][ 1 ], -1.0, places=6 )
self.assertAlmostEqual( m[ 2 ][ 2 ], 0.0, places=6 )
constraint[ "ignoreMissingTarget" ].setValue( True )
m = constraint[ "out" ].fullTransform( "/" + cube[ "name" ].getValue() )
self.assertAlmostEqual( m[ 3 ][ 0 ], 0.5, places=6 )
self.assertAlmostEqual( m[ 3 ][ 1 ], 0.5, places=6 )
self.assertAlmostEqual( m[ 3 ][ 2 ], 0.0, places=6 )
self.assertAlmostEqual( m[ 0 ][ 0 ], -1.0, places=6 )
self.assertAlmostEqual( m[ 0 ][ 1 ], 0.0, places=6 )
self.assertAlmostEqual( m[ 0 ][ 2 ], 0.0, places=6 )
self.assertAlmostEqual( m[ 1 ][ 0 ], 0.0, places=6 )
self.assertAlmostEqual( m[ 1 ][ 1 ], 0.0, places=6 )
self.assertAlmostEqual( m[ 1 ][ 2 ], -1.0, places=6 )
self.assertAlmostEqual( m[ 2 ][ 0 ], 0.0, places=6 )
self.assertAlmostEqual( m[ 2 ][ 1 ], -1.0, places=6 )
self.assertAlmostEqual( m[ 2 ][ 2 ], 0.0, places=6 )
# face 1 (constant v)
constraint[ "ignoreMissingTarget" ].setValue( False )
constraint[ "targetUV" ].setValue( imath.V2f( 0.5, 1.5 ) )
m = constraint[ "out" ].fullTransform( "/" + cube[ "name" ].getValue() )
self.assertAlmostEqual( m[ 3 ][ 0 ], 1.5, places=6 )
self.assertAlmostEqual( m[ 3 ][ 1 ], 0.5, places=6 )
self.assertAlmostEqual( m[ 3 ][ 2 ], 0.0, places=6 )
self.assertAlmostEqual( m[ 0 ][ 0 ], 1.0, places=6 )
self.assertAlmostEqual( m[ 0 ][ 1 ], 0.0, places=6 )
self.assertAlmostEqual( m[ 0 ][ 2 ], 0.0, places=6 )
self.assertAlmostEqual( m[ 1 ][ 0 ], 0.0, places=6 )
self.assertAlmostEqual( m[ 1 ][ 1 ], 0.0, places=6 )
self.assertAlmostEqual( m[ 1 ][ 2 ], -1.0, places=6 )
self.assertAlmostEqual( m[ 2 ][ 0 ], 0.0, places=6 )
self.assertAlmostEqual( m[ 2 ][ 1 ], 1.0, places=6 )
self.assertAlmostEqual( m[ 2 ][ 2 ], 0.0, places=6 )
constraint[ "ignoreMissingTarget" ].setValue( True )
m = constraint[ "out" ].fullTransform( "/" + cube[ "name" ].getValue() )
self.assertAlmostEqual( m[ 3 ][ 0 ], 1.5, places=6 )
self.assertAlmostEqual( m[ 3 ][ 1 ], 0.5, places=6 )
self.assertAlmostEqual( m[ 3 ][ 2 ], 0.0, places=6 )
self.assertAlmostEqual( m[ 0 ][ 0 ], 1.0, places=6 )
self.assertAlmostEqual( m[ 0 ][ 1 ], 0.0, places=6 )
self.assertAlmostEqual( m[ 0 ][ 2 ], 0.0, places=6 )
self.assertAlmostEqual( m[ 1 ][ 0 ], 0.0, places=6 )
self.assertAlmostEqual( m[ 1 ][ 1 ], 0.0, places=6 )
self.assertAlmostEqual( m[ 1 ][ 2 ], -1.0, places=6 )
self.assertAlmostEqual( m[ 2 ][ 0 ], 0.0, places=6 )
self.assertAlmostEqual( m[ 2 ][ 1 ], 1.0, places=6 )
self.assertAlmostEqual( m[ 2 ][ 2 ], 0.0, places=6 )
def testTargetUVCollapsedUTangent( self ) :
from random import Random
from datetime import datetime
verticesPerFace = IECore.IntVectorData( [ 4 ] )
vertexIds = IECore.IntVectorData( [
0, 2, 3, 1 ] )
points = IECore.V3fVectorData( [
imath.V3f( 0, 0, 0 ),
imath.V3f( 0, 0, 0 ),
imath.V3f( 1, 1, 0 ),
imath.V3f( 1, 1, 0 ) ],
IECore.GeometricData.Interpretation.Point )
uvs = IECore.V2fVectorData( [
imath.V2f( 0, 0 ),
imath.V2f( 1, 0 ),
imath.V2f( 0, 1 ),
imath.V2f( 1, 1 ) ],
IECore.GeometricData.Interpretation.UV )
mesh = IECoreScene.MeshPrimitive( verticesPerFace, vertexIds )
mesh[ "P" ] = IECoreScene.PrimitiveVariable( IECoreScene.PrimitiveVariable.Interpolation.Vertex, points )
mesh[ "uv" ] = IECoreScene.PrimitiveVariable( IECoreScene.PrimitiveVariable.Interpolation.Vertex, uvs )
target = GafferScene.ObjectToScene()
target[ "name" ].setValue( "target" )
target[ "object" ].setValue( mesh )
cube = GafferScene.Cube()
cube[ "name" ].setValue( "cube" )
cubeFilter = GafferScene.PathFilter()
cubeFilter[ "paths" ].setValue( IECore.StringVectorData( [ "/" + cube[ "name" ].getValue() ] ) )
constraint = GafferScene.ParentConstraint()
constraint[ "in" ].setInput( cube[ "out" ] )
constraint[ "filter" ].setInput( cubeFilter[ "out" ] )
constraint[ "targetScene" ].setInput( target[ "out" ] )
constraint[ "target" ].setValue( "/" + target[ "name" ].getValue() )
constraint[ "targetMode" ].setValue( GafferScene.Constraint.TargetMode.UV )
constraint[ "targetOffset" ].setValue( imath.V3f( 0, 0, 0 ) )
r = Random( datetime.now() )
for i in range( 10 ) :
u = r.uniform( 0.0, 1.0 )
v = r.uniform( 0.0, 1.0 )
constraint[ "targetUV" ].setValue( imath.V2f( u, v ) )
m = constraint[ "out" ].fullTransform( "/" + cube[ "name" ].getValue() )
self.assertAlmostEqual( m[ 3 ][ 0 ], v, places=6 )
self.assertAlmostEqual( m[ 3 ][ 1 ], v, places=6 )
self.assertAlmostEqual( m[ 3 ][ 2 ], 0.0, places=6 )
self.assertEqual( imath.V3f( m[ 0 ][ 0 ], m[ 0 ][ 1 ], m[ 0 ][ 2 ] ), imath.V3f( 1, 0, 0 ) )
self.assertEqual( imath.V3f( m[ 1 ][ 0 ], m[ 1 ][ 1 ], m[ 1 ][ 2 ] ), imath.V3f( 0, 1, 0 ) )
self.assertEqual( imath.V3f( m[ 2 ][ 0 ], m[ 2 ][ 1 ], m[ 2 ][ 2 ] ), imath.V3f( 0, 0, 1 ) )
def testTargetUVCollapsedVTangent( self ) :
from random import Random
from datetime import datetime
verticesPerFace = IECore.IntVectorData( [ 4 ] )
vertexIds = IECore.IntVectorData( [
0, 2, 3, 1 ] )
points = IECore.V3fVectorData( [
imath.V3f( 0, 0, 0 ),
imath.V3f( 1, 1, 0 ),
imath.V3f( 0, 0, 0 ),
imath.V3f( 1, 1, 0 ) ],
IECore.GeometricData.Interpretation.Point )
uvs = IECore.V2fVectorData( [
imath.V2f( 0, 0 ),
imath.V2f( 1, 0 ),
imath.V2f( 0, 1 ),
imath.V2f( 1, 1 ) ],
IECore.GeometricData.Interpretation.UV )
mesh = IECoreScene.MeshPrimitive( verticesPerFace, vertexIds )
mesh[ "P" ] = IECoreScene.PrimitiveVariable( IECoreScene.PrimitiveVariable.Interpolation.Vertex, points )
mesh[ "uv" ] = IECoreScene.PrimitiveVariable( IECoreScene.PrimitiveVariable.Interpolation.Vertex, uvs )
target = GafferScene.ObjectToScene()
target[ "name" ].setValue( "target" )
target[ "object" ].setValue( mesh )
cube = GafferScene.Cube()
cube[ "name" ].setValue( "cube" )
cubeFilter = GafferScene.PathFilter()
cubeFilter[ "paths" ].setValue( IECore.StringVectorData( [ "/" + cube[ "name" ].getValue() ] ) )
constraint = GafferScene.ParentConstraint()
constraint[ "in" ].setInput( cube[ "out" ] )
constraint[ "filter" ].setInput( cubeFilter[ "out" ] )
constraint[ "targetScene" ].setInput( target[ "out" ] )
constraint[ "target" ].setValue( "/" + target[ "name" ].getValue() )
constraint[ "targetMode" ].setValue( GafferScene.Constraint.TargetMode.UV )
constraint[ "targetOffset" ].setValue( imath.V3f( 0, 0, 0 ) )
r = Random( datetime.now() )
for i in range( 10 ) :
u = r.uniform( 0.0, 1.0 )
v = r.uniform( 0.0, 1.0 )
constraint[ "targetUV" ].setValue( imath.V2f( u, v ) )
m = constraint[ "out" ].fullTransform( "/" + cube[ "name" ].getValue() )
self.assertAlmostEqual( m[ 3 ][ 0 ], u, places=6 )
self.assertAlmostEqual( m[ 3 ][ 1 ], u, places=6 )
self.assertAlmostEqual( m[ 3 ][ 2 ], 0.0, places=6 )
self.assertEqual( imath.V3f( m[ 0 ][ 0 ], m[ 0 ][ 1 ], m[ 0 ][ 2 ] ), imath.V3f( 1, 0, 0 ) )
self.assertEqual( imath.V3f( m[ 1 ][ 0 ], m[ 1 ][ 1 ], m[ 1 ][ 2 ] ), imath.V3f( 0, 1, 0 ) )
self.assertEqual( imath.V3f( m[ 2 ][ 0 ], m[ 2 ][ 1 ], m[ 2 ][ 2 ] ), imath.V3f( 0, 0, 1 ) )
def testTargetUVZeroAreaFace( self ) :
from random import Random
from datetime import datetime
verticesPerFace = IECore.IntVectorData( [ 4 ] )
vertexIds = IECore.IntVectorData( [
0, 2, 3, 1 ] )
points = IECore.V3fVectorData( [
imath.V3f( 0.5, 0.5, 0 ),
imath.V3f( 0.5, 0.5, 0 ),
imath.V3f( 0.5, 0.5, 0 ),
imath.V3f( 0.5, 0.5, 0 ) ],
IECore.GeometricData.Interpretation.Point )
uvs = IECore.V2fVectorData( [
imath.V2f( 0, 0 ),
imath.V2f( 1, 0 ),
imath.V2f( 0, 1 ),
imath.V2f( 1, 1 ) ],
IECore.GeometricData.Interpretation.UV )
mesh = IECoreScene.MeshPrimitive( verticesPerFace, vertexIds )
mesh[ "P" ] = IECoreScene.PrimitiveVariable( IECoreScene.PrimitiveVariable.Interpolation.Vertex, points )
mesh[ "uv" ] = IECoreScene.PrimitiveVariable( IECoreScene.PrimitiveVariable.Interpolation.Vertex, uvs )
target = GafferScene.ObjectToScene()
target[ "name" ].setValue( "target" )
target[ "object" ].setValue( mesh )
cube = GafferScene.Cube()
cube[ "name" ].setValue( "cube" )
cubeFilter = GafferScene.PathFilter()
cubeFilter[ "paths" ].setValue( IECore.StringVectorData( [ "/" + cube[ "name" ].getValue() ] ) )
constraint = GafferScene.ParentConstraint()
constraint[ "in" ].setInput( cube[ "out" ] )
constraint[ "filter" ].setInput( cubeFilter[ "out" ] )
constraint[ "targetScene" ].setInput( target[ "out" ] )
constraint[ "target" ].setValue( "/" + target[ "name" ].getValue() )
constraint[ "targetMode" ].setValue( GafferScene.Constraint.TargetMode.UV )
constraint[ "targetOffset" ].setValue( imath.V3f( 0, 0, 0 ) )
r = Random( datetime.now() )
for i in range( 10 ) :
u = r.uniform( 0.0, 1.0 )
v = r.uniform( 0.0, 1.0 )
constraint[ "targetUV" ].setValue( imath.V2f( u, v ) )
m = | |
<gh_stars>10-100
import vapoursynth as vs
import havsfunc as haf
import mvsfunc as mvf
import muvsfunc as muvf
import nnedi3_resample
import nnedi3_resampleCL
import resamplehq
import xvs
import sys, os, re, math, glob, functools, subprocess, shutil, configparser
"""*********************************************
Scripts functions:
EraseAny*
EraseDMM*
LogoNR
FillMargins
SpeedGear
Nnedi3aa
Dissolve
FadeIn
FadeOut
FadeIO
TurnLeft
TurnRight
SelectEven
SelectOdd
SelectHalf
StackVertical
StackHorizontal
ChangeFPS
AssumeFPS
OverlayM
Overlay
Overlaymod
GradualMerge
Crop
Diff
TrimMul
Trims
************************************************
Source functions:
ImageSourceMul
FFImageSource
LWLSource
DirectShowSource
MplsSource
D2VSource
DGSource
DGSourceNV
DGSourceIM
************************************************
Deinterlacing functions:
Nnedi3
Yadifmod
QTGMC
DGBob
************************************************
Inverse Telecine functions:
VIVTC
DGIVTC
DGTelecide
************************************************
Resizing functions:
GammaResize
PointResize
BlinearResize
BicubicResize
LanczosResize
Spline16Resize
Spline36Resize
Nnedi3Resize
************************************************
Denoise functions:
DGDenoise
SMDegrain
************************************************
Sharpen functions:
DGSharpen
************************************************
Format Conversion functions:
ConvertToYV24
ConvertToYV16
ConvertToYUY2
ConvertToYV12
ConvertToYV411
ConvertToY8
ConvertToRGB24
ConvertToRGB48
ConvertToRGBS
ConvertToGrey
ConvertToGray
ConvertTo
SwapUV
UToY
VToY
Depth
************************************************
Configure functions:
Main
SetWorkingDir
GetLgdPath
GetHomePath
GetLibPath
SetConfig
GetConfig
TestSuite
PathSplit
FindFirstFilePath
************************************************
Subtitle functions:
Subtitle
TcasSub
************************************************
Assistant functions:
Default
SplitClip
FrameType
Matrix
RemoveFileExt
RightStr
LeftStr
ConvertToTimecode
Roma
************************************************
Command Line functions:
AddDefEnv
AddParm
ToDict
GetEach
CreatScripts
StartScripts
Muxers
Muxer
LsmashMuxers
LsmashMuxer
Demuxer
TsDemuxer
EacDemuxer
AudioEnc
AudioTrimFF
AudioTrimFFSingle
AudioTrims
AudioTrimAVS
ExtractModule
Preview
*********************************************"""
"""*********************************************
### Script : qvs.py
### Program : Qvs
### Contact : http://weibo.com/egosub
### Homepage : http://egosub.ske48.co
*********************************************"""
##################################################################################################
### Function : EraseAny
### Function : EraseAnyRel
### Function : EraseAnyAbs
### Author : ema
### Version : v0.4
### Release : 2018.06.02
##################################################################################################
### Be used for erase any transparent logo.
### YV12 and YV24 Only.
###
### an [int, default: 0]
### ------------------
### >> Using for EraseAny.
### The alignment parameter takes a number from 1 to 9, corresponding to the positions of the keys on a numpad.
### But when alignment equals 0, it's equivalent to full screen.
###
### l/t/r/b [int, default: 0]
### ------------------
### >> Using for EraseAnyRel.
### Crop is the simplest to use of the two. The arguments specify how many pixels to crop from each side. This function used to be called CropRel which is still an alias for it.
### left/top/right/bottom pixels to be cropped for logo area.
###
### x/y/width/height [int, default: 0]
### ------------------
### >> Using for EraseAnyAbs.
### left/top/right/bottom pixels to be cropped for logo area.
### CropAbs, on the other hand, is special, because it can accept clips with variable frame sizes and crop out a fixed size area, thus making it a fixed size clip.
###
### d/a/s/h [int, default: 1/2/4/6]
### ------------------
### >> Using for EraseAny/EraseAnyRel/EraseAnyAbs.
### The same parameters of KNLMeansCL.
###
### pos_x / pos_y [int, default: 0/0]
### ------------------
### Adjust logo position, in a quarter pixel.
###
### device_type [string, default: 'gpu']
### ------------------
### 'accelerator' := Dedicated OpenCL accelerators.
### 'cpu' := An OpenCL device that is the host processor.
### 'gpu' := An OpenCL device that is a GPU.
### 'auto' := 'accelerator' -> 'gpu' -> 'cpu'.
###
### device_id [int, default: 1]
### ------------------
### The 'device_id' device of type 'device_type' in the system.
### Example: [device_type = "GPU", device_id = 1] return the second GPU in the system.
###
### +----------------+
### | REQUIREMENTS |
### +----------------+
### -> LogoNR
### -> KNLMeansCL
### -> RemoveGrain/Repair
##################################################################################################
def EraseAny(src, lgd, chroma=True, an=0, fadein=0, fadeout=0, start=0, end=None, pos_x=0, pos_y=0, depth=128, d=1, a=2, s=4, h=6, device_type='gpu', device_id=1):
core = vs.get_core()
def multOfTwo(value):
value = int(value)
if (value % 2) != 0:
value += 1
return value
w = src.width
h = src.height
an_l_dict = {0 : 0, 1 : 0, 2 : (w/3), 3 : (w/3*2), 4 : 0, 5 : (w/3), 6 : (w/3*2), 7 : 0, 8 : (w/3), 9 : (w/3*2)}
an_t_dict = {0 : 0, 1 : (h/3*2), 2 : (h/3*2), 3 : (h/3*2), 4 : (h/3), 5 : (h/3), 6 : (h/3), 7 : 0, 8 : 0, 9 : 0 }
an_r_dict = {0 : 0, 1 : (w/3*2), 2 : (w/3), 3 : 0, 4 : (w/3*2), 5 : (w/3), 6 : 0, 7 : (w/3*2), 8 : (w/3), 9 : 0 }
an_b_dict = {0 : 0, 1 : 0, 2 : 0, 3 : 0, 4 : (h/3), 5 : (h/3), 6 : (h/3), 7 : (h/3*2), 8 : (h/3*2), 9 : (h/3*2)}
l = multOfTwo(an_l_dict[an])
t = multOfTwo(an_t_dict[an])
r = multOfTwo(an_r_dict[an])
b = multOfTwo(an_b_dict[an])
#raise ValueError('l={l},t={t},r={r},b={b}'.format(l=l, t=t, r=r, b=b))
#Fixed bug of delogo plugin when using fadeout parameter
if fadeout == 0: end = Default(end, src.num_frames)
else: end = Default(end, src.num_frames - 3)
dlg = core.delogo.EraseLogo(clip=src, logofile=lgd, start=start, end=end, fadein=fadein, fadeout=fadeout, pos_x=pos_x, pos_y=pos_y, depth=depth)
try:
src = LogoNR(dlg, src, chroma=chroma, l=l, t=t, r=r, b=b, d=d, a=a, s=s, h=h, device_type=device_type, device_id=device_id)
except:
src = LogoNR(dlg, src, chroma=chroma, l=l, t=t, r=r, b=b, d=d, a=a, s=s, h=h, device_type=device_type, device_id=0)
return src
def EraseAnyRel(src, lgd, chroma=True, l=0, t=0, r=0, b=0, fadein=0, fadeout=0, start=0, end=None, pos_x=0, pos_y=0, depth=128, d=1, a=2, s=4, h=6, device_type='gpu', device_id=0):
core = vs.get_core()
#Fixed bug of delogo plugin when using fadeout parameter
if fadeout == 0: end = Default(end, src.num_frames)
else: end = Default(end, src.num_frames - 3)
dlg = core.delogo.EraseLogo(clip=src, logofile=lgd, start=start, end=end, fadein=fadein, fadeout=fadeout, pos_x=pos_x, pos_y=pos_y, depth=depth)
try:
src = LogoNR(dlg, src, chroma=chroma, l=l, t=t, r=r, b=b, d=d, a=a, s=s, h=h, device_type=device_type, device_id=device_id)
except:
src = LogoNR(dlg, src, chroma=chroma, l=l, t=t, r=r, b=b, d=d, a=a, s=s, h=h, device_type=device_type, device_id=0)
return src
def EraseAnyAbs(src, lgd, chroma=True, x=0, y=0, width=0, height=0, fadein=0, fadeout=0, start=0, end=None, pos_x=0, pos_y=0, depth=128, d=1, a=2, s=4, h=6, device_type='gpu', device_id=0):
core = vs.get_core()
#Fixed bug of delogo plugin when using fadeout parameter
if fadeout == 0: end = Default(end, src.num_frames)
else: end = Default(end, src.num_frames - 3)
if width == 0:
width = src.width
if height == 0:
height = src.height
l = x
t = y
r = src.width - x - width
b = src.height - y - height
dlg = core.delogo.EraseLogo(clip=src, logofile=lgd, start=start, end=end, fadein=fadein, fadeout=fadeout, pos_x=pos_x, pos_y=pos_y, depth=depth)
try:
src = LogoNR(dlg, src, chroma=chroma, l=l, t=t, r=r, b=b, d=d, a=a, s=s, h=h, device_type=device_type, device_id=device_id)
except:
src = LogoNR(dlg, src, chroma=chroma, l=l, t=t, r=r, b=b, d=d, a=a, s=s, h=h, device_type=device_type, device_id=0)
return src
##################################################################################################
### Function : LogoNR
### Author : 06_taro
### Version : v0.1
### Release : 2012.04.22
##################################################################################################
### Designer : ema
### Version : v0.3
### Release : 2018.06.02
##################################################################################################
### Post-denoise filter of EraseLogo.
### Only process logo areas in logo frames, even if l/t/r/b are not set. Non-logo areas are left untouched.
###
### +---------+
### | USAGE |
### +---------+
###
### dlg [clip]
### ------------------
### Clip after delogo.
###
### src [clip]
### ------------------
### Clip before delogo.
###
### chroma [bool, default: True]
### ------------------
### Process chroma plane or not.
###
### l/t/r/b [int, default: 0]
### ------------------
### left/top/right/bottom pixels to be cropped for logo area.
### Have the same restriction as CropRel, e.g., no odd value for YV12.
### logoNR only filters the logo areas in logo frames, no matter l/t/r/b are set or not.
### So if you have other heavy filters running in a pipeline and don't care much about the speed of logoNR,
### it is safe to left these values unset.
### Setting these values only makes logoNR run faster, with rarely noticeable difference in result,
### unless you set wrong values and the logo is not covered in your cropped target area.
###
### d/a/s/h [int, default: 1/2/4/6]
### ------------------
### The same parameters of KNLMeansCL.
###
### device_type [string, default: 'gpu']
### ------------------
### 'accelerator' := Dedicated OpenCL accelerators.
### 'cpu' := An OpenCL device that is the host processor.
### 'gpu' := An OpenCL device that is a GPU.
### 'auto' := 'accelerator' -> 'gpu' -> 'cpu'.
###
### device_id [int, default: 1]
### ------------------
### The 'device_id' device of type 'device_type' in the system.
### Example: [device_type = "GPU", device_id = 1] return the second GPU in the system.
###
### +----------------+
### | REQUIREMENTS |
### +----------------+
### -> havsfunc
### -> KNLMeansCL
### -> RGVS(mt_expand_multi)
##################################################################################################
def LogoNR(dlg, src, chroma=True, l=0, t=0, r=0, b=0, d=1, a=2, s=2, h=3, device_type='gpu', device_id=1):
core = vs.get_core()
if not (isinstance(dlg, vs.VideoNode) and isinstance(src, vs.VideoNode)):
raise TypeError('logoNR: This is not a clip')
if dlg.format.id != src.format.id:
raise TypeError('logoNR: clips must have the same format')
if dlg.format.color_family == vs.GRAY:
chroma = False
if not chroma and dlg.format.color_family != vs.GRAY:
dlg_orig = dlg
dlg = mvf.GetPlane(dlg, 0)
src = mvf.GetPlane(src, 0)
else:
dlg_orig = None
b_crop = (l != 0) or (t != 0) or (r != 0) or (b != 0)
if b_crop:
src = core.std.Crop(src, l, r, t, b)
last = core.std.Crop(dlg, l, r, t, b)
else:
last = dlg
if chroma:
clp_nr = haf.KNLMeansCL(last, d=d, a=a, s=s, h=h, device_type=device_type, device_id=device_id)
else:
clp_nr = core.knlm.KNLMeansCL(last, d=d, a=a, s=s, h=h, device_type=device_type, device_id=device_id)
logoM = haf.mt_expand_multi(core.std.Expr([last, src], ['x y - abs 16 *']), mode='losange', sw=3, sh=3).std.Convolution(matrix=[1, 1, 1, 1, 0, 1, 1, 1, 1]).std.Deflate()
clp_nr = core.std.MaskedMerge(last, clp_nr, logoM)
if b_crop:
clp_nr = haf.Overlay(dlg, clp_nr, x=l, y=t)
if dlg_orig is not None:
return core.std.ShufflePlanes([clp_nr, dlg_orig], planes=[0, 1, 2], colorfamily=dlg_orig.format.color_family)
else:
return clp_nr
##################################################################################################
### Function : EraseDMM360
### Function : EraseDMM450
### Function : EraseDMM720
### Function : EraseDMM720_2
### Function : EraseDMM1080
### Function : EraseDMM1080_2
### Function : GetLgdPath
### Author : ema
### Version : v0.2.2
### Release : 2018.01.11
##################################################################################################
### All of '*.lgd' files were made from SKE48 Stage Perf.
### EraseDMM720 applies only to SKE48 Stage Perf.
###
### GetLgdPath:
### path [str, default: '']
### ------------------
### Get the '*.lgd' files dir path.
### Parm of path is '' : True -> '/Python36/Lib/site-packages/logodata',
### False -> path.
###
### +----------------+
### | REQUIREMENTS |
### +----------------+
### -> LogoNR
### -> KNLMeansCL
### -> RemoveGrain/Repair
##################################################################################################
def EraseDMM360(src, lgd=None, pos_x=0, pos_y=0, device_type='gpu', device_id=1):
lgd = Default(lgd, GetLgdPath() + '/dmm360.lgd')
return EraseAnyRel(src, lgd, chroma=True, l=16, t=8, r=494, b=328, pos_x=pos_x, pos_y=pos_y, device_type=device_type, device_id=device_id)
def EraseDMM450(src, lgd=None, pos_x=0, pos_y=0, device_type='gpu', device_id=1):
lgd = Default(lgd, GetLgdPath() + '/dmm450.lgd')
return EraseAnyRel(src, lgd, chroma=True, l=20, t=4, r=678, b=420, pos_x=pos_x, pos_y=pos_y, device_type=device_type, device_id=device_id)
def EraseDMM720(src, lgd=None, pos_x=0, pos_y=0, device_type='gpu', device_id=1):
lgd = Default(lgd, GetLgdPath() + '/dmm_skelive720.lgd')
return EraseAnyRel(src, lgd, | |
Constraint(expr=m.x284*m.x2514 + m.x909*m.x2520 + m.x1534*m.x2526 + m.x2159*m.x2532 <= 8)
m.c1553 = Constraint(expr=m.x285*m.x2514 + m.x910*m.x2520 + m.x1535*m.x2526 + m.x2160*m.x2532 <= 8)
m.c1554 = Constraint(expr=m.x286*m.x2514 + m.x911*m.x2520 + m.x1536*m.x2526 + m.x2161*m.x2532 <= 8)
m.c1555 = Constraint(expr=m.x287*m.x2514 + m.x912*m.x2520 + m.x1537*m.x2526 + m.x2162*m.x2532 <= 8)
m.c1556 = Constraint(expr=m.x288*m.x2514 + m.x913*m.x2520 + m.x1538*m.x2526 + m.x2163*m.x2532 <= 8)
m.c1557 = Constraint(expr=m.x289*m.x2514 + m.x914*m.x2520 + m.x1539*m.x2526 + m.x2164*m.x2532 <= 8)
m.c1558 = Constraint(expr=m.x290*m.x2514 + m.x915*m.x2520 + m.x1540*m.x2526 + m.x2165*m.x2532 <= 8)
m.c1559 = Constraint(expr=m.x291*m.x2514 + m.x916*m.x2520 + m.x1541*m.x2526 + m.x2166*m.x2532 <= 8)
m.c1560 = Constraint(expr=m.x292*m.x2514 + m.x917*m.x2520 + m.x1542*m.x2526 + m.x2167*m.x2532 <= 8)
m.c1561 = Constraint(expr=m.x293*m.x2514 + m.x918*m.x2520 + m.x1543*m.x2526 + m.x2168*m.x2532 <= 8)
m.c1562 = Constraint(expr=m.x294*m.x2514 + m.x919*m.x2520 + m.x1544*m.x2526 + m.x2169*m.x2532 <= 8)
m.c1563 = Constraint(expr=m.x295*m.x2514 + m.x920*m.x2520 + m.x1545*m.x2526 + m.x2170*m.x2532 <= 8)
m.c1564 = Constraint(expr=m.x296*m.x2514 + m.x921*m.x2520 + m.x1546*m.x2526 + m.x2171*m.x2532 <= 8)
m.c1565 = Constraint(expr=m.x297*m.x2514 + m.x922*m.x2520 + m.x1547*m.x2526 + m.x2172*m.x2532 <= 8)
m.c1566 = Constraint(expr=m.x298*m.x2514 + m.x923*m.x2520 + m.x1548*m.x2526 + m.x2173*m.x2532 <= 8)
m.c1567 = Constraint(expr=m.x299*m.x2514 + m.x924*m.x2520 + m.x1549*m.x2526 + m.x2174*m.x2532 <= 8)
m.c1568 = Constraint(expr=m.x300*m.x2514 + m.x925*m.x2520 + m.x1550*m.x2526 + m.x2175*m.x2532 <= 8)
m.c1569 = Constraint(expr=m.x301*m.x2514 + m.x926*m.x2520 + m.x1551*m.x2526 + m.x2176*m.x2532 <= 8)
m.c1570 = Constraint(expr=m.x302*m.x2514 + m.x927*m.x2520 + m.x1552*m.x2526 + m.x2177*m.x2532 <= 8)
m.c1571 = Constraint(expr=m.x303*m.x2514 + m.x928*m.x2520 + m.x1553*m.x2526 + m.x2178*m.x2532 <= 8)
m.c1572 = Constraint(expr=m.x304*m.x2514 + m.x929*m.x2520 + m.x1554*m.x2526 + m.x2179*m.x2532 <= 8)
m.c1573 = Constraint(expr=m.x305*m.x2514 + m.x930*m.x2520 + m.x1555*m.x2526 + m.x2180*m.x2532 <= 8)
m.c1574 = Constraint(expr=m.x306*m.x2514 + m.x931*m.x2520 + m.x1556*m.x2526 + m.x2181*m.x2532 <= 8)
m.c1575 = Constraint(expr=m.x307*m.x2514 + m.x932*m.x2520 + m.x1557*m.x2526 + m.x2182*m.x2532 <= 8)
m.c1576 = Constraint(expr=m.x308*m.x2514 + m.x933*m.x2520 + m.x1558*m.x2526 + m.x2183*m.x2532 <= 8)
m.c1577 = Constraint(expr=m.x309*m.x2514 + m.x934*m.x2520 + m.x1559*m.x2526 + m.x2184*m.x2532 <= 8)
m.c1578 = Constraint(expr=m.x310*m.x2514 + m.x935*m.x2520 + m.x1560*m.x2526 + m.x2185*m.x2532 <= 8)
m.c1579 = Constraint(expr=m.x311*m.x2514 + m.x936*m.x2520 + m.x1561*m.x2526 + m.x2186*m.x2532 <= 8)
m.c1580 = Constraint(expr=m.x312*m.x2514 + m.x937*m.x2520 + m.x1562*m.x2526 + m.x2187*m.x2532 <= 8)
m.c1581 = Constraint(expr=m.x313*m.x2514 + m.x938*m.x2520 + m.x1563*m.x2526 + m.x2188*m.x2532 <= 8)
m.c1582 = Constraint(expr=m.x314*m.x2514 + m.x939*m.x2520 + m.x1564*m.x2526 + m.x2189*m.x2532 <= 8)
m.c1583 = Constraint(expr=m.x315*m.x2514 + m.x940*m.x2520 + m.x1565*m.x2526 + m.x2190*m.x2532 <= 8)
m.c1584 = Constraint(expr=m.x316*m.x2514 + m.x941*m.x2520 + m.x1566*m.x2526 + m.x2191*m.x2532 <= 8)
m.c1585 = Constraint(expr=m.x317*m.x2514 + m.x942*m.x2520 + m.x1567*m.x2526 + m.x2192*m.x2532 <= 8)
m.c1586 = Constraint(expr=m.x318*m.x2514 + m.x943*m.x2520 + m.x1568*m.x2526 + m.x2193*m.x2532 <= 8)
m.c1587 = Constraint(expr=m.x319*m.x2514 + m.x944*m.x2520 + m.x1569*m.x2526 + m.x2194*m.x2532 <= 8)
m.c1588 = Constraint(expr=m.x320*m.x2514 + m.x945*m.x2520 + m.x1570*m.x2526 + m.x2195*m.x2532 <= 8)
m.c1589 = Constraint(expr=m.x321*m.x2514 + m.x946*m.x2520 + m.x1571*m.x2526 + m.x2196*m.x2532 <= 8)
m.c1590 = Constraint(expr=m.x322*m.x2514 + m.x947*m.x2520 + m.x1572*m.x2526 + m.x2197*m.x2532 <= 8)
m.c1591 = Constraint(expr=m.x323*m.x2514 + m.x948*m.x2520 + m.x1573*m.x2526 + m.x2198*m.x2532 <= 8)
m.c1592 = Constraint(expr=m.x324*m.x2514 + m.x949*m.x2520 + m.x1574*m.x2526 + m.x2199*m.x2532 <= 8)
m.c1593 = Constraint(expr=m.x325*m.x2514 + m.x950*m.x2520 + m.x1575*m.x2526 + m.x2200*m.x2532 <= 8)
m.c1594 = Constraint(expr=m.x326*m.x2514 + m.x951*m.x2520 + m.x1576*m.x2526 + m.x2201*m.x2532 <= 8)
m.c1595 = Constraint(expr=m.x327*m.x2514 + m.x952*m.x2520 + m.x1577*m.x2526 + m.x2202*m.x2532 <= 8)
m.c1596 = Constraint(expr=m.x328*m.x2514 + m.x953*m.x2520 + m.x1578*m.x2526 + m.x2203*m.x2532 <= 8)
m.c1597 = Constraint(expr=m.x329*m.x2514 + m.x954*m.x2520 + m.x1579*m.x2526 + m.x2204*m.x2532 <= 8)
m.c1598 = Constraint(expr=m.x330*m.x2514 + m.x955*m.x2520 + m.x1580*m.x2526 + m.x2205*m.x2532 <= 8)
m.c1599 = Constraint(expr=m.x331*m.x2514 + m.x956*m.x2520 + m.x1581*m.x2526 + m.x2206*m.x2532 <= 8)
m.c1600 = Constraint(expr=m.x332*m.x2514 + m.x957*m.x2520 + m.x1582*m.x2526 + m.x2207*m.x2532 <= 8)
m.c1601 = Constraint(expr=m.x333*m.x2514 + m.x958*m.x2520 + m.x1583*m.x2526 + m.x2208*m.x2532 <= 8)
m.c1602 = Constraint(expr=m.x334*m.x2514 + m.x959*m.x2520 + m.x1584*m.x2526 + m.x2209*m.x2532 <= 8)
m.c1603 = Constraint(expr=m.x335*m.x2514 + m.x960*m.x2520 + m.x1585*m.x2526 + m.x2210*m.x2532 <= 8)
m.c1604 = Constraint(expr=m.x336*m.x2514 + m.x961*m.x2520 + m.x1586*m.x2526 + m.x2211*m.x2532 <= 8)
m.c1605 = Constraint(expr=m.x337*m.x2514 + m.x962*m.x2520 + m.x1587*m.x2526 + m.x2212*m.x2532 <= 8)
m.c1606 = Constraint(expr=m.x338*m.x2514 + m.x963*m.x2520 + m.x1588*m.x2526 + m.x2213*m.x2532 <= 8)
m.c1607 = Constraint(expr=m.x339*m.x2514 + m.x964*m.x2520 + m.x1589*m.x2526 + m.x2214*m.x2532 <= 8)
m.c1608 = Constraint(expr=m.x340*m.x2514 + m.x965*m.x2520 + m.x1590*m.x2526 + m.x2215*m.x2532 <= 8)
m.c1609 = Constraint(expr=m.x341*m.x2514 + m.x966*m.x2520 + m.x1591*m.x2526 + m.x2216*m.x2532 <= 8)
m.c1610 = Constraint(expr=m.x342*m.x2514 + m.x967*m.x2520 + m.x1592*m.x2526 + m.x2217*m.x2532 <= 8)
m.c1611 = Constraint(expr=m.x343*m.x2514 + m.x968*m.x2520 + m.x1593*m.x2526 + m.x2218*m.x2532 <= 8)
m.c1612 = Constraint(expr=m.x344*m.x2514 + m.x969*m.x2520 + m.x1594*m.x2526 + m.x2219*m.x2532 <= 8)
m.c1613 = Constraint(expr=m.x345*m.x2514 + m.x970*m.x2520 + m.x1595*m.x2526 + m.x2220*m.x2532 <= 8)
m.c1614 = Constraint(expr=m.x346*m.x2514 + m.x971*m.x2520 + m.x1596*m.x2526 + m.x2221*m.x2532 <= 8)
m.c1615 = Constraint(expr=m.x347*m.x2514 + m.x972*m.x2520 + m.x1597*m.x2526 + m.x2222*m.x2532 <= 8)
m.c1616 = Constraint(expr=m.x348*m.x2514 + m.x973*m.x2520 + m.x1598*m.x2526 + m.x2223*m.x2532 <= 8)
m.c1617 = Constraint(expr=m.x349*m.x2514 + m.x974*m.x2520 + m.x1599*m.x2526 + m.x2224*m.x2532 <= 8)
m.c1618 = Constraint(expr=m.x350*m.x2514 + m.x975*m.x2520 + m.x1600*m.x2526 + m.x2225*m.x2532 <= 8)
m.c1619 = Constraint(expr=m.x351*m.x2514 + m.x976*m.x2520 + m.x1601*m.x2526 + m.x2226*m.x2532 <= 8)
m.c1620 = Constraint(expr=m.x352*m.x2514 + m.x977*m.x2520 + m.x1602*m.x2526 + m.x2227*m.x2532 <= 8)
m.c1621 = Constraint(expr=m.x353*m.x2514 + m.x978*m.x2520 + m.x1603*m.x2526 + m.x2228*m.x2532 <= 8)
m.c1622 = Constraint(expr=m.x354*m.x2514 + m.x979*m.x2520 + m.x1604*m.x2526 + m.x2229*m.x2532 <= 8)
m.c1623 = Constraint(expr=m.x355*m.x2514 + m.x980*m.x2520 + m.x1605*m.x2526 + m.x2230*m.x2532 <= 8)
m.c1624 = Constraint(expr=m.x356*m.x2514 + m.x981*m.x2520 + m.x1606*m.x2526 + m.x2231*m.x2532 <= 8)
m.c1625 = Constraint(expr=m.x357*m.x2514 + m.x982*m.x2520 + m.x1607*m.x2526 + m.x2232*m.x2532 <= 8)
m.c1626 = Constraint(expr=m.x358*m.x2514 + m.x983*m.x2520 + m.x1608*m.x2526 + m.x2233*m.x2532 <= 8)
m.c1627 = Constraint(expr=m.x359*m.x2514 + m.x984*m.x2520 + m.x1609*m.x2526 + m.x2234*m.x2532 <= 8)
m.c1628 = Constraint(expr=m.x360*m.x2514 + m.x985*m.x2520 + m.x1610*m.x2526 + m.x2235*m.x2532 <= 8)
m.c1629 = Constraint(expr=m.x361*m.x2514 + m.x986*m.x2520 + m.x1611*m.x2526 + m.x2236*m.x2532 <= 8)
m.c1630 = Constraint(expr=m.x362*m.x2514 + m.x987*m.x2520 + m.x1612*m.x2526 + m.x2237*m.x2532 <= 8)
m.c1631 = Constraint(expr=m.x363*m.x2514 + m.x988*m.x2520 + m.x1613*m.x2526 + m.x2238*m.x2532 <= 8)
m.c1632 = Constraint(expr=m.x364*m.x2514 + m.x989*m.x2520 + m.x1614*m.x2526 + m.x2239*m.x2532 <= 8)
m.c1633 = Constraint(expr=m.x365*m.x2514 + m.x990*m.x2520 + m.x1615*m.x2526 + m.x2240*m.x2532 <= 8)
m.c1634 = Constraint(expr=m.x366*m.x2514 + m.x991*m.x2520 + m.x1616*m.x2526 + m.x2241*m.x2532 <= 8)
m.c1635 = Constraint(expr=m.x367*m.x2514 + m.x992*m.x2520 + m.x1617*m.x2526 + m.x2242*m.x2532 <= 8)
m.c1636 = Constraint(expr=m.x368*m.x2514 + m.x993*m.x2520 + m.x1618*m.x2526 + m.x2243*m.x2532 <= 8)
m.c1637 = Constraint(expr=m.x369*m.x2514 + m.x994*m.x2520 + m.x1619*m.x2526 + m.x2244*m.x2532 <= 8)
m.c1638 = Constraint(expr=m.x370*m.x2514 + m.x995*m.x2520 + m.x1620*m.x2526 + m.x2245*m.x2532 <= 8)
m.c1639 = Constraint(expr=m.x371*m.x2514 + m.x996*m.x2520 + m.x1621*m.x2526 + m.x2246*m.x2532 <= 8)
m.c1640 = Constraint(expr=m.x372*m.x2514 + m.x997*m.x2520 + m.x1622*m.x2526 + m.x2247*m.x2532 <= 8)
m.c1641 = Constraint(expr=m.x373*m.x2514 + m.x998*m.x2520 + m.x1623*m.x2526 + m.x2248*m.x2532 <= 8)
m.c1642 = Constraint(expr=m.x374*m.x2514 + m.x999*m.x2520 + m.x1624*m.x2526 + m.x2249*m.x2532 <= 8)
m.c1643 = Constraint(expr=m.x375*m.x2514 + m.x1000*m.x2520 + m.x1625*m.x2526 + m.x2250*m.x2532 <= 8)
m.c1644 = Constraint(expr=m.x376*m.x2514 + m.x1001*m.x2520 + m.x1626*m.x2526 + m.x2251*m.x2532 <= 8)
m.c1645 = Constraint(expr=m.x377*m.x2514 + m.x1002*m.x2520 + m.x1627*m.x2526 + m.x2252*m.x2532 <= 8)
m.c1646 = Constraint(expr=m.x378*m.x2514 + m.x1003*m.x2520 + m.x1628*m.x2526 + m.x2253*m.x2532 <= 8)
m.c1647 = Constraint(expr=m.x379*m.x2514 + m.x1004*m.x2520 + m.x1629*m.x2526 + m.x2254*m.x2532 <= 8)
m.c1648 = Constraint(expr=m.x380*m.x2514 + m.x1005*m.x2520 + m.x1630*m.x2526 + m.x2255*m.x2532 <= 8)
m.c1649 = Constraint(expr=m.x381*m.x2514 + m.x1006*m.x2520 + m.x1631*m.x2526 + m.x2256*m.x2532 <= 8)
m.c1650 = Constraint(expr=m.x382*m.x2514 + m.x1007*m.x2520 + m.x1632*m.x2526 + m.x2257*m.x2532 <= 8)
m.c1651 = Constraint(expr=m.x383*m.x2514 + m.x1008*m.x2520 + m.x1633*m.x2526 + m.x2258*m.x2532 <= 8)
m.c1652 = Constraint(expr=m.x384*m.x2514 + m.x1009*m.x2520 + m.x1634*m.x2526 + m.x2259*m.x2532 <= 8)
m.c1653 = Constraint(expr=m.x385*m.x2514 + m.x1010*m.x2520 + m.x1635*m.x2526 + m.x2260*m.x2532 <= 8)
m.c1654 = Constraint(expr=m.x386*m.x2514 + m.x1011*m.x2520 + m.x1636*m.x2526 + m.x2261*m.x2532 <= 8)
m.c1655 = Constraint(expr=m.x387*m.x2514 + m.x1012*m.x2520 + m.x1637*m.x2526 + m.x2262*m.x2532 <= 8)
m.c1656 = Constraint(expr=m.x388*m.x2514 + m.x1013*m.x2520 + m.x1638*m.x2526 + m.x2263*m.x2532 <= 8)
m.c1657 = Constraint(expr=m.x389*m.x2514 + m.x1014*m.x2520 + m.x1639*m.x2526 + m.x2264*m.x2532 <= 8)
m.c1658 = Constraint(expr=m.x390*m.x2514 + m.x1015*m.x2520 + m.x1640*m.x2526 + m.x2265*m.x2532 <= 8)
m.c1659 = Constraint(expr=m.x391*m.x2514 + m.x1016*m.x2520 + m.x1641*m.x2526 + m.x2266*m.x2532 <= 8)
m.c1660 = Constraint(expr=m.x392*m.x2514 + m.x1017*m.x2520 + m.x1642*m.x2526 + m.x2267*m.x2532 <= 8)
m.c1661 = Constraint(expr=m.x393*m.x2514 + m.x1018*m.x2520 + m.x1643*m.x2526 + m.x2268*m.x2532 <= 8)
m.c1662 = Constraint(expr=m.x394*m.x2514 + m.x1019*m.x2520 + m.x1644*m.x2526 + m.x2269*m.x2532 <= 8)
m.c1663 = Constraint(expr=m.x395*m.x2514 + m.x1020*m.x2520 + m.x1645*m.x2526 + m.x2270*m.x2532 <= 8)
m.c1664 = Constraint(expr=m.x396*m.x2514 + m.x1021*m.x2520 + m.x1646*m.x2526 + m.x2271*m.x2532 <= 8)
m.c1665 = Constraint(expr=m.x397*m.x2514 + m.x1022*m.x2520 + m.x1647*m.x2526 + m.x2272*m.x2532 <= 8)
m.c1666 = Constraint(expr=m.x398*m.x2514 + m.x1023*m.x2520 + m.x1648*m.x2526 + m.x2273*m.x2532 <= 8)
m.c1667 = Constraint(expr=m.x399*m.x2514 + m.x1024*m.x2520 + m.x1649*m.x2526 + m.x2274*m.x2532 <= 8)
m.c1668 = Constraint(expr=m.x400*m.x2514 + m.x1025*m.x2520 + m.x1650*m.x2526 + m.x2275*m.x2532 <= 8)
m.c1669 = Constraint(expr=m.x401*m.x2514 + m.x1026*m.x2520 + m.x1651*m.x2526 + m.x2276*m.x2532 <= 8)
m.c1670 = Constraint(expr=m.x402*m.x2514 + m.x1027*m.x2520 + m.x1652*m.x2526 + m.x2277*m.x2532 <= 8)
m.c1671 = Constraint(expr=m.x403*m.x2514 + m.x1028*m.x2520 + m.x1653*m.x2526 + m.x2278*m.x2532 <= 8)
m.c1672 = Constraint(expr=m.x404*m.x2514 + m.x1029*m.x2520 + m.x1654*m.x2526 + m.x2279*m.x2532 <= 8)
m.c1673 = Constraint(expr=m.x405*m.x2514 + m.x1030*m.x2520 + m.x1655*m.x2526 + m.x2280*m.x2532 <= 8)
m.c1674 = Constraint(expr=m.x406*m.x2514 + m.x1031*m.x2520 + m.x1656*m.x2526 + m.x2281*m.x2532 <= 8)
m.c1675 = Constraint(expr=m.x407*m.x2514 + m.x1032*m.x2520 + m.x1657*m.x2526 + m.x2282*m.x2532 <= 8)
m.c1676 = Constraint(expr=m.x408*m.x2514 + m.x1033*m.x2520 + m.x1658*m.x2526 + m.x2283*m.x2532 <= 8)
m.c1677 = Constraint(expr=m.x409*m.x2514 + m.x1034*m.x2520 + m.x1659*m.x2526 + m.x2284*m.x2532 <= 8)
m.c1678 = Constraint(expr=m.x410*m.x2514 + m.x1035*m.x2520 + m.x1660*m.x2526 + m.x2285*m.x2532 <= 8)
m.c1679 = Constraint(expr=m.x411*m.x2514 + m.x1036*m.x2520 + m.x1661*m.x2526 + m.x2286*m.x2532 <= 8)
m.c1680 = | |
# coding=latin-1
#
# natlinkconfigfunctions.py
# This module performs the configuration functions.
# called from natlinkconfig (a wxPython GUI),
# or directly, see below
#
# <NAME>, January 2008
#
"""
With the functions in this module NatLink can be configured.
This can be done in three ways:
-Through the command line interface (CLI) which is started automatically
when this module is run (with Pythonwin, IDLE, or command line of Python)
-On the command line, using one of the different command line options
-Through the configure GUI (natlinkconfig.py), which calls into this module
This last one needs wxPython to be installed.
*** the core directory is relative to this directory ...
...and will be searched for first.
Afterwards can be set:
DNSInstallDir
- if not found in one of the predefined subfolders of %PROGRAMFILES%,
this directory can be set in HKCU\Software\Natlink.
Functions: setDNSInstallDir(path) (d path) and clearDNSInstallDir() (D)
DNSINIDir
- if not found in one of the subfolders of %COMMON_APPDATA%
where they are expected, this one can be set in HKCU\Software\Natlink.
Functions: setDNSIniDir(path) (c path) and clearDNSIniDir() (C)
When NatLink is enabled natlink.pyd is registered with
win32api.WinExec("regsvr32 /s pathToNatlinkPyd") (silent)
It can be unregistered through function unregisterNatlinkPyd() see below.
Other functions inside this module, with calls from CLI or command line:
enableNatlink() (e)/disableNatlink() (E)
setUserDirectory(path) (n path) or clearUserDirectory() (N)
etc.
More at the bottom, with the CLI description...
"""
import ctypes
import traceback
import types
try:
from win32com.shell.shell import IsUserAnAdmin
except:
IsUserAnAdmin = ctypes.windll.shell32.IsUserAnAdmin
try:
from win32ui import MessageBox
def windowsMessageBox(message, title="NatLink configure program"):
"""do messagebox from windows, no wx needed
"""
MessageBox(message, title)
except:
import ctypes
MessageBoxA = ctypes.windll.user32.MessageBoxA
def windowsMessageBox(message, title="NatLink configure program"):
"""do messagebox from windows, no wx needed
for old versions of python
"""
MessageBoxA(None, message, title, 0)
import os, shutil
import sys
import pywintypes
if __name__ == '__main__':
if sys.version[0] == '2' and sys.version[2] in ['3', '5']:
pyVersion = sys.version[:3]
mess = ["Here are the natlinkconfigfunctions, with which you can configure NatLink even for this older (%s) version of Python."% pyVersion,
"Note: the natlink.pyd files (natlink.dll) that work with python %s are for older versions of NatSpeak (10 and before) only."% pyVersion,
"For Dragon 11 and later, some things may work, but it is better to upgrade to Python 2.6 or 2.7. You then use the newer natlink.pyd files in which several problems that arose between NatSpeak 10 and Dragon 11 are solved."]
mess = '\n\n'.join(mess)
windowsMessageBox(mess)
class ElevationError(Exception):
def __init__(self, message):
self.message = message
# if self.isNatSpeakRunning():
self.message += '\n\n(please also close Dragon if it is running)'
class NatSpeakRunningError(Exception):
def __init__(self, message):
self.message = message
# self.message += '\nPlease close Dragon and repeat your command'
self.message += '\nPlease close Dragon and this program and try it again'
ObsoleteStatusKeys = ('VocolaUsesSimpscrp', 'VocolaCommandFilesEditor', 'NatlinkDebug')
#--------- two utility functions:
def getBaseFolder(globalsDict=None):
"""get the folder of the calling module.
either sys.argv[0] (when run direct) or
__file__, which can be empty. In that case take the working directory
"""
globalsDictHere = globalsDict or globals()
baseFolder = ""
if globalsDictHere['__name__'] == "__main__":
baseFolder = os.path.split(sys.argv[0])[0]
print 'baseFolder from argv: %s'% baseFolder
elif globalsDictHere['__file__']:
baseFolder = os.path.split(globalsDictHere['__file__'])[0]
print 'baseFolder from __file__: %s'% baseFolder
if not baseFolder or baseFolder == '.':
baseFolder = os.getcwd()
print 'baseFolder was empty, take wd: %s'% baseFolder
return baseFolder
def getCoreDir(thisDir):
"""get the NatLink core folder, relative from the current folder
This folder should be relative to this with ../MacroSystem/core and should
contain natlinkmain.p, natlink.pyd, and natlinkstatus.py
If not found like this, prints a line and returns thisDir
SHOULD ONLY BE CALLED BY natlinkconfigfunctions.py
"""
coreFolder = os.path.normpath( os.path.join(thisDir, '..', 'MacroSystem', 'core') )
print 'coreDirectory: %s'% coreFolder
if not os.path.isdir(coreFolder):
print 'not a directory: %s'% coreFolder
return thisDir
## PydPath = os.path.join(coreFolder, 'natlink.pyd')
mainPath = os.path.join(coreFolder, 'natlinkmain.py')
statusPath = os.path.join(coreFolder, 'natlinkstatus.py')
## if not os.path.isfile(PydPath):
## print 'natlink.pyd not found in core directory: %s'% coreFolder
## return thisDir
if not os.path.isfile(mainPath):
print 'natlinkmain.py not found in core directory: %s'% coreFolder
return thisDir
if not os.path.isfile(statusPath):
print 'natlinkstatus.py not found in core directory: %s'% coreFolder
return thisDir
return coreFolder
hadFatalErrors = []
def fatal_error(message, new_raise=None):
"""prints a fatal error when running this module
print only the first!
"""
if not hadFatalErrors:
mess = ['natlinkconfigfunctions failed because of fatal error:',
'', message, '',
'So if Dragon is running, close it and then rerun this program (in elevated mode).']
mess = '\n'.join(mess)
windowsMessageBox(mess)
print mess
if message not in hadFatalErrors:
hadFatalErrors.append(message)
if new_raise:
raise new_raise
#-----------------------------------------------------
from win32com.shell import shell
import win32api
thisDir = getBaseFolder(globals())
coreDir = getCoreDir(thisDir)
if thisDir == coreDir:
raise IOError('natlinkconfigfunctions cannot proceed, coreDir not found...')
# appending to path if necessary:
if not os.path.normpath(thisDir) in sys.path:
thisDir = os.path.normpath(thisDir)
print 'inserting %s to pythonpath...'% thisDir
sys.path.insert(0, thisDir)
if not os.path.normpath(coreDir) in sys.path:
coreDir = os.path.normpath(coreDir)
print 'inserting %s to pythonpath...'% coreDir
sys.path.insert(0, coreDir)
# from core directory, use registry entries from CURRENT_USER/Software/Natlink:
import natlinkstatus, natlinkcorefunctions, RegistryDict
import os, os.path, sys, getopt, cmd, types, string, win32con
# import natlink # to see if NatSpeak is running...
class NatlinkConfig(natlinkstatus.NatlinkStatus):
"""performs the configuration tasks of NatLink
userregnl got from natlinkstatus, as a Class (not instance) variable, so
should be the same among instances of this class...
the checkCoreDirectory function is automatically performed at start, to see if the initialisation does not
take place from another place as the registered natlink.pyd...
"""
def __init__(self):
self.DNSName = 'Dragon'
natlinkstatus.NatlinkStatus.__init__(self, skipSpecialWarning=1)
self.changesInInitPhase = 0
self.isElevated = IsUserAnAdmin()
def checkCoreDirectory(self):
"""check if coreDir (from this file) and coreDirectory (from natlinkstatus) match, if not, raise error
"""
coreDir2 = self.getCoreDirectory()
if coreDir2.lower() != coreDir.lower():
fatal_error('ambiguous core directory,\nfrom this module: %s\from status in natlinkstatus: %s'%
(coreDir, coreDir2))
def checkDNSInstallDir(self):
"""check if install directory of Dragon is found
if not rais an error
"""
try:
dnsDir = self.getDNSInstallDir()
except IOError:
dnsDir = None
if not dnsDir:
fatal_error('no valid DNSInstallDir found, please repair in Config program or Configuration GUI')
pass
def configCheckNatlinkPydFile(self):
"""see if natlink.pyd is in core directory, if not copy from correct version
if DNSInstallDir or DNSIniDir is not properly set, all goes wrong.
"""
self.checkedUrgent = 1
if sys.version.find("64 bit") >= 0:
print '============================================='
print 'You installed a 64 bit version of python.'
print 'NatLink cannot run with this version, please uninstall and'
print 'install a 32 bit version of python, see http://qh.antenna.nl/unimacro,,,'
print '============================================='
return
if self.getDNSInstallDir == -1:
return
if self.getDNSIniDir == -1:
return
coreDir2 = self.getCoreDirectory()
if coreDir2.lower() != coreDir.lower():
fatal_error('ambiguous core directory,\nfrom this module: %s\from status in natlinkstatus: %s'%
(coreDir, coreDir2))
currentPydPath = os.path.join(coreDir, 'natlink.pyd')
if not os.path.isfile(currentPydPath):
if not self.isElevated: raise ElevationError("natlink.pyd is not found")
mess = "natlink.pyd is not found, try to repair this."
# windowsMessageBox(mess)
# self.message("natlink.pyd is not found, try to repair this.")
key = 'NatlinkPydRegistered'
# print '%s does not exist, remove "%s" from natlinkstatus.ini and setup up new pyd file...'% (currentPydPath, key)
self.userregnl.delete(key)
natlinkPydWasAlreadyThere = 0
self.checkedUrgent = None
else:
natlinkPydWasAlreadyThere = 1
wantedPyd = self.getWantedNatlinkPydFile() # wanted original based on python version and Dragon version
if self.checkNatlinkPydFile(fromConfig=1) == 1: # check the need for replacing natlink.pyd without messages...
self.checkedUrgent = None
return 1 # all is well
# for message:
#fatal_error("The current file natlink.pyd is not available, the correct version or outdated, try to replace it by the proper (newer) version...")
## now go on with trying to replace natlink.pyd with the correct version and register it...
wantedPydPath = os.path.join(coreDir, 'PYD', wantedPyd)
if not wantedPyd:
fatal_error('natlinkconfigfunctions, configCheckNatlinkPydFile: Could not find filename for wantedPydPath\ncoreDir: %s, wantedPyd: %s'% (coreDir, wantedPyd))
return
if not os.path.isfile(wantedPydPath):
fatal_error('natlinkconfigfunctions, configCheckNatlinkPydFile: wantedPydPath does not exits: %s'% wantedPydPath)
return
if natlinkPydWasAlreadyThere:
if not self.isElevated: raise ElevationError("natlink.pyd should be changed")
# if self.isNatSpeakRunning(): raise NatSpeakRunningError("natlink.pyd should be changed")
self.changesInInitPhase = 1
result = self.copyNatlinkPydPythonVersion(wantedPydPath, currentPydPath)
if not result:
return
result = self.registerNatlinkPyd()
if result:
print '-'*30
print 'Copying and registering the latest natlink.pyd was succesful.'
print 'You can now close this program and restart Dragon.'
print '-'*30
else:
if not self.isElevated: raise ElevationError("first run of configure program must be done in elevated mode")
result = self.copyNatlinkPydPythonVersion(wantedPydPath, currentPydPath)
| |
# -*- coding: utf-8 -*-
#
# Copyright 2020 Data61, CSIRO
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ..core.graph import StellarGraph
from ..core.utils import is_real_iterable, normalize_adj
from ..random import random_state
import numpy as np
from tensorflow.keras.utils import Sequence
from .base import Generator
class PaddedGraphGenerator(Generator):
"""
A data generator for use with graph classification algorithms.
The supplied graphs should be :class:`.StellarGraph` objects with node features.
Use the :meth:`flow` method supplying the graph indexes and (optionally) targets
to get an object that can be used as a Keras data generator.
This generator supplies the features arrays and the adjacency matrices to a mini-batch Keras
graph classification model. Differences in the number of nodes are resolved by padding each
batch of features and adjacency matrices, and supplying a boolean mask indicating which are
valid and which are padding.
.. seealso::
Models using this generator: :class:`.GCNSupervisedGraphClassification`, :class:`.DeepGraphCNN`.
Examples using this generator:
- `graph classification with GCN <https://stellargraph.readthedocs.io/en/stable/demos/graph-classification/gcn-supervised-graph-classification.html>`__
- `graph classification with Deep Graph CNN <https://stellargraph.readthedocs.io/en/stable/demos/graph-classification/dgcnn-graph-classification.html>`__
- `unsupervised graph representation learning <https://stellargraph.readthedocs.io/en/stable/demos/embeddings/gcn-unsupervised-graph-embeddings.html>`__
Args:
graphs (list): a collection of StellarGraph objects
name (str): an optional name of the generator
"""
def __init__(self, graphs, name=None):
self.node_features_size = None
self._check_graphs(graphs)
self.graphs = graphs
self.name = name
def _check_graphs(self, graphs):
for graph in graphs:
if not isinstance(graph, StellarGraph):
raise TypeError(
f"graphs: expected every element to be a StellarGraph object, found {type(graph).__name__}."
)
if graph.number_of_nodes() == 0:
# an empty graph has no information at all and breaks things like mean pooling, so
# let's disallow them
raise ValueError(
"graphs: expected every graph to be non-empty, found graph with no nodes"
)
# Check that there is only a single node type for GAT or GCN
node_type = graph.unique_node_type(
"graphs: expected only graphs with a single node type, found a graph with node types: %(found)s"
)
graph.check_graph_for_ml()
# we require that all graphs have node features of the same dimensionality
f_dim = graph.node_feature_sizes()[node_type]
if self.node_features_size is None:
self.node_features_size = f_dim
elif self.node_features_size != f_dim:
raise ValueError(
"graphs: expected node features for all graph to have same dimensions,"
f"found {self.node_features_size} vs {f_dim}"
)
def num_batch_dims(self):
return 1
def flow(
self,
graphs,
targets=None,
symmetric_normalization=True,
weighted=False,
batch_size=1,
name=None,
shuffle=False,
seed=None,
):
"""
Creates a generator/sequence object for training, evaluation, or prediction
with the supplied graph indexes and targets.
Args:
graphs (iterable): an iterable of graph indexes in self.graphs or an iterable of :class:`.StellarGraph` objects
for the graphs of interest (e.g., training, validation, or test set nodes).
targets (2d array, optional): a 2D array of numeric graph targets with shape ``(len(graphs),
len(targets))``.
symmetric_normalization (bool, optional): The type of normalization to be applied on the graph adjacency
matrices. If True, the adjacency matrix is left and right multiplied by the inverse square root of the
degree matrix; otherwise, the adjacency matrix is only left multiplied by the inverse of the degree
matrix.
weighted (bool, optional): if True, use the edge weights from ``G``; if False, treat the
graph as unweighted.
batch_size (int, optional): The batch size.
name (str, optional): An optional name for the returned generator object.
shuffle (bool, optional): If True the node IDs will be shuffled at the end of each epoch.
seed (int, optional): Random seed to use in the sequence object.
Returns:
A :class:`.PaddedGraphSequence` object to use with Keras methods :meth:`fit`, :meth:`evaluate`, and :meth:`predict`
"""
if targets is not None:
# Check targets is an iterable
if not is_real_iterable(targets):
raise TypeError(
f"targets: expected an iterable or None object, found {type(targets).__name__}"
)
# Check targets correct shape
if len(targets) != len(graphs):
raise ValueError(
f"expected targets to be the same length as node_ids, found {len(targets)} vs {len(graphs)}"
)
if not isinstance(batch_size, int):
raise TypeError(
f"expected batch_size to be integer type, found {type(batch_size).__name__}"
)
if batch_size <= 0:
raise ValueError(
f"expected batch_size to be strictly positive integer, found {batch_size}"
)
graphs_array = np.asarray(graphs)
if len(graphs_array.shape) == 1:
graphs_array = graphs_array[:, None]
elif len(graphs_array.shape) != 2:
raise ValueError(
f"graphs: expected a shape of length 1 or 2, found shape {graphs_array.shape}"
)
flat_graphs = graphs_array.ravel()
if isinstance(flat_graphs[0], StellarGraph):
self._check_graphs(flat_graphs)
graphs = flat_graphs
selected_ilocs = np.arange(len(graphs)).reshape(graphs_array.shape)
else:
selected_ilocs = graphs_array
graphs = self.graphs
return PaddedGraphSequence(
graphs=graphs,
selected_ilocs=selected_ilocs,
targets=targets,
symmetric_normalization=symmetric_normalization,
weighted=weighted,
batch_size=batch_size,
name=name,
shuffle=shuffle,
seed=seed,
)
class PaddedGraphSequence(Sequence):
"""
A Keras-compatible data generator for training and evaluating graph classification models.
Use this class with the Keras methods :meth:`keras.Model.fit`,
:meth:`keras.Model.evaluate`, and
:meth:`keras.Model.predict`,
This class should be created using the `.flow(...)` method of
:class:`.PaddedGraphGenerator`.
Args:
graphs (list)): The graphs as StellarGraph objects.
selected_ilocs (array): an array of indices into ``graphs``, of shape N × K for some N and K.
targets (np.ndarray, optional): An optional array of graph targets of size (N x C),
where N is the number of selected graph ilocs and C is the target size (e.g., number of classes.)
normalize (bool, optional): Specifies whether the adjacency matrix for each graph should
be normalized or not. The default is True.
symmetric_normalization (bool, optional): Use symmetric normalization if True, that is left and right multiply
the adjacency matrix by the inverse square root of the degree matrix; otherwise left multiply the adjacency
matrix by the inverse of the degree matrix. This parameter is ignored if normalize=False.
batch_size (int, optional): The batch size. It defaults to 1.
name (str, optional): An optional name for this generator object.
shuffle (bool, optional): If True the node IDs will be shuffled at the end of each epoch.
seed (int, optional): Random seed.
"""
def __init__(
self,
graphs,
selected_ilocs,
targets=None,
normalize=True,
symmetric_normalization=True,
weighted=False,
batch_size=1,
name=None,
shuffle=False,
seed=None,
):
self.name = name
self.graphs = np.asanyarray(graphs)
if not isinstance(selected_ilocs, np.ndarray):
raise TypeError(
"selected_ilocs: expected a NumPy array, found {type(selected_ilocs).__name__}"
)
if not len(selected_ilocs.shape) == 2:
raise ValueError(
"selected_ilocs: expected a NumPy array of rank 2, found shape {selected_ilocs.shape}"
)
# each row of the input corresponds to a single dataset example, but we want to handle
# columns as bulk operations, and iterating over the major axis is easier
self.selected_ilocs = selected_ilocs.transpose()
self.normalize_adj = normalize
self.targets = targets
self.batch_size = batch_size
if targets is not None:
if len(selected_ilocs) != len(targets):
raise ValueError(
"expected the number of target values and the number of graph ilocs to be the same length,"
f"found {len(selected_ilocs)} graph ilocs and {len(targets)} targets."
)
self.targets = np.asanyarray(targets)
adjacencies = [graph.to_adjacency_matrix(weighted=weighted) for graph in graphs]
if self.normalize_adj:
self.normalized_adjs = [
normalize_adj(
adj, symmetric=symmetric_normalization, add_self_loops=True,
)
for adj in adjacencies
]
else:
self.normalize_adjs = adjacencies
self.normalized_adjs = np.asanyarray(self.normalized_adjs)
_, self._np_rs = random_state(seed)
self.shuffle = shuffle
self.on_epoch_end()
def _epoch_size(self):
return self.selected_ilocs.shape[1]
def __len__(self):
return int(np.ceil(self._epoch_size() / self.batch_size))
def _pad_graphs(self, graphs, adj_graphs, max_nodes):
# pad adjacency and feature matrices to equal the size of those from the largest graph
features = [
np.pad(
graph.node_features(),
pad_width=((0, max_nodes - graph.number_of_nodes()), (0, 0)),
)
for graph in graphs
]
features = np.stack(features)
for adj in adj_graphs:
adj.resize((max_nodes, max_nodes))
adj_graphs = np.stack([adj.toarray() for adj in adj_graphs])
masks = np.full((len(graphs), max_nodes), fill_value=False, dtype=np.bool)
for index, graph in enumerate(graphs):
masks[index, : graph.number_of_nodes()] = True
# features is array of dimensionality
# batch size x N x F
# masks is array of dimensionality
# batch size x N
# adj_graphs is array of dimensionality
# batch size x N x N
# graph_targets is array of dimensionality
# batch size x C
# where N is the maximum number of nodes for largest graph in the batch, F is
# the node feature dimensionality, and C is the number of | |
"""This file contains the network model and data that holds the results.
"""
__author__ = '<NAME>'
from dataclasses import dataclass, field
from typing import List, Callable, Tuple, Any, Union
import sys
import gc
import logging
import itertools as it
from itertools import chain
from io import TextIOBase, StringIO
import random as rand
from pathlib import Path
import torch
from torch import nn
from tqdm import tqdm
from zensols.util import time
from zensols.config import Configurable, ConfigFactory, Writable
from zensols.persist import (
Deallocatable,
persisted, PersistedWork, PersistableContainer,
Stash, UnionStash,
)
from zensols.dataset import SplitStashContainer, DatasetSplitStash
from zensols.deeplearn import (
ModelError, EarlyBailError,
TorchConfig, DatasetSplitType, NetworkSettings
)
from zensols.deeplearn.result import (
EpochResult, ModelResult, ModelSettings, ModelResultManager,
)
from zensols.deeplearn.batch import BatchStash, Batch
from . import (
ModelResourceFactory, BaseNetworkModule,
ModelManager, UpdateAction,
BatchIterator, TrainManager,
)
# default message logger
logger = logging.getLogger(__name__ + '.status')
# logger for messages, which is active when the progress bar is not
progress_logger = logging.getLogger(__name__ + '.progress')
@dataclass
class ModelExecutor(PersistableContainer, Deallocatable, Writable):
"""This class creates and uses a network to train, validate and test the model.
This class is either configured using a
:class:`~zensols.config.factory.ConfigFactory` or is unpickled with
:class:`.ModelManager`. If the later, it's from a previously trained (and
possibly tested) state.
Typically, after creating a nascent instance, :meth:`train` is called to
train the model. This returns the results, but the results are also
available via the :class:`ResultManager` using the :obj:`model_manager`
property. To load previous results, use
``executor.result_manager.load()``.
During training, the training set is used to train the weights of the model
provided by the executor in the :obj:`model_settings`, then validated using
the validation set. When the validation loss is minimized, the following
is saved to disk:
* Settings: :obj:`net_settings`, :obj:`model_settings`,
* the model weights,
* the results of the training and validation thus far,
* the entire configuration (which is later used to restore the
executor),
* random seed information, which includes Python, Torch and GPU random
state.
After the model is trained, you can immediately test the model with
:meth:`test`. To be more certain of being able to reproduce the same
results, it is recommended to load the model with
``model_manager.load_executor()``, which loads the last instance of the
model that produced a minimum validation loss.
:see: :class:`.ModelExecutor`
:see: :class:`.NetworkSettings`
:see: :class:`zensols.deeplearn.model.ModelSettings`
"""
ATTR_EXP_META = ('model_settings',)
config_factory: ConfigFactory = field()
"""The configuration factory that created this instance."""
config: Configurable = field()
"""The configuration used in the configuration factory to create this
instance.
"""
name: str = field()
"""The name given in the configuration."""
model_name: str = field()
"""A human readable name for the model."""
model_settings: ModelSettings = field()
"""The configuration of the model."""
net_settings: NetworkSettings = field()
"""The settings used to configure the network."""
dataset_stash: DatasetSplitStash = field()
"""The split data set stash that contains the ``BatchStash``, which
contains the batches on which to train and test.
"""
dataset_split_names: List[str] = field()
"""The list of split names in the ``dataset_stash`` in the order: train,
validation, test (see :meth:`_get_dataset_splits`)
"""
result_path: Path = field(default=None)
"""If not ``None``, a path to a directory where the results are to be
dumped; the directory will be created if it doesn't exist when the results
are generated.
"""
update_path: Path = field(default=None)
"""The path to check for commands/updates to make while training. If this is
set, and the file exists, then it is parsed as a JSON file. If the file
cannot be parsed, or 0 size etc., then the training is (early) stopped.
If the file can be parsed, and there is a single ``epoch`` dict entry, then
the current epoch is set to that value.
"""
intermediate_results_path: Path = field(default=None)
"""If this is set, then save the model and results to this path after
validation for each training epoch.
"""
progress_bar: bool = field(default=False)
"""Create text/ASCII based progress bar if ``True``."""
progress_bar_cols: int = field(default=None)
"""The number of console columns to use for the text/ASCII based progress
bar.
"""
def __post_init__(self):
super().__init__()
if not isinstance(self.dataset_stash, DatasetSplitStash) and False:
raise ModelError('Expecting type DatasetSplitStash but ' +
f'got {self.dataset_stash.__class__}')
self._model = None
self._dealloc_model = False
self.model_result: ModelResult = None
self.batch_stash.delegate_attr: bool = True
self._criterion_optimizer_scheduler = PersistedWork(
'_criterion_optimizer_scheduler', self)
self._result_manager = PersistedWork('_result_manager', self)
self._train_manager = PersistedWork('_train_manager', self)
self.cached_batches = {}
self.debug = False
@property
def batch_stash(self) -> DatasetSplitStash:
"""The stash used to obtain the data for training and testing. This stash
should have a training, validation and test splits. The names of these
splits are given in the ``dataset_split_names``.
"""
return self.dataset_stash.split_container
@property
def feature_stash(self) -> Stash:
"""The stash used to generate the feature, which is not to be confused
with the batch source stash``batch_stash``.
"""
return self.batch_stash.split_stash_container
@property
def torch_config(self) -> TorchConfig:
"""Return the PyTorch configuration used to convert models and data (usually
GPU) during training and test.
"""
return self.batch_stash.model_torch_config
@property
@persisted('_result_manager')
def result_manager(self) -> ModelResultManager:
"""Return the manager used for controlling the life cycle of the results
generated by this executor.
"""
if self.result_path is not None:
return self._create_result_manager(self.result_path)
def _create_result_manager(self, path: Path) -> ModelResultManager:
return ModelResultManager(
name=self.model_name, path=path,
model_path=self.model_settings.path)
@property
@persisted('_model_manager')
def model_manager(self) -> ModelManager:
"""Return the manager used for controlling the train of the model.
"""
model_path = self.model_settings.path
return ModelManager(model_path, self.config_factory, self.name)
@property
@persisted('_batch_iterator')
def batch_iterator(self) -> BatchIterator:
"""The train manager that assists with the training process.
"""
resolver = self.config_factory.class_resolver
batch_iter_class_name = self.model_settings.batch_iteration_class_name
if logger.isEnabledFor(logging.DEBUG):
logger.debug(f'batch_iteration: {batch_iter_class_name}')
batch_iter_class = resolver.find_class(batch_iter_class_name)
batch_iter = batch_iter_class(self, logger)
if logger.isEnabledFor(logging.DEBUG):
logger.debug(f'batch_iter={batch_iter}')
return batch_iter
@property
def debug(self) -> Union[bool, int]:
return self._debug
@debug.setter
def debug(self, debug: Union[bool, int]):
self._debug = debug
self.batch_iterator.debug = debug
@property
@persisted('_train_manager')
def train_manager(self) -> TrainManager:
"""Return the train manager that assists with the training process.
"""
return TrainManager(
logger, progress_logger, self.update_path,
self.model_settings.max_consecutive_increased_count)
def _weight_reset(self, m):
if hasattr(m, 'reset_parameters') and callable(m.reset_parameters):
if logger.isEnabledFor(logging.DEBUG):
logger.debug(f'resetting parameters on {m}')
m.reset_parameters()
def reset(self):
"""Reset the executor's to it's nascent state.
"""
if logger.isEnabledFor(logging.INFO):
logger.info('resetting executor')
self._criterion_optimizer_scheduler.clear()
self._deallocate_model()
def load(self) -> nn.Module:
"""Clear all results and trained state and reload the last trained model from
the file system.
:return: the model that was loaded and registered in this instance of
the executor
"""
if logger.isEnabledFor(logging.INFO):
logger.info('reloading model weights')
self._deallocate_model()
self.model_manager._load_model_optim_weights(self)
if logger.isEnabledFor(logging.DEBUG):
logger.debug(f'copied model to {self.model.device}')
return self.model
def deallocate(self):
super().deallocate()
self._deallocate_model()
self.deallocate_batches()
self._try_deallocate(self.dataset_stash)
self._deallocate_settings()
self._criterion_optimizer_scheduler.deallocate()
self._result_manager.deallocate()
self.model_result = None
def _deallocate_model(self):
if logger.isEnabledFor(logging.DEBUG):
logger.debug('dealloc model: model exists/dealloc: ' +
f'{self._model is not None}/{self._dealloc_model}')
if self._model is not None and self._dealloc_model:
self._try_deallocate(self._model)
self._model = None
def _deallocate_settings(self):
self.model_settings.deallocate()
self.net_settings.deallocate()
def deallocate_batches(self):
set_of_ds_sets = self.cached_batches.values()
ds_sets = chain.from_iterable(set_of_ds_sets)
batches = chain.from_iterable(ds_sets)
for batch in batches:
batch.deallocate()
self.cached_batches.clear()
@property
def model_exists(self) -> bool:
"""Return whether the executor has a model.
:return: ``True`` if the model has been trained or loaded
"""
return self._model is not None
@property
def model(self) -> BaseNetworkModule:
"""Get the PyTorch module that is used for training and test.
"""
if self._model is None:
raise ModelError('No model, is populated; use \'load\'')
return self._model
@model.setter
def model(self, model: BaseNetworkModule):
"""Set the PyTorch module that is used for training and test.
"""
self._set_model(model, False, True)
def _set_model(self, model: BaseNetworkModule,
take_owner: bool, deallocate: bool):
if logger.isEnabledFor(level=logging.DEBUG):
logger.debug(f'setting model: {type(model)}')
if deallocate:
self._deallocate_model()
self._model = model
self._dealloc_model = take_owner
if logger.isEnabledFor(logging.DEBUG):
logger.debug(f'setting dealloc model: {self._dealloc_model}')
self._criterion_optimizer_scheduler.clear()
def _get_or_create_model(self) -> BaseNetworkModule:
if self._model is None:
self._dealloc_model = True
model = self._create_model()
self._model = model
else:
model = self._model
if logger.isEnabledFor(logging.DEBUG):
logger.debug(f'created model as dealloc: {self._dealloc_model}')
return model
def _create_model(self) -> BaseNetworkModule:
"""Create the network model instance.
"""
mng: ModelManager = self.model_manager
model = mng._create_module(self.net_settings, self.debug)
if logger.isEnabledFor(logging.INFO):
logger.info(f'created model on {model.device} ' +
f'with {self.torch_config}')
return model
def _create_model_result(self) -> ModelResult:
res = ModelResult(
self.config, f'{self.model_name}: {ModelResult.get_num_runs()}',
self.model_settings, self.net_settings,
self.batch_stash.decoded_attributes)
if logger.isEnabledFor(logging.DEBUG):
logger.debug(f'creating model result ({id(res)}): {self.model_name}')
return res
@property
@persisted('_criterion_optimizer_scheduler')
def criterion_optimizer_scheduler(self) -> \
Tuple[nn.L1Loss, torch.optim.Optimizer, Any]:
"""Return the loss function and descent optimizer.
"""
criterion = self._create_criterion()
optimizer, scheduler = self._create_optimizer_scheduler()
return criterion, optimizer, scheduler
def _create_criterion(self) -> torch.optim.Optimizer:
"""Factory method to create the loss function and optimizer.
"""
resolver = self.config_factory.class_resolver
criterion_class_name = self.model_settings.criterion_class_name
if logger.isEnabledFor(logging.DEBUG):
logger.debug(f'criterion: {criterion_class_name}')
criterion_class = resolver.find_class(criterion_class_name)
| |
size, field_parts, tmp_list, field_sgmnt_lst)
fout_header.write(";\n")
if break_point != (end_idx - 1):
fout_header.write("#elif (BYTE_ORDER == LITTLE_ENDIAN)\n")
fout_header.write(spaces(8) + "%s " + spaces(4) + " " %
(predict_type(s[1])))
for k in range(len(field_parts)):
segment = field_parts[k]
for j in reversed(range(len(segment))):
if k == 0 and j == (len(segment)-1):
fout_header.write("%s : %s" %
(segment[j][0], segment[j][1]))
else:
fout_header.write(
",\n" + spaces(12) + " " + spaces(4) + " %s : %s" % (segment[j][0], segment[j][1]))
fout_header.write(";\n")
fout_header.write("#endif\n")
field_parts = []
return
def make_header_struct(fout_header, check_points, cumulative_hdr_len, header_type):
init_idx = 0
bias = 0
cap = 8
fields = []
field_sgmnt_lst = {}
for i in range(len(check_points)):
if i == 0:
if check_points[i] == 0:
fout_header.write(spaces(
8) + "%s " % (predict_type(cumulative_hdr_len[check_points[i]]-bias)) + spaces(4) + " ")
fout_header.write("%s;\n" %
(header_type["fields"][init_idx][0]))
fields.append([(header_type["fields"][init_idx][0], predict_type(
cumulative_hdr_len[check_points[i]]-bias))])
field_sgmnt_lst[header_type["fields"][init_idx][0]] = [
header_type["fields"][init_idx][1]]
elif (cumulative_hdr_len[check_points[i]] == 24) or (cumulative_hdr_len[check_points[i]] == 40) or (cumulative_hdr_len[check_points[i]] == 48):
handle_special_len(fout_header, field_sgmnt_lst,
cumulative_hdr_len[check_points[i]], header_type["fields"], init_idx, check_points[i]+1)
else:
field_parts = []
tmp_list = []
fout_header.write("#if (BYTE_ORDER == BIG_ENDIAN)\n")
fout_header.write(spaces(
8) + "%s " % (predict_type(cumulative_hdr_len[check_points[i]]-bias)) + spaces(4) + " ")
size = header_type["fields"][init_idx][1]
if (size - cap) <= 0:
fout_header.write("%s : %s" % (
header_type["fields"][init_idx][0], header_type["fields"][init_idx][1]))
tmp_list.extend(
[(header_type["fields"][init_idx][0], header_type["fields"][init_idx][1])])
field_sgmnt_lst[header_type["fields"][init_idx][0]] = [
header_type["fields"][init_idx][1]]
if (size - cap) == 0:
cap = 8
field_parts.append(tmp_list)
tmp_list = []
else:
cap = cap - size
else:
offset = 1
fout_header.write("%s_%s : %s" % (
header_type["fields"][init_idx][0], offset, cap))
field_sgmnt_lst[header_type["fields"][init_idx][0]] = [cap]
tmp_list.extend(
[(header_type["fields"][init_idx][0]+("_%s" % (offset)), cap)])
field_parts.append(tmp_list)
tmp_list = []
size = size - cap
cap = 8
offset += 1
while(size >= cap):
fout_header.write(",\n" + spaces(12) + " " + spaces(4) + " %s_%s : %s" % (
header_type["fields"][init_idx][0], offset, cap))
field_sgmnt_lst[header_type["fields"][init_idx][0]].extend([
cap])
tmp_list.extend(
[(header_type["fields"][init_idx][0]+("_%s" % (offset)), cap)])
field_parts.append(tmp_list)
tmp_list = []
size = size - cap
offset += 1
if size > 0:
fout_header.write(",\n" + spaces(12) + " " + spaces(4) + " %s_%s : %s" % (
header_type["fields"][init_idx][0], offset, size))
field_sgmnt_lst[header_type["fields"]
[init_idx][0]].extend([size])
tmp_list.extend(
[(header_type["fields"][init_idx][0]+("_%s" % (offset)), size)])
cap = cap - size
for field in header_type["fields"][init_idx+1: check_points[i]+1]:
size = field[1]
cap = field_segmenter(
fout_header, field, cap, size, field_parts, tmp_list, field_sgmnt_lst)
fout_header.write(";\n")
fields.append(field_parts)
fout_header.write("#elif (BYTE_ORDER == LITTLE_ENDIAN)\n")
fout_header.write(spaces(
8) + "%s " % (predict_type(cumulative_hdr_len[check_points[i]]-bias)) + spaces(4) + " ")
for k in range(len(field_parts)):
segment = field_parts[k]
for j in reversed(range(len(segment))):
if k == 0 and j == (len(segment)-1):
fout_header.write("%s : %s" %
(segment[j][0], segment[j][1]))
else:
fout_header.write(
",\n" + spaces(12) + " " + spaces(4) + " %s : %s" % (segment[j][0], segment[j][1]))
fout_header.write(";\n")
fout_header.write("#endif\n")
field_parts = []
elif i > 0:
if check_points[i] - check_points[i-1] == 1:
fout_header.write(spaces(
8) + "%s " % (predict_type(cumulative_hdr_len[check_points[i]]-bias)) + spaces(4) + " ")
fout_header.write("%s;\n" %
(header_type["fields"][init_idx][0]))
fields.append((header_type["fields"][init_idx][0], predict_type(
cumulative_hdr_len[check_points[i]]-bias)))
field_sgmnt_lst[header_type["fields"][init_idx][0]] = [
header_type["fields"][init_idx][1]]
elif (cumulative_hdr_len[check_points[i]]-bias == 24) or (cumulative_hdr_len[check_points[i]]-bias) == 40 or (cumulative_hdr_len[check_points[i]]-bias == 48):
handle_special_len(fout_header, field_sgmnt_lst,
cumulative_hdr_len[check_points[i]]-bias, header_type["fields"], init_idx, check_points[i]+1)
else:
field_parts = []
tmp_list = []
fout_header.write("#if (BYTE_ORDER == BIG_ENDIAN)\n")
fout_header.write(spaces(
8) + "%s " % (predict_type(cumulative_hdr_len[check_points[i]]-bias)) + spaces(4) + " ")
size = header_type["fields"][init_idx][1]
if (size - cap) <= 0:
fout_header.write("%s : %s" % (
header_type["fields"][init_idx][0], header_type["fields"][init_idx][1]))
tmp_list.extend(
[(header_type["fields"][init_idx][0], header_type["fields"][init_idx][1])])
field_sgmnt_lst[header_type["fields"][init_idx][0]] = [
header_type["fields"][init_idx][1]]
if (size - cap) == 0:
cap = 8
field_parts.append(tmp_list)
tmp_list = []
else:
cap = cap - size
else:
offset = 1
fout_header.write("%s_%s : %s" % (
header_type["fields"][init_idx][0], offset, cap))
field_sgmnt_lst[header_type["fields"][init_idx][0]] = [cap]
tmp_list.extend(
[(header_type["fields"][init_idx][0]+("_%s" % (offset)), cap)])
field_parts.append(tmp_list)
tmp_list = []
size = size - cap
cap = 8
offset += 1
while(size >= cap):
fout_header.write(",\n" + spaces(12) + " " + spaces(4) + " %s%s : %s" %
(header_type["fields"][init_idx][0], offset, cap))
field_sgmnt_lst[header_type["fields"][init_idx][0]].extend([
cap])
tmp_list.extend(
[(header_type["fields"][init_idx][0]+("_%s" % (offset)), cap)])
field_parts.append(tmp_list)
tmp_list = []
size = size - cap
offset += 1
if size > 0:
fout_header.write(",\n" + spaces(12) + " " + spaces(4) + " %s%s : %s" %
(header_type["fields"][init_idx][0], offset, size))
field_sgmnt_lst[header_type["fields"]
[init_idx][0]].extend([size])
tmp_list.extend(
[(header_type["fields"][init_idx][0]+("_%s" % (offset)), size)])
cap = cap - size
for field in header_type["fields"][init_idx+1: check_points[i]+1]:
size = field[1]
cap = field_segmenter(
fout_header, field, cap, size, field_parts, tmp_list, field_sgmnt_lst)
fout_header.write(";\n")
fields.append(field_parts)
fout_header.write("#elif (BYTE_ORDER == LITTLE_ENDIAN)\n")
fout_header.write(spaces(
8) + "%s " % (predict_type(cumulative_hdr_len[check_points[i]]-bias)) + spaces(4) + " ")
for k in range(len(field_parts)):
segment = field_parts[k]
for j in reversed(range(len(segment))):
if k == 0 and j == (len(segment)-1):
fout_header.write("%s : %s" %
(segment[j][0], segment[j][1]))
else:
fout_header.write(
",\n" + spaces(12) + " " + spaces(4) + " %s : %s" % (segment[j][0], segment[j][1]))
fout_header.write(";\n")
fout_header.write("#endif\n")
field_parts = []
init_idx = check_points[i] + 1
bias = cumulative_hdr_len[check_points[i]]
return field_sgmnt_lst
def gen_hex_mask_cumulative(field_segments, total_len):
hex_mask = []
init_val = 0
for idx in range(len(field_segments)):
init_val += field_segments[idx]
hex_mask.append(gen_hex_mask(total_len - init_val, field_segments[idx]))
return hex_mask
def make_template(control_graph, header, header_type, destination, header_ports):
'''makes the actual lua script given the relevant header type and next and previous state transition information'''
fout_header = open(destination + ".h", "w")
fout_source = open(destination + ".cpp", "w")
fout_header.write(
"//Template for addition of new protocol '%s'\n\n" % header)
fout_header.write("#ifndef %s\n" % ("P4_" + header.upper() + "_LAYER"))
fout_header.write("#define %s\n\n" % ("P4_" + header.upper() + "_LAYER"))
fout_header.write("#include <cstring>\n")
fout_header.write("#include \"Layer.h\"\n")
fout_header.write(
'#include "uint24_t.h"\n#include "uint40_t.h"\n#include "uint48_t.h"\n')
fout_header.write(
"#if defined(WIN32) || defined(WINx64)\n#include <winsock2.h>\n#elif LINUX\n#include <in.h>\n#endif\n\n")
fout_header.write("namespace pcpp{\n" +
spaces(4) + "#pragma pack(push,1)\n")
fout_header.write(spaces(4) + "struct %s{\n" % (header.lower() + "hdr"))
cumulative_hdr_len = [None] * len(header_type["fields"])
check_points = []
total_len = 0
i = 0
for field in header_type["fields"]:
try:
total_len += field[1]
cumulative_hdr_len[i] = total_len
if total_len % 8 == 0:
check_points.append(i)
i += 1
except TypeError:
field[1] = int(input('Variable length field "' + field[0] +
'" detected in "' + header + '". Enter its length\n'))
total_len += field[1]
cumulative_hdr_len[i] = total_len
if total_len % 8 == 0:
check_points.append(i)
i += 1
field_sgmnt_lst = make_header_struct(
fout_header, check_points, cumulative_hdr_len, header_type)
fout_header.write(spaces(4) + "};\n\n")
fout_header.write(spaces(4) + "#pragma pack(pop)\n")
fout_header.write(
spaces(4) + "class %sLayer: public Layer{\n" % (header.capitalize()))
fout_header.write(spaces(8) + "public:\n")
# constructor to constuct packet from raw data
fout_header.write(
spaces(8) + "%sLayer(uint8_t* data, size_t dataLen, Layer* prevLayer, Packet* packet): Layer(data, dataLen, prevLayer, packet) {m_Protocol = P4_%s;}\n" % (
header.capitalize(), header.upper()))
# default constructor for packet with empty raw data
fout_header.write(
spaces(8) + "%sLayer(){\n" % (header.capitalize()) + spaces(12) + "m_DataLen = sizeof(%shdr);\n" % (header.lower()) + spaces(12) + "m_Data = new uint8_t[m_DataLen];\n" + spaces(12) + "memset(m_Data, 0, m_DataLen);\n" + spaces(12) + "m_Protocol = P4_%s;\n" % (header.upper()) + spaces(8) + "}\n")
fout_header.write("\n" + spaces(8) +
" // Getters and Setters for fields\n")
for field in header_type["fields"]:
fout_header.write(spaces(8) + " %s get%s();\n" %
(predict_input_type(field[1]), str(field[0]).capitalize()))
fout_header.write(spaces(8) + " void set%s(%s value);\n" %
(str(field[0]).capitalize(), predict_input_type(field[1])))
fout_header.write("\n" + spaces(8) + " inline %shdr* get%sHeader() { return (%shdr*)m_Data; }\n\n" % (
header.lower(), header.capitalize(), header.lower()))
fout_header.write(spaces(8) + " void parseNextLayer();\n\n")
fout_header.write(spaces(
8) + " inline size_t getHeaderLen() { return sizeof(%shdr); }\n\n" % (header.lower()))
fout_header.write(spaces(8) + " void computeCalculateFields() {}\n\n")
fout_header.write(spaces(8) + " std::string toString();\n\n")
fout_header.write(spaces(
8) + " OsiModelLayer getOsiModelLayer() { return OsiModelApplicationLayer; }\n\n")
fout_header.write(spaces(4) + "};\n")
fout_header.write("}\n#endif")
fout_header.close()
default_next_transition = None
transition_key = None
next_transitions = []
for edge in control_graph:
if (header == edge[0]):
if (edge[1] != None):
transition_key = edge[1]
next_transitions.append((edge[-1], edge[-2]))
else:
default_next_transition = edge[-1]
fout_source.write(
"#define LOG_MODULE PacketLogModule%sLayer\n\n" % (header.capitalize()))
fout_source.write("#include \"%s.h\"\n" %
(destination[destination.rfind('/') + 1:]))
if (len(next_transitions) > 0):
for transition in next_transitions:
fout_source.write("#include \"%s.h\"\n" %
(local_name+'_'+transition[0]))
fout_source.write(
"#include \"PayloadLayer.h\"\n#include \"IpUtils.h\"\n#include \"Logger.h\"\n")
fout_source.write(
"#include <string.h>\n#include <sstream>\n#include <endian.h>\n\n")
fout_source.write("namespace pcpp{\n")
for field in header_type["fields"]:
field_segments = field_sgmnt_lst[field[0]]
fout_source.write(spaces(4) + "%s %sLayer::get%s(){\n" % (
predict_input_type(field[1]), header.capitalize(), str(field[0]).capitalize()))
if len(field_segments) == 1:
fout_source.write(spaces(8) + "%s %s;\n" %
(predict_type(field[1]), field[0]))
fout_source.write(spaces(
8) + "%shdr* hdrdata = (%shdr*)m_Data;\n" % (header.lower(), header.lower()))
if (field[1] == 24 or field[1] == 40 or field[1] == 48):
fout_source.write(
spaces(8) + "UINT%d_HTON(%s,hdrdata->%s);\n" % (field[1], field[0], field[0]))
fout_source.write(
spaces(8) + "%s return_val = UINT%d_GET(%s);\n" % (predict_input_type(field[1]), field[1], field[0]))
fout_source.write(
spaces(8) + "return return_val;\n" + spaces(4) + "}\n\n")
else:
fout_source.write(spaces(8) + "%s = %s(hdrdata->%s);\n" %
(field[0], host_network_conversion(field), field[0]))
fout_source.write(spaces(8) + "return %s;\n" %
(field[0]) + spaces(4) + "}\n\n")
elif len(field_segments) > 1:
fout_source.write(spaces(8) + "%s %s;\n" %
(predict_input_type(field[1]), field[0]))
fout_source.write(spaces(
8) + | |
# Copyright (c) 1996-2015 PSERC. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
"""Power flow data for IEEE 300 bus test case.
"""
from numpy import array
def case300():
"""Power flow data for IEEE 300 bus test case.
Please see L{caseformat} for details on the case file format.
This data was converted from IEEE Common Data Format
(ieee300cdf.txt) on 20-Sep-2004 by cdf2matp, rev. 1.11
See end of file for warnings generated during conversion.
Converted from IEEE CDF file from:
U{http://www.ee.washington.edu/research/pstca/}
13/05/91 CYME INTERNATIONAL 100.0 1991 S IEEE 300-BUS TEST SYSTEM
@return: Power flow data for IEEE 300 bus test case.
"""
ppc = {"version": '2'}
##----- Power Flow Data -----##
## system MVA base
ppc["baseMVA"] = 100.0
## bus data
# bus_i type Pd Qd Gs Bs area Vm Va baseKV zone Vmax Vmin
ppc["bus"] = array([
[1, 1, 90, 49, 0, 0, 1, 1.0284, 5.95, 115, 1, 1.06, 0.94],
[2, 1, 56, 15, 0, 0, 1, 1.0354, 7.74, 115, 1, 1.06, 0.94],
[3, 1, 20, 0, 0, 0, 1, 0.9971, 6.64, 230, 1, 1.06, 0.94],
[4, 1, 0, 0, 0, 0, 1, 1.0308, 4.71, 345, 1, 1.06, 0.94],
[5, 1, 353, 130, 0, 0, 1, 1.0191, 4.68, 115, 1, 1.06, 0.94],
[6, 1, 120, 41, 0, 0, 1, 1.0312, 6.99, 115, 1, 1.06, 0.94],
[7, 1, 0, 0, 0, 0, 1, 0.9934, 6.19, 230, 1, 1.06, 0.94],
[8, 2, 63, 14, 0, 0, 1, 1.0153, 2.4, 115, 1, 1.06, 0.94],
[9, 1, 96, 43, 0, 0, 1, 1.0034, 2.85, 115, 1, 1.06, 0.94],
[10, 2, 153, 33, 0, 0, 1, 1.0205, 1.35, 230, 1, 1.06, 0.94],
[11, 1, 83, 21, 0, 0, 1, 1.0057, 2.46, 115, 1, 1.06, 0.94],
[12, 1, 0, 0, 0, 0, 1, 0.9974, 5.21, 230, 1, 1.06, 0.94],
[13, 1, 58, 10, 0, 0, 1, 0.9977, -0.55, 115, 1, 1.06, 0.94],
[14, 1, 160, 60, 0, 0, 1, 0.9991, -4.81, 115, 1, 1.06, 0.94],
[15, 1, 126.7, 23, 0, 0, 1, 1.0343, -8.59, 115, 1, 1.06, 0.94],
[16, 1, 0, 0, 0, 0, 1, 1.0315, -2.65, 345, 1, 1.06, 0.94],
[17, 1, 561, 220, 0, 0, 1, 1.0649, -13.1, 115, 1, 1.06, 0.94],
[19, 1, 0, 0, 0, 0, 1, 0.982, 1.08, 230, 1, 1.06, 0.94],
[20, 2, 605, 120, 0, 0, 1, 1.001, -2.46, 115, 1, 1.06, 0.94],
[21, 1, 77, 1, 0, 0, 1, 0.9752, 1.62, 230, 1, 1.06, 0.94],
[22, 1, 81, 23, 0, 0, 1, 0.9963, -1.97, 115, 1, 1.06, 0.94],
[23, 1, 21, 7, 0, 0, 1, 1.0501, 3.94, 115, 1, 1.06, 0.94],
[24, 1, 0, 0, 0, 0, 1, 1.0057, 6.02, 230, 1, 1.06, 0.94],
[25, 1, 45, 12, 0, 0, 1, 1.0234, 1.44, 115, 1, 1.06, 0.94],
[26, 1, 28, 9, 0, 0, 1, 0.9986, -1.73, 115, 1, 1.06, 0.94],
[27, 1, 69, 13, 0, 0, 1, 0.975, -4.9, 115, 1, 1.06, 0.94],
[33, 1, 55, 6, 0, 0, 1, 1.0244, -12.02, 115, 1, 1.06, 0.94],
[34, 1, 0, 0, 0, 0, 1, 1.0414, -7.94, 345, 1, 1.06, 0.94],
[35, 1, 0, 0, 0, 0, 1, 0.9757, -25.72, 115, 1, 1.06, 0.94],
[36, 1, 0, 0, 0, 0, 1, 1.0011, -22.59, 230, 1, 1.06, 0.94],
[37, 1, 85, 32, 0, 0, 1, 1.0201, -11.23, 115, 1, 1.06, 0.94],
[38, 1, 155, 18, 0, 0, 1, 1.0202, -12.56, 115, 1, 1.06, 0.94],
[39, 1, 0, 0, 0, 0, 1, 1.0535, -5.81, 345, 1, 1.06, 0.94],
[40, 1, 46, -21, 0, 0, 1, 1.0216, -12.78, 115, 1, 1.06, 0.94],
[41, 1, 86, 0, 0, 0, 1, 1.0292, -10.45, 115, 1, 1.06, 0.94],
[42, 1, 0, 0, 0, 0, 1, 1.0448, -7.44, 345, 1, 1.06, 0.94],
[43, 1, 39, 9, 0, 0, 1, 1.0006, -16.79, 115, 1, 1.06, 0.94],
[44, 1, 195, 29, 0, 0, 1, 1.0086, -17.47, 115, 1, 1.06, 0.94],
[45, 1, 0, 0, 0, 0, 1, 1.0215, -14.74, 230, 1, 1.06, 0.94],
[46, 1, 0, 0, 0, 0, 1, 1.0344, -11.75, 345, 1, 1.06, 0.94],
[47, 1, 58, 11.8, 0, 0, 1, 0.9777, -23.17, 115, 1, 1.06, 0.94],
[48, 1, 41, 19, 0, 0, 1, 1.0019, -16.09, 115, 1, 1.06, 0.94],
[49, 1, 92, 26, 0, 0, 1, 1.0475, -2.95, 115, 1, 1.06, 0.94],
[51, 1, -5, 5, 0, 0, 1, 1.0253, -8.15, 115, 1, 1.06, 0.94],
[52, 1, 61, 28, 0, 0, 1, 0.9979, -11.86, 115, 1, 1.06, 0.94],
[53, 1, 69, 3, 0, 0, 1, 0.9959, -17.6, 115, 1, 1.06, 0.94],
[54, 1, 10, 1, 0, 0, 1, 1.005, -16.25, 115, 1, 1.06, 0.94],
[55, 1, 22, 10, 0, 0, 1, 1.015, -12.21, 115, 1, 1.06, 0.94],
[57, 1, 98, 20, 0, 0, 1, 1.0335, -8, 115, 1, 1.06, 0.94],
[58, 1, 14, 1, 0, 0, 1, 0.9918, -5.99, 115, 1, 1.06, 0.94],
[59, 1, 218, 106, 0, 0, 1, 0.9789, -5.29, 115, 1, 1.06, 0.94],
[60, 1, 0, 0, 0, 0, 1, 1.0246, -9.56, 230, 1, 1.06, 0.94],
[61, 1, 227, 110, 0, 0, 1, 0.9906, -3.47, 115, 1, 1.06, 0.94],
[62, 1, 0, 0, 0, 0, 1, 1.016, -1.1, 230, 1, 1.06, 0.94],
[63, 2, 70, 30, 0, 0, 1, 0.9583, -17.62, 115, 1, 1.06, 0.94],
[64, 1, 0, 0, 0, 0, 1, 0.948, -12.97, 230, 1, 1.06, 0.94],
[69, 1, 0, 0, 0, 0, 1, 0.963, -25.66, 115, 1, 1.06, 0.94],
[70, 1, 56, 20, 0, 0, 1, 0.9513, -35.16, 115, 1, 1.06, 0.94],
[71, 1, 116, 38, 0, 0, 1, 0.9793, -29.88, 115, 1, 1.06, 0.94],
[72, 1, 57, 19, 0, 0, 1, 0.9696, -27.48, 115, 1, 1.06, 0.94],
[73, 1, 224, 71, 0, 0, 1, 0.9775, -25.77, 115, 1, 1.06, 0.94],
[74, 1, 0, 0, 0, 0, 1, 0.9964, -22, 230, 1, 1.06, 0.94],
[76, 2, 208, 107, 0, 0, 1, 0.9632, -26.54, 115, 1, 1.06, 0.94],
[77, 1, 74, 28, 0, 0, 1, 0.9837, -24.94, 115, 1, 1.06, 0.94],
[78, 1, 0, 0, 0, 0, 1, 0.99, -24.05, 115, 1, 1.06, 0.94],
[79, 1, 48, 14, 0, 0, 1, 0.982, -24.97, 115, 1, 1.06, 0.94],
[80, 1, 28, 7, 0, 0, 1, 0.9872, -24.97, 115, 1, 1.06, 0.94],
[81, 1, 0, 0, 0, 0, 1, 1.034, -18.89, 345, 1, 1.06, 0.94],
[84, 2, 37, 13, 0, 0, 1, 1.025, -17.16, 115, 1, 1.06, 0.94],
[85, 1, 0, 0, 0, 0, 1, 0.9872, -17.68, 230, 1, 1.06, 0.94],
[86, 1, 0, 0, 0, 0, 1, 0.9909, -14.19, 230, 1, 1.06, 0.94],
[87, 1, 0, 0, 0, 0, 1, 0.9921, -7.77, 230, 1, 1.06, 0.94],
[88, 1, 0, 0, 0, 0, 1, 1.0151, -20.96, 230, 1, 1.06, 0.94],
[89, 1, 44.2, 0, 0, 0, 1, 1.0317, -11.13, 115, 1, 1.06, 0.94],
[90, 1, 66, 0, 0, 0, 1, 1.0272, -11.23, 115, 1, 1.06, 0.94],
[91, 2, 17.4, 0, 0, 0, 1, 1.052, -9.4, 115, 1, 1.06, 0.94],
[92, 2, 15.8, 0, 0, 0, 1, 1.052, -6.2, 115, 1, 1.06, 0.94],
[94, 1, 60.3, 0, 0, 0, 1, 0.993, -9.42, 115, 1, 1.06, 0.94],
[97, 1, 39.9, 0, 0, 0, 1, 1.0183, -13.24, 115, 1, 1.06, 0.94],
[98, 2, 66.7, 0, 0, 0, 1, 1, -14.6, 115, 1, 1.06, 0.94],
[99, 1, 83.5, 0, 0, 0, 1, 0.9894, -20.27, 115, 1, 1.06, 0.94],
[100, 1, 0, 0, 0, 0, 1, 1.006, -14.45, 115, 1, 1.06, 0.94],
[102, 1, 77.8, 0, 0, 0, 1, 1.0008, -15.23, 115, 1, 1.06, 0.94],
[103, 1, 32, 0, 0, 0, 1, 1.0288, -12.06, 115, 1, 1.06, 0.94],
[104, 1, 8.6, 0, 0, 0, 1, 0.9958, -17.33, 115, 1, 1.06, 0.94],
[105, 1, 49.6, 0, 0, 0, 1, 1.0223, -12.94, 115, 1, 1.06, 0.94],
[107, 1, 4.6, 0, 0, 0, 1, 1.0095, -16.03, 115, 1, 1.06, 0.94],
[108, 2, | |
self["Back Side Drape Beam-Diffuse Solar Transmittance"]
@back_side_drape_beamdiffuse_solar_transmittance.setter
def back_side_drape_beamdiffuse_solar_transmittance(self, value=None):
""" Corresponds to IDD field `Back Side Drape Beam-Diffuse Solar Transmittance`
"""
self["Back Side Drape Beam-Diffuse Solar Transmittance"] = value
@property
def front_side_drape_beamdiffuse_solar_reflectance(self):
"""field `Front Side Drape Beam-Diffuse Solar Reflectance`
| The front side beam-diffuse solar reflectance at normal incidence averaged
| over the entire spectrum of solar radiation.
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Front Side Drape Beam-Diffuse Solar Reflectance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `front_side_drape_beamdiffuse_solar_reflectance` or None if not set
"""
return self["Front Side Drape Beam-Diffuse Solar Reflectance"]
@front_side_drape_beamdiffuse_solar_reflectance.setter
def front_side_drape_beamdiffuse_solar_reflectance(self, value=None):
""" Corresponds to IDD field `Front Side Drape Beam-Diffuse Solar Reflectance`
"""
self["Front Side Drape Beam-Diffuse Solar Reflectance"] = value
@property
def back_side_drape_beamdiffuse_solar_reflectance(self):
"""field `Back Side Drape Beam-Diffuse Solar Reflectance`
| The back side beam-diffuse solar reflectance at normal incidence averaged
| over the entire spectrum of solar radiation.
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Back Side Drape Beam-Diffuse Solar Reflectance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `back_side_drape_beamdiffuse_solar_reflectance` or None if not set
"""
return self["Back Side Drape Beam-Diffuse Solar Reflectance"]
@back_side_drape_beamdiffuse_solar_reflectance.setter
def back_side_drape_beamdiffuse_solar_reflectance(self, value=None):
""" Corresponds to IDD field `Back Side Drape Beam-Diffuse Solar Reflectance`
"""
self["Back Side Drape Beam-Diffuse Solar Reflectance"] = value
@property
def drape_beambeam_visible_transmittance(self):
"""field `Drape Beam-Beam Visible Transmittance`
| The beam-beam visible transmittance at normal incidence averaged over the
| visible spectrum of solar radiation. Assumed same for front and back sides.
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Drape Beam-Beam Visible Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `drape_beambeam_visible_transmittance` or None if not set
"""
return self["Drape Beam-Beam Visible Transmittance"]
@drape_beambeam_visible_transmittance.setter
def drape_beambeam_visible_transmittance(self, value=None):
""" Corresponds to IDD field `Drape Beam-Beam Visible Transmittance`
"""
self["Drape Beam-Beam Visible Transmittance"] = value
@property
def drape_beamdiffuse_visible_transmittance(self):
"""field `Drape Beam-Diffuse Visible Transmittance`
| The beam-diffuse visible transmittance at normal incidence averaged over the
| visible spectrum range of solar radiation. Assumed to be the same for front
| and back sides.
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Drape Beam-Diffuse Visible Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `drape_beamdiffuse_visible_transmittance` or None if not set
"""
return self["Drape Beam-Diffuse Visible Transmittance"]
@drape_beamdiffuse_visible_transmittance.setter
def drape_beamdiffuse_visible_transmittance(self, value=None):
""" Corresponds to IDD field `Drape Beam-Diffuse Visible Transmittance`
"""
self["Drape Beam-Diffuse Visible Transmittance"] = value
@property
def drape_beamdiffuse_visible_reflectance(self):
"""field `Drape Beam-Diffuse Visible Reflectance`
| The beam-diffuse visible reflectance at normal incidence average over the
| visible spectrum range of solar radiation. Assumed to be the same for front
| and back sides.
| Units: dimensionless
| value < 1.0
Args:
value (float): value for IDD Field `Drape Beam-Diffuse Visible Reflectance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `drape_beamdiffuse_visible_reflectance` or None if not set
"""
return self["Drape Beam-Diffuse Visible Reflectance"]
@drape_beamdiffuse_visible_reflectance.setter
def drape_beamdiffuse_visible_reflectance(self, value=None):
""" Corresponds to IDD field `Drape Beam-Diffuse Visible Reflectance`
"""
self["Drape Beam-Diffuse Visible Reflectance"] = value
@property
def drape_material_infrared_transmittance(self):
"""field `Drape Material Infrared Transmittance`
| Long-wave transmittance of the drape fabric at zero openness fraction.
| Assumed same for front and back sides.
| Units: dimensionless
| Default value: 0.05
| value < 1.0
Args:
value (float): value for IDD Field `Drape Material Infrared Transmittance`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `drape_material_infrared_transmittance` or None if not set
"""
return self["Drape Material Infrared Transmittance"]
@drape_material_infrared_transmittance.setter
def drape_material_infrared_transmittance(self, value=0.05):
"""Corresponds to IDD field `Drape Material Infrared Transmittance`"""
self["Drape Material Infrared Transmittance"] = value
@property
def front_side_drape_material_infrared_emissivity(self):
"""field `Front Side Drape Material Infrared Emissivity`
| Front side long-wave emissivity of the drape fabric at zero shade openness.
| Openness fraction specified above is used to calculate the effective
| emissivity value.
| Units: dimensionless
| Default value: 0.87
| value < 1.0
Args:
value (float): value for IDD Field `Front Side Drape Material Infrared Emissivity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `front_side_drape_material_infrared_emissivity` or None if not set
"""
return self["Front Side Drape Material Infrared Emissivity"]
@front_side_drape_material_infrared_emissivity.setter
def front_side_drape_material_infrared_emissivity(self, value=0.87):
"""Corresponds to IDD field `Front Side Drape Material Infrared
Emissivity`"""
self["Front Side Drape Material Infrared Emissivity"] = value
@property
def back_side_drape_material_infrared_emissivity(self):
"""field `Back Side Drape Material Infrared Emissivity`
| Back side long-wave emissivity of the drape fabric at zero shade openness.
| Openness fraction specified above is used to calculate the effective
| emissivity value.
| Units: dimensionless
| Default value: 0.87
| value < 1.0
Args:
value (float): value for IDD Field `Back Side Drape Material Infrared Emissivity`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `back_side_drape_material_infrared_emissivity` or None if not set
"""
return self["Back Side Drape Material Infrared Emissivity"]
@back_side_drape_material_infrared_emissivity.setter
def back_side_drape_material_infrared_emissivity(self, value=0.87):
"""Corresponds to IDD field `Back Side Drape Material Infrared
Emissivity`"""
self["Back Side Drape Material Infrared Emissivity"] = value
@property
def width_of_pleated_fabric(self):
"""field `Width of Pleated Fabric`
| Width of the pleated section of the draped fabric. If the drape fabric is
| unpleated or is flat, then the pleated section width is set to zero.
| Units: m
| IP-Units: in
Args:
value (float): value for IDD Field `Width of Pleated Fabric`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `width_of_pleated_fabric` or None if not set
"""
return self["Width of Pleated Fabric"]
@width_of_pleated_fabric.setter
def width_of_pleated_fabric(self, value=None):
"""Corresponds to IDD field `Width of Pleated Fabric`"""
self["Width of Pleated Fabric"] = value
@property
def length_of_pleated_fabric(self):
"""field `Length of Pleated Fabric`
| Length of the pleated section of the draped fabric. If the drape fabric is
| unpleated or is flat, then the pleated section length is set to zero.
| Units: m
| IP-Units: in
Args:
value (float): value for IDD Field `Length of Pleated Fabric`
Raises:
ValueError: if `value` is not a valid value
Returns:
float: the value of `length_of_pleated_fabric` or None if not set
"""
return self["Length of Pleated Fabric"]
@length_of_pleated_fabric.setter
def length_of_pleated_fabric(self, value=None):
"""Corresponds to IDD field `Length of Pleated Fabric`"""
self["Length of Pleated Fabric"] = value
class WindowMaterialBlindEquivalentLayer(DataObject):
""" Corresponds to IDD object `WindowMaterial:Blind:EquivalentLayer`
Window equivalent layer blind slat optical and thermal properties.
The model assumes that slats are thin and flat, applies correction
empirical correlation to account for curvature effect. Slats are
assumed to transmit and reflect diffusely.
"""
_schema = {'extensible-fields': OrderedDict(),
'fields': OrderedDict([(u'name',
{'name': u'Name',
'pyname': u'name',
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'alpha'}),
(u'slat orientation',
{'name': u'Slat Orientation',
'pyname': u'slat_orientation',
'default': u'Horizontal',
'required-field': False,
'autosizable': False,
'accepted-values': [u'Horizontal',
u'Vertical'],
'autocalculatable': False,
'type': 'alpha'}),
(u'slat width',
{'name': u'Slat Width',
'pyname': u'slat_width',
'minimum>': 0.0,
'maximum': 0.025,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'slat separation',
{'name': u'Slat Separation',
'pyname': u'slat_separation',
'minimum>': 0.0,
'maximum': 0.025,
'required-field': True,
'autosizable': False,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'slat crown',
{'name': u'Slat Crown',
'pyname': u'slat_crown',
'default': 0.0015,
'maximum': 0.00156,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'm'}),
(u'slat angle',
{'name': u'Slat Angle',
'pyname': u'slat_angle',
'default': 45.0,
'maximum': 180.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'deg'}),
(u'front side slat beam-diffuse solar transmittance',
{'name': u'Front Side Slat Beam-Diffuse Solar Transmittance',
'pyname': u'front_side_slat_beamdiffuse_solar_transmittance',
'default': 0.0,
'maximum<': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real'}),
(u'back side slat beam-diffuse solar transmittance',
{'name': u'Back Side Slat Beam-Diffuse Solar Transmittance',
'pyname': u'back_side_slat_beamdiffuse_solar_transmittance',
'default': 0.0,
'maximum<': 1.0,
'required-field': False,
'autosizable': False,
'minimum': 0.0,
'autocalculatable': False,
'type': u'real',
'unit': u'dimensionless'}),
(u'front side slat beam-diffuse solar reflectance',
{'name': u'Front Side Slat | |
"""
This module produces the strain versus strain rate populations, with bivariate
histograms.
Example:
> cd ~/sibl/cli/process/exodus
> conda activate siblenv
> python visualization.py
"""
import os
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import rc
# import pandas as pd
import seaborn as sns
np.random.seed(0)
sns.set(style="white", color_codes=True)
EXEMPLAR = 0 # turn on or off the exemplar problem
TEST = 0 # turn on or off Bob test with small data set
TRANSLATION = (
1 # turns on or off translational case (Bob-063f), else does rotation (Bob-066b)
)
INJURY_0 = 0 # turn on or off cellular injury curve, original
INJURY_1 = 1 # updated Summey injury curves
FIG_NAME = os.path.basename(__file__).split(".")[0] # remove the .py extension
FIG_FORMAT = "png" # "pdf" or "png", but "tiff" doesn't look good
DPI = 600
LATEX = 1
SERIALIZE = 1 # turn on or off write figure to disk
# sns.axes_style("darkgrid")
sns.set(style="darkgrid")
bbox_props = dict(boxstyle="square, pad=0.2", fc="white", ec="black", lw=1)
if LATEX:
# rc('font', **{'family': 'serif', 'serif': ['Computer Modern Roman']})
rc("text", usetex=True)
rc("font", family="serif")
# matplotlib.rcParams.update({'font.size': 22})
# rcParams.update({"font.size": 16})
# 2021-05-10: Increase base font size. Process for smaller pdf files:
# 1. Generate original pdf file (about 19 MB).
# 2. Open original pdf file in Preview, save as tiff, at 600 dpi (about 56 MB)
# 3. Open tiff, export as pdf (results in 1.7 MB)
SMALL_SIZE = 8
MEDIUM_SIZE = 10
BIG_SIZE = 14
plt.rc("font", size=BIG_SIZE) # controls default text sizes
# plt.rc("axes", titlesize=SMALL_SIZE) # fontsize of the axes title
plt.rc("axes", labelsize=BIG_SIZE) # fontsize of the x and y labels
plt.rc("xtick", labelsize=BIG_SIZE) # fontsize of the tick labels
plt.rc("ytick", labelsize=BIG_SIZE) # fontsize of the tick labels
plt.rc("legend", fontsize=BIG_SIZE) # legend fontsize
# plt.rc("figure", titlesize=BIGGER_SIZE) # fontsize of the figure title
def cell_death_strain_rate_to_strain(x):
# The Summey cell death curve used for production-ready figures
y_cell_death = 0.128 * x ** (-0.156)
return y_cell_death
# Exemplar joint distribution plot - begin
if EXEMPLAR:
tips = sns.load_dataset("tips")
tip_data = np.array(tips["tip"])
bill_data = np.array(tips["total_bill"])
# legend_txt = 'hello'
# legend_properties = {'weight': 'bold', 'size': 12}
g = sns.JointGrid(x=bill_data, y=tip_data)
# g = g.plot_joint(plt.scatter, s=10, linewidths=0.05, edgecolors='blue', marker='o', alpha=0.3, label=legend_txt)
g = g.plot_joint(
plt.scatter, s=10, linewidths=0.05, edgecolors="blue", marker="o", alpha=0.3
)
_ = g.ax_marg_x.hist(bill_data, color="b", bins=np.arange(0, 60, 5))
_ = g.ax_marg_y.hist(
tip_data, color="g", orientation="horizontal", bins=np.arange(0, 12, 1)
)
# _ = g.ax_joint.legend(prop=legend_properties, loc='upper left')
_ = g.ax_joint.text(20, 10, "hello", ha="left", va="bottom", bbox=bbox_props)
axis_txt = f"exemplar"
plt.xlabel("total bill")
plt.ylabel("tip")
plt.show()
# Exemplar joint distribution plot - end
else:
# -------------------------------- ##
# Client application initializaton - begin
script_pth = os.getcwd()
# Client application initializaton - end
# -------------------------------- ##
if TEST:
simulation_path = "." # here, in same location as visualization.py
idx = 0 # index for the probes
probes = {
"steps": [0],
"time": [0.00],
"strain_p95": [0.015],
"strain_rate_p95": [30],
}
axis_txt = f'time = {probes["time"][idx]*1000:.3f} ms (Bob-TEST-1000-pts)'
blocks = [7]
labels = ["white matter"]
colors = ["C1"] # white plotted as orange, gray -> green
strain_files = [["test_ebe_max_principal_log_strain_51_small.txt"]]
strain_rate_files = [
["test_ebe_max_principal_rate_of_deformation_51_small.txt"]
]
marker_dict = {"linestyle": "", "marker": ".", "markersize": 10, "alpha": 0.2}
else:
# not Bob TEST data subset, is the actual full data set, either translation or rotation
# block 7 is white matter is 504,505 data points
# block 8 is gray matter is 790,102 data points
# combined white + gray = 1,294,607 data points
# markers are very small and light to cope with the large data set
marker_dict = {"linestyle": "", "marker": ",", "markersize": 0.7, "alpha": 0.2}
blocks = [7, 8]
labels = ["white matter", "gray matter"]
colors = ["C1", "C2"] # white plotted as orange, gray -> green
if TRANSLATION:
# relative to this script, location of the particular simulation
simulation_path = (
"../../../../casco_sim/bob-1mm-5kg-helmet2-0305-hemi-063f/"
)
idx = 0 # index for the probes
probes = {
"steps": [30, 51, 57],
"time": [
0.00580000428262166,
0.010000030740917116,
0.011200009903610695,
],
"strain_p95": [
0.013038920686082887,
0.007864328738051788,
0.009356105757136385,
],
"strain_rate_p95": [
26.62451150429535,
45.64035758617126,
47.167653798895905,
],
}
# axis_txt = f'time = {probes["time"][idx]*1000:.3f} ms (Bob-063f)'
axis_txt = f'time = {probes["time"][idx]*1000:.2f} ms'
strain_files = [
[
"ts_30_block_7_max_principal_green_lagrange_strain.txt",
"ts_30_block_8_max_principal_green_lagrange_strain.txt",
],
[
"ts_51_block_7_max_principal_green_lagrange_strain.txt",
"ts_51_block_8_max_principal_green_lagrange_strain.txt",
],
[
"ts_57_block_7_max_principal_green_lagrange_strain.txt",
"ts_57_block_8_max_principal_green_lagrange_strain.txt",
],
]
strain_rate_files = [
[
"ts_30_block_7_max_principal_green_lagrange_strain_rate.txt",
"ts_30_block_8_max_principal_green_lagrange_strain_rate.txt",
],
[
"ts_51_block_7_max_principal_green_lagrange_strain_rate.txt",
"ts_51_block_8_max_principal_green_lagrange_strain_rate.txt",
],
[
"ts_57_block_7_max_principal_green_lagrange_strain_rate.txt",
"ts_57_block_8_max_principal_green_lagrange_strain_rate.txt",
],
]
else: # not a TRANSLATION, then the rotation case
simulation_path = (
"../../../../casco_sim/bob-1mm-5kg-helmet2-0305-hemi-066b/"
)
idx = 1 # index for the probes
probes = {
"steps": [43, 69],
"time": [0.00840000000000000, 0.013600000000000000],
"strain_p95": [0.021800000000000000, 0.056370000000000000],
"strain_rate_p95": [10.60000000000000, 5.190000000000000],
}
# axis_txt = f'time = {probes["time"][idx]*1000:.3f} ms (Bob-066b)'
axis_txt = f'time = {probes["time"][idx]*1000:.1f} ms'
strain_files = [
["max_principal_green_lagrange_strain_ts_43.csv"],
["max_principal_green_lagrange_strain_ts_69.csv"],
]
strain_rate_files = [
["max_principal_green_lagrange_strain_rate_ts_43.csv"],
["max_principal_green_lagrange_strain_rate_ts_69.csv"],
]
# User Input Deck, simulation-specific input - end
# -------------------------------- ##
# fig, ax = plt.subplots(figsize=(8,8))
# ax.set_aspect("equal")
strain = np.array([])
strain_rate = np.array([])
# for i, (s, sr) in enumerate(zip(strain_files, strain_rate_files)):
for s, sr in zip(
strain_files[idx], strain_rate_files[idx]
): # collect over all blocks
block_strain = np.genfromtxt(os.path.join(simulation_path, s))
block_strain_rate = np.genfromtxt(os.path.join(simulation_path, sr))
strain = np.concatenate((strain, block_strain))
strain_rate = np.concatenate((strain_rate, block_strain_rate))
g = sns.JointGrid(x=strain_rate, y=strain)
# g = g.plot_joint(plt.plot, linestyle='', marker=',', markersize=0.7, alpha=0.2)
g = g.plot_joint(plt.plot, **marker_dict)
exp_min = -1 # x-domain minimum 10^exp_min
exp_max = 3 # x-domain maximum 10^exp_max
npts = 24 # number of points
strain_rate_095th = np.percentile(strain_rate, 95.0) # 95th percentile strain rate
x_bins = np.logspace(exp_min, exp_max, 2 * npts)
_ = g.ax_marg_x.hist(strain_rate, bins=x_bins)
strain_095th = np.percentile(strain, 95.0) # 95th percentile strain
strain_min = np.amin(strain)
strain_max = np.amax(strain)
y_bins = np.linspace(strain_min, strain_max, npts)
_ = g.ax_marg_y.hist(strain, orientation="horizontal", bins=y_bins)
g.ax_joint.set_xscale("log")
g.ax_marg_x.set_xscale("log")
g.ax_joint.set_xlim([0.01, 10000])
# g.ax_joint.set_xlim([10**exp_min, 10**exp_max])
g.ax_joint.set_ylim([-0.02, 0.10])
# g.ax_joint.text(0.02, 0.09, axis_txt, ha='left', va='bottom', bbox=bbox_props)
time_label_x = 0.02 # strain rate
time_label_y = -0.015 # strain
g.ax_joint.text(
time_label_x, time_label_y, axis_txt, ha="left", va="bottom", bbox=bbox_props
)
# draw 95th percentile boundaries
line_prop = dict(color="orange", linewidth=1)
# vertical line on joint plot
g.ax_joint.plot(
[strain_rate_095th, strain_rate_095th], g.ax_joint.get_ylim(), **line_prop
)
# horizontal line on the joint plot
g.ax_joint.plot(g.ax_joint.get_xlim(), [strain_095th, strain_095th], **line_prop)
# vertical line across marginal strain rate plot
y0_log_sr, y1_log_sr = g.ax_marg_x.get_ylim()
g.ax_marg_x.plot(
[strain_rate_095th, strain_rate_095th], [y0_log_sr, y1_log_sr], **line_prop
)
# marginal strain rate text
if TRANSLATION:
# strain_rate_txt = r" 95\% = " + str(round(strain_rate_095th, 1)) # 26.6
strain_rate_txt = "{:.{}f}".format(strain_rate_095th, 1) # 26.6
else: # then rotation
# strain_rate_txt = r" 95\% = " + str(round(strain_rate_095th, 2)) # 5.2, not 5.20 as desired
strain_rate_txt = "{:.{}f}".format(strain_rate_095th, 2) # 5.20
# g.ax_marg_x.text(strain_rate_095th, (y0_log_sr + y1_log_sr) / 2.0, ' 95% = ' + str(round(strain_rate_095th, 1)), ha='left', va='bottom')
g.ax_marg_x.text(
strain_rate_095th,
(y0_log_sr + y1_log_sr) / 2.0,
r" 95\% = " + strain_rate_txt,
ha="left",
va="bottom",
)
# horizontal line on the marginal strain plot
x0_strain, x1_strain = g.ax_marg_y.get_xlim()
g.ax_marg_y.plot([x0_strain, x1_strain], [strain_095th, strain_095th], **line_prop)
# marginal strain text
if TRANSLATION:
# strain_txt = r"95\% = " + str(round(strain_095th, 4)) # 0.0130
strain_txt = "{:.{}f}".format(strain_095th, 4) # 0.0130
else: # then rotation
# strain_txt = r"95\% = " + str(round(strain_095th, 4)) # 0.0564
strain_txt = "{:.{}f}".format(strain_095th, 4) # 0.0564
g.ax_marg_y.text(
(x0_strain + x1_strain) / 2.0,
strain_095th,
# strain_txt,
r" 95\% = " + strain_txt,
ha="center",
va="bottom",
)
# 2021-05-10: These seem not to work with new library, so just accept defaults.
# g.ax_joint.grid(color="gray")
# # g.ax_joint.grid(color="red")
# # g.ax_joint(grid_color="red")
# g.ax_marg_x.grid(color="green", axis="x")
# g.ax_marg_y.grid(color="gray", axis="y")
# plt.xlabel("max(eig(GL strain rate)) (1/s)")
plt.xlabel("maximum principal strain rate (1/s)")
# plt.ylabel("max(eig(GL strain)) (cm/cm)")
plt.ylabel("maximum principal strain (cm/cm)")
if INJURY_0 or INJURY_1:
exp_min = -2 # x-domain minimum 10^exp_min
exp_max = 4 # x-domain maximum 10^exp_max
npts = 100 # number of points
# x = np.linspace(-4, 4, npts)
x = np.logspace(exp_min, exp_max, npts)
# injury curves
if INJURY_0:
# pathway-induced injury
# y_pathway = 0.2589 * np.arctan(-0.5789 * np.log(10**x) - 1.83) + 0.4192
y_pathway = 0.2589 * np.arctan(-0.5789 * np.log(x) - 1.83) + 0.4192
# mechanical injury
# y_mechanical = 0.345 * np.arctan(-0.2923 * np.log(10**x) - 0.1617) + 0.5033
y_mechanical = 0.345 * np.arctan(-0.2923 * np.log(x) - 0.1617) + 0.5033
g.ax_joint.plot(
x,
y_pathway,
linestyle="--",
color="green",
linewidth=2,
alpha=0.8,
label="pathway induced injury",
)
# g.ax_joint.legend()
g.ax_joint.legend(loc="upper right")
if INJURY_1:
# y_cell_death = 0.128 * x ** (-0.156)
y_cell_death = cell_death_strain_rate_to_strain(x)
g.ax_joint.plot(
x,
y_cell_death,
linestyle="--",
color="black",
linewidth=2,
alpha=0.8,
| |
= None # type: FolderBase # DOT_META/.cache subfolder
cli = None # type: TrackedSettings # Tracks any custom CLI cfg flags given, such as --index, --python or --delivery
configs = None # type: list
program_path = get_program_path()
_pickley_dev_path = None
def __init__(self):
self.configs = []
self.config_path = None
self.pip_conf, self.pip_conf_index = get_default_index("~/.config/pip/pip.conf", "/etc/pip.conf")
self.default_index = self.pip_conf_index or DEFAULT_PYPI
self._explored = set()
def __repr__(self):
return "<not-configured>" if self.base is None else runez.short(self.base)
@runez.cached_property
def available_pythons(self):
pyenv = self.pyenv()
scanner = PythonInstallationScanner(pyenv) if pyenv else None
depot = PythonDepot(scanner=scanner)
depot.find_preferred_python(
self.get_value("preferred_pythons"),
min_python=self.get_value("min_python"),
preferred_min_python=self.get_value("preferred_min_python")
)
return depot
@classmethod
def pickley_dev_path(cls):
if cls._pickley_dev_path is None:
cls._pickley_dev_path = runez.DEV.project_folder
return cls._pickley_dev_path
def set_base(self, base_path):
"""
Args:
base_path (str): Path to pickley base installation
"""
self.configs = []
self.base = FolderBase("base", base_path)
self.meta = FolderBase("meta", os.path.join(self.base.path, DOT_META))
self.cache = FolderBase("cache", os.path.join(self.meta.path, ".cache"))
if self.cli:
cli = runez.serialize.json_sanitized(self.cli.to_dict())
self.configs.append(RawConfig(self, "cli", cli))
self._add_config_file(self.config_path)
self._add_config_file(self.meta.full_path("config.json"))
defaults = dict(
delivery="wrap",
install_timeout=1800,
min_python="3.6",
preferred_min_python="3.7",
preferred_pythons="/usr/bin/python3,/usr/bin/python",
version_check_delay=300
)
self.configs.append(RawConfig(self, "defaults", defaults))
def set_cli(self, config_path, delivery, index, python, virtualenv):
"""
Args:
config_path (str | None): Optional configuration to use
delivery (str | None): Optional delivery method to use
index (str | None): Optional pypi index to use
python (str | None): Optional python interpreter to use
"""
self.config_path = config_path
self.cli = TrackedSettings(delivery, index, python, virtualenv)
def _add_config_file(self, path, base=None):
path = runez.resolved_path(path, base=base)
if path and all(c.source != path for c in self.configs) and os.path.exists(path):
values = runez.read_json(path, logger=LOG.warning)
if values:
self.configs.append(RawConfig(self, path, values))
included = values.get("include")
if included:
for additional in runez.flattened(included):
self._add_config_file(additional, base=os.path.dirname(path))
def _expand_bundle(self, result, seen, bundle_name):
if not bundle_name or bundle_name in seen:
return
seen.add(bundle_name)
if not bundle_name.startswith("bundle:"):
result.append(bundle_name)
return
names = self.get_nested("bundle", bundle_name[7:])
if names:
for name in runez.flattened(names, split=" "):
self._expand_bundle(result, seen, name)
def find_python(self, pspec=None, fatal=True):
"""
Args:
pspec (PackageSpec | None): Package spec, when applicable
fatal (bool): If True, abort execution is no valid python could be found
Returns:
(runez.pyenv.PythonInstallation): Object representing python installation
"""
desired = self.get_value("python", pspec=pspec)
if not desired:
# Most common case: use configured preferred python (will be 'invoker' by default)
return self.available_pythons.find_python(None)
issues = []
python = None
desired = runez.flattened(desired, split=",")
for d in desired:
python = self.available_pythons.find_python(d)
if not python.problem:
return python
issues.append(f"Python '{runez.bold(runez.short(d))}' skipped: {runez.red(python.problem)}")
for i in issues: # Warn only if no python could be found at all
LOG.warning(i)
if fatal:
abort("No suitable python installation found")
return python
def package_specs(self, names=None, include_pickley=False):
"""
Args:
names (list | None): Package names, if empty: all installed
Returns:
(list[PackageSpec]): Corresponding PackageSpec-s
"""
if names:
names = runez.flattened(names, split=" ")
if include_pickley and PICKLEY not in names:
names.append(PICKLEY)
result = [self.resolved_bundle(name) for name in names]
result = runez.flattened(result, unique=True)
return [PackageSpec(self, name) for name in result]
result = []
if os.path.isdir(self.meta.path):
for fname in sorted(os.listdir(self.meta.path)):
if include_pickley or fname != PICKLEY:
fpath = os.path.join(self.meta.path, fname)
if os.path.isdir(fpath):
if os.path.exists(os.path.join(fpath, ".manifest.json")):
result.append(PackageSpec(self, fname))
return result
def get_nested(self, section, key):
"""
Args:
section (str): Nested section to examine
key (str): Key to look up in nested section
Returns:
Nested value from first RawConfig that defines it
"""
for c in self.configs:
value = c.get_nested(section, key)
if value:
return value
def get_value(self, key, pspec=None, validator=None):
"""
Args:
key (str): Key to look up
pspec (PackageSpec | None): Package spec, when applicable
validator (callable | None): Validator to use
Returns:
Value from first RawConfig that defines it
"""
for c in self.configs:
value = c.get_value(key, pspec, validator)
if value:
return value
def delivery_method(self, pspec=None):
"""
Args:
pspec (PackageSpec | None): Package spec, when applicable
Returns:
(str): Configured delivery method for 'pspec'
"""
return self.get_value("delivery", pspec=pspec)
def facultative(self, pspec):
"""
Args:
pspec (PackageSpec | None): Associated package spec
Returns:
(bool): Is installation facultative for 'pspec'? (if it is: pre-existing non-pickley installs remain as-is)
"""
return self.get_value("facultative", pspec=pspec, validator=runez.to_boolean)
def index(self, pspec=None):
"""
Args:
pspec (PackageSpec | None): Package spec, when applicable
Returns:
(str | None): Optional pypi index to use
"""
return self.get_value("index", pspec=pspec)
def install_timeout(self, pspec=None):
"""
Args:
pspec (PackageSpec | None): Package spec, when applicable
Returns:
(int): How many seconds to give an installation to complete before assuming it failed
"""
return self.get_value("install_timeout", pspec=pspec, validator=runez.to_int)
def pinned_version(self, pspec):
"""
Args:
pspec (PackageSpec | None): Package spec, when applicable
Returns:
(str | None): Configured version for 'pspec', if any
"""
if pspec:
pinned = self.get_nested("pinned", pspec.dashed)
if isinstance(pinned, dict):
return pinned.get("version")
if isinstance(pinned, str):
return pinned
def pyenv(self):
"""
Returns:
(str): Configured path to pyenv installation
"""
return self.get_value("pyenv")
def resolved_bundle(self, name):
"""
Args:
name (str): Name of bundle to resolve
Returns:
(list): List of expanded package names included in the bundle
"""
result = []
self._expand_bundle(result, set(), name)
return result
def version_check_delay(self, pspec=None):
"""
Args:
pspec (PackageSpec | None): Package spec, when applicable
Returns:
(int): How many seconds to wait before checking latest version again
"""
return self.get_value("version_check_delay", pspec=pspec, validator=runez.to_int)
def get_virtualenv(self, pspec):
"""
Args:
pspec (PackageSpec | None): Package spec, when applicable
Returns:
(str): Virtualenv version to use for this package spec (default: stdlib venv module)
"""
return self.get_value("virtualenv", pspec=pspec)
@staticmethod
def colored_key(key, indent):
if (key in K_CLI or key in K_LEAVES) and indent in (1, 3):
return runez.teal(key)
if key in K_DIRECTIVES and indent == 1:
return runez.dim(key)
if key in K_GROUPS and indent == 1:
return runez.purple(key)
if indent == 2:
return runez.bold(key)
return runez.red(key)
def represented(self):
"""str: Human readable representation of this configuration"""
result = [f"{runez.bold('base')}: {self}", ""]
for c in self.configs:
result.append(c.represented())
return "\n".join(result).strip()
class TrackedVersion:
"""Object tracking a version, and the source it was obtained from"""
index = None # type: str # Associated pypi url, if any
install_info = None # type: TrackedInstallInfo
problem = None # type: str # Problem that occurred during pypi lookup, if any
source = None # type: str # How 'version' was determined (can be: latest, pinned, ...)
version = None # type: str
def __init__(self, index=None, install_info=None, problem=None, source=None, version=None):
self.index = index
self.install_info = install_info or TrackedInstallInfo.current()
self.problem = problem
self.source = source
self.version = version
def __repr__(self):
return self.version
@classmethod
def from_pypi(cls, pspec, index=None, include_prerelease=False):
"""
Args:
pspec (PackageSpec): Pypi package name to lookup
index (str | None): URL to pypi index to use (default: pypi.org)
include_prerelease (bool): If True, include latest pre-release
Returns:
(TrackedVersion):
"""
index = index or pspec.index or pspec.cfg.default_index
version = PypiStd.latest_pypi_version(pspec.dashed, index=index, include_prerelease=include_prerelease)
if not version:
return cls(index=index, problem=f"does not exist on {index}")
return cls(index=index, source="latest", version=version.text)
@classmethod
def from_manifest(cls, manifest, source="installed"):
return cls(index=manifest.index, install_info=manifest.install_info, source=source, version=manifest.version)
@classmethod
def from_file(cls, path):
data = runez.read_json(path)
if data:
return cls(
index=data.get("index"),
install_info=TrackedInstallInfo.from_manifest_data(data),
problem=data.get("problem"),
source=data.get("source"),
version=data.get("version"),
)
def to_dict(self):
return dict(
index=self.index,
install_info=self.install_info.to_dict(),
problem=self.problem,
source=self.source,
version=self.version,
)
class TrackedManifest:
"""Info stored in .manifest.json for each installation"""
path = None # type: str # Path to this manifest
settings = None # type: TrackedSettings
entrypoints = None # type: dict
install_info = None # type: TrackedInstallInfo
pinned = None # type: str
version = None # type: str
def __init__(self, path, settings, entrypoints, install_info=None, pinned=None, version=None):
self.path = path
self.settings = settings
self.entrypoints = entrypoints or {}
self.install_info = install_info or TrackedInstallInfo.current()
self.pinned = pinned
self.version = version
def __repr__(self):
return f"{self.version} [p: {self.python}]"
@classmethod
def from_file(cls, path):
data = runez.read_json(path)
if data:
return cls(
path,
TrackedSettings.from_manifest_data(data),
data.get("entrypoints"),
install_info=TrackedInstallInfo.from_manifest_data(data),
pinned=data.get("pinned"),
version=data.get("version"),
)
@property
def delivery(self):
if self.settings:
return self.settings.delivery
@property
def index(self):
if self.settings:
return self.settings.index
@property
def python(self):
if self.settings:
return self.settings.python
def to_dict(self):
return dict(
settings=self.settings.to_dict(),
entrypoints=self.entrypoints,
install_info=self.install_info.to_dict(),
pinned=self.pinned,
version=self.version,
)
class TrackedInstallInfo:
"""Info on which pickley run performed the installation"""
args = None # type: str # CLI args with which pickley was invoked
timestamp = None # type: datetime
vpickley = None # type: str # Version of pickley that performed the installation
def __init__(self, args, timestamp, vpickley):
self.args = args
self.timestamp = | |
<filename>src/sage/misc/explain_pickle.py
"""
A tool for inspecting Python pickles
AUTHORS:
- <NAME> (2009-03)
The explain_pickle function takes a pickle and produces Sage code that
will evaluate to the contents of the pickle. Ideally, the combination
of explain_pickle to produce Sage code and sage_eval to evaluate the code
would be a 100% compatible implementation of cPickle's unpickler; this
is almost the case now.
EXAMPLES::
sage: explain_pickle(dumps(12345))
pg_make_integer = unpickle_global('sage.rings.integer', 'make_integer')
pg_make_integer('c1p')
sage: explain_pickle(dumps(polygen(QQ)))
pg_Polynomial_rational_flint = unpickle_global('sage.rings.polynomial.polynomial_rational_flint', 'Polynomial_rational_flint')
pg_unpickle_PolynomialRing = unpickle_global('sage.rings.polynomial.polynomial_ring_constructor', 'unpickle_PolynomialRing')
pg_RationalField = unpickle_global('sage.rings.rational_field', 'RationalField')
pg = unpickle_instantiate(pg_RationalField, ())
pg_make_rational = unpickle_global('sage.rings.rational', 'make_rational')
pg_Polynomial_rational_flint(pg_unpickle_PolynomialRing(pg, ('x',), None, False), [pg_make_rational('0'), pg_make_rational('1')], False, True)
sage: sage_eval(explain_pickle(dumps(polygen(QQ)))) == polygen(QQ)
True
By default (as above) the code produced contains calls to several
utility functions (unpickle_global, etc.); this is done so that the
code is truly equivalent to the pickle. If the pickle can be loaded
into a future version of Sage, then the code that explain_pickle
produces today should work in that future Sage as well.
It is also possible to produce simpler code, that is tied to the current
version of Sage; here are the above two examples again::
sage: explain_pickle(dumps(12345), in_current_sage=True)
from sage.rings.integer import make_integer
make_integer('c1p')
sage: explain_pickle(dumps(polygen(QQ)), in_current_sage=True)
from sage.rings.polynomial.polynomial_rational_flint import Polynomial_rational_flint
from sage.rings.polynomial.polynomial_ring_constructor import unpickle_PolynomialRing
from sage.rings.rational import make_rational
Polynomial_rational_flint(unpickle_PolynomialRing(RationalField(), ('x',), None, False), [make_rational('0'), make_rational('1')], False, True)
The explain_pickle function has several use cases.
- Write pickling support for your classes
You can use explain_pickle to see what will happen when a pickle
is unpickled. Consider: is this sequence of commands something
that can be easily supported in all future Sage versions, or does
it expose internal design decisions that are subject to change?
- Debug old pickles
If you have a pickle from an old version of Sage that no longer
unpickles, you can use explain_pickle to see what it is trying to
do, to figure out how to fix it.
- Use explain_pickle in doctests to help maintenance
If you have a ``loads(dumps(S))`` doctest, you could also add an
``explain_pickle(dumps(S))`` doctest. Then if something changes
in a way that would invalidate old pickles, the output of
``explain_pickle`` will also change. At that point, you can add
the previous output of :obj:`explain_pickle` as a new set of
doctests (and then update the :obj:`explain_pickle` doctest to use
the new output), to ensure that old pickles will continue to work.
As mentioned above, there are several output modes for :obj:`explain_pickle`,
that control fidelity versus simplicity of the output. For example,
the GLOBAL instruction takes a module name and a class name and
produces the corresponding class. So GLOBAL of ``sage.rings.integer``,
``Integer`` is approximately equivalent to ``sage.rings.integer.Integer``.
However, this class lookup process can be customized (using
sage.misc.persist.register_unpickle_override). For instance,
if some future version of Sage renamed ``sage/rings/integer.pyx`` to
``sage/rings/knuth_was_here.pyx``, old pickles would no longer work unless
register_unpickle_override was used; in that case, GLOBAL of
'sage.rings.integer', 'integer' would mean
``sage.rings.knuth_was_here.integer``.
By default, ``explain_pickle`` will map this GLOBAL instruction to
``unpickle_global('sage.rings.integer', 'integer')``. Then when this code
is evaluated, unpickle_global will look up the current mapping in the
register_unpickle_override table, so the generated code will continue
to work even in hypothetical future versions of Sage where integer.pyx
has been renamed.
If you pass the flag ``in_current_sage=True``, then
:obj:`explain_pickle` will generate code that may only work in the
current version of Sage, not in future versions. In this case, it
would generate::
from sage.rings.integer import integer
and if you ran explain_pickle in hypothetical future sage, it would generate:
from sage.rings.knuth_was_here import integer
but the current code wouldn't work in the future sage.
If you pass the flag ``default_assumptions=True``, then
:obj:`explain_pickle` will generate code that would work in the
absence of any special unpickling information. That is, in either
current Sage or hypothetical future Sage, it would generate::
from sage.rings.integer import integer
The intention is that ``default_assumptions`` output is prettier (more
human-readable), but may not actually work; so it is only intended for
human reading.
There are several functions used in the output of :obj:`explain_pickle`.
Here I give a brief description of what they usually do, as well as
how to modify their operation (for instance, if you're trying to get
old pickles to work).
- ``unpickle_global(module, classname)``:
unpickle_global('sage.foo.bar', 'baz') is usually equivalent to
sage.foo.bar.baz, but this can be customized with
register_unpickle_override.
- ``unpickle_newobj(klass, args)``:
Usually equivalent to ``klass.__new__(klass, *args)``. If
``klass`` is a Python class, then you can define :meth:`__new__`
to control the result (this result actually need not be an
instance of klass). (This doesn't work for Cython classes.)
- ``unpickle_build(obj, state)``:
If ``obj`` has a :meth:`__setstate__` method, then this is equivalent to
``obj.__setstate__(state)``. Otherwise uses state to set the attributes
of ``obj``. Customize by defining :meth:`__setstate__`.
- ``unpickle_instantiate(klass, args)``:
Usually equivalent to ``klass(*args)``. Cannot be customized.
- unpickle_appends(lst, vals):
Appends the values in vals to lst. If not ``isinstance(lst, list)``,
can be customized by defining a :meth:`append` method.
"""
#*****************************************************************************
# Copyright (C) 2009 <NAME> <<EMAIL>>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
# http://www.gnu.org/licenses/
#*****************************************************************************
from __future__ import absolute_import, print_function
import pickletools
import re
import sys
import types
import zlib as comp
import bz2 as comp_other
from pickletools import genops
import sage.all
from sage.misc.sage_input import SageInputBuilder, SageInputExpression
from sage.misc.sage_eval import sage_eval
from sage.misc.persist import (unpickle_override, unpickle_global, dumps,
register_unpickle_override, SageUnpickler)
try:
from types import ClassType
except ImportError:
# Python 3 does not have a "ClassType". Instead, we ensure that
# isinstance(foo, ClassType) will always return False.
ClassType = ()
def explain_pickle(pickle=None, file=None, compress=True, **kwargs):
r"""
Explain a pickle. That is, produce source code such that evaluating
the code is equivalent to loading the pickle. Feeding the result
of ``explain_pickle`` to ``sage_eval`` should be totally equivalent to loading
the ``pickle`` with ``cPickle``.
INPUT:
- ``pickle`` -- the pickle to explain, as a string (default: None)
- ``file`` -- a filename of a pickle (default: None)
- ``compress`` -- if False, don't attempt to decompress the pickle
(default: True)
- ``in_current_sage`` -- if True, produce potentially simpler code that is
tied to the current version of Sage. (default: False)
- ``default_assumptions`` -- if True, produce potentially simpler code that
assumes that generic unpickling code will be
used. This code may not actually work.
(default: False)
- ``eval`` -- if True, then evaluate the resulting code and return the
evaluated result. (default: False)
- ``preparse`` -- if True, then produce code to be evaluated with
Sage's preparser; if False, then produce standard
Python code; if None, then produce code that will work
either with or without the preparser. (default: True)
- ``pedantic`` -- if True, then carefully ensures that the result has
at least as much sharing as the result of cPickle
(it may have more, for immutable objects). (default: False)
Exactly one of ``pickle`` (a string containing a pickle) or
``file`` (the filename of a pickle) must be provided.
EXAMPLES::
sage: explain_pickle(dumps({('a', 'b'): [1r, 2r]}))
{('a', 'b'):[1r, 2r]}
sage: explain_pickle(dumps(RR(pi)), in_current_sage=True)
from sage.rings.real_mpfr import __create__RealNumber_version0
from sage.rings.real_mpfr import __create__RealField_version0
__create__RealNumber_version0(__create__RealField_version0(53r, False, 'RNDN'), '3.4gvml245kc0@0', 32r)
sage: s = 'hi'
sage: explain_pickle(dumps((s, s)))
('hi', 'hi')
sage: explain_pickle(dumps((s, s)), pedantic=True)
si = 'hi'
(si, si)
sage: explain_pickle(dumps(5r))
5r
sage: explain_pickle(dumps(5r), preparse=False)
5
sage: explain_pickle(dumps(5r), preparse=None)
int(5)
sage: explain_pickle(dumps(22/7))
pg_make_rational = unpickle_global('sage.rings.rational', 'make_rational')
pg_make_rational('m/7')
sage: explain_pickle(dumps(22/7), in_current_sage=True)
from sage.rings.rational import make_rational
make_rational('m/7')
sage: explain_pickle(dumps(22/7), default_assumptions=True)
from sage.rings.rational import make_rational
make_rational('m/7')
"""
if pickle is not None:
p = pickle
elif file is not None:
with open(file) as f:
p = f.read()
else:
raise ValueError("Either pickle or file must be specified")
if compress:
try:
p = comp.decompress(p)
except Exception:
try:
p = comp_other.decompress(p)
except Exception:
# Maybe data is uncompressed?
pass
return explain_pickle_string(p, **kwargs)
def explain_pickle_string(pickle, in_current_sage=False,
default_assumptions=False, eval=False, preparse=True,
pedantic=False):
r"""
This is a helper function for explain_pickle. It takes a decompressed
pickle string as input; other than that, its options are all the same
as explain_pickle.
EXAMPLES::
sage: sage.misc.explain_pickle.explain_pickle_string(dumps("Hello, world", compress=False))
'Hello, world'
(See the documentation for ``explain_pickle`` for many more examples.)
"""
sib = SageInputBuilder(preparse=preparse)
pe = PickleExplainer(sib, in_current_sage=in_current_sage,
default_assumptions=default_assumptions,
pedantic=pedantic)
v = pe.run_pickle(pickle)
ans = sib.result(sib(v))
if eval:
if default_assumptions:
| |
10,
(71, '1'): 10,
(71, '2'): 10,
(71, '3'): 10,
(71, '4'): 10,
(71, '5'): 10,
(71, '6'): 10,
(71, '7'): 10,
(71, '8'): 10,
(71, '9'): 10,
(71, 'A'): 10,
(71, 'B'): 10,
(71, 'C'): 10,
(71, 'D'): 10,
(71, 'E'): 10,
(71, 'F'): 10,
(71, 'G'): 10,
(71, 'H'): 10,
(71, 'I'): 10,
(71, 'J'): 10,
(71, 'K'): 10,
(71, 'L'): 10,
(71, 'M'): 10,
(71, 'N'): 10,
(71, 'O'): 10,
(71, 'P'): 10,
(71, 'Q'): 10,
(71, 'R'): 10,
(71, 'S'): 10,
(71, 'T'): 10,
(71, 'U'): 10,
(71, 'V'): 10,
(71, 'W'): 10,
(71, 'X'): 10,
(71, 'Y'): 10,
(71, 'Z'): 10,
(71, '_'): 10,
(71, 'a'): 10,
(71, 'b'): 10,
(71, 'c'): 10,
(71, 'd'): 10,
(71, 'e'): 10,
(71, 'f'): 10,
(71, 'g'): 10,
(71, 'h'): 10,
(71, 'i'): 10,
(71, 'j'): 10,
(71, 'k'): 72,
(71, 'l'): 10,
(71, 'm'): 10,
(71, 'n'): 10,
(71, 'o'): 10,
(71, 'p'): 10,
(71, 'q'): 10,
(71, 'r'): 10,
(71, 's'): 10,
(71, 't'): 10,
(71, 'u'): 10,
(71, 'v'): 10,
(71, 'w'): 10,
(71, 'x'): 10,
(71, 'y'): 10,
(71, 'z'): 10,
(72, '0'): 10,
(72, '1'): 10,
(72, '2'): 10,
(72, '3'): 10,
(72, '4'): 10,
(72, '5'): 10,
(72, '6'): 10,
(72, '7'): 10,
(72, '8'): 10,
(72, '9'): 10,
(72, 'A'): 10,
(72, 'B'): 10,
(72, 'C'): 10,
(72, 'D'): 10,
(72, 'E'): 10,
(72, 'F'): 10,
(72, 'G'): 10,
(72, 'H'): 10,
(72, 'I'): 10,
(72, 'J'): 10,
(72, 'K'): 10,
(72, 'L'): 10,
(72, 'M'): 10,
(72, 'N'): 10,
(72, 'O'): 10,
(72, 'P'): 10,
(72, 'Q'): 10,
(72, 'R'): 10,
(72, 'S'): 10,
(72, 'T'): 10,
(72, 'U'): 10,
(72, 'V'): 10,
(72, 'W'): 10,
(72, 'X'): 10,
(72, 'Y'): 10,
(72, 'Z'): 10,
(72, '_'): 10,
(72, 'a'): 10,
(72, 'b'): 10,
(72, 'c'): 10,
(72, 'd'): 10,
(72, 'e'): 10,
(72, 'f'): 10,
(72, 'g'): 10,
(72, 'h'): 10,
(72, 'i'): 10,
(72, 'j'): 10,
(72, 'k'): 10,
(72, 'l'): 10,
(72, 'm'): 10,
(72, 'n'): 10,
(72, 'o'): 10,
(72, 'p'): 10,
(72, 'q'): 10,
(72, 'r'): 10,
(72, 's'): 10,
(72, 't'): 10,
(72, 'u'): 10,
(72, 'v'): 10,
(72, 'w'): 10,
(72, 'x'): 10,
(72, 'y'): 10,
(72, 'z'): 10,
(80, '\x00'): 80,
(80, '\x01'): 80,
(80, '\x02'): 80,
(80, '\x03'): 80,
(80, '\x04'): 80,
(80, '\x05'): 80,
(80, '\x06'): 80,
(80, '\x07'): 80,
(80, '\x08'): 80,
(80, '\t'): 80,
(80, '\n'): 80,
(80, '\x0b'): 80,
(80, '\x0c'): 80,
(80, '\r'): 80,
(80, '\x0e'): 80,
(80, '\x0f'): 80,
(80, '\x10'): 80,
(80, '\x11'): 80,
(80, '\x12'): 80,
(80, '\x13'): 80,
(80, '\x14'): 80,
(80, '\x15'): 80,
(80, '\x16'): 80,
(80, '\x17'): 80,
(80, '\x18'): 80,
(80, '\x19'): 80,
(80, '\x1a'): 80,
(80, '\x1b'): 80,
(80, '\x1c'): 80,
(80, '\x1d'): 80,
(80, '\x1e'): 80,
(80, '\x1f'): 80,
(80, ' '): 80,
(80, '!'): 80,
(80, '"'): 80,
(80, '#'): 80,
(80, '$'): 80,
(80, '%'): 80,
(80, '&'): 80,
(80, "'"): 80,
(80, '('): 80,
(80, ')'): 80,
(80, '*'): 83,
(80, '+'): 80,
(80, ','): 80,
(80, '-'): 80,
(80, '.'): 80,
(80, '/'): 80,
(80, '0'): 80,
(80, '1'): 80,
(80, '2'): 80,
(80, '3'): 80,
(80, '4'): 80,
(80, '5'): 80,
(80, '6'): 80,
(80, '7'): 80,
(80, '8'): 80,
(80, '9'): 80,
(80, ':'): 80,
(80, ';'): 80,
(80, '<'): 80,
(80, '='): 80,
(80, '>'): 80,
(80, '?'): 80,
(80, '@'): 80,
(80, 'A'): 80,
(80, 'B'): 80,
(80, 'C'): 80,
(80, 'D'): 80,
(80, 'E'): 80,
(80, 'F'): 80,
(80, 'G'): 80,
(80, 'H'): 80,
(80, 'I'): 80,
(80, 'J'): 80,
(80, 'K'): 80,
(80, 'L'): 80,
(80, 'M'): 80,
(80, 'N'): 80,
(80, 'O'): 80,
(80, 'P'): 80,
(80, 'Q'): 80,
(80, 'R'): 80,
(80, 'S'): 80,
(80, 'T'): 80,
(80, 'U'): 80,
(80, 'V'): 80,
(80, 'W'): 80,
(80, 'X'): 80,
(80, 'Y'): 80,
(80, 'Z'): 80,
(80, '['): 80,
(80, '\\'): 80,
(80, ']'): 80,
(80, '^'): 80,
(80, '_'): 80,
(80, '`'): 80,
(80, 'a'): 80,
(80, 'b'): 80,
(80, 'c'): 80,
(80, 'd'): 80,
(80, 'e'): 80,
(80, 'f'): 80,
(80, 'g'): 80,
(80, 'h'): 80,
(80, 'i'): 80,
(80, 'j'): 80,
(80, 'k'): 80,
(80, 'l'): 80,
(80, 'm'): 80,
(80, 'n'): 80,
(80, 'o'): 80,
(80, 'p'): 80,
(80, 'q'): 80,
(80, 'r'): 80,
(80, 's'): 80,
(80, 't'): 80,
(80, 'u'): 80,
(80, 'v'): 80,
(80, 'w'): 80,
(80, 'x'): 80,
(80, 'y'): 80,
(80, 'z'): 80,
(80, '{'): 80,
(80, '|'): 80,
(80, '}'): 80,
(80, '~'): 80,
(80, '\x7f'): 80,
(80, '\x80'): 80,
(80, '\x81'): 80,
(80, '\x82'): 80,
(80, '\x83'): 80,
(80, '\x84'): 80,
(80, '\x85'): 80,
(80, '\x86'): 80,
(80, '\x87'): 80,
(80, '\x88'): 80,
(80, '\x89'): 80,
(80, '\x8a'): 80,
(80, '\x8b'): 80,
(80, '\x8c'): 80,
(80, '\x8d'): 80,
(80, '\x8e'): 80,
(80, '\x8f'): 80,
(80, '\x90'): 80,
(80, '\x91'): 80,
(80, '\x92'): 80,
(80, '\x93'): 80,
(80, '\x94'): 80,
(80, '\x95'): 80,
(80, '\x96'): 80,
(80, '\x97'): 80,
(80, '\x98'): 80,
(80, '\x99'): 80,
(80, '\x9a'): 80,
(80, '\x9b'): 80,
(80, '\x9c'): 80,
(80, '\x9d'): 80,
(80, '\x9e'): 80,
(80, '\x9f'): 80,
(80, '\xa0'): 80,
(80, '\xa1'): 80,
(80, '\xa2'): 80,
(80, '\xa3'): 80,
(80, '\xa4'): 80,
(80, '\xa5'): 80,
(80, '\xa6'): 80,
(80, '\xa7'): 80,
(80, '\xa8'): 80,
(80, '\xa9'): 80,
(80, '\xaa'): 80,
(80, '\xab'): 80,
(80, '\xac'): 80,
(80, '\xad'): 80,
(80, '\xae'): 80,
(80, '\xaf'): 80,
(80, '\xb0'): 80,
(80, '\xb1'): 80,
(80, '\xb2'): 80,
(80, '\xb3'): 80,
(80, '\xb4'): 80,
(80, '\xb5'): 80,
(80, '\xb6'): 80,
(80, '\xb7'): 80,
(80, '\xb8'): 80,
(80, '\xb9'): 80,
(80, '\xba'): 80,
(80, '\xbb'): 80,
(80, '\xbc'): 80,
(80, '\xbd'): 80,
(80, '\xbe'): 80,
(80, '\xbf'): 80,
(80, '\xc0'): 80,
(80, '\xc1'): 80,
(80, '\xc2'): 80,
(80, '\xc3'): 80,
(80, '\xc4'): 80,
(80, '\xc5'): 80,
(80, '\xc6'): 80,
(80, '\xc7'): 80,
(80, '\xc8'): 80,
(80, '\xc9'): 80,
(80, '\xca'): 80,
(80, '\xcb'): 80,
(80, '\xcc'): 80,
(80, '\xcd'): 80,
(80, '\xce'): 80,
(80, '\xcf'): 80,
(80, '\xd0'): 80,
(80, '\xd1'): 80,
(80, '\xd2'): 80,
(80, '\xd3'): 80,
(80, '\xd4'): 80,
(80, '\xd5'): 80,
(80, '\xd6'): 80,
(80, '\xd7'): 80,
(80, '\xd8'): 80,
(80, '\xd9'): 80,
(80, '\xda'): 80,
(80, '\xdb'): 80,
(80, '\xdc'): 80,
(80, '\xdd'): 80,
(80, '\xde'): 80,
(80, '\xdf'): 80,
(80, '\xe0'): 80,
(80, '\xe1'): 80,
(80, '\xe2'): 80,
(80, '\xe3'): 80,
(80, '\xe4'): 80,
(80, '\xe5'): 80,
(80, '\xe6'): 80,
(80, '\xe7'): 80,
(80, '\xe8'): 80,
(80, '\xe9'): 80,
(80, '\xea'): 80,
(80, '\xeb'): 80,
(80, '\xec'): 80,
(80, '\xed'): 80,
(80, '\xee'): 80,
(80, '\xef'): 80,
(80, '\xf0'): 80,
(80, '\xf1'): 80,
(80, '\xf2'): 80,
(80, '\xf3'): 80,
(80, '\xf4'): 80,
(80, '\xf5'): 80,
(80, '\xf6'): 80,
(80, '\xf7'): 80,
(80, '\xf8'): 80,
(80, '\xf9'): 80,
(80, '\xfa'): 80,
(80, '\xfb'): 80,
(80, '\xfc'): 80,
(80, '\xfd'): 80,
(80, '\xfe'): 80,
(80, '\xff'): 80,
(83, '\x00'): 80,
(83, '\x01'): 80,
(83, '\x02'): 80,
(83, '\x03'): 80,
(83, '\x04'): 80,
(83, '\x05'): 80,
(83, '\x06'): 80,
(83, '\x07'): 80,
(83, '\x08'): 80,
(83, '\t'): 80,
(83, '\n'): 80,
(83, '\x0b'): 80,
(83, '\x0c'): 80,
(83, '\r'): 80,
(83, '\x0e'): 80,
(83, '\x0f'): 80,
(83, '\x10'): 80,
(83, '\x11'): 80,
(83, '\x12'): 80,
(83, '\x13'): 80,
(83, '\x14'): 80,
(83, '\x15'): 80,
(83, '\x16'): 80,
(83, '\x17'): 80,
(83, '\x18'): 80,
(83, '\x19'): 80,
(83, '\x1a'): 80,
(83, '\x1b'): 80,
(83, '\x1c'): 80,
(83, '\x1d'): 80,
(83, '\x1e'): 80,
(83, '\x1f'): 80,
(83, ' '): 80,
(83, '!'): 80,
(83, '"'): 80,
(83, '#'): 80,
(83, '$'): 80,
(83, '%'): 80,
(83, '&'): 80,
(83, "'"): 80,
(83, '('): 80,
(83, ')'): 80,
(83, '*'): 84,
(83, '+'): 80,
(83, ','): | |
import tensorflow as tf
from tensorflow.python.keras import activations
from tensorflow.python.keras import backend as K
from tensorflow.python.keras import initializers, regularizers, constraints
from tensorflow.python.keras.backend import _preprocess_padding
from tensorflow.python.keras.layers import Conv2D, Add
from tensorflow.python.keras.layers import Layer
from tensorflow.python.keras.utils import conv_utils
from utils import he_init, glorot_init
class ConditionalCenterScale(Layer):
def __init__(self,
number_of_classes,
axis=-1,
center=True,
scale=True,
beta_initializer='zeros',
gamma_initializer='ones',
beta_regularizer=None,
gamma_regularizer=None,
beta_constraint=None,
gamma_constraint=None,
**kwargs):
super(ConditionalCenterScale, self).__init__(**kwargs)
self.number_of_classes = number_of_classes
self.supports_masking = True
self.axis = axis
self.center = center
self.scale = scale
self.beta_initializer = initializers.get(beta_initializer)
self.gamma_initializer = initializers.get(gamma_initializer)
self.beta_regularizer = regularizers.get(beta_regularizer)
self.gamma_regularizer = regularizers.get(gamma_regularizer)
self.beta_constraint = constraints.get(beta_constraint)
self.gamma_constraint = constraints.get(gamma_constraint)
def build(self, input_shape):
ndim = len(input_shape[0])
cls = input_shape[1]
if len(cls) != 2:
raise ValueError("Classes should be one dimensional")
if self.axis == 0:
raise ValueError('Axis cannot be zero')
if (self.axis is not None) and (ndim == 2):
raise ValueError('Cannot specify axis for rank 1 tensor')
if self.axis is None:
shape = (self.number_of_classes, 1)
else:
shape = (self.number_of_classes, input_shape[0][self.axis])
if self.scale:
self.gamma = self.add_weight(shape=shape,
name='gamma',
initializer=self.gamma_initializer,
regularizer=self.gamma_regularizer,
constraint=self.gamma_constraint)
else:
self.gamma = None
if self.center:
self.beta = self.add_weight(shape=shape,
name='beta',
initializer=self.beta_initializer,
regularizer=self.beta_regularizer,
constraint=self.beta_constraint)
else:
self.beta = None
super(ConditionalCenterScale, self).build(input_shape)
def call(self, inputs, training=None):
class_labels = K.squeeze(inputs[1], axis=1)
inputs = inputs[0]
input_shape = K.int_shape(inputs)
reduction_axes = list(range(0, len(input_shape)))
if self.axis is not None:
del reduction_axes[self.axis]
del reduction_axes[0]
normed = inputs
broadcast_shape = [1] * len(input_shape)
broadcast_shape[0] = K.shape(inputs)[0]
if self.axis is not None:
broadcast_shape[self.axis] = input_shape[self.axis]
if self.scale:
broadcast_gamma = K.reshape(K.gather(self.gamma, class_labels), broadcast_shape)
normed = normed * broadcast_gamma
if self.center:
broadcast_beta = K.reshape(K.gather(self.beta, class_labels), broadcast_shape)
normed = normed + broadcast_beta
return normed
def compute_output_shape(self, input_shape):
return input_shape[0]
def get_config(self):
config = {
'number_of_classes': self.number_of_classes,
'axis': self.axis,
'center': self.center,
'scale': self.scale,
'beta_initializer': initializers.serialize(self.beta_initializer),
'gamma_initializer': initializers.serialize(self.gamma_initializer),
'beta_regularizer': regularizers.serialize(self.beta_regularizer),
'gamma_regularizer': regularizers.serialize(self.gamma_regularizer),
'beta_constraint': constraints.serialize(self.beta_constraint),
'gamma_constraint': constraints.serialize(self.gamma_constraint)
}
base_config = super(ConditionalCenterScale, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
class CenterScale(Layer):
def __init__(self,
axis=-1,
center=True,
scale=True,
beta_initializer='zeros',
gamma_initializer='ones',
beta_regularizer=None,
gamma_regularizer=None,
beta_constraint=None,
gamma_constraint=None,
**kwargs):
super(CenterScale, self).__init__(**kwargs)
self.axis = axis
self.center = center
self.scale = scale
self.beta_initializer = initializers.get(beta_initializer)
self.gamma_initializer = initializers.get(gamma_initializer)
self.beta_regularizer = regularizers.get(beta_regularizer)
self.gamma_regularizer = regularizers.get(gamma_regularizer)
self.beta_constraint = constraints.get(beta_constraint)
self.gamma_constraint = constraints.get(gamma_constraint)
def build(self, input_shape):
ndim = input_shape
if self.axis == 0:
raise ValueError('Axis cannot be zero')
if (self.axis is not None) and (ndim == 2):
raise ValueError('Cannot specify axis for rank 1 tensor')
if self.axis is None:
shape = (1, )
else:
shape = (input_shape[self.axis], )
if self.scale:
self.gamma = self.add_weight(shape=shape,
name='gamma',
initializer=self.gamma_initializer,
regularizer=self.gamma_regularizer,
constraint=self.gamma_constraint)
else:
self.gamma = None
if self.center:
self.beta = self.add_weight(shape=shape,
name='beta',
initializer=self.beta_initializer,
regularizer=self.beta_regularizer,
constraint=self.beta_constraint)
else:
self.beta = None
super(CenterScale, self).build(input_shape)
def call(self, inputs, training=None):
inputs = inputs
input_shape = K.int_shape(inputs)
reduction_axes = list(range(0, len(input_shape)))
if self.axis is not None:
del reduction_axes[self.axis]
del reduction_axes[0]
normed = inputs
broadcast_shape = [1] * len(input_shape)
if self.axis is not None:
broadcast_shape[self.axis] = input_shape[self.axis]
if self.scale:
broadcast_gamma = K.reshape(self.gamma, broadcast_shape)
normed = normed * broadcast_gamma
if self.center:
broadcast_beta = K.reshape(self.beta, broadcast_shape)
normed = normed + broadcast_beta
return normed
def get_config(self):
config = {
'axis': self.axis,
'center': self.center,
'scale': self.scale,
'beta_initializer': initializers.serialize(self.beta_initializer),
'gamma_initializer': initializers.serialize(self.gamma_initializer),
'beta_regularizer': regularizers.serialize(self.beta_regularizer),
'gamma_regularizer': regularizers.serialize(self.gamma_regularizer),
'beta_constraint': constraints.serialize(self.beta_constraint),
'gamma_constraint': constraints.serialize(self.gamma_constraint)
}
base_config = super(CenterScale, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
class ConditionalConv11(Layer):
def __init__(self, filters,
number_of_classes,
strides=1,
group=1,
data_format=None,
activation=None,
use_bias=True,
kernel_initializer='glorot_uniform',
bias_initializer='zeros',
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
triangular=False,
**kwargs):
super(ConditionalConv11, self).__init__(**kwargs)
self.filters = filters
self.kernel_size = conv_utils.normalize_tuple((1, 1), 2, 'kernel_size')
self.number_of_classes = number_of_classes
self.strides = conv_utils.normalize_tuple(strides, 2, 'strides')
self.padding = conv_utils.normalize_padding('same')
self.group = group
self.data_format = conv_utils.normalize_data_format(data_format)
self.dilation_rate = conv_utils.normalize_tuple(1, 2, 'dilation_rate')
self.activation = activations.get(activation)
self.use_bias = use_bias
self.kernel_initializer = initializers.get(kernel_initializer)
self.bias_initializer = initializers.get(bias_initializer)
self.kernel_regularizer = regularizers.get(kernel_regularizer)
self.bias_regularizer = regularizers.get(bias_regularizer)
self.activity_regularizer = regularizers.get(activity_regularizer)
self.kernel_constraint = constraints.get(kernel_constraint)
self.bias_constraint = constraints.get(bias_constraint)
self.triangular = triangular
def build(self, input_shape):
if self.data_format == 'channels_first':
channel_axis = 1
else:
channel_axis = -1
if input_shape[channel_axis] is None:
raise ValueError('The channel dimension of the inputs '
'should be defined. Found `None`.')
input_dim = input_shape[0][channel_axis].value
assert (input_dim % self.group == 0), 'group incorrect!'
self.m_per_group = input_dim // self.group
self.input_dim = input_dim
kernel_shape = (self.number_of_classes,) + self.kernel_size + (input_dim, self.filters)
self.kernel = self.add_weight(shape=kernel_shape,
initializer=self.kernel_initializer,
name='kernel',
regularizer=self.kernel_regularizer,
constraint=self.kernel_constraint)
if self.use_bias:
self.bias = self.add_weight(shape=(self.number_of_classes, self.filters),
initializer=self.bias_initializer,
name='bias',
regularizer=self.bias_regularizer,
constraint=self.bias_constraint)
else:
self.bias = None
super(ConditionalConv11, self).build(input_shape)
def call(self, inputs):
cls = inputs[1]
x = inputs[0]
### Preprocess input
# (bs, w, h, c)
if self.data_format != 'channels_first':
x = tf.transpose(x, [0, 3, 1, 2])
_, in_c, w, h = K.int_shape(x)
else:
_, w, h, in_c = K.int_shape(x)
# (bs, c, w, h)
x = tf.reshape(x, (-1, in_c, w * h))
# (bs, c, w*h)
x = tf.transpose(x, [0, 2, 1])
# (bs, w*h, c)
### Preprocess filter
cls = tf.squeeze(cls, axis=1)
# (num_cls, 1, 1, in, out)
if self.triangular:
kernel = tf.matrix_band_part(self.kernel, 0, -1)
else:
kernel = self.kernel
kernel = tf.gather(kernel, cls)
# (bs, 1, 1, in, out)
kernel = tf.squeeze(kernel, axis=1)
kernel = tf.squeeze(kernel, axis=1)
# print (K.int_shape(kernel))
# (in, 1, bs, out)
# print (K.int_shape(kernel))
output = tf.matmul(x, kernel)
# (bs, w*h, out)
### Deprocess output
output = tf.transpose(output, [0, 2, 1])
# (bs, out, w * h)
output = tf.reshape(output, (-1, self.filters, w, h))
# (bs, out, w, h)
if self.bias is not None:
# (num_cls, out)
bias = tf.gather(self.bias, cls)
# (bs, bias)
bias = tf.expand_dims(bias, axis=-1)
bias = tf.expand_dims(bias, axis=-1)
# (bs, bias, 1, 1)
output += bias
if self.data_format != 'channels_first':
# (bs, out, w, h)
output = tf.transpose(output, [0, 2, 3, 1])
if self.activation is not None:
return self.activation(output)
return output
def compute_output_shape(self, input_shape):
input_shape = input_shape[0]
if self.data_format == 'channels_last':
space = input_shape[1:-1]
new_space = []
for i in range(len(space)):
new_dim = conv_utils.conv_output_length(
space[i],
self.kernel_size[i],
padding=self.padding,
stride=self.strides[i],
dilation=self.dilation_rate[i])
new_space.append(new_dim)
return (input_shape[0],) + tuple(new_space) + (self.filters,)
if self.data_format == 'channels_first':
space = input_shape[2:]
new_space = []
for i in range(len(space)):
new_dim = conv_utils.conv_output_length(
space[i],
self.kernel_size[i],
padding=self.padding,
stride=self.strides[i],
dilation=self.dilation_rate[i])
new_space.append(new_dim)
return (input_shape[0], self.filters) + tuple(new_space)
def get_config(self):
config = {
'number_of_classes': self.number_of_classes,
'rank': 2,
'filters': self.filters,
'kernel_size': self.kernel_size,
'strides': self.strides,
'padding': self.padding,
'data_format': self.data_format,
'dilation_rate': self.dilation_rate,
'activation': activations.serialize(self.activation),
'use_bias': self.use_bias,
'kernel_initializer': initializers.serialize(self.kernel_initializer),
'bias_initializer': initializers.serialize(self.bias_initializer),
'kernel_regularizer': regularizers.serialize(self.kernel_regularizer),
'bias_regularizer': regularizers.serialize(self.bias_regularizer),
'activity_regularizer': regularizers.serialize(self.activity_regularizer),
'kernel_constraint': constraints.serialize(self.kernel_constraint),
'bias_constraint': constraints.serialize(self.bias_constraint)
}
base_config = super(ConditionalConv11, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
class FactorizedConv11(Layer):
def __init__(self, filters,
number_of_classes,
filters_emb,
strides=1,
data_format=None,
activation=None,
use_bias=True,
kernel_initializer='glorot_uniform',
bias_initializer='zeros',
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
bias_constraint=None,
**kwargs):
super(FactorizedConv11, self).__init__(**kwargs)
self.filters = filters
self.filters_emb = filters_emb
self.kernel_size = conv_utils.normalize_tuple((1, 1), 2, 'kernel_size')
self.number_of_classes = number_of_classes
self.strides = conv_utils.normalize_tuple(strides, 2, 'strides')
self.padding = conv_utils.normalize_padding('same')
self.data_format = conv_utils.normalize_data_format(data_format)
self.dilation_rate = conv_utils.normalize_tuple(1, 2, 'dilation_rate')
self.activation = activations.get(activation)
self.use_bias = use_bias
self.kernel_initializer = initializers.get(kernel_initializer)
self.bias_initializer = initializers.get(bias_initializer)
self.kernel_regularizer = regularizers.get(kernel_regularizer)
self.bias_regularizer = regularizers.get(bias_regularizer)
self.activity_regularizer = regularizers.get(activity_regularizer)
self.kernel_constraint = constraints.get(kernel_constraint)
self.bias_constraint = constraints.get(bias_constraint)
def build(self, input_shape):
if self.data_format == 'channels_first':
channel_axis = 1
else:
channel_axis = -1
if input_shape[channel_axis] is None:
raise ValueError('The channel dimension of the inputs '
'should be defined. Found `None`.')
input_dim = input_shape[0][channel_axis].value
self.input_dim = input_dim
class_matrix_shape = (self.number_of_classes, self.filters_emb)
kernel_shape = (self.filters_emb, ) + self.kernel_size + (input_dim, self.filters)
self.class_matrix = self.add_weight(shape=class_matrix_shape,
initializer=self.kernel_initializer,
name='class_matrix')
self.kernel = self.add_weight(shape=kernel_shape,
initializer=self.kernel_initializer,
name='kernel',
regularizer=self.kernel_regularizer,
constraint=self.kernel_constraint)
if self.use_bias:
self.bias = self.add_weight(shape=(self.number_of_classes, self.filters),
initializer=self.bias_initializer,
name='bias',
regularizer=self.bias_regularizer,
constraint=self.bias_constraint)
else:
self.bias = None
super(FactorizedConv11, self).build(input_shape)
def call(self, inputs):
cls = inputs[1]
x = inputs[0]
### Preprocess input
#(bs, w, h, c)
if self.data_format != 'channels_first':
x = tf.transpose(x, [0, 3, 1, 2])
_, in_c, w, h = K.int_shape(x)
else:
_, w, h, in_c = K.int_shape(x)
#(bs, c, w, h)
x = tf.reshape(x, (-1, in_c, w * h))
#(bs, c, w*h)
x = tf.transpose(x, [0, 2, 1])
#(bs, w*h, c)
### Preprocess filter
cls = tf.squeeze(cls, axis=1)
#(num_cls, 1, 1, in, out)
cls_emb = tf.gather(self.class_matrix, cls)
cls_emb = K.l2_normalize(cls_emb, axis=1)
#(bs, filters_emb)
kernel = tf.reshape(self.kernel, (self.filters_emb, -1))
#(filters_emb, 1 * 1 * in * out)
kernel = tf.matmul(cls_emb, kernel)
#(bs, 1 * 1 * in * out)
kernel = tf.reshape(kernel, (-1, 1, 1, in_c, self.filters))
#(bs, 1, 1, in, out)
kernel = tf.squeeze(kernel, axis=1)
kernel = tf.squeeze(kernel, axis=1)
#print (K.int_shape(kernel))
#(in, 1, bs, out)
#print (K.int_shape(kernel))
output = tf.matmul(x, kernel)
#(bs, w*h, out)
### | |
# -*- coding: utf-8 -*-
#
# testfish.py
#
# Copyright (c) <NAME> 2009-2012 and other authors specified
# in the AUTHOR
# Licence terms in LICENCE.
#
import sys
import types
import unittest
import urllib
import fishbase
import fishlib
import cli
class TestFluidinfo(unittest.TestCase):
def setUp(self):
self.db = fishlib.Fluidinfo()
self.user = self.db.credentials.username # UNICODE
self.db.set_connection_from_global()
self.db.set_debug_timeout(5.0)
self.dadgadID = fishlib.id(u'DADGAD', self.db.host)
def testCreateObject(self):
db = self.db
o = db.create_object(u'DADGAD')
self.assertEqual(o.id, self.dadgadID)
self.assertEqual(o.tags[u'URI'], fishlib.object_uri(self.dadgadID))
def testCreateObjectNoAbout(self):
db = self.db
o = db.create_object()
self.assertEqual(type(o) not in (int, long), True)
def testCreateObjectFail(self):
bad = fishlib.Credentials(u'doesnotexist',
u'certainlywiththispassword')
db = fishlib.Fluidinfo(bad)
o = db.create_object(u'DADGAD')
self.assertEqual(o, fishlib.STATUS.UNAUTHORIZED)
def testCreateTag(self):
db = self.db
o = db.delete_abstract_tag(u'test-fish/testrating')
# doesn't really matter if this works or not
o = db.create_abstract_tag(u'test-fish/testrating',
u"%s's test-fish/testrating (0-10; more is better)"
% self.user)
self.assertEqual(type(o.id) in types.StringTypes, True)
self.assertEqual(unicode(urllib.unquote(o.tags['URI'].encode('UTF-8')),
'UTF-8'),
fishlib.tag_uri(db.credentials.username,
u'test-fish/testrating'))
def testTags(self):
db = self.db
user = db.credentials.username
o = db.tag_object_by_about(u'αβγδε', u'test-fish/ζηθικ', u'φχψω')
o = db.tag_object_by_about(u'αβγδε', u'test-fish/λμνξο', u'πρστυ')
# check tags function OK
tags = db.get_object_tags_by_about(u'αβγδε')
self.assertEqual(u'%s/test-fish/ζηθικ' % user in tags, True)
self.assertEqual(u'%s/test-fish/λμνξο' % user in tags, True)
# check tag values are OK
status, v = db.get_tag_value_by_about(u'αβγδε', u'test-fish/ζηθικ')
self.assertEqual(v, u'φχψω')
# clean up
o = db.untag_object_by_about(u'αβγδε', u'test-fish/ζηθικ')
o = db.untag_object_by_about(u'αβγδε', u'test-fish/λμνξο')
def testValuesAPISetGet(self):
db = self.db
user = db.credentials.username
pairs = {
u'test-fish/αβγδε': u'αβγδε',
u'test-fish/ζηθικ': 1,
u'test-fish/φχψω': 2.5,
u'test-fish/λμνξο': True,
u'test-fish/πρστυ': None,
u'test-fish/testrating': u'αβγδε',
u'test-fish/testrating2': 1,
u'test-fish/testrating3': 2.5,
u'test-fish/testrating4': True,
u'test-fish/testrating5': None,
}
tagsToSet = {}
object_about = u'ΔΑΔΓΑΔ'
for tag in pairs:
db.tag_object_by_about(object_about, tag, None) # make sure
# tag exists
tagsToSet[db.abs_tag_path(tag)[1:]] = pairs[tag]
query = u'fluiddb/about = "%s"' % object_about
db.tag_by_query(query, tagsToSet)
objects = db.get_values_by_query(query, tagsToSet.keys())
self.assertEqual(len(objects), 1)
o = objects[0]
for key in tagsToSet:
self.assertEqual(o.tags[key], tagsToSet[key])
db.delete_abstract_tag(u'/' + tag)
def testSetTagByID(self):
db = self.db
user = db.credentials.username
o = db.delete_abstract_tag(u'test-fish/testrating')
o = db.create_abstract_tag(u'test-fish/testrating',
u"%s's test-fish/testrating (0-10; more is better)" % self.user)
o = db.tag_object_by_id(self.dadgadID,
u'/%s/test-fish/testrating' % user, 5)
self.assertEqual(o, 0)
_status, v = db.get_tag_value_by_id(self.dadgadID,
u'test-fish/testrating')
self.assertEqual(v, 5)
def testSetTagByAbout(self):
db = self.db
user = db.credentials.username
o = db.delete_abstract_tag(u'test-fish/testrating')
o = db.tag_object_by_about(u'http://dadgad.com',
u'/%s/test-fish/testrating' % user, u'five')
o = db.tag_object_by_about('DAD +GAD',
u'/%s/test-fish/testrating' % user, u'five')
self.assertEqual(o, 0)
_status, v = db.get_tag_value_by_about(u'http://dadgad.com',
u'test-fish/testrating')
_status, v = db.get_tag_value_by_about(u'DAD +GAD',
u'test-fish/testrating')
self.assertEqual(v, u'five')
def testDeleteNonExistentTag(self):
db = self.db
o = db.delete_abstract_tag(u'test-fish/testrating')
o = db.delete_abstract_tag(u'test-fish/testrating') # definitely
# doesn't exist
def testSetNonExistentTag(self):
db = self.db
o = db.delete_abstract_tag(u'test-fish/testrating')
o = db.tag_object_by_id(self.dadgadID, u'test-fish/testrating', 5)
self.assertEqual(o, 0)
status, v = db.get_tag_value_by_id(self.dadgadID,
u'test-fish/testrating')
self.assertEqual(v, 5)
def testUntagObjectByID(self):
db = self.db
# First tag something
o = db.tag_object_by_id(self.dadgadID, u'test-fish/testrating', 5)
self.assertEqual(o, 0)
# Now untag it
error = db.untag_object_by_id(self.dadgadID, u'test-fish/testrating')
self.assertEqual(error, 0)
status, v = db.get_tag_value_by_id(self.dadgadID,
u'test-fish/testrating')
self.assertEqual(status, fishlib.STATUS.NOT_FOUND)
# Now untag it again (should be OK)
error = db.untag_object_by_id(self.dadgadID, u'test-fish/testrating')
self.assertEqual(error, 0)
# And again, but this time asking for error if untagged
error = db.untag_object_by_id(self.dadgadID, u'test-fish/testrating',
False)
self.assertEqual(error, 0) # The API has changed so that in fact
# a 204 (NO CONTENT) is always returned,
# so this test and the flag are now
# less meaningful.
# For now, just updated to be consistent
# with the latest API.
def testUntagObjectByAbout(self):
db = self.db
# First tag something
o = db.tag_object_by_id(self.dadgadID, u'test-fish/testrating', 5)
self.assertEqual(o, 0)
# Now untag it
error = db.untag_object_by_about(u'DADGAD', u'test-fish/testrating')
self.assertEqual(error, 0)
status, v = db.get_tag_value_by_about(u'DADGAD',
u'test-fish/testrating')
self.assertEqual(status, fishlib.STATUS.NOT_FOUND)
def testAddValuelessTag(self):
db = self.db
o = db.delete_abstract_tag(u'test-fish/testconvtag')
o = db.create_abstract_tag(u'test-fish/testconvtag',
u"a conventional (valueless) tag")
o = db.tag_object_by_id(self.dadgadID, u'test-fish/testconvtag')
self.assertEqual(o, 0)
status, v = db.get_tag_value_by_id(self.dadgadID,
u'test-fish/testconvtag')
self.assertEqual(v, None)
class TestFDBUtilityFunctions(unittest.TestCase):
def setUp(self):
self.db = fishlib.Fluidinfo()
self.user = self.db.credentials.username
self.db.set_connection_from_global()
self.db.set_debug_timeout(5.0)
self.dadgadID = fishlib.id(u'DADGAD', self.db.host)
def testFullTagPath(self):
db = self.db
user = db.credentials.username
self.assertEqual(db.full_tag_path(u'rating'),
u'/tags/%s/rating' % user)
self.assertEqual(db.full_tag_path(u'/%s/rating' % user),
u'/tags/%s/rating' % user)
self.assertEqual(db.full_tag_path(u'/tags/%s/rating' % user),
u'/tags/%s/rating' % user)
self.assertEqual(db.full_tag_path(u'foo/rating'),
u'/tags/%s/foo/rating' % user)
self.assertEqual(db.full_tag_path(u'/%s/foo/rating' % user),
u'/tags/%s/foo/rating' % user)
self.assertEqual(db.full_tag_path(u'/tags/%s/foo/rating' % user),
u'/tags/%s/foo/rating' % user)
def testAbsTagPath(self):
db = self.db
user = db.credentials.username
self.assertEqual(db.abs_tag_path(u'rating'), u'/%s/rating' % user)
self.assertEqual(db.abs_tag_path(u'/%s/rating' % user),
u'/%s/rating' % user)
self.assertEqual(db.abs_tag_path(u'/tags/%s/rating' % user),
u'/%s/rating' % user)
self.assertEqual(db.abs_tag_path('foo/rating'),
u'/%s/foo/rating' % user)
self.assertEqual(db.abs_tag_path('/%s/foo/rating' % user),
u'/%s/foo/rating' % user)
self.assertEqual(db.abs_tag_path('/tags/%s/foo/rating' % user),
u'/%s/foo/rating' % user)
def testTagPathSplit(self):
db = self.db
user = db.credentials.username
self.assertEqual(db.tag_path_split(u'rating'), (user, u'', u'rating'))
self.assertEqual(db.tag_path_split(u'/%s/rating' % user),
(user, u'', u'rating'))
self.assertEqual(db.tag_path_split('/tags/%s/rating' % user),
(user, u'', u'rating'))
self.assertEqual(db.tag_path_split(u'foo/rating'),
(user, u'foo', u'rating'))
self.assertEqual(db.tag_path_split('/%s/foo/rating' % user),
(user, u'foo', u'rating'))
self.assertEqual(db.tag_path_split(u'/tags/%s/foo/rating' % user),
(user, u'foo', u'rating'))
self.assertEqual(db.tag_path_split(u'foo/bar/rating'),
(user, u'foo/bar', u'rating'))
self.assertEqual(db.tag_path_split(u'/%s/foo/bar/rating' % user),
(user, u'foo/bar', u'rating'))
self.assertEqual(db.tag_path_split('/tags/%s/foo/bar/rating' % user),
(user, u'foo/bar', u'rating'))
self.assertRaises(fishlib.TagPathError, db.tag_path_split, u'')
self.assertRaises(fishlib.TagPathError, db.tag_path_split, u'/')
self.assertRaises(fishlib.TagPathError, db.tag_path_split, u'/foo')
def testTypedValueInterpretation(self):
corrects = {
u'TRUE': (True, bool),
u'tRuE': (True, bool),
u't': (True, bool),
u'T': (True, bool),
u'f': (False, bool),
u'false': (False, bool),
u'1': (1, int),
u'+1': (1, int),
u'-1': (-1, int),
u'0': (0, int),
u'+0': (0, int),
u'-0': (0, int),
u'123456789': (123456789, int),
u'-987654321': (-987654321, int),
u'011': (11, int),
u'-011': (-11, int),
u'3.14159': (float('3.14159'), float),
u'-3.14159': (float('-3.14159'), float),
u'.14159': (float('.14159'), float),
u'-.14159': (float('-.14159'), float),
u'"1"': ('1', unicode),
u'DADGAD': ('DADGAD', unicode),
u'': ('', unicode),
u'1,300': ('1,300', unicode),
u'.': ('.', unicode),
u'+.': ('+.', unicode),
u'-.': ('-.', unicode),
u'+': ('+', unicode),
u'-': ('-', unicode),
}
for s in corrects:
target, targetType = corrects[s]
v = fishlib.get_typed_tag_value(s)
self.assertEqual((s, v), (s, target))
self.assertEqual((s, type(v)), (s, targetType))
def specify_DADGAD(mode, host):
if mode == 'about':
return ('-a', 'DADGAD', fishbase.O(about=u'DADGAD'))
elif mode == 'id':
return ('-i', fishlib.id('DADGAD', host),
fishbase.O(id=fishlib.id('DADGAD', host)))
elif mode == 'query':
return ('-q', 'fluiddb/about="DADGAD"', fishbase.O(about=u'DADGAD'))
else:
raise ModeError('Bad mode')
class TestCLI(unittest.TestCase):
def setUp(self):
self.db = fishlib.Fluidinfo()
self.user = self.db.credentials.username
self.db.set_connection_from_global()
self.db.set_debug_timeout(5.0)
self.dadgadID = fishlib.id('DADGAD', self.db.host)
self.stdout = sys.stdout
self.stderr = sys.stderr
self.stealOutput()
self.hostname = ['--hostname', fishlib.choose_host()]
def stealOutput(self):
self.out = fishlib.SaveOut()
self.err = fishlib.SaveOut()
sys.stdout = self.out
sys.stderr = self.err
def reset(self):
sys.stdout = self.stdout
sys.stderr = self.stderr
def Print(self, msg):
self.stdout.write(toStr(msg) + '\n')
def testOutputManipulation(self):
print 'one'
sys.stderr.write('two')
self.reset()
self.assertEqual(self.out.buffer, ['one', '\n'])
self.assertEqual(self.err.buffer, ['two'])
def tagTest(self, mode, verbose=True):
self.stealOutput()
(flag, spec, o) = specify_DADGAD(mode, self.db.host)
description = cli.describe_by_mode(o)
flags = ['-v', flag] if verbose else [flag]
args = ['tag'] + ['-U'] + flags + [spec, 'rating=10'] + self.hostname
cli.execute_command_line(*cli.parse_args(args))
self.reset()
if verbose:
target = ['Tagged object %s with rating = 10' % description, '\n']
else:
if mode == 'query':
target = ['1 object matched', '\n']
else:
target = []
self.assertEqual(self.out.buffer, target)
self.assertEqual(self.err.buffer, [])
def untagTest(self, mode, verbose=True):
self.stealOutput()
(flag, spec, o) = specify_DADGAD(mode, self.db.host)
description = cli.describe_by_mode(o)
flags = ['-v', flag] if verbose else [flag]
args = ['untag'] + ['-U'] + flags + [spec, 'rating'] + self.hostname
cli.execute_command_line(*cli.parse_args(args))
self.reset()
if verbose:
target = ['Removed tag rating from object %s\n' % description,
'\n']
else:
target = []
self.assertEqual(self.out.buffer, target)
self.assertEqual(self.err.buffer, [])
def showTaggedSuccessTest(self, mode):
self.stealOutput()
(flag, spec, o) = specify_DADGAD(mode, self.db.host)
description = cli.describe_by_mode(o)
args = (['show', '-U', '-v', flag, spec, 'rating', '/fluiddb/about']
+ self.hostname)
cli.execute_command_line(*cli.parse_args(args))
self.reset()
self.assertEqual(self.out.buffer,
['Object %s:' % description, '\n',
' /%s/rating = 10' % self.user.encode('UTF-8'), '\n',
' /fluiddb/about = "DADGAD"', '\n'])
self.assertEqual(self.err.buffer, [])
def showUntagSuccessTest(self, mode):
self.stealOutput()
(flag, spec, o) = specify_DADGAD(mode, self.db.host)
description = cli.describe_by_mode(o)
args = (['show', '-U', '-v', flag, spec, 'rating', '/fluiddb/about']
+ self.hostname)
cli.execute_command_line(*cli.parse_args(args))
self.reset()
self.assertEqual(self.out.buffer,
['Object %s:' % description, '\n',
' %s' % cli_bracket('tag /%s/rating not present' % self.user),
'\n', ' /fluiddb/about = "DADGAD"', '\n'])
self.assertEqual(self.err.buffer, [])
def testTagByAboutVerboseShow(self):
self.tagTest('about')
self.showTaggedSuccessTest('about')
def testTagByIDVerboseShow(self):
self.tagTest('id')
self.showTaggedSuccessTest('id')
def testTagByQueryVerboseShow(self):
self.tagTest('query', verbose=False)
self.showTaggedSuccessTest('id')
def testTagSilent(self):
self.tagTest('about', verbose=False)
self.showTaggedSuccessTest('about')
def atestUntagByAboutVerboseShow(self):
self.untagTest('about')
self.showUntagSuccessTest('about')
def atestUntagByIDVerboseShow(self):
self.untagTest('id')
self.showUntagSuccessTest('id')
def strip_list(self, list_):
return [L.strip() for L in list_ if L.strip()]
def command_sequence_test(self, commands, output):
self.stealOutput()
expected = self.strip_list(output if type(output) in (list, tuple)
else [output])
for command in commands:
if type(command) == type(''):
args = command.split(' ')
else:
args = command
cli.execute_command_line(*cli.parse_args(args))
self.reset()
self.assertEqual(self.strip_list(self.out.buffer), expected)
def test_simple_rm(self):
commands = ('-U mkns test-fish/testns',
'-U rm test-fish/testns',
'-U ls -d test-fish/testns',)
output = u'/%s/test-fish/testns not found' % self.user
self.command_sequence_test(commands, output.encode('UTF-8'))
def test_perms_simples(self):
# Tests the simple permissions settings for namespaces
# --- private, default, lock, unlock
commands = ('-U mkns test-fish/testns',
'-U perms private test-fish/testns',
'-U ls -ld test-fish/testns',
'-U perms default test-fish/testns',
'-U ls -ld test-fish/testns',
'-U perms lock test-fish/testns',
'-U | |
t1.x > ?) AS alias1
JOIN t2 ON alias1.x = t2.a) AS alias2"""
assert (normalize(str(result)) == normalize(expected1) or
normalize(str(result)) == normalize(expected2))
def test_transform_where():
t2 = t[t.id == 1]
expr = transform(t2, abs_amt=abs(t2.amount), sine=sin(t2.id))
result = compute(expr, s, return_type='native')
expected = """SELECT
accounts.name,
accounts.amount,
accounts.id,
abs(accounts.amount) as abs_amt,
sin(accounts.id) as sine
FROM accounts
WHERE accounts.id = :id_1
"""
assert normalize(str(result)) == normalize(expected)
def test_merge():
col = (t['amount'] * 2).label('new')
expr = merge(t['name'], col)
result = str(compute(expr, s, return_type='native'))
assert 'amount * ' in result
assert 'FROM accounts' in result
assert 'SELECT accounts.name' in result
assert 'new' in result
def test_merge_where():
t2 = t[t.id == 1]
expr = merge(t2[['amount', 'name']], t2.id)
result = compute(expr, s, return_type='native')
expected = normalize("""SELECT
accounts.amount,
accounts.name,
accounts.id
FROM accounts
WHERE accounts.id = :id_1
""")
assert normalize(str(result)) == expected
def test_transform_filter_by_single_column():
t2 = t[t.amount < 0]
tr = transform(t2, abs_amt=abs(t2.amount), sine=sin(t2.id))
expr = by(tr.name, avg_amt=tr.abs_amt.mean())
result = compute(expr, s, return_type='native')
expected = normalize("""SELECT
accounts.name,
avg(abs(accounts.amount)) AS avg_amt
FROM accounts
WHERE accounts.amount < :amount_1
GROUP BY accounts.name
""")
assert normalize(str(result)) == expected
def test_transform_filter_by_multiple_columns():
t2 = t[t.amount < 0]
tr = transform(t2, abs_amt=abs(t2.amount), sine=sin(t2.id))
expr = by(tr.name, avg_amt=tr.abs_amt.mean(), sum_sine=tr.sine.sum())
result = compute(expr, s, return_type='native')
expected = normalize("""SELECT
accounts.name,
avg(abs(accounts.amount)) AS avg_amt,
sum(sin(accounts.id)) AS sum_sine
FROM accounts
WHERE accounts.amount < :amount_1
GROUP BY accounts.name
""")
assert normalize(str(result)) == expected
def test_transform_filter_by_different_order():
t2 = transform(t, abs_amt=abs(t.amount), sine=sin(t.id))
tr = t2[t2.amount < 0]
expr = by(tr.name,
avg_amt=tr.abs_amt.mean(),
avg_sine=tr.sine.sum() / tr.sine.count())
result = compute(expr, s, return_type='native')
expected = normalize("""SELECT
accounts.name,
avg(abs(accounts.amount)) AS avg_amt,
sum(sin(accounts.id)) / count(sin(accounts.id)) AS avg_sine
FROM accounts
WHERE accounts.amount < :amount_1
GROUP BY accounts.name
""")
assert normalize(str(result)) == expected
def test_transform_filter_by_projection():
t2 = transform(t, abs_amt=abs(t.amount), sine=sin(t.id))
tr = t2[t2.amount < 0]
expr = by(tr[['name', 'id']],
avg_amt=tr.abs_amt.mean(),
avg_sine=tr.sine.sum() / tr.sine.count())
result = compute(expr, s, return_type='native')
expected = normalize("""SELECT
accounts.name,
accounts.id,
avg(abs(accounts.amount)) AS avg_amt,
sum(sin(accounts.id)) / count(sin(accounts.id)) AS avg_sine
FROM accounts
WHERE accounts.amount < :amount_1
GROUP BY accounts.name, accounts.id
""")
assert normalize(str(result)) == expected
def test_merge_compute():
dta = [(1, 'Alice', 100),
(2, 'Bob', 200),
(4, 'Dennis', 400)]
ds = datashape.dshape('var * {id: int, name: string, amount: real}')
s = symbol('s', ds)
with tmpfile('db') as fn:
uri = 'sqlite:///' + fn
into(uri + '::table', dta, dshape=ds)
expr = transform(s, amount10=s.amount * 10)
result = into(list, compute(expr, {s: dta}, return_type='native'))
assert result == [(1, 'Alice', 100, 1000),
(2, 'Bob', 200, 2000),
(4, 'Dennis', 400, 4000)]
def test_notnull():
result = compute(nt[nt.name.notnull()], ns, return_type='native')
expected = """SELECT
nullaccounts.name,
nullaccounts.amount,
nullaccounts.id
FROM nullaccounts
WHERE nullaccounts.name is not null
"""
assert normalize(str(result)) == normalize(expected)
def test_head_limit():
assert compute(t.head(5).head(10), s, return_type='native')._limit == 5
assert compute(t.head(10).head(5), s, return_type='native')._limit == 5
assert compute(t.head(10).head(10), s, return_type='native')._limit == 10
def test_no_extraneous_join():
ds = """ {event: var * {name: ?string,
operation: ?string,
datetime_nearest_receiver: ?datetime,
aircraft: ?string,
temperature_2m: ?float64,
temperature_5cm: ?float64,
humidity: ?float64,
windspeed: ?float64,
pressure: ?float64,
include: int64},
operation: var * {name: ?string,
runway: int64,
takeoff: bool,
datetime_nearest_close: ?string}}
"""
db = bz_data('sqlite:///:memory:', dshape=ds)
d = symbol('db', dshape=ds)
expr = join(d.event[d.event.include == True],
d.operation[['name', 'datetime_nearest_close']],
'operation', 'name')
result = compute(expr, db, return_type='native')
assert normalize(str(result)) == normalize("""
SELECT
alias.operation,
alias.name as name_left,
alias.datetime_nearest_receiver,
alias.aircraft,
alias.temperature_2m,
alias.temperature_5cm,
alias.humidity,
alias.windspeed,
alias.pressure,
alias.include,
alias.datetime_nearest_close
FROM
(SELECT
event.name AS name,
event.operation AS operation,
event.datetime_nearest_receiver AS datetime_nearest_receiver,
event.aircraft AS aircraft,
event.temperature_2m AS temperature_2m,
event.temperature_5cm AS temperature_5cm,
event.humidity AS humidity,
event.windspeed AS windspeed,
event.pressure AS pressure,
event.include AS include
FROM
event WHERE event.include = 1) AS alias1
JOIN
(SELECT
operation.name AS name,
operation.datetime_nearest_close as datetime_nearest_close
FROM operation) AS alias2
ON
alias1.operation = alias2.name
""")
def test_math():
result = compute(sin(t.amount), s, return_type='native')
assert normalize(str(result)) == normalize("""
SELECT sin(accounts.amount) as amount
FROM accounts""")
result = compute(floor(t.amount), s, return_type='native')
assert normalize(str(result)) == normalize("""
SELECT floor(accounts.amount) as amount
FROM accounts""")
result = compute(t.amount // 2, s, return_type='native')
assert normalize(str(result)) == normalize("""
SELECT floor(accounts.amount / :amount_1) AS amount
FROM accounts""")
def test_transform_order():
r = transform(t, sin_amount=sin(t.amount), cos_id=cos(t.id))
result = compute(r, s, return_type='native')
expected = """SELECT
accounts.name,
accounts.amount,
accounts.id,
cos(accounts.id) as cos_id,
sin(accounts.amount) as sin_amount
FROM accounts
"""
assert normalize(str(result)) == normalize(expected)
def test_isin():
result = t[t.name.isin(['foo', 'bar'])]
result_sql_expr = str(compute(result, s, return_type='native'))
expected = """
SELECT
accounts.name,
accounts.amount,
accounts.id
FROM
accounts
WHERE
accounts.name
IN
(:name_1,
:name_2)
"""
assert normalize(result_sql_expr) == normalize(expected)
@pytest.mark.skipif('1.0.0' <= LooseVersion(sa.__version__) <= '1.0.1',
reason=("SQLAlchemy generates different code in 1.0.0"
" and 1.0.1"))
def test_date_grouper_repeats_not_one_point_oh():
columns = [sa.Column('amount', sa.REAL),
sa.Column('ds', sa.TIMESTAMP)]
dta = sa.Table('t', sa.MetaData(), *columns)
t = symbol('t', discover(dta))
expr = by(t.ds.year, avg_amt=t.amount.mean())
result = str(compute(expr, dta, return_type='native'))
# FYI spark sql isn't able to parse this correctly
expected = """SELECT
EXTRACT(year FROM t.ds) as ds_year,
AVG(t.amount) as avg_amt
FROM t
GROUP BY EXTRACT(year FROM t.ds)
"""
assert normalize(result) == normalize(expected)
@pytest.mark.skipif(LooseVersion(sa.__version__) < '1.0.0' or
LooseVersion(sa.__version__) >= '1.0.2',
reason=("SQLAlchemy generates different code in < 1.0.0 "
"and >= 1.0.2"))
def test_date_grouper_repeats():
columns = [sa.Column('amount', sa.REAL),
sa.Column('ds', sa.TIMESTAMP)]
dta = sa.Table('t', sa.MetaData(), *columns)
t = symbol('t', discover(dta))
expr = by(t.ds.year, avg_amt=t.amount.mean())
result = str(compute(expr, dta, return_type='native'))
# FYI spark sql isn't able to parse this correctly
expected = """SELECT
EXTRACT(year FROM t.ds) as ds_year,
AVG(t.amount) as avg_amt
FROM t
GROUP BY ds_year
"""
assert normalize(result) == normalize(expected)
def test_transform_then_project_single_column():
expr = transform(t, foo=t.id + 1)[['foo', 'id']]
result = normalize(str(compute(expr, s, return_type='native')))
expected = normalize("""SELECT
accounts.id + :id_1 as foo,
accounts.id
FROM accounts""")
assert result == expected
def test_transform_then_project():
proj = ['foo', 'id']
expr = transform(t, foo=t.id + 1)[proj]
result = normalize(str(compute(expr, s, return_type='native')))
expected = normalize("""SELECT
accounts.id + :id_1 as foo,
accounts.id
FROM accounts""")
assert result == expected
def test_reduce_does_not_compose():
expr = by(t.name, counts=t.count()).counts.max()
result = str(compute(expr, s, return_type='native'))
expected = """
SELECT max(alias.counts) AS counts_max
FROM
(SELECT count(accounts.id) AS counts
FROM accounts GROUP BY accounts.name) as alias"""
assert normalize(result) == normalize(expected)
@pytest.mark.xfail(raises=NotImplementedError)
def test_normalize_reduction():
expr = by(t.name, counts=t.count())
expr = transform(expr, normed_counts=expr.counts / expr.counts.max())
result = str(compute(expr, s, return_type='native'))
expected = """WITH alias AS
(SELECT count(accounts.id) AS counts
FROM accounts GROUP BY accounts.name)
SELECT alias.counts / max(alias.counts) AS normed_counts
FROM alias"""
assert normalize(result) == normalize(expected)
def test_do_not_erase_group_by_functions_with_datetime():
t, s = tdate, sdate
expr = by(t[t.amount < 0].occurred_on.date,
avg_amount=t[t.amount < 0].amount.mean())
result = str(compute(expr, s, return_type='native'))
expected = """SELECT
date(accdate.occurred_on) as occurred_on_date,
avg(accdate.amount) as avg_amount
FROM
accdate
WHERE
accdate.amount < :amount_1
GROUP BY
date(accdate.occurred_on)
"""
assert normalize(result) == normalize(expected)
def test_not():
expr = t.amount[~t.name.isin(('Billy', 'Bob'))]
result = str(compute(expr, s, return_type='native'))
expected = """SELECT
accounts.amount
FROM
accounts
WHERE
accounts.name not in (:name_1, :name_2)
"""
assert normalize(result) == normalize(expected)
def test_slice():
start, stop, step = 50, 100, 1
result = str(compute(t[start:stop], s, return_type='native'))
# Verifies that compute is translating the query correctly
assert result == str(select(s).offset(start).limit(stop))
# Verifies the query against expected SQL query
expected = """
SELECT accounts.name, accounts.amount, accounts.id FROM accounts
LIMIT :param_1 OFFSET :param_2
"""
assert normalize(str(result)) == normalize(str(expected))
# Step size of 1 should be alright
compute(t[start:stop:step], s, return_type='native')
@pytest.mark.xfail(raises=ValueError)
def test_slice_step():
start, stop, step = 50, 100, 2
compute(t[start:stop:step], s, return_type='native')
def test_datetime_to_date():
expr = tdate.occurred_on.date
result = str(compute(expr, sdate, return_type='native'))
expected = """SELECT
DATE(accdate.occurred_on) as occurred_on_date
FROM
accdate
"""
assert normalize(result) == normalize(expected)
def test_sort_compose():
expr = t.name[:5].sort()
result = compute(expr, s, return_type='native')
expected = """select
anon_1.name
from (select
accounts.name as name
from
accounts
limit :param_1
offset :param_2) as anon_1
order by
anon_1.name asc"""
assert normalize(str(result)) == normalize(expected)
assert (normalize(str(compute(t.sort('name').name[:5], s, return_type='native'))) !=
normalize(expected))
def test_coerce():
expr = t.amount.coerce(to='int64')
expected = """SELECT
cast(accounts.amount AS BIGINT) AS amount
FROM accounts"""
result = compute(expr, s, return_type='native')
assert normalize(str(result)) == normalize(expected)
def test_multi_column_by_after_transform():
tbl = transform(t, new_amount=t.amount + 1, one_two=t.amount * 2)
expr = by(tbl[['name', 'one_two']], avg_amt=tbl.new_amount.mean())
result = compute(expr, s, return_type='native')
expected = """SELECT
accounts.name,
accounts.amount * :amount_1 as one_two,
avg(accounts.amount + :amount_2) as avg_amt
FROM
accounts
GROUP BY
accounts.name, accounts.amount * :amount_1
"""
assert normalize(str(result)) == normalize(expected)
def test_multi_column_by_after_transform_and_filter():
tbl = t[t.name == 'Alice']
tbl = transform(tbl, new_amount=tbl.amount + 1, one_two=tbl.amount * 2)
expr = by(tbl[['name', 'one_two']], avg_amt=tbl.new_amount.mean())
result = compute(expr, s, return_type='native')
expected = """SELECT
accounts.name,
accounts.amount * :amount_1 as one_two,
avg(accounts.amount + :amount_2) as avg_amt
FROM
accounts
WHERE
accounts.name = :name_1
GROUP BY
accounts.name, accounts.amount * :amount_1
"""
assert normalize(str(result)) == normalize(expected)
def test_attribute_access_on_transform_filter():
tbl = | |
stability_value
location = (rows[i], columns[i])
return location
def parity_player(self, board):
"""
Parity player uses the parity characteristic of the stones
:param board: the current state of the board
:return: A tuple representing the location of parity player's move
"""
valid_moves = self.game.find_valid_moves(self.computer_color, board, self.board_size)
rows, columns = np.where(valid_moves == 1)
max_parity = -200
location = (-2, -2)
for i in range(len(rows)):
temp_board = np.copy(board)
temp_board = self.game.flip_opponent_stones((rows[i], columns[i]), temp_board, self.board_size,
self.computer_num, self.opponent_num)
parity_value = self.stone_parity(temp_board)
if parity_value > max_parity:
max_parity = parity_value
location = (rows[i], columns[i])
return location
def mobility_player(self, board):
"""
Mobility player uses the mobility characteristic of the stones
:param board: the current state of the board
:return: A tuple representing the location of mobility player's move
"""
valid_moves = self.game.find_valid_moves(self.computer_color, board, self.board_size)
rows, columns = np.where(valid_moves == 1)
max_mobility = -200
location = (-2, -2)
for i in range(len(rows)):
temp_board = np.copy(board)
temp_board = self.game.flip_opponent_stones((rows[i], columns[i]), temp_board, self.board_size,
self.computer_num, self.opponent_num)
mobility_value = self.stone_parity(temp_board)
if mobility_value > max_mobility:
max_mobility = mobility_value
location = (rows[i], columns[i])
return location
def potential_mobility_player(self, board):
"""
Potential mobility player uses the potential mobility characteristic of the stones
:param board: the current state of the board
:return: A tuple representing the location of potential mobility player's move
"""
valid_moves = self.game.find_valid_moves(self.computer_color, board, self.board_size)
rows, columns = np.where(valid_moves == 1)
max_potential_mobility = -200
location = (-2, -2)
for i in range(len(rows)):
temp_board = np.copy(board)
temp_board = self.game.flip_opponent_stones((rows[i], columns[i]), temp_board, self.board_size,
self.computer_num, self.opponent_num)
potential_mobility_value = self.stone_parity(temp_board)
if potential_mobility_value > max_potential_mobility:
max_potential_mobility = potential_mobility_value
location = (rows[i], columns[i])
return location
def corners_player(self, board):
"""
Corners player uses the corners characteristic of the stones
:param board: the current state of the board
:return: A tuple representing the location of corners player's move
"""
valid_moves = self.game.find_valid_moves(self.computer_color, board, self.board_size)
rows, columns = np.where(valid_moves == 1)
max_corners = -200
location = (-2, -2)
for i in range(len(rows)):
temp_board = np.copy(board)
temp_board = self.game.flip_opponent_stones((rows[i], columns[i]), temp_board, self.board_size,
self.computer_num, self.opponent_num)
corners_value = self.stone_parity(temp_board)
if corners_value > max_corners:
max_corners = corners_value
location = (rows[i], columns[i])
return location
def combination(self, board):
value = self.heuristic_weights[0] * self.stability(board) + self.heuristic_weights[1] * self.corners(board) + \
self.heuristic_weights[2] * self.mobility(board) + self.heuristic_weights[3] * self.potential_mobility(
board) + self.heuristic_weights[4] * self.stone_parity(board) + self.heuristic_weights[5] * \
self.stone_score_static(board)
return value
def stone_score_static(self, board):
move_value = 0
rows, columns = np.where(board == self.computer_num)
for j in range(len(rows)):
move_value += self.static_weight[rows[j]][columns[j]]
return move_value
def stone_parity(self, board):
"""
The stone parity value is based on the players' immediate score after a specific move.
:param board: the current state of the board
:return: parity value
"""
computer_score = sum(sum(board == self.computer_num))
opponent_score = sum(sum(board == self.opponent_num))
return 100 * (computer_score - opponent_score) / (computer_score + opponent_score)
def mobility(self, board):
"""
The mobility value is based on the players' immediate possible moves after a specific move.
:param board: the current state of the board
:return: mobility value
"""
valid_moves_computer = sum(sum(self.game.find_valid_moves(self.computer_color, board, self.board_size)))
valid_moves_opponent = sum(sum(self.game.find_valid_moves(self.opponent_color, board, self.board_size)))
if valid_moves_computer + valid_moves_opponent == 0:
return 0
else:
return 100 * (valid_moves_computer - valid_moves_opponent) / (valid_moves_computer + valid_moves_opponent)
def potential_mobility(self, board):
"""
The potential mobility value is based on the players' potential possible moves after a specific
move in the near future.
:param board: the current state of the board
:return: potential mobility value
"""
valid_moves_computer = self.game.find_valid_moves(self.computer_color, board, self.board_size)
computer_counter = 0
temp = np.zeros(board.shape)
for i in range(self.board_size):
for j in range(self.board_size):
if board[i][j] == 0 and valid_moves_computer[i][j] == 0:
list_of_i = [i, i + 1, i - 1]
list_of_j = [j, j + 1, j - 1]
for k in list_of_i:
for l in list_of_j:
if (k, l) != (i, j):
if -1 < k < 8 and -1 < l < 8:
if board[k][l] == self.opponent_num and temp[i][j] == 0:
computer_counter += 1
temp[i][j] = 1
break
valid_moves_opponent = self.game.find_valid_moves(self.opponent_color, board, self.board_size)
opponent_counter = 0
temp = np.zeros(board.shape)
for i in range(self.board_size):
for j in range(self.board_size):
if board[i][j] == 0 and valid_moves_opponent[i][j] == 0:
list_of_i = [i, i + 1, i - 1]
list_of_j = [j, j + 1, j - 1]
for k in list_of_i:
for l in list_of_j:
if (k, l) != (i, j):
if -1 < k < 8 and -1 < l < 8:
if board[k][l] == self.computer_num and temp[i][j] == 0:
opponent_counter += 1
temp[i][j] = 1
break
if computer_counter + opponent_counter == 0:
return 0
return 100 * (computer_counter - opponent_counter) / (computer_counter + opponent_counter)
def corners(self, board):
"""
The corners value is based on the players' current and potential captured corners after a specific move.
:param board: the current state of the board
:return: corners value
"""
# Calculating already captured corners
computer_corners = 0
computer_corners = computer_corners + 1 if board[0][0] == self.computer_num else computer_corners
computer_corners = computer_corners + 1 if board[0][
self.board_size - 1] == self.computer_num else computer_corners
computer_corners = computer_corners + 1 if board[self.board_size - 1][
0] == self.computer_num else computer_corners
computer_corners = computer_corners + 1 if board[self.board_size - 1][
self.board_size - 1] == self.computer_num else computer_corners
opponent_corners = 0
opponent_corners = opponent_corners + 1 if board[0][0] == self.opponent_num else opponent_corners
opponent_corners = opponent_corners + 1 if board[0][
self.board_size - 1] == self.opponent_num else opponent_corners
opponent_corners = opponent_corners + 1 if board[self.board_size - 1][
0] == self.opponent_num else opponent_corners
opponent_corners = opponent_corners + 1 if board[self.board_size - 1][
self.board_size - 1] == self.opponent_num else opponent_corners
# Calculating potential corners
valid_moves_computer = self.game.find_valid_moves(self.computer_color, board, self.board_size)
computer_potential_corner = 0
computer_potential_corner = computer_potential_corner + 1 if valid_moves_computer[0][
0] == 1 else computer_potential_corner
computer_potential_corner = computer_potential_corner + 1 if valid_moves_computer[0][
self.board_size - 1] == 1 else computer_potential_corner
computer_potential_corner = computer_potential_corner + 1 if valid_moves_computer[self.board_size - 1][
0] == 1 else computer_potential_corner
computer_potential_corner = computer_potential_corner + 1 if valid_moves_computer[self.board_size - 1][
self.board_size - 1] == 1 else computer_potential_corner
valid_moves_opponent = self.game.find_valid_moves(self.opponent_color, board, self.board_size)
opponent_potential_corner = 0
opponent_potential_corner = opponent_potential_corner + 1 if valid_moves_opponent[0][
0] == 1 else opponent_potential_corner
opponent_potential_corner = opponent_potential_corner + 1 if valid_moves_opponent[0][
self.board_size - 1] == 1 else opponent_potential_corner
opponent_potential_corner = opponent_potential_corner + 1 if valid_moves_opponent[self.board_size - 1][
0] == 1 else opponent_potential_corner
opponent_potential_corner = opponent_potential_corner + 1 if valid_moves_opponent[self.board_size - 1][
self.board_size - 1] == 1 else opponent_potential_corner
# Calculating potential corners for both players
valid_moves = valid_moves_opponent + valid_moves_computer
common_potential_corner = 0
common_potential_corner = common_potential_corner + 1 if valid_moves[0][
0] == 2 else common_potential_corner
common_potential_corner = common_potential_corner + 1 if valid_moves[0][
self.board_size - 1] == 1 else common_potential_corner
common_potential_corner = common_potential_corner + 1 if valid_moves[self.board_size - 1][
0] == 2 else common_potential_corner
common_potential_corner = common_potential_corner + 1 if valid_moves[self.board_size - 1][
self.board_size - 1] == 2 else common_potential_corner
computer_potential_corner -= common_potential_corner
opponent_potential_corner -= common_potential_corner
numerator = computer_corners + computer_potential_corner - common_potential_corner - opponent_corners - opponent_potential_corner
denominator = computer_corners + computer_potential_corner + common_potential_corner + opponent_corners \
+ opponent_potential_corner
if denominator == 0:
return 0
return 100 * numerator / denominator
def stability(self, board):
"""
The stability value is based on the players' stable and unstable stones after a specific move.
A stable stone is a stone that cannot be replaced by the opponent's stone.
An unstable stone is a stone that can be replaced by the opponent's stone in its next move.
:param board: the current state of the board
:return: stability value
"""
# Stable stones
computer_board = self.get_stable_stones(board, self.computer_num)
computer_stable = sum(sum(computer_board == 100))
opponent_board = self.get_stable_stones(board, self.opponent_num)
opponent_stable = sum(sum(opponent_board == 100))
# Unstable stones are the ones which can be flanked in the next move
computer_board = self.get_unstable_stones(board, self.opponent_color, self.computer_num,
self.opponent_num, computer_board)
computer_unstable = sum(sum(computer_board == 200))
opponent_board = self.get_unstable_stones(board, self.computer_color, self.opponent_num,
self.computer_num, opponent_board)
opponent_unstable = sum(sum(opponent_board == 200))
# the reset is semi stable with weight 0, so it is not important
computer_stability = computer_stable - computer_unstable
opponent_stability = opponent_stable - opponent_unstable
if computer_stable + opponent_stable != 0:
return 100 * | |
def closeEvent(*args, **kwargs):
pass
def forceShow(*args, **kwargs):
pass
def labelText(*args, **kwargs):
pass
def maximum(*args, **kwargs):
pass
def minimum(*args, **kwargs):
pass
def minimumDuration(*args, **kwargs):
pass
def open(*args, **kwargs):
pass
def reset(*args, **kwargs):
pass
def resizeEvent(*args, **kwargs):
pass
def setAutoClose(*args, **kwargs):
pass
def setAutoReset(*args, **kwargs):
pass
def setBar(*args, **kwargs):
pass
def setCancelButton(*args, **kwargs):
pass
def setCancelButtonText(*args, **kwargs):
pass
def setLabel(*args, **kwargs):
pass
def setLabelText(*args, **kwargs):
pass
def setMaximum(*args, **kwargs):
pass
def setMinimum(*args, **kwargs):
pass
def setMinimumDuration(*args, **kwargs):
pass
def setRange(*args, **kwargs):
pass
def setValue(*args, **kwargs):
pass
def showEvent(*args, **kwargs):
pass
def sizeHint(*args, **kwargs):
pass
def value(*args, **kwargs):
pass
def wasCanceled(*args, **kwargs):
pass
__new__ = None
canceled = None
staticMetaObject = None
class QStyleOptionSlider(QStyleOptionComplex):
def __init__(*args, **kwargs):
"""
x.__init__(...) initializes x; see help(type(x)) for signature
"""
pass
dialWrapping = None
maximum = None
minimum = None
notchTarget = None
orientation = None
pageStep = None
singleStep = None
sliderPosition = None
sliderValue = None
tickInterval = None
tickPosition = None
upsideDown = None
StyleOptionType = None
StyleOptionVersion = None
Type = None
Version = None
__new__ = None
class QWizard(QDialog):
def __init__(*args, **kwargs):
"""
x.__init__(...) initializes x; see help(type(x)) for signature
"""
pass
def addPage(*args, **kwargs):
pass
def back(*args, **kwargs):
pass
def button(*args, **kwargs):
pass
def buttonText(*args, **kwargs):
pass
def cleanupPage(*args, **kwargs):
pass
def currentId(*args, **kwargs):
pass
def currentPage(*args, **kwargs):
pass
def done(*args, **kwargs):
pass
def event(*args, **kwargs):
pass
def field(*args, **kwargs):
pass
def hasVisitedPage(*args, **kwargs):
pass
def initializePage(*args, **kwargs):
pass
def next(*args, **kwargs):
pass
def nextId(*args, **kwargs):
pass
def options(*args, **kwargs):
pass
def page(*args, **kwargs):
pass
def pageIds(*args, **kwargs):
pass
def paintEvent(*args, **kwargs):
pass
def pixmap(*args, **kwargs):
pass
def removePage(*args, **kwargs):
pass
def resizeEvent(*args, **kwargs):
pass
def restart(*args, **kwargs):
pass
def setButton(*args, **kwargs):
pass
def setButtonLayout(*args, **kwargs):
pass
def setButtonText(*args, **kwargs):
pass
def setDefaultProperty(*args, **kwargs):
pass
def setField(*args, **kwargs):
pass
def setOption(*args, **kwargs):
pass
def setOptions(*args, **kwargs):
pass
def setPage(*args, **kwargs):
pass
def setPixmap(*args, **kwargs):
pass
def setSideWidget(*args, **kwargs):
pass
def setStartId(*args, **kwargs):
pass
def setSubTitleFormat(*args, **kwargs):
pass
def setTitleFormat(*args, **kwargs):
pass
def setVisible(*args, **kwargs):
pass
def setWizardStyle(*args, **kwargs):
pass
def sideWidget(*args, **kwargs):
pass
def sizeHint(*args, **kwargs):
pass
def startId(*args, **kwargs):
pass
def subTitleFormat(*args, **kwargs):
pass
def testOption(*args, **kwargs):
pass
def titleFormat(*args, **kwargs):
pass
def validateCurrentPage(*args, **kwargs):
pass
def visitedPages(*args, **kwargs):
pass
def wizardStyle(*args, **kwargs):
pass
AeroStyle = None
BackButton = None
BackgroundPixmap = None
BannerPixmap = None
CancelButton = None
CancelButtonOnLeft = None
ClassicStyle = None
CommitButton = None
CustomButton1 = None
CustomButton2 = None
CustomButton3 = None
DisabledBackButtonOnLastPage = None
ExtendedWatermarkPixmap = None
FinishButton = None
HaveCustomButton1 = None
HaveCustomButton2 = None
HaveCustomButton3 = None
HaveFinishButtonOnEarlyPages = None
HaveHelpButton = None
HaveNextButtonOnLastPage = None
HelpButton = None
HelpButtonOnRight = None
IgnoreSubTitles = None
IndependentPages = None
LogoPixmap = None
MacStyle = None
ModernStyle = None
NButtons = None
NPixmaps = None
NStandardButtons = None
NStyles = None
NextButton = None
NoBackButtonOnLastPage = None
NoBackButtonOnStartPage = None
NoButton = None
NoCancelButton = None
NoCancelButtonOnLastPage = None
NoDefaultButton = None
Stretch = None
WatermarkPixmap = None
WizardButton = None
WizardOption = None
WizardOptions = None
WizardPixmap = None
WizardStyle = None
__new__ = None
currentIdChanged = None
customButtonClicked = None
helpRequested = None
pageAdded = None
pageRemoved = None
staticMetaObject = None
class QGraphicsPathItem(QAbstractGraphicsShapeItem):
def __init__(*args, **kwargs):
"""
x.__init__(...) initializes x; see help(type(x)) for signature
"""
pass
def boundingRect(*args, **kwargs):
pass
def contains(*args, **kwargs):
pass
def extension(*args, **kwargs):
pass
def isObscuredBy(*args, **kwargs):
pass
def opaqueArea(*args, **kwargs):
pass
def paint(*args, **kwargs):
pass
def path(*args, **kwargs):
pass
def setPath(*args, **kwargs):
pass
def shape(*args, **kwargs):
pass
def type(*args, **kwargs):
pass
__new__ = None
class QStyleOptionGroupBox(QStyleOptionComplex):
def __init__(*args, **kwargs):
"""
x.__init__(...) initializes x; see help(type(x)) for signature
"""
pass
features = None
lineWidth = None
midLineWidth = None
text = None
textAlignment = None
textColor = None
StyleOptionType = None
StyleOptionVersion = None
Type = None
Version = None
__new__ = None
class QStyleOptionSpinBox(QStyleOptionComplex):
def __init__(*args, **kwargs):
"""
x.__init__(...) initializes x; see help(type(x)) for signature
"""
pass
buttonSymbols = None
frame = None
stepEnabled = None
StyleOptionType = None
StyleOptionVersion = None
Type = None
Version = None
__new__ = None
class QGraphicsSimpleTextItem(QAbstractGraphicsShapeItem):
def __init__(*args, **kwargs):
"""
x.__init__(...) initializes x; see help(type(x)) for signature
"""
pass
def boundingRect(*args, **kwargs):
pass
def contains(*args, **kwargs):
pass
def extension(*args, **kwargs):
pass
def font(*args, **kwargs):
pass
def isObscuredBy(*args, **kwargs):
pass
def opaqueArea(*args, **kwargs):
pass
def paint(*args, **kwargs):
pass
def setFont(*args, **kwargs):
pass
def setText(*args, **kwargs):
pass
def shape(*args, **kwargs):
pass
def text(*args, **kwargs):
pass
def type(*args, **kwargs):
pass
__new__ = None
class QPushButton(QAbstractButton):
def __init__(*args, **kwargs):
"""
x.__init__(...) initializes x; see help(type(x)) for signature
"""
pass
def autoDefault(*args, **kwargs):
pass
def event(*args, **kwargs):
pass
def focusInEvent(*args, **kwargs):
pass
def focusOutEvent(*args, **kwargs):
pass
def initStyleOption(*args, **kwargs):
pass
def isDefault(*args, **kwargs):
pass
def isFlat(*args, **kwargs):
pass
def keyPressEvent(*args, **kwargs):
pass
def menu(*args, **kwargs):
pass
def minimumSizeHint(*args, **kwargs):
pass
def paintEvent(*args, **kwargs):
pass
def setAutoDefault(*args, **kwargs):
pass
def setDefault(*args, **kwargs):
pass
def setFlat(*args, **kwargs):
pass
def setMenu(*args, **kwargs):
pass
def showMenu(*args, **kwargs):
pass
def sizeHint(*args, **kwargs):
pass
__new__ = None
staticMetaObject = None
class QAbstractScrollArea(QFrame):
def __init__(*args, **kwargs):
"""
x.__init__(...) initializes x; see help(type(x)) for signature
"""
pass
def addScrollBarWidget(*args, **kwargs):
pass
def contextMenuEvent(*args, **kwargs):
pass
def cornerWidget(*args, **kwargs):
pass
def dragEnterEvent(*args, **kwargs):
pass
def dragLeaveEvent(*args, **kwargs):
pass
def dragMoveEvent(*args, **kwargs):
pass
def dropEvent(*args, **kwargs):
pass
def event(*args, **kwargs):
pass
| |
"""
_validation = {
'limit': {'readonly': True},
'unit': {'readonly': True},
'status': {'readonly': True},
}
_attribute_map = {
'limit': {'key': 'limit', 'type': 'float'},
'unit': {'key': 'unit', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'reason': {'key': 'reason', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ElasticPoolPerDatabaseMinPerformanceLevelCapability, self).__init__(**kwargs)
self.limit = None
self.unit = None
self.status = None
self.reason = kwargs.get('reason', None)
class ElasticPoolPerDatabaseSettings(msrest.serialization.Model):
"""Per database settings of an elastic pool.
:param min_capacity: The minimum capacity all databases are guaranteed.
:type min_capacity: float
:param max_capacity: The maximum capacity any one database can consume.
:type max_capacity: float
"""
_attribute_map = {
'min_capacity': {'key': 'minCapacity', 'type': 'float'},
'max_capacity': {'key': 'maxCapacity', 'type': 'float'},
}
def __init__(
self,
**kwargs
):
super(ElasticPoolPerDatabaseSettings, self).__init__(**kwargs)
self.min_capacity = kwargs.get('min_capacity', None)
self.max_capacity = kwargs.get('max_capacity', None)
class ElasticPoolPerformanceLevelCapability(msrest.serialization.Model):
"""The Elastic Pool performance level capability.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar performance_level: The performance level for the pool.
:vartype performance_level: ~azure.mgmt.sql.models.PerformanceLevelCapability
:ivar sku: The sku.
:vartype sku: ~azure.mgmt.sql.models.Sku
:ivar supported_license_types: List of supported license types.
:vartype supported_license_types: list[~azure.mgmt.sql.models.LicenseTypeCapability]
:ivar max_database_count: The maximum number of databases supported.
:vartype max_database_count: int
:ivar included_max_size: The included (free) max size for this performance level.
:vartype included_max_size: ~azure.mgmt.sql.models.MaxSizeCapability
:ivar supported_max_sizes: The list of supported max sizes.
:vartype supported_max_sizes: list[~azure.mgmt.sql.models.MaxSizeRangeCapability]
:ivar supported_per_database_max_sizes: The list of supported per database max sizes.
:vartype supported_per_database_max_sizes: list[~azure.mgmt.sql.models.MaxSizeRangeCapability]
:ivar supported_per_database_max_performance_levels: The list of supported per database max
performance levels.
:vartype supported_per_database_max_performance_levels:
list[~azure.mgmt.sql.models.ElasticPoolPerDatabaseMaxPerformanceLevelCapability]
:ivar zone_redundant: Whether or not zone redundancy is supported for the performance level.
:vartype zone_redundant: bool
:ivar supported_maintenance_configurations: List of supported maintenance configurations.
:vartype supported_maintenance_configurations:
list[~azure.mgmt.sql.models.MaintenanceConfigurationCapability]
:ivar status: The status of the capability. Possible values include: "Visible", "Available",
"Default", "Disabled".
:vartype status: str or ~azure.mgmt.sql.models.CapabilityStatus
:param reason: The reason for the capability not being available.
:type reason: str
"""
_validation = {
'performance_level': {'readonly': True},
'sku': {'readonly': True},
'supported_license_types': {'readonly': True},
'max_database_count': {'readonly': True},
'included_max_size': {'readonly': True},
'supported_max_sizes': {'readonly': True},
'supported_per_database_max_sizes': {'readonly': True},
'supported_per_database_max_performance_levels': {'readonly': True},
'zone_redundant': {'readonly': True},
'supported_maintenance_configurations': {'readonly': True},
'status': {'readonly': True},
}
_attribute_map = {
'performance_level': {'key': 'performanceLevel', 'type': 'PerformanceLevelCapability'},
'sku': {'key': 'sku', 'type': 'Sku'},
'supported_license_types': {'key': 'supportedLicenseTypes', 'type': '[LicenseTypeCapability]'},
'max_database_count': {'key': 'maxDatabaseCount', 'type': 'int'},
'included_max_size': {'key': 'includedMaxSize', 'type': 'MaxSizeCapability'},
'supported_max_sizes': {'key': 'supportedMaxSizes', 'type': '[MaxSizeRangeCapability]'},
'supported_per_database_max_sizes': {'key': 'supportedPerDatabaseMaxSizes', 'type': '[MaxSizeRangeCapability]'},
'supported_per_database_max_performance_levels': {'key': 'supportedPerDatabaseMaxPerformanceLevels', 'type': '[ElasticPoolPerDatabaseMaxPerformanceLevelCapability]'},
'zone_redundant': {'key': 'zoneRedundant', 'type': 'bool'},
'supported_maintenance_configurations': {'key': 'supportedMaintenanceConfigurations', 'type': '[MaintenanceConfigurationCapability]'},
'status': {'key': 'status', 'type': 'str'},
'reason': {'key': 'reason', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ElasticPoolPerformanceLevelCapability, self).__init__(**kwargs)
self.performance_level = None
self.sku = None
self.supported_license_types = None
self.max_database_count = None
self.included_max_size = None
self.supported_max_sizes = None
self.supported_per_database_max_sizes = None
self.supported_per_database_max_performance_levels = None
self.zone_redundant = None
self.supported_maintenance_configurations = None
self.status = None
self.reason = kwargs.get('reason', None)
class ElasticPoolUpdate(msrest.serialization.Model):
"""An elastic pool update.
:param sku: An ARM Resource SKU.
:type sku: ~azure.mgmt.sql.models.Sku
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param max_size_bytes: The storage limit for the database elastic pool in bytes.
:type max_size_bytes: long
:param per_database_settings: The per database settings for the elastic pool.
:type per_database_settings: ~azure.mgmt.sql.models.ElasticPoolPerDatabaseSettings
:param zone_redundant: Whether or not this elastic pool is zone redundant, which means the
replicas of this elastic pool will be spread across multiple availability zones.
:type zone_redundant: bool
:param license_type: The license type to apply for this elastic pool. Possible values include:
"LicenseIncluded", "BasePrice".
:type license_type: str or ~azure.mgmt.sql.models.ElasticPoolLicenseType
:param maintenance_configuration_id: Maintenance configuration id assigned to the elastic pool.
This configuration defines the period when the maintenance updates will will occur.
:type maintenance_configuration_id: str
"""
_attribute_map = {
'sku': {'key': 'sku', 'type': 'Sku'},
'tags': {'key': 'tags', 'type': '{str}'},
'max_size_bytes': {'key': 'properties.maxSizeBytes', 'type': 'long'},
'per_database_settings': {'key': 'properties.perDatabaseSettings', 'type': 'ElasticPoolPerDatabaseSettings'},
'zone_redundant': {'key': 'properties.zoneRedundant', 'type': 'bool'},
'license_type': {'key': 'properties.licenseType', 'type': 'str'},
'maintenance_configuration_id': {'key': 'properties.maintenanceConfigurationId', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ElasticPoolUpdate, self).__init__(**kwargs)
self.sku = kwargs.get('sku', None)
self.tags = kwargs.get('tags', None)
self.max_size_bytes = kwargs.get('max_size_bytes', None)
self.per_database_settings = kwargs.get('per_database_settings', None)
self.zone_redundant = kwargs.get('zone_redundant', None)
self.license_type = kwargs.get('license_type', None)
self.maintenance_configuration_id = kwargs.get('maintenance_configuration_id', None)
class EncryptionProtector(ProxyResource):
"""The server encryption protector.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar kind: Kind of encryption protector. This is metadata used for the Azure portal
experience.
:vartype kind: str
:ivar location: Resource location.
:vartype location: str
:ivar subregion: Subregion of the encryption protector.
:vartype subregion: str
:param server_key_name: The name of the server key.
:type server_key_name: str
:param server_key_type: The encryption protector type like 'ServiceManaged', 'AzureKeyVault'.
Possible values include: "ServiceManaged", "AzureKeyVault".
:type server_key_type: str or ~azure.mgmt.sql.models.ServerKeyType
:ivar uri: The URI of the server key.
:vartype uri: str
:ivar thumbprint: Thumbprint of the server key.
:vartype thumbprint: str
:param auto_rotation_enabled: Key auto rotation opt-in flag. Either true or false.
:type auto_rotation_enabled: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'kind': {'readonly': True},
'location': {'readonly': True},
'subregion': {'readonly': True},
'uri': {'readonly': True},
'thumbprint': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'subregion': {'key': 'properties.subregion', 'type': 'str'},
'server_key_name': {'key': 'properties.serverKeyName', 'type': 'str'},
'server_key_type': {'key': 'properties.serverKeyType', 'type': 'str'},
'uri': {'key': 'properties.uri', 'type': 'str'},
'thumbprint': {'key': 'properties.thumbprint', 'type': 'str'},
'auto_rotation_enabled': {'key': 'properties.autoRotationEnabled', 'type': 'bool'},
}
def __init__(
self,
**kwargs
):
super(EncryptionProtector, self).__init__(**kwargs)
self.kind = None
self.location = None
self.subregion = None
self.server_key_name = kwargs.get('server_key_name', None)
self.server_key_type = kwargs.get('server_key_type', None)
self.uri = None
self.thumbprint = None
self.auto_rotation_enabled = kwargs.get('auto_rotation_enabled', None)
class EncryptionProtectorListResult(msrest.serialization.Model):
"""A list of server encryption protectors.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: Array of results.
:vartype value: list[~azure.mgmt.sql.models.EncryptionProtector]
:ivar next_link: Link to retrieve next page of results.
:vartype next_link: str
"""
_validation = {
'value': {'readonly': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[EncryptionProtector]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(EncryptionProtectorListResult, self).__init__(**kwargs)
self.value = None
self.next_link = None
class ExportDatabaseDefinition(msrest.serialization.Model):
"""Contains the information necessary to perform export database operation.
All required parameters must be populated in order to send to Azure.
:param storage_key_type: Required. Storage key type. Possible values include:
"SharedAccessKey", "StorageAccessKey".
:type storage_key_type: str or ~azure.mgmt.sql.models.StorageKeyType
:param storage_key: Required. Storage key.
:type storage_key: str
:param storage_uri: Required. Storage Uri.
:type storage_uri: str
:param administrator_login: Required. Administrator login name.
:type administrator_login: str
:param administrator_login_password: Required. Administrator login password.
:type administrator_login_password: str
:param authentication_type: Authentication type.
:type authentication_type: str
:param network_isolation: Optional resource information to enable network isolation for
request.
:type network_isolation: ~azure.mgmt.sql.models.NetworkIsolationSettings
"""
_validation = {
'storage_key_type': {'required': True},
'storage_key': {'required': True},
'storage_uri': {'required': True},
'administrator_login': {'required': True},
'administrator_login_password': {'required': True},
}
_attribute_map = {
'storage_key_type': {'key': 'storageKeyType', 'type': 'str'},
'storage_key': {'key': 'storageKey', 'type': 'str'},
'storage_uri': {'key': 'storageUri', 'type': 'str'},
'administrator_login': {'key': 'administratorLogin', 'type': 'str'},
'administrator_login_password': {'key': 'administratorLoginPassword', 'type': 'str'},
'authentication_type': {'key': 'authenticationType', 'type': 'str'},
'network_isolation': {'key': 'networkIsolation', 'type': 'NetworkIsolationSettings'},
}
def __init__(
self,
**kwargs
):
super(ExportDatabaseDefinition, self).__init__(**kwargs)
self.storage_key_type = kwargs['storage_key_type']
self.storage_key = kwargs['storage_key']
self.storage_uri = kwargs['storage_uri']
self.administrator_login = kwargs['administrator_login']
self.administrator_login_password = kwargs['administrator_login_password']
self.authentication_type = kwargs.get('authentication_type', None)
self.network_isolation = kwargs.get('network_isolation', None)
class ExtendedDatabaseBlobAuditingPolicy(ProxyResource):
"""An extended database blob auditing policy.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param predicate_expression: Specifies condition of where clause when creating an audit.
:type predicate_expression: str
:param retention_days: Specifies the number of days to keep in the audit logs in the storage
account.
:type retention_days: int
:param audit_actions_and_groups: Specifies the Actions-Groups and Actions to audit.
The recommended set of action groups to use is the following combination - this will audit all
the queries and stored procedures executed against the database, | |
outfile.write('>')
outfile.write(self.convert_unicode(self.valueOf_))
self.exportChildren(outfile, level + 1, namespace_='', name_='productDescriptionType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='productDescriptionType'):
if self.id is not None and 'id' not in already_processed:
already_processed.add('id')
outfile.write(' id=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.id), input_name='id')), ))
def exportChildren(self, outfile, level, namespace_='', name_='productDescriptionType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('id', node)
if value is not None and 'id' not in already_processed:
already_processed.add('id')
self.id = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class productDescriptionType
class renderInstructionsType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, code=None, renderInstructions=None, valueOf_=None):
self.original_tagname_ = None
self.code = _cast(None, code)
self.renderInstructions = _cast(None, renderInstructions)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, renderInstructionsType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if renderInstructionsType.subclass:
return renderInstructionsType.subclass(*args_, **kwargs_)
else:
return renderInstructionsType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_code(self): return self.code
def set_code(self, code): self.code = code
def get_renderInstructions(self): return self.renderInstructions
def set_renderInstructions(self, renderInstructions): self.renderInstructions = renderInstructions
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='renderInstructionsType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('renderInstructionsType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='renderInstructionsType')
if self.hasContent_():
outfile.write('>')
outfile.write(self.convert_unicode(self.valueOf_))
self.exportChildren(outfile, level + 1, namespace_='', name_='renderInstructionsType', pretty_print=pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='renderInstructionsType'):
if self.code is not None and 'code' not in already_processed:
already_processed.add('code')
outfile.write(' code=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.code), input_name='code')), ))
if self.renderInstructions is not None and 'renderInstructions' not in already_processed:
already_processed.add('renderInstructions')
outfile.write(' renderInstructions=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.renderInstructions), input_name='renderInstructions')), ))
def exportChildren(self, outfile, level, namespace_='', name_='renderInstructionsType', fromsubclass_=False, pretty_print=True):
pass
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
self.valueOf_ = get_all_text_(node)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
value = find_attr_value_('code', node)
if value is not None and 'code' not in already_processed:
already_processed.add('code')
self.code = value
value = find_attr_value_('renderInstructions', node)
if value is not None and 'renderInstructions' not in already_processed:
already_processed.add('renderInstructions')
self.renderInstructions = value
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
pass
# end class renderInstructionsType
class sortDepotType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, depotCode=None, sortCellIndicator=None, sortLocationCode=None):
self.original_tagname_ = None
self.depotCode = depotCode
self.sortCellIndicator = sortCellIndicator
self.sortLocationCode = sortLocationCode
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, sortDepotType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if sortDepotType.subclass:
return sortDepotType.subclass(*args_, **kwargs_)
else:
return sortDepotType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_depotCode(self): return self.depotCode
def set_depotCode(self, depotCode): self.depotCode = depotCode
def get_sortCellIndicator(self): return self.sortCellIndicator
def set_sortCellIndicator(self, sortCellIndicator): self.sortCellIndicator = sortCellIndicator
def get_sortLocationCode(self): return self.sortLocationCode
def set_sortLocationCode(self, sortLocationCode): self.sortLocationCode = sortLocationCode
def hasContent_(self):
if (
self.depotCode is not None or
self.sortCellIndicator is not None or
self.sortLocationCode is not None
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='sortDepotType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('sortDepotType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='sortDepotType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='sortDepotType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='sortDepotType'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='sortDepotType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.depotCode is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<depotCode>%s</depotCode>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.depotCode), input_name='depotCode')), eol_))
if self.sortCellIndicator is not None:
self.sortCellIndicator.export(outfile, level, namespace_, name_='sortCellIndicator', pretty_print=pretty_print)
if self.sortLocationCode is not None:
showIndent(outfile, level, pretty_print)
outfile.write('<sortLocationCode>%s</sortLocationCode>%s' % (self.gds_encode(self.gds_format_string(quote_xml(self.sortLocationCode), input_name='sortLocationCode')), eol_))
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'depotCode':
depotCode_ = child_.text
depotCode_ = self.gds_validate_string(depotCode_, node, 'depotCode')
self.depotCode = depotCode_
elif nodeName_ == 'sortCellIndicator':
obj_ = renderInstructionsType.factory()
obj_.build(child_)
self.sortCellIndicator = obj_
obj_.original_tagname_ = 'sortCellIndicator'
elif nodeName_ == 'sortLocationCode':
sortLocationCode_ = child_.text
sortLocationCode_ = self.gds_validate_string(sortLocationCode_, node, 'sortLocationCode')
self.sortLocationCode = sortLocationCode_
# end class sortDepotType
class transitDepotListType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, transitDepot=None, actionDepot=None, sortDepot=None):
self.original_tagname_ = None
if transitDepot is None:
self.transitDepot = []
else:
self.transitDepot = transitDepot
if actionDepot is None:
self.actionDepot = []
else:
self.actionDepot = actionDepot
if sortDepot is None:
self.sortDepot = []
else:
self.sortDepot = sortDepot
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, transitDepotListType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if transitDepotListType.subclass:
return transitDepotListType.subclass(*args_, **kwargs_)
else:
return transitDepotListType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_transitDepot(self): return self.transitDepot
def set_transitDepot(self, transitDepot): self.transitDepot = transitDepot
def add_transitDepot(self, value): self.transitDepot.append(value)
def insert_transitDepot_at(self, index, value): self.transitDepot.insert(index, value)
def replace_transitDepot_at(self, index, value): self.transitDepot[index] = value
def get_actionDepot(self): return self.actionDepot
def set_actionDepot(self, actionDepot): self.actionDepot = actionDepot
def add_actionDepot(self, value): self.actionDepot.append(value)
def insert_actionDepot_at(self, index, value): self.actionDepot.insert(index, value)
def replace_actionDepot_at(self, index, value): self.actionDepot[index] = value
def get_sortDepot(self): return self.sortDepot
def set_sortDepot(self, sortDepot): self.sortDepot = sortDepot
def add_sortDepot(self, value): self.sortDepot.append(value)
def insert_sortDepot_at(self, index, value): self.sortDepot.insert(index, value)
def replace_sortDepot_at(self, index, value): self.sortDepot[index] = value
def hasContent_(self):
if (
self.transitDepot or
self.actionDepot or
self.sortDepot
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='transitDepotListType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('transitDepotListType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
self.exportAttributes(outfile, level, already_processed, namespace_, name_='transitDepotListType')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
self.exportChildren(outfile, level + 1, namespace_='', name_='transitDepotListType', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespace_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
def exportAttributes(self, outfile, level, already_processed, namespace_='', name_='transitDepotListType'):
pass
def exportChildren(self, outfile, level, namespace_='', name_='transitDepotListType', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
for transitDepot_ in self.transitDepot:
transitDepot_.export(outfile, level, namespace_, name_='transitDepot', pretty_print=pretty_print)
for actionDepot_ in self.actionDepot:
actionDepot_.export(outfile, level, namespace_, name_='actionDepot', pretty_print=pretty_print)
for sortDepot_ in self.sortDepot:
sortDepot_.export(outfile, level, namespace_, name_='sortDepot', pretty_print=pretty_print)
def build(self, node):
already_processed = set()
self.buildAttributes(node, node.attrib, already_processed)
for child in node:
nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
self.buildChildren(child, node, nodeName_)
return self
def buildAttributes(self, node, attrs, already_processed):
pass
def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
if nodeName_ == 'transitDepot':
obj_ = depotType.factory()
obj_.build(child_)
self.transitDepot.append(obj_)
obj_.original_tagname_ = 'transitDepot'
elif nodeName_ == 'actionDepot':
obj_ = actionDepotType.factory()
obj_.build(child_)
self.actionDepot.append(obj_)
obj_.original_tagname_ = 'actionDepot'
elif nodeName_ == 'sortDepot':
obj_ = sortDepotType.factory()
obj_.build(child_)
self.sortDepot.append(obj_)
obj_.original_tagname_ = 'sortDepot'
# end class transitDepotListType
class twoDBarcodeType(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, symbology=None, valueOf_=None):
self.original_tagname_ = None
self.symbology = _cast(None, symbology)
self.valueOf_ = valueOf_
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
CurrentSubclassModule_, twoDBarcodeType)
if subclass is not None:
return subclass(*args_, **kwargs_)
if twoDBarcodeType.subclass:
return twoDBarcodeType.subclass(*args_, **kwargs_)
else:
return twoDBarcodeType(*args_, **kwargs_)
factory = staticmethod(factory)
def get_symbology(self): return self.symbology
def set_symbology(self, symbology): self.symbology = symbology
def get_valueOf_(self): return self.valueOf_
def set_valueOf_(self, valueOf_): self.valueOf_ = valueOf_
def hasContent_(self):
if (
(1 if type(self.valueOf_) in [int,float] else self.valueOf_)
):
return True
else:
return False
def export(self, outfile, level, namespace_='', name_='twoDBarcodeType', namespacedef_='', pretty_print=True):
imported_ns_def_ = GenerateDSNamespaceDefs_.get('twoDBarcodeType')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
eol_ = '\n'
else:
eol_ = ''
if self.original_tagname_ is not None:
name_ = self.original_tagname_
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespace_, name_, | |
custom error message for smaller, printable tensors
if t.numel() < 10:
msg = ("Failed to produce expected results! Input tensor was"
" {0}, torch result is {1}, and reference result is"
" {2}.").format(t, actual, expected)
else:
msg = None
exact_dtype = True
if isinstance(actual, torch.Tensor):
_helper_reference_numerics(expected, actual, msg, exact_dtype, equal_nan)
else:
for x, y in zip(expected, actual):
# testing multi-outputs results
_helper_reference_numerics(x, y, msg, exact_dtype, equal_nan)
# Tests that the function and its (array-accepting) reference produce the same
# values on a range of tensors, including empty tensors, scalar tensors,
# 1D tensors and a large 2D tensor with interesting and extremal values
# and noncontiguities.
@suppress_warnings
@ops(reference_filtered_ops)
def test_reference_numerics_normal(self, device, dtype, op):
tensors = generate_numeric_tensors(device, dtype,
domain=op.domain)
self._test_reference_numerics(dtype, op, tensors)
@suppress_warnings
@ops(reference_filtered_ops, allowed_dtypes=floating_and_complex_types_and(
torch.bfloat16, torch.half, torch.int8, torch.int16, torch.int32, torch.int64
))
def test_reference_numerics_hard(self, device, dtype, op):
if not op.handles_large_floats:
raise self.skipTest("This op does not handle large values")
tensors = generate_numeric_tensors_hard(device, dtype,
domain=op.domain)
self._test_reference_numerics(dtype, op, tensors)
@suppress_warnings
@ops(reference_filtered_ops,
allowed_dtypes=floating_and_complex_types_and(torch.bfloat16, torch.half))
def test_reference_numerics_extremal(self, device, dtype, op):
handles_extremals = (op.handles_complex_extremals if
dtype in (torch.cfloat, torch.cdouble) else op.handles_extremals)
if not handles_extremals:
raise self.skipTest("This op does not handle extremal values")
tensors = generate_numeric_tensors_extremal(device, dtype,
domain=op.domain)
self._test_reference_numerics(dtype, op, tensors)
# Tests for testing (non)contiguity consistency
@ops(unary_ufuncs)
def test_contig_vs_every_other(self, device, dtype, op):
contig = make_tensor((1026,), device=device, dtype=dtype,
low=op.domain[0], high=op.domain[1])
non_contig = contig[::2]
self.assertTrue(contig.is_contiguous())
self.assertFalse(non_contig.is_contiguous())
torch_kwargs, _ = op.sample_kwargs(device, dtype, non_contig)
self.assertEqual(op(contig, **torch_kwargs)[::2], op(non_contig, **torch_kwargs))
@ops(unary_ufuncs)
def test_contig_vs_transposed(self, device, dtype, op):
contig = make_tensor((789, 357), device=device, dtype=dtype,
low=op.domain[0], high=op.domain[1])
non_contig = contig.T
self.assertTrue(contig.is_contiguous())
self.assertFalse(non_contig.is_contiguous())
torch_kwargs, _ = op.sample_kwargs(device, dtype, contig)
self.assertEqual(op(contig, **torch_kwargs).T, op(non_contig, **torch_kwargs))
@ops(unary_ufuncs)
def test_non_contig(self, device, dtype, op):
shapes = [(5, 7), (1024,)]
for shape in shapes:
contig = make_tensor(shape, device, dtype,
low=op.domain[0], high=op.domain[1])
non_contig = torch.empty(shape + (2,), device=device, dtype=dtype)[..., 0]
non_contig.copy_(contig)
self.assertTrue(contig.is_contiguous())
self.assertFalse(non_contig.is_contiguous())
torch_kwargs, _ = op.sample_kwargs(device, dtype, contig)
self.assertEqual(op(contig, **torch_kwargs), op(non_contig, **torch_kwargs))
@ops(unary_ufuncs)
def test_non_contig_index(self, device, dtype, op):
contig = make_tensor((2, 2, 1, 2), device, dtype,
low=op.domain[0], high=op.domain[1])
non_contig = contig[:, 1, ...]
contig = non_contig.contiguous()
self.assertTrue(contig.is_contiguous())
self.assertFalse(non_contig.is_contiguous())
torch_kwargs, _ = op.sample_kwargs(device, dtype, contig)
self.assertEqual(op(contig, **torch_kwargs), op(non_contig, **torch_kwargs))
@ops(unary_ufuncs)
def test_non_contig_expand(self, device, dtype, op):
shapes = [(1, 3), (1, 7), (5, 7)]
for shape in shapes:
contig = make_tensor(shape, device, dtype,
low=op.domain[0], high=op.domain[1])
non_contig = contig.clone().expand(3, -1, -1)
self.assertTrue(contig.is_contiguous())
self.assertFalse(non_contig.is_contiguous())
torch_kwargs, _ = op.sample_kwargs(device, dtype, contig)
contig = op(contig, **torch_kwargs)
non_contig = op(non_contig, **torch_kwargs)
for i in range(3):
self.assertEqual(contig, non_contig[i],
msg='non-contiguous expand[' + str(i) + ']')
@ops(unary_ufuncs)
def test_contig_size1(self, device, dtype, op):
contig = make_tensor((5, 100), device, dtype,
low=op.domain[0], high=op.domain[1])
contig = contig[:1, :50]
contig2 = torch.empty(contig.size(), device=device, dtype=dtype)
contig2.copy_(contig)
self.assertTrue(contig.is_contiguous())
self.assertTrue(contig2.is_contiguous())
torch_kwargs, _ = op.sample_kwargs(device, dtype, contig)
self.assertEqual(op(contig, **torch_kwargs), op(contig2, **torch_kwargs))
@ops(unary_ufuncs)
def test_contig_size1_large_dim(self, device, dtype, op):
contig = make_tensor((5, 2, 3, 1, 4, 5, 3, 2, 1, 2, 3, 4), device, dtype,
low=op.domain[0], high=op.domain[1])
contig = contig[:1, :, :, :, :, :, :, :, :, :, :, :]
contig2 = torch.empty(contig.size(), device=device, dtype=dtype)
contig2.copy_(contig)
self.assertTrue(contig.is_contiguous())
self.assertTrue(contig2.is_contiguous())
torch_kwargs, _ = op.sample_kwargs(device, dtype, contig)
self.assertEqual(op(contig, **torch_kwargs), op(contig2, **torch_kwargs))
# Tests that computation on a multiple batches is the same as
# per-batch computation.
@ops(unary_ufuncs)
def test_batch_vs_slicing(self, device, dtype, op):
input = make_tensor((1024, 512), dtype=dtype, device=device,
low=op.domain[0], high=op.domain[1])
torch_kwargs, _ = op.sample_kwargs(device, dtype, input)
actual = op(input, **torch_kwargs)
expected = torch.stack([op(slice, **torch_kwargs) for slice in input])
self.assertEqual(actual, expected)
def _test_out_arg(self, op, input, output, expected, **kwargs):
if op.safe_casts_outputs:
expect_fail = not torch.can_cast(expected.dtype, output.dtype)
else:
expect_fail = output.dtype != expected.dtype
if expect_fail:
with self.assertRaises(RuntimeError):
op(input, out=output, **kwargs)
else:
res = op(input, out=output, **kwargs)
self.assertTrue(res is output)
self.assertEqual(output, expected.to(output.dtype))
@ops(unary_ufuncs, dtypes=OpDTypes.supported)
def test_out_arg_all_dtypes(self, device, dtype, op):
if not op.supports_out:
self.skipTest("Skipped! Op doesn't support out= kwarg.")
input = make_tensor((64, 64), dtype=dtype, device=device,
low=op.domain[0], high=op.domain[1])
torch_kwargs, _ = op.sample_kwargs(device, dtype, input)
expected = op(input, **torch_kwargs)
for out_dtype in all_types_and_complex_and(torch.bool, torch.half):
out = torch.empty_like(input, dtype=out_dtype)
self._test_out_arg(op, input, out, expected, **torch_kwargs)
@dtypes(*(get_all_int_dtypes() + [torch.bool] +
get_all_fp_dtypes(include_bfloat16=False)))
def test_nan_to_num(self, device, dtype):
for contiguous in [False, True]:
x = make_tensor((64, 64), low=0., high=100., dtype=dtype, device=device)
if dtype.is_floating_point:
# Add extremal values.
extremals = [float('nan'), float('inf'), -float('inf')]
for idx, extremal in zip(torch.randint(0, 63, (3,)), extremals):
x[idx, :] = extremal
if not contiguous:
x = x.T
# With args
nan = random.random()
posinf = random.random() * 5
neginf = random.random() * 10
self.compare_with_numpy(lambda x: x.nan_to_num(nan=nan, posinf=posinf),
lambda x: np.nan_to_num(x, nan=nan, posinf=posinf),
x)
self.compare_with_numpy(lambda x: x.nan_to_num(posinf=posinf, neginf=neginf),
lambda x: np.nan_to_num(x, posinf=posinf, neginf=neginf),
x)
# Out Variant
out = torch.empty_like(x)
result = torch.nan_to_num(x)
torch.nan_to_num(x, out=out)
self.assertEqual(result, out)
result = torch.nan_to_num(x, nan=nan, posinf=posinf, neginf=neginf)
torch.nan_to_num(x, out=out, nan=nan, posinf=posinf, neginf=neginf)
self.assertEqual(result, out)
@dtypes(torch.cdouble)
def test_complex_edge_values(self, device, dtype):
# sqrt Test Reference: https://github.com/pytorch/pytorch/pull/47424
x = torch.tensor(0. - 1.0e+20j, dtype=dtype, device=device)
self.compare_with_numpy(torch.sqrt, np.sqrt, x)
# acos test reference: https://github.com/pytorch/pytorch/issue/42952
# Skip on Windows, as CUDA acos returns conjugate value
# see https://github.com/pytorch/pytorch/issues/52299
if not (IS_WINDOWS and dtype == torch.cdouble and "cuda" in device):
self.compare_with_numpy(torch.acos, np.arccos, x)
x = torch.tensor((-1.0e+60 if dtype == torch.cdouble else -1.0e+20) - 4988429.2j, dtype=dtype, device=device)
self.compare_with_numpy(torch.sqrt, np.sqrt, x)
@unittest.skipIf(not TEST_SCIPY, "Requires SciPy")
@dtypes(torch.float, torch.double)
def test_digamma_special(self, device, dtype):
# Based on SciPy test for the following special values.
# Reference:
# https://github.com/scipy/scipy/blob/3a8a3a1d4657254a6611e77e9c28feafa26e6645/scipy/special/tests/test_digamma.py#L22
euler = 0.57721566490153286
dataset = [(0., -0.),
(1, -euler),
(0.5, -2 * math.log(2) - euler),
(1 / 3, -math.pi / (2 * math.sqrt(3)) - 3 * math.log(3) / 2 - euler),
(1 / 4, -math.pi / 2 - 3 * math.log(2) - euler),
(1 / 6, -math.pi * math.sqrt(3) / 2 - 2 * math.log(2) - 3 * math.log(3) / 2 - euler),
(1 / 8, -math.pi / 2 - 4 * math.log(2) -
(math.pi + math.log(2 + math.sqrt(2)) - math.log(2 - math.sqrt(2))) / math.sqrt(2) - euler)]
x = torch.tensor(dataset, device=device, dtype=dtype)
self.compare_with_numpy(torch.digamma, scipy.special.digamma, x)
@unittest.skipIf(not TEST_SCIPY, "Requires SciPy")
@dtypes(torch.float, torch.double)
def test_digamma(self, device, dtype):
# Tests pole behavior
tensor = torch.tensor([-0.999999994, -1.999999994, -2.0000000111,
-100.99999994, 0.000000111, -1931.99999994,
-0.000000111, 0, -0, -1, -2, -931], dtype=dtype, device=device)
self.compare_with_numpy(torch.digamma, scipy.special.digamma, tensor)
@skipCUDAIfRocm
@dtypes(*get_all_fp_dtypes(include_half=True, include_bfloat16=False))
def test_frexp(self, device, dtype):
input = make_tensor((50, 50), device, dtype)
mantissa, exponent = torch.frexp(input)
np_mantissa, np_exponent = np.frexp(input.cpu().numpy())
self.assertEqual(mantissa, np_mantissa)
self.assertEqual(exponent, np_exponent)
# torch.frexp returns exponent in int32 to be compatible with np.frexp
self.assertTrue(exponent.dtype == torch.int32)
self.assertTrue(torch_to_numpy_dtype_dict[exponent.dtype] == np_exponent.dtype)
@skipCUDAIfRocm
@dtypes(*get_all_fp_dtypes(include_half=True, include_bfloat16=False))
def test_frexp_out(self, device, dtype):
input = make_tensor((50, 50), device, dtype)
outputs = (
(torch.empty_like(input), torch.empty_like(input, dtype=torch.int)),
(torch.empty_like(input).transpose(0, 1), make_tensor((50, 50), device, torch.int, noncontiguous=True)),
)
for mantissa, exponent in outputs:
torch.frexp(input, out=(mantissa, exponent))
np_mantissa, np_exponent = np.frexp(input.cpu().numpy())
self.assertEqual(mantissa, np_mantissa)
self.assertEqual(exponent, np_exponent)
# The warning is given when output tensors have wrong shape
with warnings.catch_warnings(record=True) as w:
mantissa = torch.empty((2, 2), device=device, dtype=dtype)
exponent = torch.empty((5, 5), device=device, dtype=torch.int)
torch.frexp(input, out=(mantissa, exponent))
self.assertEqual(len(w), 2)
self.assertTrue("An output with one or more elements was resized" in str(w[0].message))
self.assertTrue("An output with one or more elements was resized" in str(w[1].message))
@skipCUDAIfRocm
def test_frexp_assert_raises(self, device):
invalid_input_dtypes = get_all_int_dtypes() + \
get_all_complex_dtypes() + \
[torch.bool]
for dtype in invalid_input_dtypes:
input = make_tensor((50, 50), device, dtype)
with self.assertRaisesRegex(RuntimeError, r"torch\.frexp\(\) only supports floating-point dtypes"):
torch.frexp(input)
for dtype in get_all_fp_dtypes(include_half=True, include_bfloat16=False):
input = make_tensor((50, 50), device, dtype)
dtypes = list(all_types_and_complex_and(torch.bool, torch.half, torch.bfloat16))
dtypes.remove(dtype)
for mantissa_dtype in dtypes:
mantissa = torch.empty_like(input, dtype=mantissa_dtype)
exponent = torch.empty_like(input, dtype=torch.int)
with self.assertRaisesRegex(RuntimeError,
r"torch\.frexp\(\) expects mantissa to have dtype .+ but got .+"):
torch.frexp(input, out=(mantissa, exponent))
dtypes.append(dtype)
dtypes.remove(torch.int)
for exponent_dtype in dtypes:
mantissa = torch.empty_like(input)
exponent = torch.empty_like(input, dtype=exponent_dtype)
with self.assertRaisesRegex(RuntimeError,
r"torch\.frexp\(\) expects exponent to have int dtype but got .+"):
torch.frexp(input, out=(mantissa, exponent))
def test_mvlgamma_argcheck(self, device):
def run_test(d):
input = torch.linspace((d - 2) / 2, 10, 10, device=device)
torch.mvlgamma(input, d)
with self.assertRaisesRegex(RuntimeError, r"All elements must be greater than \(p-1\)/2"):
run_test(3)
def test_polygamma_neg(self, device):
with self.assertRaisesRegex(RuntimeError, r'polygamma\(n, x\) does not support negative n\.'):
torch.polygamma(-1, torch.tensor([1.0, 2.0], device=device))
# TODO resolve with opinfos
@onlyCPU
def test_op_invert(self, device):
res = 0xffff - torch.arange(127, dtype=torch.int8)
for dtype in (torch.uint8, torch.int8, torch.int16, torch.int32, torch.int64):
a = torch.arange(127, dtype=dtype)
self.assertEqual(res.to(dtype), ~a)
self.assertEqual(torch.tensor([True, False]), ~torch.tensor([False, True]))
# test exceptions
for dtype in (torch.half, torch.float, torch.double):
| |
'DOWN-AND-IN-BARRIER':
V_Nt = np.maximum(s - self.strike, 0) * np.where(s >= self.barrier, 1, 0)
else:
V_Nt = np.maximum(s - self.strike, 0) * np.where(s <= self.barrier, 1, 0)
payoff = np.maximum(s - self.strike, 0)
elif self.position == 'PUT':
if self.option_type == 'DOWN-AND-OUT-BARRIER' or self.option_type == 'DOWN-AND-IN-BARRIER':
V_Nt = np.maximum(- s + self.strike, 0) * np.where(s >= self.barrier, 1, 0)
else:
V_Nt = np.maximum(- s + self.strike, 0) * np.where(s <= self.barrier, 1, 0)
payoff = np.maximum(- s + self.strike, 0)
# initialize the Dirichlet boundary condition
if self.position == "CALL":
f_0 = np.linspace(0, 0, Nt + 1)
if self.option_type == 'DOWN-AND-OUT-BARRIER' or self.option_type == 'DOWN-AND-IN-BARRIER':
f_Ns = Smax * np.exp(-self.r * np.linspace(0, self.maturity, Nt + 1)
) - self.strike * np.exp(-self.r * (np.linspace(0, self.maturity, Nt + 1)))
else:
f_Ns = np.linspace(0, 0, Nt + 1)
elif self.position == "PUT":
if self.option_type == 'DOWN-AND-OUT-BARRIER' or self.option_type == 'DOWN-AND-IN-BARRIER':
f_0 = np.linspace(0, 0, Nt + 1)
else:
f_0 = self.strike * np.exp(-self.r * np.linspace(0, self.maturity, Nt + 1))
f_Ns = np.linspace(0, 0, Nt + 1)
# initialize the tridiagonal matrix by scalar-form
delta_s_i = 0.5 * (s[2:] - s[0:Ns - 1])
delta_s_plus = s[2:] - s[1:Ns]
delta_s_minus = s[1:Ns] - s[0:Ns - 1]
# from a_2 to a_I-1 are in the calculation matrix
a = - (1.0 - theta) * self.sig **2 * s[1:Ns] **2 / (2.0 * delta_s_i * delta_s_minus) + (
1 - theta) * mu * s[1:Ns] / (2 * delta_s_i)
# from b_1 to b_I-1 are in the calculation matrix
b = 1.0 / dt + (1 - theta) * self.r + (1.0 - theta) * self.sig **2 * s[1:Ns] **2 / (2.0 * delta_s_i * delta_s_minus)
b = b + (1.0 - theta) * self.sig **2 * s[1:Ns] **2 / (2.0 * delta_s_i * delta_s_plus)
# from c_1 to c_I-2 are in the calculation matrix
c = - (1.0 - theta) * self.sig **2 * s[1:Ns] **2 / (2.0 * delta_s_i * delta_s_plus) - (
1 - theta) * mu * s[1:Ns] / (2 * delta_s_i)
# from alpha_2 to alpha_I-1 are in the calculation matrix
alpha = theta * self.sig **2 * s[1:Ns] **2 / (2.0 * delta_s_i * delta_s_minus
) - theta * mu * s[1:Ns] / (2 * delta_s_i)
# from beta_1 to beta_I-1 are in the calculation matrix
beta = 1.0 / dt - theta * self.sig **2 * s[1:Ns] **2 / (2.0 * delta_s_i * delta_s_minus) - self.r * theta
beta = beta - theta * self.sig **2 * s[1:Ns] **2 / (2.0 * delta_s_i * delta_s_plus)
# from gamma_1 to gamma_I-2 are in the calculation matrix
gamma = theta * self.sig **2 * s[1:Ns] **2 / (2.0 * delta_s_i * delta_s_plus) + theta * mu * s[1:Ns] / (
2 * delta_s_i)
# From Nt to 1, calculate V_Nt-1, V_Nt-2, ..., V_0 (vectors)
V_Nplus = V_Nt[1:Ns]
for k in range(Nt, 0, -1):
#for k in range(1,0,-1):
#V_Nplus : b of Ax=b
V_Nplus = self.my_dot_product(alpha, beta, gamma, V_Nplus)
V_Nplus[0] = V_Nplus[0] - a[0] * f_0[k-1] + alpha[0] * f_0[k]
V_Nplus[Ns-2] = V_Nplus[Ns-2] - c[Ns-2] * f_Ns[k-1] + gamma[Ns-2] * f_Ns[k]
#V_N : Intial Guess for american case / x of Ax=b for european case
ab = self.tri_bound_to_ab(a,b,c)
V_N = linalg.solve_banded((1, 1), ab, V_Nplus)
#American process
if self.exercise_type == 'AMERICAN':
V_N = self.Projected_SOR(a[1:],b,c[:-1], V_Nplus, V_N, payoff[1:-1], k, step, s[1:Ns])
V_Nplus = V_N
# linear interpolation
index = sum(s < self.spot_price)
w = (self.spot_price - s[index-1]) / (s[index] - s[index-1])
v_0 = V_Nplus[index-1] * (1 - w) + w * V_Nplus[index]
'''
Above process is only for knock out option
'''
if self.option_type == 'UP-AND-OUT-BARRIER' or self.option_type == 'DOWN-AND-OUT-BARRIER':
return v_0
else:
if self.position == 'CALL':
v_0 = self.Black_Scholes_Call() - v_0
return v_0
else:
if self.exercise_type == 'EUROPEAN':
v_0 = self.Black_Scholes_Put() - v_0
return v_0
else:
v_0 = self.BTM_Vanilla(1200) - v_0
return v_0
#========
def FDM_Vanilla_Implicit(self, Ns, Nt, m):
'''
Abstract:
--------
Finite difference method for vanilla option.
Trivial implicit method.
Parameters:
----------
Ns: Number of points in price axis
Nt: Number of points in time axis
m : monitoring times
'''
# discretize Nt-1 points between every two monitoring time, total Nt*m + 1 gird in time axis
step = Nt
Nt = Nt * m
# set up parameters
mu = self.r - self.q
_range = 3 * self.sig * np.sqrt(self.maturity)
Smax = self.spot_price * np.exp((mu - self.sig ** 2 / 2.0) * self.maturity + _range)
Smin = 0
# totally Nt + 1 in row grid
dt = self.maturity/ float(Nt)
ds = (Smax - Smin) / float(Ns) # totally Ns + 1 in column grid
# initialize the payoff
sGrid = np.linspace(Smin, Smax, Ns + 1)
if self.position.upper() == "CALL":
V_Nt = np.maximum(sGrid - self.strike, 0)
elif self.position.upper() == "PUT":
V_Nt = np.maximum(self.strike - sGrid, 0)
s = np.linspace(Smin, Smax, Ns + 1)
payoff = np.maximum(s - self.strike, 0)
# initialize the Dirichlet boundary condition
if self.position.upper() == "CALL":
f_0 = np.linspace(0, 0, Nt + 1)
f_Ns = Smax * np.exp(-self.q * np.linspace(0, self.maturity, Nt + 1)
) - self.strike * np.exp(-self.r * (np.linspace(0, self.maturity, Nt + 1)))
elif self.position.upper() == "PUT":
f_0 = self.strike * np.exp(-self.r * np.linspace(0, self.maturity, Nt + 1))
f_Ns = np.linspace(0, 0, Nt + 1)
else:
raise ValueError('Invalid option_type!!')
# initialize the tridiagonal matrix by scalar-form
i = np.linspace(1, Ns - 1, Ns - 1)
# from a_2 to a_I-1 are in the calculation matrix
a = -(self.sig ** 2 * i ** 2 - (self.r - self.q) * i) * dt / 2.0
# from b_1 to b_I-1 are in the calculation matrix
b = 1 + self.sig ** 2 * i ** 2 * dt + self.r * dt
# from c_1 to c_I-2 are in the calculation matrix
c = -(self.sig ** 2 * i ** 2 + (self.r - self.q) * i) * dt / 2.0
# From Nt to 1, calculate V_Nt-1, V_Nt-2, ..., V_0 (vectors)
V_Nplus = V_Nt[1:Ns]
for k in range(Nt, 0, -1):
V_Nplus[0] = V_Nplus[0] - a[0] * f_0[k]
V_Nplus[Ns-2] = V_Nplus[Ns-2] - c[Ns-2] * f_Ns[k]
ab = self.tri_bound_to_ab(a,b,c)
V_N = linalg.solve_banded((1, 1), ab, V_Nplus)
#American process
if self.exercise_type == 'AMERICAN':
V_N = self.Projected_SOR(a[1:],b,c[:-1], V_Nplus, V_N, payoff[1:-1], k, step, s[1:Ns])
V_Nplus = V_N
# linear interpolation
w = (self.spot_price - sGrid[int(self.spot_price/ds)]) / (sGrid[int(self.spot_price/ds) + 1] - sGrid[int(self.spot_price/ds)])
v_0 = V_N[int(self.spot_price/ds)] * (1 - w) + w * V_N[int(self.spot_price/ds) + 1]
return v_0
#========
def Monte_Carlo_Vanilla(self, path_num):
'''
Abstract:
--------
Monte Carlo method for European vanilla option.
Parameter:
---------
path_num : number of simulation times
'''
mu = self.r - self.q
simulated_underlier_price_list = [self.spot_price * np.exp((mu - 0.5 * self.sig ** 2
) * self.maturity + self.sig * np.sqrt(self.maturity) * np.random.normal(0, 1, 1)) for i in range(path_num)]
simulated_underlier_price_list = [item[0] for item in simulated_underlier_price_list]
if self.position == 'CALL':
simulated_option_price_list = [max(temp_price - self.strike, 0) for temp_price in simulated_underlier_price_list]
else:
simulated_option_price_list = [max( - temp_price + self.strike, 0) for temp_price in simulated_underlier_price_list]
expectation = sum(simulated_option_price_list) / len(simulated_option_price_list) * np.exp(-self.r * self.maturity)
return expectation
#=====================================================================================
def main(option_position):
'''
Show pricing result of various method
'''
r = 0.05
q = 0
spot_price = 100
sig = 0.2
T = 1
T_switch = 2
strike = 90
option_type = 'down-and-out-barrier'
exercise_type = 'european'
position = option_position
analytical_price_list = []
trivial_pde_price_list = []
improved_pde_price_list = []
mc_price_list = []
strike_list = [80 + i for i in range(41)]
print('Test for %s options' % option_position)
print(' ')
for strike in strike_list:
test_option = option(r, q, spot_price, strike, sig, T, option_type, exercise_type, position,T_switch, barrier = 0.000001)
'''
analytical result
'''
if test_option.position | |
<reponame>barry-scott/PythonWinAppPackager<filename>win_app_packager/win_app_package_builder.py<gh_stars>1-10
#/usr/bin/python3
#
# win_app_package_builder.py
#
import sys
import os
import pathlib
import uuid
import importlib
import colour_text
import modulefinder
from . import win_app_package_win_pe_info
from . import win_app_package_exe_config
from . import win_known_paths
class AppPackageError(Exception):
pass
class AppPackage:
APP_TYPE_CLI = 1
APP_TYPE_GUI = 2
resource_folder = pathlib.Path( 'PyWinAppRes' )
library_folder = resource_folder / 'lib'
all_modules_allowed_to_be_missing = set( [
'sets',
'_dummy_threading',
'_frozen_importlib',
'_frozen_importlib_external',
'_posixsubprocess',
'_scproxy',
'_winreg',
'ce',
'grp',
'java.lang',
'org.python.core',
'os.path',
'posix',
'pwd',
'readline',
'termios',
'vms_lib',
# condition import for python2 compat which can be ignored
'_perf',
'cStringIO',
'ordereddict',
'sha',
'UserDict',
# from python2 support for pkg_resources via pytz
'_sysconfigdata',
'multiprocessing.get_context',
'multiprocessing.BufferTooShort',
'dummy.Process',
'multiprocessing.AuthenticationError',
'urlparse',
'packaging.specifiers',
'multiprocessing.set_start_method',
'multiprocessing.TimeoutError',
'packaging.version',
'multiprocessing.get_start_method',
# git - module can be ignored
'git.index.IndexFile',
'git.objects.Object',
'git.objects.RootModule',
'git.refs.RemoteReference',
'git.refs.SymbolicReference',
'git.refs.TagReference',
'git.repo.Repo',
'refs.RemoteReference',
'refs.SymbolicReference',
'refs.TagReference',
'smmap.SlidingWindowMapBuffer',
'smmap.SlidingWindowMapManager',
'smmap.StaticWindowMapManager',
# new in python3.8 to ignore
'_posixshmem',
'resource',
'win32evtlog',
'win32evtlogutil',
'asyncio.DefaultEventLoopPolicy',
# new in python 3.9
'pep517',
'_testinternalcapi',
] )
all_imported_modules_to_exclude = set( [
'ctypes',
'ctypes._endian',
'ctypes.util',
'ctypes.wintypes',
'importlib',
'importlib._bootstrap',
'importlib._bootstrap_external',
'importlib.abc',
'importlib.machinery',
'importlib.util',
'modulefinder',
'uuid',
'win_app_packager',
'win_app_packager.win_app_package_builder',
'win_app_packager.win_app_package_exe_config',
'win_app_packager.win_app_package_win_pe_info',
] )
def __init__( self ):
self.ct = colour_text.ColourText()
self.ct.initTerminal()
# options
self.enable_debug = False
self.enable_verbose = False
self.enable_merge = False
self.enable_bootstrap_debug = False
self.app_type = self.APP_TYPE_CLI
self.app_name = None
self.app_icon = None
self.app_version = (0, 0, 0, 0)
self.app_install_key = ''
self.app_install_value = ''
self.modules_allowed_to_be_missing_filename = None
self.main_program = None
self.package_folder = None
# need to check for both paths
# the cases covered are:
# win32 app on windows 32 bit system
# win32 app on windows 64 bit system
# win64 app on windows 64 bit system
self.__windows_system_folders = (pathlib.Path( win_known_paths.get_path( win_known_paths.FOLDERID.System ) )
,pathlib.Path( win_known_paths.get_path( win_known_paths.FOLDERID.SystemX86 ) ))
# package contents
self.__all_library_files = set()
self.__all_dlls = set()
self.__all_found_dlls = set()
# how to find app packager resources
self.win_app_packager_folder = pathlib.Path( sys.argv[0] ).parent
def debug( self, msg ):
if self.enable_debug:
print( 'Debug: %s' % (msg,) )
def info( self, msg ):
print( self.ct('<>info Info:<> %s') % (msg,) )
def verbose( self, msg ):
if self.enable_verbose:
print( self.ct('<>info Info:<> %s') % (msg,) )
def error( self, msg ):
print( self.ct('<>error Error: %s<>') % (msg,) )
def warning( self, msg ):
print( self.ct('<>em Warn:<> %s') % (msg,) )
def usage( self ):
################################################################################
print(
'''python3 -m win_app_packager build <main-script> <package-folder> [<options>...]
main-script
- python main module
package-folder
- folder to create package into
Where <options> are:
--console
--cli
build a windows console progam (the default).
--gui
build a windows gui program.
--name
name the program (defaults to the <main-script> name).
--version <version>
Set the version of the .EXE to be <version>.
e.g 1.0.2.5
--install-key <key>
--install-value <value>
The install path of the package can be read
from the windows registry from key HKLM:<key> value <value>
otherwise the install path is assumed to be the same folder
that the .EXE files is in.
--modules-allowed-to-be-missing-file <file-name>
Add all the modules listed in the file <file-name> to the allowed
to be missing list. Blank lines and lines starting with a '#' as ignored.
--merge
Do not clean out the <package-folder> before building the package.
Useful for putting multiple programs into one package.
--verbose
Output extra information about the build process.
--debug
Developer option. Output lots of details about the build process.
--bootstrap-debug
Developer option. Copy PDF files and setup a Microsoft Visual
Studio solution (.sln) file suitable for running the bootstrap
under the debugger.
'''
)
return 1
def parseArgs( self, argv ):
all_positional_args = []
args = iter( argv )
next(args)
for arg in args:
if arg.startswith( '--' ):
if arg == '--debug':
self.enable_debug = True
elif arg == '--bootstrap-debug':
self.enable_bootstrap_debug = True
elif arg == '--verbose':
self.enable_verbose = True
elif arg in ('--console', '--cli'):
self.app_type = self.APP_TYPE_CLI
elif arg == '--gui':
self.app_type = self.APP_TYPE_GUI
elif arg == '--name':
self.app_name = next(args)
elif arg == '--install-key':
self.app_install_key = next(args)
elif arg == '--install-value':
self.app_install_value = next(args)
elif arg == '--icon':
self.app_icon = next(args)
elif arg == '--version':
# expecting upto 4 int seperated by "."
try:
version = next(args)
if version.strip() == '':
raise AppPackageError( '--version - value must not be empty' )
int_version = list( int(n) for n in version.split('.') )
# pad with 0 to make exactly 4 parts to the version
while len(int_version) < 4:
int_version.append( 0 )
if len(int_version) > 4:
raise AppPackageError( '--version %r - only 4 parts allowed' )
except StopIteration:
raise AppPackageError( '--version - value required' )
except ValueError:
raise AppPackageError( '--version - invalid value %r' % (version,) )
self.app_version = tuple(int_version)
elif arg == '--merge':
self.enable_merge = True
elif arg == '--modules-allowed-to-be-missing-file':
self.modules_allowed_to_be_missing_filename = next(args)
else:
raise AppPackageError( 'Unknown option %r' % (arg,) )
else:
all_positional_args.append( arg )
if( len( all_positional_args ) < 1
or all_positional_args[0] != 'build' ):
raise AppPackageError( 'Expecting command name "build"' )
if len( all_positional_args ) < 3:
raise AppPackageError( 'build expects two args' )
self.main_program = all_positional_args[1]
self.package_folder = pathlib.Path( all_positional_args[2] ).resolve()
if self.app_name is None:
self.app_name = self.main_program[:-len('.py')]
if self.app_install_key != '' and self.app_install_value == '':
raise AppPackageError( 'require --install-value with --install-key' )
def buildCommand( self, argv ):
try:
self.info( 'App Package Builder' )
self.parseArgs( argv )
self.processModulesAllowedToBeMissingFile()
if self.app_type == self.APP_TYPE_CLI:
self.info( 'Building CLI App %s into package folder %s' % (self.app_name, self.package_folder) )
elif self.app_type == self.APP_TYPE_GUI:
self.info( 'Building GUI App %s into package folder %s' % (self.app_name, self.package_folder) )
else:
raise AppPackageError( 'Unknown app_type %r' % (self.app_type,) )
#
# Look for modules using two methods
# 1. Import the main program and see what ends up in sys.modules
# 2. Force "encodings" to be included as python will not start without encodings
# 3. Use modulefinder to locate imports done at runtime
#
# 1. import main program
main_module = self.main_program
if main_module.endswith( '.py' ):
main_module = main_module[:-len('.py')]
self.info( 'Importing %s' % (main_module,) )
importlib.import_module( main_module )
self.info( 'Import complete for %s' % (main_module,) )
# save the list of modules imported
all_imported_module_names = list( sys.modules.keys() )
# 2. force in encodings
importlib.import_module( 'encodings' )
# 3. what can modulefinder locate
mf = modulefinder.ModuleFinder()
mf.run_script( self.main_program )
for name, mod in sorted( mf.modules.items() ):
self.verbose( 'Module %s: %r' % (name, mod) )
missing, maybe = mf.any_missing_maybe()
all_missing = set( missing )
all_missing_but_needed = all_missing - self.all_modules_allowed_to_be_missing
for module_name in all_missing_but_needed:
self.error( 'Module %s is missing but is required' % (module_name,) )
for module_name in maybe:
self.warning( 'Module %s maybe missing' % (module_name,) )
if len(all_missing_but_needed) > 0:
return 1
# find the python DLL
self.addWinPeFileDependenciesToPackage( pathlib.Path( sys.executable ) )
for name, mod in sorted( mf.modules.items() ):
self.processModule( name, mod )
for name in sorted( all_imported_module_names ):
if name in self.all_imported_modules_to_exclude:
continue
self.processModule( name, sys.modules[ name ] )
if not self.enable_merge:
self.cleanAppPackage()
self.createAppPackage()
self.info( 'Completed sucessfully' )
return 0
except AppPackageError as e:
self.error( str(e) )
return 1
def processModulesAllowedToBeMissingFile( self ):
if self.modules_allowed_to_be_missing_filename is None:
return
try:
with open( self.modules_allowed_to_be_missing_filename, 'r', encoding='utf=8' ) as f:
for line in f:
line = line.strip()
if line == '' or line.startswith( '#' ):
continue
self.verbose( 'Adding module "%s" to the allowed to be missing modules list' % (line,) )
self.all_modules_allowed_to_be_missing.add( line )
except FileNotFoundError as e:
raise AppPackageError( str(e) )
def processModule( self, name, module ):
if not hasattr( module, '__file__' ) or module.__file__ is None:
self.verbose( 'Module %s is builtin - ignoring' % (name,) )
return
filename = pathlib.Path( module.__file__ ).resolve()
self.verbose( 'Module %s type %s filename %s' % (name, filename.suffix, filename) )
# is this file part of the python installation?
for root in [sys.base_prefix] + sys.path:
try:
root = pathlib.Path( root )
root = root.resolve()
# find the suffix relative to a python home
library_filename_suffix = filename.relative_to( root )
except FileNotFoundError:
# root does not exist
continue
except ValueError:
# of filename is not relative to root
continue
if filename.match( '*.py' ):
self.verbose( 'Module %s suffix %s' % (name, library_filename_suffix) )
self.addPyFileToPackage( filename, library_filename_suffix )
if filename.name == '__init__.py':
# assume that all files in | |
<reponame>acorg/dark-matter
import bz2
import gzip
from six.moves import builtins
from unittest import TestCase
from unittest.mock import mock_open
from six import assertRaisesRegex
from collections import Counter
try:
from unittest.mock import patch
except ImportError:
from mock import patch
from io import BytesIO
from dark.utils import (
numericallySortFilenames, median, asHandle, parseRangeString,
parseRangeExpression, pct, StringIO, baseCountsToStr, nucleotidesToStr,
countPrint, take)
class TestNumericallySortFilenames(TestCase):
"""
Test the numericallySortFilenames function.
"""
def testNoNames(self):
"""
An empty list must be returned when an empty list is given.
"""
self.assertEqual([], numericallySortFilenames([]))
def testOneNonNumericName(self):
"""
A list with a single non-numeric name should result in that same
name being returned.
"""
self.assertEqual(['hey'], numericallySortFilenames(['hey']))
def testOneNumericName(self):
"""
A list with a single numeric name should result in that same
name being returned.
"""
self.assertEqual(['3.json'], numericallySortFilenames(['3.json']))
def testSeveralNames(self):
"""
A list with several numeric names should result in a correctly
sorted list of names being returned.
"""
self.assertEqual(
['1.json', '2.json', '3.json'],
numericallySortFilenames(['3.json', '1.json', '2.json']))
def testSeveralNamesWithUnequalPrefixLengths(self):
"""
A list with several numeric names whose numeric prefixes differ
in length should result in a correctly sorted list of names being
returned.
"""
self.assertEqual(
['2.json', '3.json', '21.json', '35.json', '250.json'],
numericallySortFilenames(
['3.json', '21.json', '35.json', '250.json', '2.json']))
def testBasename(self):
"""
Sorting must be according to file basename.
"""
self.assertEqual(
['../output/2.json', '../output/3.json', '../output/21.json',
'../output/35.json', '../output/250.json'],
numericallySortFilenames(
['../output/3.json', '../output/21.json', '../output/35.json',
'../output/250.json', '../output/2.json']))
class TestMedian(TestCase):
"""
Tests for the median function.
"""
def testEmptyArgRaises(self):
"""
An empty list must cause median to raise ValueError.
"""
error = '^arg is an empty sequence$'
assertRaisesRegex(self, ValueError, error, median, [])
def testMedianOfOne(self):
"""
The median function must work on a list of length one.
"""
self.assertEqual(3, median([3]))
def testMedianOfTwo(self):
"""
The median function must work on a list of length two.
"""
self.assertEqual(4.5, median([3.1, 5.9]))
def testMedianOfThree(self):
"""
The median function must work on a list of length threee.
"""
self.assertEqual(5.9, median([3.1, 7.6, 5.9]))
def testMedianOfFour(self):
"""
The median function must work on a list of length four.
"""
self.assertEqual(4.5, median([3.1, 1.3, 7.6, 5.9]))
def testMedianOfFive(self):
"""
The median function must work on a list of length five.
"""
self.assertEqual(5.9, median([3.1, 1.3, 7.6, 9.9, 5.9]))
class TestAsHandle(TestCase):
"""
Test the asHandle function
"""
def testOpenFile(self):
"""
When an open file pointer is passed to asHandle, that same file
pointer must be returned.
"""
with patch.object(builtins, 'open', mock_open()):
fp = open('file')
with asHandle(fp) as newfp:
self.assertIs(fp, newfp)
def testStr(self):
"""
When a string filename is passed to asHandle, it must be possible to
read the correct data from the fp that is returned.
"""
mockOpener = mock_open(read_data='xxx')
with patch.object(builtins, 'open', mockOpener):
with asHandle('file') as fp:
self.assertEqual('xxx', fp.read())
def testBZ2(self):
"""
When a string '*.bz2' filename is passed to asHandle, it must be
possible to read the correct data from the fp that is returned.
"""
result = BytesIO(b'xxx')
with patch.object(bz2, 'BZ2File') as mockMethod:
mockMethod.return_value = result
with asHandle('file.bz2') as fp:
self.assertEqual('xxx', fp.read())
def testGzip(self):
"""
When a string '*.gz' filename is passed to asHandle, it must be
possible to read the correct data from the fp that is returned.
"""
result = BytesIO(b'xxx')
with patch.object(gzip, 'GzipFile') as mockMethod:
mockMethod.return_value = result
with asHandle('file.gz') as fp:
self.assertEqual('xxx', fp.read())
class TestParseRangeString(TestCase):
"""
Check that the parseRangeString function works as expected.
"""
def testEmptyString(self):
"""
An empty string must produce an empty set of indices.
"""
error = ("^Illegal range ''. Ranges must single numbers or "
"number-number\\.$")
assertRaisesRegex(self, ValueError, error, parseRangeString, '')
def testSingleNumber(self):
"""
A single number must result in the expected set.
"""
self.assertEqual({6}, parseRangeString('6'))
def testSingleNumberSpaceBefore(self):
"""
A single number preceeded by whitespace must result in the expected
set.
"""
self.assertEqual({6}, parseRangeString(' 6'))
def testSingleNumberSpaceAfter(self):
"""
A single number followed by whitespace must result in the expected
set.
"""
self.assertEqual({6}, parseRangeString('6 '))
def testSingleNumberSpaceBeforeAndAfter(self):
"""
A single number preceeded and followed by whitespace must result in
the expected set.
"""
self.assertEqual({6}, parseRangeString(' 6 '))
def testSingleRange(self):
"""
A single range must result in the expected set.
"""
self.assertEqual({6, 7, 8, 9, 10}, parseRangeString('6-10'))
def testSingleRangeWithSpaceBeforeHyphen(self):
"""
A single range with a space before the hyphen must result in the
expected set.
"""
self.assertEqual({6, 7, 8, 9, 10}, parseRangeString('6 -10'))
def testSingleRangeWithSpaceAfterHyphen(self):
"""
A single range with a space after the hyphen must result in the
expected set.
"""
self.assertEqual({6, 7, 8, 9, 10}, parseRangeString('6- 10'))
def testSingleRangeWithSpaceBeforeAfterHyphen(self):
"""
A single range with spaces before and after the hyphen must result in
the expected set.
"""
self.assertEqual({6, 7, 8, 9, 10}, parseRangeString('6 - 10'))
def testTwoRanges(self):
"""
Two ranges must result in the expected set.
"""
self.assertEqual({6, 7, 8, 9, 10}, parseRangeString('6-8,9-10'))
def testTwoOverlappingRanges(self):
"""
Two overlapping ranges must result in the expected set.
"""
self.assertEqual({6, 7, 8, 9, 10}, parseRangeString('6-9,7-10'))
def testTwoRangesAndANumber(self):
"""
Two ranges and a number must result in the expected set.
"""
self.assertEqual({6, 7, 8, 10}, parseRangeString('6-8,10'))
def testTwoRangesAndTwoNumbers(self):
"""
Two ranges and two numbers must result in the expected set.
"""
self.assertEqual({4, 6, 7, 8, 9, 10, 11, 12},
parseRangeString('6-8,9,10-12,4'))
def testZeroConversion(self):
"""
If we ask for zero conversion, the result must be as expected.
"""
self.assertEqual({3, 5, 6, 7, 8, 9, 10, 11},
parseRangeString('6-8,9,10-12,4',
convertToZeroBased=True))
class TestParseRangeExpression(TestCase):
"""
Check that the parseRangeExpression function works as expected.
"""
def testInvalidExpression(self):
"""
An invalid string must raise a ValueError.
"""
error = r'^\($'
assertRaisesRegex(self, ValueError, error, parseRangeExpression, '(')
error = r'^hey$'
assertRaisesRegex(self, ValueError, error, parseRangeExpression, 'hey')
def testEmptyString(self):
"""
An empty string must produce an empty set.
"""
self.assertEqual(set(), parseRangeExpression(''))
def testOneRange(self):
"""
A simple 3-4 string must produce the expected set.
"""
self.assertEqual({3, 4}, parseRangeExpression('3-4'))
def testOneRangeZeroBased(self):
"""
A simple 3-4 string must produce the expected set when
convertToZeroBased is True.
"""
self.assertEqual({2, 3}, parseRangeExpression('3-4', True))
def testCommas(self):
"""
A simple 3,4,5 string must produce the expected set.
"""
self.assertEqual({3, 4, 5}, parseRangeExpression('3,4,5'))
def testCommasAndRange(self):
"""
A simple 3,4,5-7 string must produce the expected set.
"""
self.assertEqual({3, 4, 5, 6, 7}, parseRangeExpression('3,4,5-7'))
def testTwoRanges(self):
"""
A simple 3-4,6-8 string must produce the expected set.
"""
self.assertEqual({3, 4, 6, 7, 8}, parseRangeExpression('3-4,6-8'))
def testTwoRangesWithSpace(self):
"""
A simple 3-4, 6-8 string must produce the expected set.
"""
self.assertEqual({3, 4, 6, 7, 8}, parseRangeExpression('3-4, 6-8'))
def testUnion(self):
"""
A union such as 3-4 | 6-8 must produce the expected set.
"""
self.assertEqual({3, 4, 6, 7, 8}, parseRangeExpression('3-4 | 6-8'))
def testIntersection(self):
"""
An intersection such as 3-4 & 4-8 must produce the expected set.
"""
self.assertEqual({4}, parseRangeExpression('3-4 & 4-8'))
def testDifferenceNoSpaces(self):
"""
A difference such as 6-10-7-8 must produce the expected set.
"""
self.assertEqual({6, 9, 10}, parseRangeExpression('6-10-7-8'))
def testDifferenceWithSpaces(self):
"""
A difference such as 6-10 - 7-8 must produce the expected set.
"""
self.assertEqual({6, 9, 10}, parseRangeExpression('6-10 - 7-8'))
def testParens(self):
"""
A difference with parentheses such as '(3-5 | 7-9) & 5-7' must produce
the expected set.
"""
self.assertEqual({5, 7}, parseRangeExpression('(3-5 | 7-9) & 5-7'))
def testDoubleParens(self):
"""
A difference with two parentheses such as '(3-5 | 7-9) & (5-7 | 9-11)'
must produce the expected set.
"""
self.assertEqual({5, 7, 9},
parseRangeExpression('(3-5 | 7-9) & (5-7 | 9-11)'))
class TestStringIO(TestCase):
"""
Tests for our StringIO class.
"""
def testInitiallyEmpty(self):
"""
A StringIO instance must initially be empty.
"""
self.assertEqual('', StringIO().getvalue())
def testWriteRead(self):
"""
It must be possible to write and read to/from a StringIO instance as
normal.
"""
s = StringIO()
s.write('hey')
self.assertEqual('hey', s.getvalue())
def testInitializedRead(self):
"""
It must be possible to read from a StringIO instance that is
initialized on creation.
"""
s = StringIO('hey')
self.assertEqual('hey', s.getvalue())
def testContextManager(self):
"""
It must be possible to use a StringIO instance as a context manager.
"""
with StringIO() as s:
s.write('hey')
self.assertEqual('hey', s.getvalue())
class TestBaseCountsToStr(TestCase):
"""
Test the baseCountsToStr function.
"""
def testSimple(self):
"""
A simple example must work as expected.
"""
counts = Counter()
counts['A'] += 1
counts['G'] += 2
self.assertEqual('A:1 G:2',
baseCountsToStr(counts))
class TestNucleotidesToStr(TestCase):
"""
Test the nucleotidesToStr function.
"""
def testSimple(self):
"""
A simple example must work as expected.
"""
counts1 = Counter()
counts1['A'] += 1
counts1['G'] += | |
<filename>sdk/turing/generated/api/ensembling_job_api.py
"""
Turing Minimal Openapi Spec for SDK
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: 0.0.1
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from turing.generated.api_client import ApiClient, Endpoint as _Endpoint
from turing.generated.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from turing.generated.model.ensembler_job_status import EnsemblerJobStatus
from turing.generated.model.ensembling_job import EnsemblingJob
from turing.generated.model.ensembling_job_paginated_results import EnsemblingJobPaginatedResults
from turing.generated.model.id_object import IdObject
class EnsemblingJobApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def __create_ensembling_job(
self,
project_id,
ensembling_job,
**kwargs
):
"""Submit an Ensembling job. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_ensembling_job(project_id, ensembling_job, async_req=True)
>>> result = thread.get()
Args:
project_id (int):
ensembling_job (EnsemblingJob): A JSON object that contains the configuration of the ensembling job
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
EnsemblingJob
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['ensembling_job'] = \
ensembling_job
return self.call_with_http_info(**kwargs)
self.create_ensembling_job = _Endpoint(
settings={
'response_type': (EnsemblingJob,),
'auth': [],
'endpoint_path': '/projects/{project_id}/jobs',
'operation_id': 'create_ensembling_job',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'project_id',
'ensembling_job',
],
'required': [
'project_id',
'ensembling_job',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(int,),
'ensembling_job':
(EnsemblingJob,),
},
'attribute_map': {
'project_id': 'project_id',
},
'location_map': {
'project_id': 'path',
'ensembling_job': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__create_ensembling_job
)
def __get_ensembling_job(
self,
project_id,
job_id,
**kwargs
):
"""Get an existing Ensembling job. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_ensembling_job(project_id, job_id, async_req=True)
>>> result = thread.get()
Args:
project_id (int):
job_id (int):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
EnsemblingJob
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['job_id'] = \
job_id
return self.call_with_http_info(**kwargs)
self.get_ensembling_job = _Endpoint(
settings={
'response_type': (EnsemblingJob,),
'auth': [],
'endpoint_path': '/projects/{project_id}/jobs/{job_id}',
'operation_id': 'get_ensembling_job',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'project_id',
'job_id',
],
'required': [
'project_id',
'job_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(int,),
'job_id':
(int,),
},
'attribute_map': {
'project_id': 'project_id',
'job_id': 'job_id',
},
'location_map': {
'project_id': 'path',
'job_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__get_ensembling_job
)
def __list_ensembling_jobs(
self,
project_id,
**kwargs
):
"""Returns a list of ensembling jobs that belong to the project # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_ensembling_jobs(project_id, async_req=True)
>>> result = thread.get()
Args:
project_id (int):
Keyword Args:
page (int): [optional] if omitted the server will use the default value of 1
page_size (int): [optional] if omitted the server will use the default value of 10
status ([EnsemblerJobStatus]): [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
EnsemblingJobPaginatedResults
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
return self.call_with_http_info(**kwargs)
self.list_ensembling_jobs = _Endpoint(
settings={
'response_type': (EnsemblingJobPaginatedResults,),
'auth': [],
'endpoint_path': '/projects/{project_id}/jobs',
'operation_id': 'list_ensembling_jobs',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'project_id',
'page',
'page_size',
'status',
],
'required': [
'project_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(int,),
'page':
(int,),
'page_size':
(int,),
'status':
([EnsemblerJobStatus],),
},
'attribute_map': {
'project_id': 'project_id',
'page': 'page',
'page_size': 'page_size',
'status': 'status',
},
'location_map': {
'project_id': 'path',
'page': 'query',
'page_size': 'query',
'status': 'query',
},
'collection_format_map': {
'status': 'multi',
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__list_ensembling_jobs
)
def __terminate_ensembling_job(
self,
project_id,
job_id,
**kwargs
):
"""Terminate an ongoing Ensembling Job. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.terminate_ensembling_job(project_id, job_id, async_req=True)
>>> result = thread.get()
Args:
project_id (int):
job_id (int):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
IdObject
If the method is | |
# -*- coding: utf-8 -*-
"""
@author: sebi
PSF_XYZ_Dialog_BF.py
Version: 1.5
Date: 2015-11-02
This program can be used to detect beads an measures the FWHM-XYZ of the PSF.
The crucial steps are:
* Read the z-stack via BioFormats using python-bioformats.
* Define detection parameters for the selected channel.
* Find the brightest voxel and extract the corresponding XY-plane from Z-Stack.
* Detect all peaks within the extracted XY-plane using scikit-image toolbox.
* Extract the Z-profile at every detected peak position.
* Determine the brightest XY-plane for every peak separately.
* Cutout stack at every detected peak position.
* Do 2D-Gauss fit for every peak to determine FWHM-XY.
* Do 1D-Gauss fit for every Z-Profile to determine FWHM-Z.
* Displays PSF OrthoView and PSF volume for the average PSF = sum of all detected PSFs.
* Displays overview image with all detected peaks and 2D fit from randomly selected peak.
* Optional - Write results to excel sheet (currently only XLS).
* Optional - Save output graphics as PNGs.
"""
from pylab import *
import numpy as np
import gaussfit as gf
import psfview as psf
import matplotlib.pyplot as plt
from xlwt import Workbook
import os
from remove_hotpix import adjust_max
import bftools as bf
import skimage.feature as sf
from PyQt5 import QtCore, QtGui
from PyQt5.Qt import *
from PyQt5.QtGui import *
from PyQt5.QtCore import *
import sys
import os
from PyQt5.Qt import *
from PyQt5.QtGui import *
from PyQt5.QtCore import *
#from PyQt5.QtCore import *
from PyQt5.QtCore import QObject, pyqtSignal
from PyQt5.QtWidgets import (QWidget, QPushButton, QLineEdit, QInputDialog,
QFileDialog,
QApplication, QDialog, QVBoxLayout, QSizePolicy)
from PyQt5.QtCore import QTimer
# import the MainWindow widget from the converted .ui files
import ui_PSF_XYZ_Dialog_BF
# current version number
version = 2.0
class PSF_XYZ_Dialog_BF(QDialog, ui_PSF_XYZ_Dialog_BF.Ui_PSF_XYZ_Dialog_BF):
def __init__(self, parent=None):
super(PSF_XYZ_Dialog_BF, self).__init__(parent)
self.setupUi(self)
# set window title to current version
self.setWindowTitle('PSF-XYZ Automatic Detection BF ' + str(version))
# connect the signals with the slots
self.OpenFile.clicked.connect(self.onopen_file)
self.pushButton_StartCalc.clicked.connect(self.onstart_detection)
self.SpinBox_channel.valueChanged.connect(self.onchannel_changed)
self.check_hotpixel.stateChanged.connect(self.onremove_hotpix_changed)
# initialize dictionaries
self.MetaInfo = {}
self.BeadData = {}
@pyqtSlot()
def onopen_file(self):
"""
open image file dialog with default starting directory
"""
# default_folder = os.getcwd()
default_folder = r'c:\Users\m1srh\Documents\GitHub\PSF_XYZ_Bead_Fit'
psfstack_filepath = QFileDialog.getOpenFileName(self, 'Open file',
default_folder,
'CZI Files (*.czi);; TIF Files (*.tif);; TIFF Files (*.tiff)')
# update filename inside the GUI
self.text_filename.setText(psfstack_filepath[0])
# get image data file location
self.imagefilepath = str(psfstack_filepath[0])
# specify bioformats_package.jar to use if required
bfpackage = r'bioformats_package.jar'
bf.set_bfpath(bfpackage)
# use for BioFormtas > 5.2.0
urlnamespace = 'http://www.openmicroscopy.org/Schemas/OME/2016-06'
# get image meta-information
self.MetaInfo = bf.get_relevant_metainfo_wrapper(self.imagefilepath,
namespace=urlnamespace,
bfpath=bfpackage,
showinfo=False)
bf.showtypicalmetadata(self.MetaInfo)
self.objname_text.setText(self.MetaInfo['ObjModel'])
if self.MetaInfo['NA'] != 'n.a.':
self.SpinBox_NA.setValue(self.MetaInfo['NA'])
self.onchannel_changed()
self.SpinBox_pixsize.setValue(self.MetaInfo['XScale']*1000)
self.SpinBox_zspacing.setValue(self.MetaInfo['ZScale'])
# enable button to actually start the PSF detection process
self.pushButton_StartCalc.setEnabled(True)
# update estimated FWHM-Z
estimate, refindex_n = estimate_fwhmz(self.MetaInfo['NA'], self.MetaInfo['WLEm'][0], self.MetaInfo['Immersion'])
self.SpinBox_guess_fwhmz.setValue(estimate)
self.immersion_text.setText(self.MetaInfo['Immersion'])
self.ri_text.setText(str(refindex_n))
# limit possible values for channel based on MetaInfo
self.SpinBox_channel.setMaximum(self.MetaInfo['SizeC'])
# disable channel selection if there is only one channel
if self.MetaInfo['SizeC'] < 2:
self.SpinBox_channel.setEnabled(False)
def find_peaks(self, planexy):
"""
Find the peaks inside the plane with the brightest pixel using skimage library.
:param planexy: 2D image
:return: xpos, ypos, peaknum
"""
# get minimal distance --> use subimage size / 2
mindist = np.round(self.SpinBox_subimage_size.value(), 0) + 1
th = self.SpinBox_threshold.value()
# peak detection with scikit-image
peaks = sf.peak_local_max(planexy,
min_distance=mindist,
threshold_rel=th,
exclude_border=True,
indices=True,
num_peaks=inf)
peaknum = len(peaks)
xpos = np.zeros(len(peaks), dtype=int)
ypos = np.zeros(len(peaks), dtype=int)
for p in arange(len(peaks)):
# x and y coordinates from skimage.peak_local_max are switched
xpos[p] = int(peaks[p][1])
ypos[p] = int(peaks[p][0])
# print('Detected Peak Positions : ', xpos, ypos)
print('Number of Peaks : ', peaknum)
# return plist, xpos, ypos, peaknum
return xpos, ypos, peaknum
def fit_psf(self, peaknum, xdim, ydim, zdim, stack, xpos, ypos):
# create matrix for Z-profiles
zprofiles = np.zeros([zdim, peaknum])
# create vector containing the maximum value of every Z-profile
zprofiles_max = np.zeros(peaknum)
# extract z-profiles at every detected peak
for i in range(0, peaknum, 1):
zprofiles[:, i] = stack[:, int(ypos[i]), int(xpos[i])] # swap coordinates !!!
# write highest value in vector
zprofiles_max[i] = zprofiles[:, i].max()
# create vector for the position of the maximum within every Z-profile
zprofiles_max_pos = np.zeros(peaknum)
# determine position of brightest pixel for every z-profile
for i in range(0, peaknum, 1):
maxposition = (zprofiles[:, i] == zprofiles_max[i]).nonzero()
# in case there are more maxima along the z-axis just take the mean ...
zprofiles_max_pos[i] = np.int(np.mean(maxposition[0]))
print('Z-Profiles MAX Values:', zprofiles_max)
print('Z-Profiles MAX Positions:', zprofiles_max_pos)
# loop through all peak positions and return imagelist (igl)
igl = cut_subimages(peaknum, xpos, ypos, zprofiles_max_pos, self.SpinBox_subimage_size.value(), stack)
# initialize data matrix
results = zeros((peaknum, 9))
xfit_z = np.zeros([zdim, peaknum])
yfit_z = np.zeros([zdim, peaknum])
# do the PSF-XY fits using the plane which corresponds to the Z-position
# of the maximum value extracted from the Z-profiles
for i in range(0, peaknum, 1):
# fit PSF-XY in xy-plane using 2D-Gauss
params = gf.fitgaussian2D(igl[i])
fit = gf.gaussian2D(*params)
(height, bgrd, x, y, width_x, width_y) = params
fwhm_x = width_x * self.SpinBox_pixsize.value() * 2.3548
fwhm_y = width_y * self.SpinBox_pixsize.value() * 2.3548
results[i, 0] = round(x, 3) # center x
results[i, 1] = round(y, 3) # center y
results[i, 2] = round(height, 0) # height of peak
results[i, 3] = round(bgrd, 0) # background
results[i, 4] = round(fwhm_x, 0) # FWHM-X
results[i, 5] = round(fwhm_y, 0) # FWHM-Y
# vector for spacing in dz
zrange = np.arange(0, stack.shape[0], 1) * self.SpinBox_zspacing.value()
# guess z peak position from profile
z_peak_positions = zprofiles_max_pos * self.SpinBox_zspacing.value()
# fit PSF-Z using the Z-profiles with 1D-Gauss
[bgrd, heightZ, center, fwhm_z, cov, xfit_z[:, i], yfit_z[:, i]] = gf.fitgaussian1D(zrange, zprofiles[:, i],
self.SpinBox_guess_fwhmz.value(), z_peak_positions[i])
results[i, 6] = round(fwhm_z*1000, 0) # FWHM-Z
results[i, 7] = zprofiles_max_pos[i] # Z-Planes
results[i, 8] = zprofiles_max[i] # Brightest Pixel
heightXY = results[:, 2]
bgrdXY = results[:, 3]
fwhmx = np.abs(results[:, 4])
fwhmy = np.abs(results[:, 5])
fwhmz = results[:, 6]
zplanes_pos_all = results[:, 7]
zplanes_max_all = results[:, 8]
fwhmxy_all = np.concatenate((fwhmx, fwhmy))
fwhmxy_all_ok = (fwhmxy_all > 100).nonzero()
print('FWHM-X [nm] : ', fwhmx)
print('FWHM-Y [nm] : ', fwhmy)
print('FWHM-Z [nm] : ', fwhmz)
return heightXY, bgrdXY, fwhmx, fwhmy, fwhmz, zplanes_pos_all, zplanes_max_all, fwhmxy_all, fwhmxy_all_ok, igl, fit
def display_results(self, xdim, ydim, zdim, stack, imagefilepath, planexy, xpos, ypos, zpos,
fwhmx, fwhmy, fwhmz, heightXY, bgrdXY, fwhm_all, fwhm_all_ok, igl, fit):
# Gauss 2D fit for randomly selected peak
goodimages = (fwhmx > 0).nonzero()
tmp = goodimages[0]
# rn = int(round(random(1)*(len(tmp)-1), 0))
rn = np.int(np.round(np.random.rand(1)*(len(tmp)-1), 0))
img2show = int(tmp[rn])
# display image and detected peaks
fig = plt.figure(figsize=(12, 8))
fig.canvas.set_window_title(imagefilepath)
ax1 = fig.add_subplot(2, 2, 1) # all detected peaks
ax2 = fig.add_subplot(2, 2, 2) # random selected peak with fit
ax3 = fig.add_subplot(2, 2, 3) # FWHM-XY distribution
ax4 = fig.add_subplot(2, 2, 4) # FWHM-Z example profile
fig.subplots_adjust(left=0.07, bottom=0.1, right=0.97, top=0.92, wspace=0.20, hspace=0.25)
# ax1
ax1.imshow(planexy, interpolation='nearest', origin='None', cmap=cm.jet)
ax1.plot(xpos, ypos, 'rs', markersize=12, markeredgewidth=1, alpha=0.3)
ax1.plot(xpos[img2show], ypos[img2show], 'ys', markersize=20, markeredgewidth=1, alpha=0.4)
ax1.axis([0, xdim, 0, ydim])
ax1.set_xlabel('pixel x')
ax1.set_ylabel('pixel y')
ax1.set_title('Peak Detection and FWHM-XY Fit', fontsize=12)
# ax2
cax2 = ax2.imshow(igl[img2show], interpolation='nearest', origin=None, cmap=cm.jet)
ax2.set_xlabel('pixel x')
ax2.set_ylabel('pixel y')
ax2.set_title('Random Peak Image shown : ' + str(img2show), fontsize=12)
ax2.contour(fit(*indices(igl[img2show].shape)), cmap=cm.copper)
ax2.text(0.90, 0.90, """
Channel : %.0f""" % (self.SpinBox_channel.value()),
fontsize=12, fontweight='bold', horizontalalignment='right', color='red',
verticalalignment='bottom', transform=ax2.transAxes)
ax2.text(0.95, 0.05, """
Height : %.0f
Bgrd : %.0f
FWHM-X : %.0f
FWHM-Y : %.0f
FWHM-Z : %.0f""" % (np.round(heightXY[img2show], 0), round(bgrdXY[img2show], 0),
fwhmx[img2show], fwhmy[img2show], fwhmz[img2show]),
fontsize=12, horizontalalignment='right', color='red',
verticalalignment='bottom', transform=ax2.transAxes)
# ax3
# the histogram of the data
n, bins, patches = ax3.hist(fwhm_all[fwhm_all_ok], 10, label='FWHM-XY',
normed=0, facecolor='green', alpha=0.75)
ax3.set_xlabel('FWHM-XY [nm]')
ax3.set_ylabel('Occurrence')
ax3.set_title('Measured FWHM-XY', fontsize=12)
ax3.set_xlim(fwhm_all[fwhm_all_ok].min()*0.95, fwhm_all[fwhm_all_ok].max() * 1.05)
ax3.legend()
ax3.grid(True)
# ax4
fwhmz_ok = fwhmz.ravel().nonzero()
try:
n, bins, patches = ax4.hist(fwhmz[fwhmz_ok], 10, label='FWHM-Z', normed=0, facecolor='green', alpha=0.75)
except:
print('Only one data point --> no histogram plotted.')
ax4.plot([fwhmz[fwhmz_ok], fwhmz[fwhmz_ok]], [0, 1], 'g-', lw=5, label='FWHM-Z')
ax4.set_xlabel('FWHM-Z [nm]')
ax4.set_ylabel('Occurrence')
ax4.set_title('Measured FWHM-Z', fontsize=12)
ax4.set_xlim(fwhmz[fwhmz_ok].min()*0.95, fwhmz[fwhmz_ok].max() * 1.05)
ax4.legend()
ax4.grid(True)
# only save plot when option is checked
if self.checkBox_SavePeaks.isChecked() == True:
print('Saving PSF peaks.')
savename = self.BeadData['FileDir']+'/'+self.BeadData['FileName'][:-4] + '_PSF_FWHM.png'
fig.savefig(savename)
# display PSF-OrthoView for selected peak
# estimate a "good" number of pixels around center of PSF to be displayed
psfwidth = np.round(np.mean(fwhm_all) / self.MetaInfo['XScale']/1000, 0) * 3
# estimate a "good" number of planes below and above PSF | |
<filename>pgbedrock/spec_inspector.py
from collections import defaultdict
import copy
import os
import cerberus
import jinja2
import yaml
from pgbedrock import common
from pgbedrock import context
from pgbedrock.jinja import add_filters
DEPENDENT_OBJECTS_MSG = ('Spec error: Ownership listed for dependent {objkind}: {dep_objs}\n'
'Ownership for a dependent object derives from the object is depends '
'on. Please remove these objects from the ownership sections within '
'your spec file')
DUPLICATE_ROLE_DEFINITIONS_ERR_MSG = 'Spec error: Role(s) defined more than once: {}'
FILE_OPEN_ERROR_MSG = "Unable to open file '{}':\n{}"
MISSING_ENVVAR_MSG = "Spec error: Required environment variable not found:\n{}"
MULTIPLE_SCHEMA_OWNER_ERR_MSG = 'Spec error: Schema "{}" owned by multiple roles: {}'
MULTIPLE_OBJKIND_OWNER_ERR_MSG = 'Spec error: {} "{}" owned by multiple roles: {}'
OBJECT_REF_READ_WRITE_ERR = (
'Spec error: objects have been unnecessarily given both read and write privileges.'
'pgbedrock automatically grants read access when write access is requested.{}'
)
UNKNOWN_OBJECTS_MSG = ('Spec error: Unknown {objkind} found: {unknown_objects}\n'
'Please manually add these {objkind} to the database or '
'remove them from the spec file')
UNOWNED_OBJECTS_MSG = ('Spec error: Unowned {objkind} found: {unowned_objects}\n'
'Please add these {objkind} to the spec file or manually remove '
'them from the Postgres cluster')
UNDOCUMENTED_ROLES_MSG = ('Spec error: Undocumented roles found: {}.\n'
'Please add these roles to the spec file or manually remove '
'them from the Postgres cluster')
UNOWNED_SCHEMAS_MSG = ('Spec error: Schemas found in database with no owner in spec: {}\n'
'Please add these schemas to the spec file or manually remove '
'them from the Postgres cluster')
VALIDATION_ERR_MSG = 'Spec error: Role "{}", field "{}": {}'
SPEC_SCHEMA_YAML = """
ignore:
type: boolean
can_login:
type: boolean
has_personal_schema:
type: boolean
is_superuser:
type: boolean
attributes:
type: list
schema:
type: string
forbidden:
- LOGIN
- NOLOGIN
- SUPERUSER
- NOSUPERUSER
member_of:
type: list
schema:
type: string
owns:
type: dict
allowed:
- schemas
- tables
- sequences
valueschema:
type: list
schema:
type: string
privileges:
type: dict
allowed:
- schemas
- sequences
- tables
valueschema:
type: dict
allowed:
- read
- write
valueschema:
type: list
schema:
type: string
"""
def convert_spec_to_objectnames(spec):
""" Convert object names in a loaded spec from strings to ObjectName instances
This converts items in the following sublists, if those sublists exist:
* <role_name> -> owns -> <key in context.PRIVILEGE_MAP>
* <role_name> -> privileges -> <key in context.PRIVILEGE_MAP> -> read
* <role_name> -> privileges -> <key in context.PRIVILEGE_MAP> -> write
"""
output_spec = copy.deepcopy(spec)
for role, config in output_spec.items():
if not config:
continue
for objkind, owned_items in config.get('owns', {}).items():
if not owned_items:
continue
converted = [common.ObjectName.from_str(item) for item in owned_items]
config['owns'][objkind] = converted
for objkind, perm_dicts in config.get('privileges', {}).items():
for priv_kind, granted_items in perm_dicts.items():
if not granted_items:
continue
converted = [common.ObjectName.from_str(item) for item in granted_items]
config['privileges'][objkind][priv_kind] = converted
return output_spec
def ensure_no_object_owned_twice(spec, dbcontext, objkind):
""" Check spec for objects of objkind with multiple owners. """
all_db_objects = dbcontext.get_all_object_attributes().get(objkind, dict())
object_ownerships = defaultdict(list)
for rolename, config in spec.items():
if not config:
continue
if config.get('has_personal_schema'):
schema_objects = all_db_objects.get(rolename, dict())
nondependent_objects = [name for name, attr in schema_objects.items() if not attr['is_dependent']]
for obj in nondependent_objects:
object_ownerships[obj].append(rolename)
if not config.get('owns') or not config['owns'].get(objkind):
continue
role_owned_objects = config['owns'][objkind]
for objname in role_owned_objects:
if objname.unqualified_name == '*':
schema_objects = all_db_objects.get(objname.schema, dict())
nondependent_objects = [name for name, attr in schema_objects.items() if not attr['is_dependent']]
for obj in nondependent_objects:
object_ownerships[obj].append(rolename)
else:
object_ownerships[objname].append(rolename)
error_messages = []
for objname, owners in object_ownerships.items():
if len(owners) > 1:
owners_formatted = ", ".join(sorted(owners))
error_messages.append(MULTIPLE_OBJKIND_OWNER_ERR_MSG.format(objkind[:-1].capitalize(),
objname.qualified_name,
owners_formatted))
return error_messages
def ensure_no_schema_owned_twice(spec):
""" Check spec for schemas with multiple owners. """
schema_ownerships = defaultdict(list)
for rolename, config in spec.items():
if not config:
continue
if config.get('has_personal_schema'):
# Indicates a role has a personal schema with its same name
schema_ownerships[common.ObjectName(rolename)].append(rolename)
if config.get('owns') and config['owns'].get('schemas'):
role_owned_schemas = config['owns']['schemas']
for schema in role_owned_schemas:
schema_ownerships[schema].append(rolename)
error_messages = []
for schema, owners in schema_ownerships.items():
if len(owners) > 1:
owners_formatted = ", ".join(sorted(owners))
error_messages.append(MULTIPLE_SCHEMA_OWNER_ERR_MSG.format(schema.qualified_name,
owners_formatted))
return error_messages
def ensure_no_redundant_privileges(spec):
"""
Verify objects aren't defined in both read and write privilege sections for a given role.
"""
multi_refs = defaultdict(dict)
for rolename, config in spec.items():
if config and config.get('privileges'):
for obj in config['privileges']:
try:
reads = set(config['privileges'][obj]['read'])
writes = set(config['privileges'][obj]['write'])
duplicates = reads.intersection(writes)
if duplicates:
multi_refs[rolename][obj] = list(duplicates)
except KeyError:
continue
if multi_refs:
# Convert ObjectNames back to strings to print out in the error message
for rolename, mapped_duplicates in multi_refs.items():
for objkind, duplicate_objects in mapped_duplicates.items():
multi_refs[rolename][objkind] = [dup.qualified_name for dup in duplicate_objects]
multi_ref_strings = ["%s: %s" % (k, v) for k, v in multi_refs.items()]
multi_ref_err_string = "\n\t".join(multi_ref_strings)
return [OBJECT_REF_READ_WRITE_ERR.format(multi_ref_err_string)]
return []
def ensure_no_duplicate_roles(rendered_spec_template):
"""
Ensure that no roles are declared multiple times.
In a spec template, if a role is declared more than once there exists a risk that the
re-declaration will override the desired configuration. pgbedrock considers a config containing
this risk to be invalid and will throw an error.
To accomplish this, the yaml.loader.Loader object is used to convert spec template into a
document tree. Then, the root object's child nodes (which are the roles) are checked for
duplicates.
Outputs a list of strings. The decision to return a list of strings was deliberate, despite the
fact that the length of the list can at most be one. The reason for this is that the other spec
verification functions also return a list of strings. This return signature consistency makes
the code in the verify_spec function cleaner.
"""
loader = yaml.loader.Loader(rendered_spec_template)
document_tree = loader.get_single_node()
if document_tree is None:
return None
role_definitions = defaultdict(int)
for node in document_tree.value:
role_definitions[node[0].value] += 1
multi_defined_roles = [k for k, v in role_definitions.items() if v > 1]
if multi_defined_roles:
multi_roles_fmtd = " ,".join(multi_defined_roles)
return [DUPLICATE_ROLE_DEFINITIONS_ERR_MSG.format(multi_roles_fmtd)]
return []
def ensure_no_undocumented_roles(spec, dbcontext):
"""
Ensure that all roles in the database are documented within the spec. This is done
(vs. having pbedrock assume it should delete these roles) because the roles may own schemas,
tables, functions, etc. There's enough going on that if the user just made a mistake by
forgetting to add a role to their spec then we've caused serious damage; better to throw an
error and ask the user to manually resolve this.
"""
current_role_attributes = dbcontext.get_all_role_attributes()
spec_roles = set(spec.keys())
current_roles = set(current_role_attributes.keys())
undocumented_roles = current_roles.difference(spec_roles)
if undocumented_roles:
undocumented_roles_fmtd = '"' + '", "'.join(sorted(undocumented_roles)) + '"'
return [UNDOCUMENTED_ROLES_MSG.format(undocumented_roles_fmtd)]
return []
def ensure_no_missing_objects(spec, dbcontext, objkind):
"""
Ensure that all objects of kind objkind in the database are documented within the spec, and
vice versa. This is done for two reasons:
Object defined in database but not in spec
In this case, pgbedrock could delete the object, but this is hard-to-reverse. If the user
happened to just forget to document something then a table could be dropped, etc. It's
better to throw an error and ask the user to manually resolve this.
Object defined in spec but not in database
Similarly, if a object is defined in the spec but not in the database it is unclear what
pgbedrock should do. It can't create the object as it doesn't know the DDL that the object
should have. The only real option here is to alert the user to the mismatch and ask them to
resolve it.
"""
db_objects = set()
for obj in dbcontext.get_all_raw_object_attributes():
if obj.kind == objkind and not obj.is_dependent:
db_objects.add(obj.objname)
db_objects_by_schema = dbcontext.get_all_object_attributes().get(objkind, dict())
spec_objects = set()
for rolename, config in spec.items():
if not config:
continue
if config.get('has_personal_schema'):
schema_objects = db_objects_by_schema.get(rolename, dict())
nondependent_objects = [name for name, attr in schema_objects.items() if not attr['is_dependent']]
for obj in nondependent_objects:
spec_objects.add(obj)
if not config.get('owns') or not config['owns'].get(objkind):
continue
role_owned_objects = config['owns'][objkind]
for objname in role_owned_objects:
if objname.unqualified_name == '*':
schema_objects = db_objects_by_schema.get(objname.schema, dict())
nondependent_objects = [name for name, attr in schema_objects.items() if not attr['is_dependent']]
for obj in nondependent_objects:
spec_objects.add(obj)
else:
spec_objects.add(objname)
error_messages = []
not_in_db = spec_objects.difference(db_objects)
if not_in_db:
qualified_names = [objname.qualified_name for objname in not_in_db]
unknown_objects = ', '.join(sorted(qualified_names))
msg = UNKNOWN_OBJECTS_MSG.format(objkind=objkind, unknown_objects=unknown_objects)
error_messages.append(msg)
not_in_spec = db_objects.difference(spec_objects)
if not_in_spec:
qualified_names = [objname.qualified_name for objname in not_in_spec]
unowned_objects = ', '.join(sorted(qualified_names))
msg = UNOWNED_OBJECTS_MSG.format(objkind=objkind, unowned_objects=unowned_objects)
error_messages.append(msg)
return error_messages
def ensure_no_unowned_schemas(spec, dbcontext):
"""
Ensure that all schemas in the | |
post(self, request, *args, **kwargs):
emails = request.POST.get('emails', []).split()
balance = Decimal(request.POST.get('customPaymentAmount', 0))
wire_invoice_factory = DomainWireInvoiceFactory(request.domain, contact_emails=emails)
try:
wire_invoice_factory.create_wire_invoice(balance)
except Exception, e:
return json_response({'error': {'message', e}})
return json_response({'success': True})
class BillingStatementPdfView(View):
urlname = 'domain_billing_statement_download'
@method_decorator(login_and_domain_required)
@method_decorator(domain_admin_required)
def dispatch(self, request, *args, **kwargs):
return super(BillingStatementPdfView, self).dispatch(request, *args, **kwargs)
def get(self, request, *args, **kwargs):
domain = args[0]
statement_id = kwargs.get('statement_id')
if statement_id is None or domain is None:
raise Http404()
try:
invoice_pdf = InvoicePdf.get(statement_id)
except ResourceNotFound:
raise Http404()
try:
if invoice_pdf.is_wire:
invoice = WireInvoice.objects.get(
pk=invoice_pdf.invoice_id,
domain=domain
)
else:
invoice = Invoice.objects.get(
pk=invoice_pdf.invoice_id,
subscription__subscriber__domain=domain
)
except (Invoice.DoesNotExist, WireInvoice.DoesNotExist):
raise Http404()
if invoice.is_wire:
edition = 'Bulk'
else:
edition = DESC_BY_EDITION[invoice.subscription.plan_version.plan.edition]['name']
filename = "%(pdf_id)s_%(domain)s_%(edition)s_%(filename)s" % {
'pdf_id': invoice_pdf._id,
'domain': domain,
'edition': edition,
'filename': invoice_pdf.get_filename(invoice),
}
try:
data = invoice_pdf.get_data(invoice)
response = HttpResponse(data, content_type='application/pdf')
response['Content-Disposition'] = 'inline;filename="%s' % filename
except Exception as e:
logging.error('[Billing] Fetching invoice PDF failed: %s' % e)
return HttpResponse(_("Could not obtain billing statement. "
"An issue has been submitted."))
return response
class InternalSubscriptionManagementView(BaseAdminProjectSettingsView):
template_name = 'domain/internal_subscription_management.html'
urlname = 'internal_subscription_mgmt'
page_title = ugettext_lazy("Dimagi Internal Subscription Management")
form_classes = INTERNAL_SUBSCRIPTION_MANAGEMENT_FORMS
@method_decorator(require_superuser)
def get(self, request, *args, **kwargs):
return super(InternalSubscriptionManagementView, self).get(request, *args, **kwargs)
@method_decorator(require_superuser)
def post(self, request, *args, **kwargs):
form = self.get_post_form
if form.is_valid():
try:
form.process_subscription_management()
return HttpResponseRedirect(reverse(DomainSubscriptionView.urlname, args=[self.domain]))
except NewSubscriptionError as e:
messages.error(self.request, e.message)
return self.get(request, *args, **kwargs)
@property
def page_context(self):
return {
'plan_name': Subscription.get_subscribed_plan_by_domain(self.domain)[0],
'select_subscription_type_form': self.select_subscription_type_form,
'subscription_management_forms': self.slug_to_form.values(),
'today': datetime.date.today(),
}
@property
def get_post_form(self):
return self.slug_to_form[self.request.POST.get('slug')]
@property
@memoized
def slug_to_form(self):
def create_form(form_class):
if self.request.method == 'POST' and form_class.slug == self.request.POST.get('slug'):
return form_class(self.domain, self.request.couch_user.username, self.request.POST)
return form_class(self.domain, self.request.couch_user.username)
return {form_class.slug: create_form(form_class) for form_class in self.form_classes}
@property
@memoized
def select_subscription_type_form(self):
if self.request.method == 'POST':
for form_slug in self.slug_to_form:
if form_slug in self.request.POST:
return SelectSubscriptionTypeForm({
'subscription_type': form_slug,
})
subscription_type = None
subscription = Subscription.get_subscribed_plan_by_domain(self.domain_object)[1]
if subscription is None:
subscription_type = None
else:
plan = subscription.plan_version.plan
if subscription.service_type == SubscriptionType.CONTRACTED:
subscription_type = "contracted_partner"
elif plan.edition == SoftwarePlanEdition.ENTERPRISE:
subscription_type = "dimagi_only_enterprise"
elif (plan.edition == SoftwarePlanEdition.ADVANCED
and plan.visibility == SoftwarePlanVisibility.TRIAL_INTERNAL):
subscription_type = "advanced_extended_trial"
return SelectSubscriptionTypeForm({'subscription_type': subscription_type})
class SelectPlanView(DomainAccountingSettings):
template_name = 'domain/select_plan.html'
urlname = 'domain_select_plan'
page_title = ugettext_lazy("Change Plan")
step_title = ugettext_lazy("Select Plan")
edition = None
lead_text = ugettext_lazy("Please select a plan below that fits your organization's needs.")
@property
def edition_name(self):
if self.edition:
return DESC_BY_EDITION[self.edition]['name']
@property
def is_non_ops_superuser(self):
if not self.request.couch_user.is_superuser:
return False
return not has_privilege(self.request, privileges.ACCOUNTING_ADMIN)
@property
def parent_pages(self):
return [
{
'title': DomainSubscriptionView.page_title,
'url': reverse(DomainSubscriptionView.urlname, args=[self.domain]),
}
]
@property
def steps(self):
edition_name = u" (%s)" % self.edition_name if self.edition_name else ""
return [
{
'title': _(u"1. Select a Plan%(edition_name)s") % {
"edition_name": edition_name
},
'url': reverse(SelectPlanView.urlname, args=[self.domain]),
}
]
@property
def main_context(self):
context = super(SelectPlanView, self).main_context
context.update({
'steps': self.steps,
'step_title': self.step_title,
'lead_text': self.lead_text,
})
return context
@property
def page_context(self):
return {
'pricing_table': PricingTable.get_table_by_product(self.product, domain=self.domain),
'current_edition': (self.current_subscription.plan_version.plan.edition.lower()
if self.current_subscription is not None
and not self.current_subscription.is_trial
else ""),
'is_non_ops_superuser': self.is_non_ops_superuser,
}
class EditPrivacySecurityView(BaseAdminProjectSettingsView):
template_name = "domain/admin/project_privacy.html"
urlname = "privacy_info"
page_title = ugettext_lazy("Privacy and Security")
@property
@memoized
def privacy_form(self):
initial = {
"secure_submissions": self.domain_object.secure_submissions,
"restrict_superusers": self.domain_object.restrict_superusers,
"allow_domain_requests": self.domain_object.allow_domain_requests,
}
if self.request.method == 'POST':
return PrivacySecurityForm(self.request.POST, initial=initial)
return PrivacySecurityForm(initial=initial)
@property
def page_context(self):
return {
'privacy_form': self.privacy_form
}
def post(self, request, *args, **kwargs):
if self.privacy_form.is_valid():
self.privacy_form.save(self.domain_object)
messages.success(request, _("Your project settings have been saved!"))
return self.get(request, *args, **kwargs)
class SelectedEnterprisePlanView(SelectPlanView):
template_name = 'domain/selected_enterprise_plan.html'
urlname = 'enterprise_request_quote'
step_title = ugettext_lazy("Contact Dimagi")
edition = SoftwarePlanEdition.ENTERPRISE
@property
def steps(self):
last_steps = super(SelectedEnterprisePlanView, self).steps
last_steps.append({
'title': _("2. Contact Dimagi"),
'url': reverse(SelectedEnterprisePlanView.urlname, args=[self.domain]),
})
return last_steps
@property
@memoized
def is_not_redirect(self):
return not 'plan_edition' in self.request.POST
@property
@memoized
def enterprise_contact_form(self):
if self.request.method == 'POST' and self.is_not_redirect:
return EnterprisePlanContactForm(self.domain, self.request.couch_user, data=self.request.POST)
return EnterprisePlanContactForm(self.domain, self.request.couch_user)
@property
def page_context(self):
return {
'enterprise_contact_form': self.enterprise_contact_form,
}
def post(self, request, *args, **kwargs):
if self.is_not_redirect and self.enterprise_contact_form.is_valid():
self.enterprise_contact_form.send_message()
messages.success(request, _("Your request was sent to Dimagi. "
"We will try our best to follow up in a timely manner."))
return HttpResponseRedirect(reverse(DomainSubscriptionView.urlname, args=[self.domain]))
return self.get(request, *args, **kwargs)
class ConfirmSelectedPlanView(SelectPlanView):
template_name = 'domain/confirm_plan.html'
urlname = 'confirm_selected_plan'
step_title = ugettext_lazy("Confirm Plan")
@property
def steps(self):
last_steps = super(ConfirmSelectedPlanView, self).steps
last_steps.append({
'title': _("2. Confirm Plan"),
'url': reverse(SelectPlanView.urlname, args=[self.domain]),
})
return last_steps
@property
@memoized
def edition(self):
edition = self.request.POST.get('plan_edition').title()
if edition not in [e[0] for e in SoftwarePlanEdition.CHOICES]:
raise Http404()
return edition
@property
@memoized
def selected_plan_version(self):
return DefaultProductPlan.get_default_plan_by_domain(self.domain, self.edition).plan.get_version()
@property
def downgrade_messages(self):
current_plan_version, subscription = Subscription.get_subscribed_plan_by_domain(self.domain_object)
if subscription is None:
current_plan_version = None
downgrades = get_change_status(current_plan_version, self.selected_plan_version)[1]
downgrade_handler = DomainDowngradeStatusHandler(
self.domain_object, self.selected_plan_version, downgrades,
web_user=self.request.user.username
)
return downgrade_handler.get_response()
@property
def page_context(self):
return {
'downgrade_messages': self.downgrade_messages,
'current_plan': (self.current_subscription.plan_version.user_facing_description
if self.current_subscription is not None else None),
'show_community_notice': (self.edition == SoftwarePlanEdition.COMMUNITY
and self.current_subscription is None),
}
@property
def main_context(self):
context = super(ConfirmSelectedPlanView, self).main_context
context.update({
'plan': self.selected_plan_version.user_facing_description,
})
return context
def get(self, request, *args, **kwargs):
return HttpResponseRedirect(reverse(SelectPlanView.urlname, args=[self.domain]))
def post(self, request, *args, **kwargs):
if self.edition == SoftwarePlanEdition.ENTERPRISE and not self.request.couch_user.is_superuser:
return HttpResponseRedirect(reverse(SelectedEnterprisePlanView.urlname, args=[self.domain]))
return super(ConfirmSelectedPlanView, self).get(request, *args, **kwargs)
class ConfirmBillingAccountInfoView(ConfirmSelectedPlanView, AsyncHandlerMixin):
template_name = 'domain/confirm_billing_info.html'
urlname = 'confirm_billing_account_info'
step_title = ugettext_lazy("Confirm Billing Information")
is_new = False
async_handlers = [
Select2BillingInfoHandler,
]
@property
def steps(self):
last_steps = super(ConfirmBillingAccountInfoView, self).steps
last_steps.append({
'title': _("3. Confirm Billing Account"),
'url': reverse(ConfirmBillingAccountInfoView.urlname, args=[self.domain]),
})
return last_steps
@property
@memoized
def account(self):
if self.current_subscription:
return self.current_subscription.account
account, self.is_new = BillingAccount.get_or_create_account_by_domain(
self.domain,
created_by=self.request.couch_user.username,
account_type=BillingAccountType.USER_CREATED,
entry_point=EntryPoint.SELF_STARTED,
)
return account
@property
def payment_method(self):
user = self.request.user.username
payment_method, __ = StripePaymentMethod.objects.get_or_create(
web_user=user,
method_type=PaymentMethodType.STRIPE,
)
return payment_method
@property
@memoized
def is_form_post(self):
return 'company_name' in self.request.POST
@property
@memoized
def billing_account_info_form(self):
initial = None
if self.edition == SoftwarePlanEdition.ENTERPRISE and self.request.couch_user.is_superuser:
initial = {
'company_name': "Dimagi",
'first_line': "585 Massachusetts Ave",
'second_line': "Suite 4",
'city': "Cambridge",
'state_province_region': "MA",
'postal_code': "02139",
'country': "US",
}
if self.request.method == 'POST' and self.is_form_post:
return ConfirmNewSubscriptionForm(
self.account, self.domain, self.request.couch_user.username,
self.selected_plan_version, self.current_subscription, data=self.request.POST, initial=initial
)
return ConfirmNewSubscriptionForm(self.account, self.domain, self.request.couch_user.username,
self.selected_plan_version, self.current_subscription, initial=initial)
@property
def page_context(self):
return {
'billing_account_info_form': self.billing_account_info_form,
'stripe_public_key': settings.STRIPE_PUBLIC_KEY,
'cards': self.payment_method.all_cards_serialized(self.account)
}
def post(self, request, *args, **kwargs):
if self.async_response is not None:
return self.async_response
if self.edition == SoftwarePlanEdition.ENTERPRISE and not self.request.couch_user.is_superuser:
return HttpResponseRedirect(reverse(SelectedEnterprisePlanView.urlname, args=[self.domain]))
if self.is_form_post and self.billing_account_info_form.is_valid():
is_saved = self.billing_account_info_form.save()
software_plan_name = DESC_BY_EDITION[self.selected_plan_version.plan.edition]['name'].encode('utf-8')
if not is_saved:
messages.error(
request, _("It appears there was an issue subscribing your project to the %s Software Plan. You "
"may try resubmitting, but if that doesn't work, rest assured someone will be "
"contacting you shortly.") % software_plan_name)
else:
messages.success(
request, _("Your project has been successfully subscribed to the %s Software Plan."
% software_plan_name)
)
return HttpResponseRedirect(reverse(DomainSubscriptionView.urlname, args=[self.domain]))
return super(ConfirmBillingAccountInfoView, self).post(request, *args, **kwargs)
class SubscriptionMixin(object):
@property
@memoized
def subscription(self):
subscription = Subscription.get_subscribed_plan_by_domain(self.domain_object)[1]
if subscription is None:
raise Http404
if subscription.is_renewed:
raise Http404
return subscription
class SubscriptionRenewalView(SelectPlanView, SubscriptionMixin):
urlname = "domain_subscription_renewal"
page_title = ugettext_lazy("Renew Plan")
step_title = ugettext_lazy("Renew or Change Plan")
@property
def lead_text(self):
return ugettext_lazy("Based on your current usage we recommend you use the <strong>{plan}</strong> plan"
.format(plan=self.current_subscription.plan_version.plan.edition))
@property
def main_context(self):
context = super(SubscriptionRenewalView, self).main_context
context.update({'is_renewal': True})
return context
@property
def page_context(self):
context = super(SubscriptionRenewalView, self).page_context
current_privs = get_privileges(self.subscription.plan_version)
plan = DefaultProductPlan.get_lowest_edition_by_domain(
self.domain, current_privs, return_plan=False,
).lower()
context['current_edition'] = (plan
if self.current_subscription is not None
and not self.current_subscription.is_trial
else "")
return context
class ConfirmSubscriptionRenewalView(DomainAccountingSettings, AsyncHandlerMixin, SubscriptionMixin):
template_name = 'domain/confirm_subscription_renewal.html'
urlname = 'domain_subscription_renewal_confirmation'
page_title = ugettext_lazy("Renew Plan")
async_handlers = [
Select2BillingInfoHandler,
]
@property
@memoized
def next_plan_version(self):
new_edition = self.request.POST.get('plan_edition').title()
plan_version = DefaultProductPlan.get_default_plan_by_domain(self.domain, new_edition)
if plan_version is None:
logging.error("[BILLING] Could not find a matching renewable plan "
"for %(domain)s, subscription number %(sub_pk)s." % {
'domain': self.domain,
'sub_pk': self.subscription.pk
})
raise Http404
return plan_version
@property
@memoized
def confirm_form(self):
if self.request.method == 'POST' and "from_plan_page" not in self.request.POST:
return ConfirmSubscriptionRenewalForm(
self.account, self.domain, self.request.couch_user.username,
self.subscription, self.next_plan_version,
data=self.request.POST,
)
return ConfirmSubscriptionRenewalForm(
self.account, self.domain, self.request.couch_user.username,
self.subscription, self.next_plan_version,
)
@property
def page_context(self):
return {
'subscription': self.subscription,
'plan': self.subscription.plan_version.user_facing_description,
'confirm_form': self.confirm_form,
'next_plan': self.next_plan_version.user_facing_description,
}
def post(self, request, *args, **kwargs):
if self.async_response is not None:
return self.async_response
if self.confirm_form.is_valid():
is_saved = self.confirm_form.save()
if not is_saved:
messages.error(
request, _(
"There was an issue renewing your subscription. We "
"have been notified of the issue. Please try "
"submitting again, and if the problem persists, "
"please try in a few hours."
)
)
else:
messages.success(
request, _("Your subscription was successfully renewed!")
| |
host_source,
"options": ["rbind", "nosuid",
"noexec", "nodev",
mode, ], }
self._container_specjson["mounts"].append(mount)
def _del_mount_spec(self, host_source, cont_dest):
"""Remove one mount point"""
for (index, mount) in enumerate(self._container_specjson["mounts"]):
if (mount["destination"] == cont_dest and
mount["source"] == host_source):
del self._container_specjson["mounts"][index]
def _add_volume_bindings(self):
"""Get the volume bindings string for runc"""
(host_dir, cont_dir) = self._filebind.start(Config.sysdirs_list)
self._add_mount_spec(host_dir, cont_dir, rwmode=True)
for vol in self.opt["vol"]:
(host_dir, cont_dir) = self._vol_split(vol)
if os.path.isdir(host_dir):
if host_dir == "/dev":
Msg().err("Warning: this engine does not support -v",
host_dir, l=Msg.WAR)
continue
self._add_mount_spec(host_dir, cont_dir, rwmode=True)
elif os.path.isfile(host_dir):
if cont_dir not in Config.sysdirs_list:
Msg().err("Error: engine does not support file mounting:",
host_dir)
else:
self._filebind.add(host_dir, cont_dir)
def _check_env(self):
"""Sanitize environment variables
Overriding parent ExecutionEngineCommon() class.
"""
for pair in list(self.opt["env"]):
if not pair:
self.opt["env"].remove(pair)
continue
if "=" not in pair:
self.opt["env"].remove(pair)
val = os.getenv(pair, "")
if val:
self.opt["env"].append('%s=%s' % (pair, val))
continue
(key, val) = pair.split("=", 1)
if " " in key or key[0] in string.digits:
Msg().err("Error: in environment:", pair)
return False
return True
def _run_invalid_options(self):
"""check -p --publish -P --publish-all --net-coop"""
if self.opt["portsmap"]:
Msg().err("Warning: this execution mode does not support "
"-p --publish", l=Msg.WAR)
if self.opt["netcoop"]:
Msg().err("Warning: this execution mode does not support "
"-P --netcoop --publish-all", l=Msg.WAR)
def run(self, container_id):
"""Execute a Docker container using runc. This is the main method
invoked to run the a container with runc.
* argument: container_id or name
* options: many via self.opt see the help
"""
Config.sysdirs_list = (
"/etc/resolv.conf", "/etc/host.conf",
"/etc/passwd", "/etc/group",
)
# setup execution
if not self._run_init(container_id):
return 2
self._run_invalid_options()
self._container_specfile = self.container_dir + "/config.json"
self._filebind = FileBind(self.localrepo, self.container_id)
self._select_runc()
# create new OCI spec file
if not self._load_spec(new=True):
return 4
self._uid_check()
# if not --hostenv clean the environment
self._run_env_cleanup_list()
# set environment variables
self._run_env_set()
if not self._check_env():
return 5
self._set_spec()
if (Config.runc_nomqueue or (Config.runc_nomqueue is None and not
Config().oskernel_isgreater("4.8.0"))):
self._del_mount_spec("mqueue", "/dev/mqueue")
self._add_volume_bindings()
self._add_devices()
self._save_spec()
if Msg.level >= Msg.DBG:
runc_debug = ["--debug", ]
else:
runc_debug = []
# build the actual command
self.execution_id = Unique().uuid(self.container_id)
cmd_l = self._set_cpu_affinity()
cmd_l.append(self.runc_exec)
cmd_l.extend(runc_debug)
cmd_l.extend(["--root", self.container_dir, "run"])
cmd_l.extend(["--bundle", self.container_dir, self.execution_id])
Msg().err("CMD =", cmd_l, l=Msg.VER)
self._run_banner(self.opt["cmd"][0], '%')
if sys.stdout.isatty():
return self.run_pty(cmd_l)
return self.run_nopty(cmd_l)
def run_pty(self, cmd_l):
"""runc from a terminal"""
status = subprocess.call(cmd_l, shell=False, close_fds=True)
self._filebind.finish()
return status
def run_nopty(self, cmd_l):
"""runc without a terminal"""
(pmaster, pslave) = os.openpty()
status = subprocess.Popen(cmd_l, shell=False, close_fds=True,
stdout=pslave, stderr=pslave)
os.close(pslave)
while True:
status.poll()
if status.returncode is not None:
break
readable, dummy, exception = \
select.select([pmaster, ], [], [pmaster, ], 5)
if exception:
break
if readable:
try:
sys.stdout.write(os.read(pmaster, 1))
except OSError:
break
try:
status.terminate()
except OSError:
pass
self._filebind.finish()
return status
class SingularityEngine(ExecutionEngineCommon):
"""Docker container execution engine using singularity
Provides a namespaces based user space container.
Inherits from ContainerEngine class
"""
def __init__(self, localrepo):
super(SingularityEngine, self).__init__(localrepo)
self.singularity_exec = None # singularity
self._filebind = None
self.execution_id = None
def _select_singularity(self):
"""Set singularity executable and related variables"""
conf = Config()
arch = conf.arch()
if arch == "amd64":
image_list = ["singularity-x86_64", "singularity"]
elif arch == "i386":
image_list = ["singularity-x86", "singularity"]
elif arch == "arm64":
image_list = ["singularity-arm64", "singularity"]
elif arch == "arm":
image_list = ["singularity-arm", "singularity"]
f_util = FileUtil(self.localrepo.bindir)
self.singularity_exec = f_util.find_file_in_dir(image_list)
if not self.singularity_exec:
self.singularity_exec = FileUtil("singularity").find_exec()
if not self.singularity_exec:
Msg().err("Error: singularity executable not found")
sys.exit(1)
def _get_volume_bindings(self):
"""Get the volume bindings string for singularity exec"""
vol_list = []
(tmphost_path, tmpcont_path) = self._filebind.start(Config.sysdirs_list)
vol_list.extend(["-B", "%s:%s" % (tmphost_path, tmpcont_path), ])
home_dir = NixAuthentication().get_home()
home_is_binded = False
tmp_is_binded = False
vartmp_is_binded = False
for vol in self.opt["vol"]:
(host_path, cont_path) = self._vol_split(vol)
if os.path.isdir(host_path):
if host_path == home_dir and cont_path in ("", host_path):
home_is_binded = True
elif host_path == "/tmp" and cont_path in ("", "/tmp"):
tmp_is_binded = True
elif host_path == "/var/tmp" and cont_path in ("", "/var/tmp"):
vartmp_is_binded = True
else:
vol_list.extend(["-B", "%s:%s" % (host_path, cont_path), ])
elif os.path.isfile(host_path):
if cont_path not in Config.sysdirs_list:
Msg().err("Error: engine does not support file mounting:",
host_path)
else:
self._filebind.add(host_path, cont_path)
if not home_is_binded:
vol_list.extend(["--home", "%s/root:%s" % (self.container_root, "/root"), ])
if not tmp_is_binded:
vol_list.extend(["-B", "%s/tmp:/tmp" % (self.container_root), ])
if not vartmp_is_binded:
vol_list.extend(["-B", "%s/var/tmp:/var/tmp" % (self.container_root), ])
return vol_list
def _singularity_env_get(self):
"""Build environment string with user specified environment in
the form SINGULARITYENV_var=value
"""
singularityenv = dict()
for pair in list(self.opt["env"]):
(key, val) = pair.split("=", 1)
singularityenv['SINGULARITYENV_%s' % key] = val
return singularityenv
def _setup_container_user(self, user):
"""Override of _setup_container_user()"""
return self._setup_container_user_noroot(user)
def _make_container_directories(self):
"""Create directories expected by Singularity"""
FileUtil(self.container_root + "/var/tmp").mkdir()
FileUtil(self.container_root + "/tmp").mkdir()
FileUtil(self.container_root + "/proc").mkdir()
FileUtil(self.container_root + "/dev").mkdir()
FileUtil(self.container_root + "/sys").mkdir()
FileUtil(self.container_root + "/root").mkdir()
def _run_invalid_options(self):
"""check -p --publish -P --publish-all --net-coop"""
if self.opt["portsmap"]:
Msg().err("Warning: this execution mode does not support "
"-p --publish", l=Msg.WAR)
if self.opt["netcoop"]:
Msg().err("Warning: this execution mode does not support "
"-P --netcoop --publish-all", l=Msg.WAR)
def _has_option(self, option):
"""Check if singularity has a given cli option"""
if option in Uprocess().get_output("singularity --help"):
return True
return False
def run(self, container_id):
"""Execute a Docker container using singularity.
This is the main method invoked to run a container with singularity.
* argument: container_id or name
* options: many via self.opt see the help
"""
Config.sysdirs_list = (
# "/dev", "/proc", "/sys",
"/etc/passwd", "/etc/group",
"/lib/modules",
)
# setup execution
if not self._run_init(container_id):
return 2
self._run_invalid_options()
self._make_container_directories()
self._filebind = FileBind(self.localrepo, self.container_id)
self._select_singularity()
self._uid_check_noroot()
# set environment variables
self._run_env_set()
if not self._check_env():
return 5
if Msg.level >= Msg.DBG:
singularity_debug = ["--debug", "-x", "-v", ]
elif self._has_option("--silent"):
singularity_debug = ["--silent", ]
elif self._has_option("--quiet"):
singularity_debug = ["--quiet", ]
else:
singularity_debug = []
if self.singularity_exec.startswith(self.localrepo.bindir):
Config.singularity_options.extend(["-u", ])
#if FileUtil("nvidia-smi").find_exec():
# Config.singularity_options.extend(["--nv", ])
singularity_vol_list = self._get_volume_bindings()
# build the actual command
self.execution_id = Unique().uuid(self.container_id)
cmd_l = self._set_cpu_affinity()
cmd_l.append(self.singularity_exec)
cmd_l.extend(singularity_debug)
cmd_l.append("exec")
cmd_l.extend(Config.singularity_options)
if self.opt["cwd"]:
cmd_l.extend(["--pwd", self.opt["cwd"], ])
cmd_l.extend(singularity_vol_list)
cmd_l.append(self.container_root)
cmd_l.extend(self.opt["cmd"])
Msg().err("CMD =", cmd_l, l=Msg.VER)
# if not --hostenv clean the environment
self._run_env_cleanup_dict()
# execute
self._run_banner(self.opt["cmd"][0], '/')
status = subprocess.call(cmd_l, shell=False, close_fds=True, \
env=os.environ.update(self._singularity_env_get()))
self._filebind.finish()
return status
class FakechrootEngine(ExecutionEngineCommon):
"""Docker container execution engine using Fakechroot
Provides a chroot like environment to run containers.
Uses Fakechroot as chroot alternative.
Inherits from ContainerEngine class
"""
def __init__(self, localrepo):
super(FakechrootEngine, self).__init__(localrepo)
self._fakechroot_so = ""
self._elfpatcher = None
def _select_fakechroot_so(self):
"""Select fakechroot sharable object library"""
conf = Config()
if conf.fakechroot_so:
if isinstance(conf.fakechroot_so, list):
image_list = conf.fakechroot_so
elif isinstance(conf.fakechroot_so, str):
image_list = [conf.fakechroot_so, ]
if "/" in conf.fakechroot_so:
if os.path.exists(conf.fakechroot_so):
return os.path.realpath(conf.fakechroot_so)
elif os.path.exists(self.container_dir + "/libfakechroot.so"):
return self.container_dir + "/libfakechroot.so"
else:
lib = "libfakechroot"
deflib = "libfakechroot.so"
image_list = [deflib, ]
guest = GuestInfo(self.container_root)
arch = guest.arch()
(distro, version) = guest.osdistribution()
version = version.split(".")[0]
if arch == "amd64":
image_list = ["%s-%s-%s-x86_64.so" % (lib, distro, version),
"%s-%s-x86_64.so" % (lib, distro),
"%s-x86_64.so" % (lib), deflib]
elif arch == "i386":
image_list = ["%s-%s-%s-x86.so" % (lib, distro, version),
"%s-%s-x86.so" % (lib, distro),
"%s-x86.so" % (lib), deflib]
elif arch == "arm64":
image_list = ["%s-%s-%s-arm64.so" % (lib, distro, version),
"%s-%s-arm64.so" % (lib, distro),
"%s-arm64.so" % (lib), deflib]
elif arch == "arm":
image_list = ["%s-%s-%s-arm.so" % (lib, distro, version),
"%s-%s-arm.so" % (lib, distro),
"%s-arm.so" % (lib), deflib]
f_util = FileUtil(self.localrepo.libdir)
fakechroot_so = f_util.find_file_in_dir(image_list)
if not fakechroot_so:
Msg().err("Error: no libfakechroot found", image_list)
sys.exit(1)
Msg().err("fakechroot_so:", fakechroot_so, l=Msg.DBG)
return fakechroot_so
def _setup_container_user(self, user):
"""Override of _setup_container_user()"""
return self._setup_container_user_noroot(user)
def _get_volume_bindings(self):
"""Get the volume bindings string for fakechroot run"""
host_volumes_list = []
map_volumes_list = []
map_volumes_dict = dict()
for vol in self.opt["vol"]:
(host_path, cont_path) = self._vol_split(vol)
if host_path == cont_path:
host_volumes_list.append(host_path)
else:
map_volumes_dict[cont_path] = host_path + "!" + cont_path
for cont_path in sorted(map_volumes_dict, reverse=True):
map_volumes_list.append(map_volumes_dict[cont_path])
return (":".join(host_volumes_list), ":".join(map_volumes_list))
def _get_access_filesok(self):
"""
Circunvent mpi init issues when calling access()
A list of certain existing files is provided
"""
file_list = []
for c_path in Config.access_files:
h_file = self._cont2host(c_path)
if h_file and os.path.exists(h_file):
file_list.append(c_path)
return ":".join(file_list)
def _fakechroot_env_set(self):
"""fakechroot environment variables to set"""
(host_volumes, map_volumes) = self._get_volume_bindings()
self._fakechroot_so = self._select_fakechroot_so()
access_filesok = self._get_access_filesok()
#
self.opt["env"].append("PWD=" + self.opt["cwd"])
self.opt["env"].append("FAKECHROOT_BASE=" +
os.path.realpath(self.container_root))
self.opt["env"].append("LD_PRELOAD=" + self._fakechroot_so)
if not self._is_volume("/tmp"):
self.opt["env"].append("FAKECHROOT_AF_UNIX_PATH=" + Config.tmpdir)
#
if host_volumes:
self.opt["env"].append("FAKECHROOT_EXCLUDE_PATH=" + host_volumes)
if map_volumes:
self.opt["env"].append("FAKECHROOT_DIR_MAP=" + map_volumes)
if Msg.level >= Msg.DBG:
self.opt["env"].append("FAKECHROOT_DEBUG=true")
self.opt["env"].append("LD_DEBUG=libs:files")
if access_filesok:
self.opt["env"].append("FAKECHROOT_ACCESS_FILESOK=" +
access_filesok)
# execution mode
ld_library_real = self._elfpatcher.get_ld_library_path()
xmode = self.exec_mode.get_mode()
| |
B: (-1)**(B.size() - len(B))
coeff = lambda B: sign(B) * prod([factorial(sum( 1 for part in B if part.issubset(big) )) for big in A],
self.base_ring().one())
return e.sum_of_terms([(B, coeff(B)) for B in A.refinements()], distinct=True)
@cached_method
def _h_to_p_on_basis(self, A):
r"""
Return `\mathbf{h}_A` in terms of the powersum basis.
INPUT:
- ``A`` -- a set partition
OUTPUT:
- An element of the `\mathbf{p}` basis
TESTS::
sage: NCSym = SymmetricFunctionsNonCommutingVariables(QQ)
sage: h = NCSym.h()
sage: all(h(h._h_to_p_on_basis(A)) == h[A] for i in range(5) for A in SetPartitions(i))
True
"""
p = self.realization_of().p()
coeff = lambda B: abs( prod([(-1)**(i-1) * factorial(i-1) for i in B.shape()],
self.base_ring().one()) )
return p.sum_of_terms([(B, coeff(B)) for B in A.refinements()], distinct=True)
class Element(CombinatorialFreeModule.Element):
"""
An element in the homogeneous basis of `NCSym`.
"""
def omega(self):
r"""
Return the involution `\omega` applied to ``self``.
The involution `\omega` on `NCSym` is defined by
`\omega(\mathbf{h}_A) = \mathbf{e}_A`.
OUTPUT:
- an element in the basis ``self``
EXAMPLES::
sage: NCSym = SymmetricFunctionsNonCommutingVariables(QQ)
sage: h = NCSym.h()
sage: e = NCSym.e()
sage: elt = h[[1,3],[2]].omega(); elt
2*h{{1}, {2}, {3}} - h{{1, 3}, {2}}
sage: elt.omega()
h{{1, 3}, {2}}
sage: e(elt)
e{{1, 3}, {2}}
"""
P = self.parent()
e = self.parent().realization_of().e()
return P(e.sum_of_terms(self))
def to_symmetric_function(self):
r"""
The projection of ``self`` to the symmetric functions.
Take a symmetric function in non-commuting variables
expressed in the `\mathbf{h}` basis, and return the projection of
expressed in the complete basis of symmetric functions.
The map `\chi \colon NCSym \to Sym` is given by
.. MATH::
\mathbf{h}_A \mapsto
h_{\lambda(A)} \prod_i \lambda(A)_i!
where `\lambda(A)` is the partition associated with `A` by
taking the sizes of the parts.
OUTPUT:
- An element of the symmetric functions in the complete basis
EXAMPLES::
sage: h = SymmetricFunctionsNonCommutingVariables(QQ).h()
sage: h[[1,3],[2]].to_symmetric_function()
2*h[2, 1]
sage: h[[1],[3],[2]].to_symmetric_function()
h[1, 1, 1]
"""
h = SymmetricFunctions(self.parent().base_ring()).h()
c = lambda la: prod(map(factorial, la))
return h.sum_of_terms([(i.shape(), coeff*c(i.shape())) for (i, coeff) in self])
h = homogeneous
class powersum(NCSymBasis_abstract):
r"""
The Hopf algebra of symmetric functions in non-commuting variables
in the powersum basis.
EXAMPLES::
sage: NCSym = SymmetricFunctionsNonCommutingVariables(QQ)
sage: p = NCSym.p()
"""
def __init__(self, NCSym):
"""
EXAMPLES::
sage: NCSym = SymmetricFunctionsNonCommutingVariables(QQ)
sage: TestSuite(NCSym.p()).run()
"""
CombinatorialFreeModule.__init__(self, NCSym.base_ring(), SetPartitions(),
prefix='p', bracket=False,
category=MultiplicativeNCSymBases(NCSym))
# Register coercions
m = NCSym.m()
self.module_morphism(self._p_to_m_on_basis, codomain=m).register_as_coercion()
m.module_morphism(m._m_to_p_on_basis, codomain=self).register_as_coercion()
x = NCSym.x()
self.module_morphism(self._p_to_x_on_basis, codomain=x).register_as_coercion()
x.module_morphism(x._x_to_p_on_basis, codomain=self).register_as_coercion()
@cached_method
def _p_to_m_on_basis(self, A):
"""
Return `\mathbf{p}_A` in terms of the monomial basis.
INPUT:
- ``A`` -- a set partition
OUTPUT:
- An element of the `\mathbf{m}` basis
TESTS::
sage: NCSym = SymmetricFunctionsNonCommutingVariables(QQ)
sage: p = NCSym.p()
sage: all(p(p._p_to_m_on_basis(A)) == p[A] for i in range(5) for A in SetPartitions(i))
True
"""
m = self.realization_of().m()
return m.sum_of_terms([(B, 1) for B in A.coarsenings()], distinct=True)
@cached_method
def _p_to_e_on_basis(self, A):
"""
Return `\mathbf{p}_A` in terms of the elementary basis.
INPUT:
- ``A`` -- a set partition
OUTPUT:
- An element of the `\mathbf{e}` basis
TESTS::
sage: NCSym = SymmetricFunctionsNonCommutingVariables(QQ)
sage: p = NCSym.p()
sage: all(p(p._p_to_e_on_basis(A)) == p[A] for i in range(5) for A in SetPartitions(i))
True
"""
e = self.realization_of().e()
P_refine = Poset((A.refinements(), A.parent().lt))
c = prod([(-1)**(i-1) * factorial(i-1) for i in A.shape()], self.base_ring().one())
return e.sum_of_terms([(B, P_refine.mobius_function(B, A) / c) for B in P_refine], distinct=True)
@cached_method
def _p_to_h_on_basis(self, A):
"""
Return `\mathbf{p}_A` in terms of the homogeneous basis.
INPUT:
- ``A`` -- a set partition
OUTPUT:
- An element of the `\mathbf{h}` basis
TESTS::
sage: NCSym = SymmetricFunctionsNonCommutingVariables(QQ)
sage: p = NCSym.p()
sage: all(p(p._p_to_h_on_basis(A)) == p[A] for i in range(5) for A in SetPartitions(i))
True
"""
h = self.realization_of().h()
P_refine = Poset((A.refinements(), A.parent().lt))
c = abs(prod([(-1)**(i-1) * factorial(i-1) for i in A.shape()], self.base_ring().one()))
return h.sum_of_terms([(B, P_refine.mobius_function(B, A) / c) for B in P_refine], distinct=True)
@cached_method
def _p_to_x_on_basis(self, A):
"""
Return `\mathbf{p}_A` in terms of the `\mathbf{x}` basis.
INPUT:
- ``A`` -- a set partition
OUTPUT:
- An element of the `\mathbf{x}` basis
TESTS::
sage: NCSym = SymmetricFunctionsNonCommutingVariables(QQ)
sage: p = NCSym.p()
sage: all(p(p._p_to_x_on_basis(A)) == p[A] for i in range(5) for A in SetPartitions(i))
True
"""
x = self.realization_of().x()
return x.sum_of_terms([(B, 1) for B in A.refinements()], distinct=True)
# Note that this is the same as the monomial coproduct_on_basis
def coproduct_on_basis(self, A):
r"""
Return the coproduct of a monomial basis element.
INPUT:
- ``A`` -- a set partition
OUTPUT:
- The coproduct applied to the monomial symmetric function in
non-commuting variables indexed by ``A`` expressed in the
monomial basis.
EXAMPLES::
sage: p = SymmetricFunctionsNonCommutingVariables(QQ).powersum()
sage: p[[1, 3], [2]].coproduct()
p{} # p{{1, 3}, {2}} + p{{1}} # p{{1, 2}} + p{{1, 2}} # p{{1}} + p{{1, 3}, {2}} # p{}
sage: p.coproduct_on_basis(SetPartition([[1]]))
p{} # p{{1}} + p{{1}} # p{}
sage: p.coproduct_on_basis(SetPartition([]))
p{} # p{}
"""
P = SetPartitions()
# Handle corner cases
if len(A) == 0:
return self.tensor_square().monomial(( P([]), P([]) ))
if len(A) == 1:
return self.tensor_square().sum_of_monomials([(P([]), A), (A, P([]))])
ell_set = range(1, len(A) + 1) # +1 for indexing
L = [[[], ell_set]] + list(SetPartitions(ell_set, 2))
def to_basis(S):
if len(S) == 0:
return P([])
sub_parts = [list(A[i-1]) for i in S] # -1 for indexing
mins = [min(p) for p in sub_parts]
over_max = max([max(p) for p in sub_parts]) + 1
ret = [[] for i in range(len(S))]
cur = 1
while min(mins) != over_max:
m = min(mins)
i = mins.index(m)
ret[i].append(cur)
cur += 1
sub_parts[i].pop(sub_parts[i].index(m))
if len(sub_parts[i]) != 0:
mins[i] = min(sub_parts[i])
else:
mins[i] = over_max
return P(ret)
L1 = [(to_basis(S), to_basis(C)) for S,C in L]
L2 = [(M, N) for N,M in L1]
return self.tensor_square().sum_of_monomials(L1 + L2)
def internal_coproduct_on_basis(self, A):
"""
Return the internal coproduct of a powersum basis element.
The internal coproduct is defined by
.. MATH::
\Delta^{\odot}(\mathbf{p}_A) = \mathbf{p}_A \otimes
\mathbf{p}_A
INPUT:
- ``A`` -- a set partition
OUTPUT:
- an element of the tensor square of ``self``
EXAMPLES::
sage: p = SymmetricFunctionsNonCommutingVariables(QQ).powersum()
sage: p.internal_coproduct_on_basis(SetPartition([[1,3],[2]]))
p{{1, 3}, {2}} # p{{1, 3}, {2}}
"""
return self.tensor_square().monomial((A, A))
def antipode_on_basis(self, A):
r"""
Return the result of the antipode applied to a powersum basis element.
Let `A` be a set partition. The antipode given in [LM2011]_ is
.. MATH::
S(\mathbf{p}_A) = \sum_{\gamma} (-1)^{\ell(\gamma)}
\mathbf{p}_{\gamma[A]}
where we sum over all ordered set partitions (i.e. set
compositions) of `[\ell(A)]` and
.. MATH::
\gamma[A] = A_{\gamma_1}^{\downarrow} | \cdots |
A_{\gamma_{\ell(A)}}^{\downarrow}
is the action of `\gamma` on `A` defined in
:meth:`SetPartition.ordered_set_partition_action()`.
INPUT:
- ``A`` -- a set partition
OUTPUT:
- an element in the basis ``self``
EXAMPLES::
sage: p = SymmetricFunctionsNonCommutingVariables(QQ).powersum()
sage: p.antipode_on_basis(SetPartition([[1], [2,3]]))
p{{1, 2}, {3}}
sage: p.antipode_on_basis(SetPartition([]))
p{}
sage: F = p[[1,3],[5],[2,4]].coproduct()
sage: F.apply_multilinear_morphism(lambda x,y: x.antipode()*y)
0
"""
P = SetPartitions()
def action(gamma):
cur = 1
ret = []
for S in gamma:
sub_parts = [list(A[i-1]) for i in S] # -1 for indexing
mins = [min(p) for p in sub_parts]
over_max = max([max(p) for p in sub_parts]) + 1
temp = [[] for i in range(len(S))]
while min(mins) != over_max:
m = min(mins)
i = mins.index(m)
temp[i].append(cur)
cur += 1
sub_parts[i].pop(sub_parts[i].index(m))
if len(sub_parts[i]) != 0:
mins[i] = min(sub_parts[i])
else:
mins[i] = over_max
ret += temp
return P(ret)
return self.sum_of_terms( [(A.ordered_set_partition_action(gamma), (-1)**len(gamma))
for gamma in OrderedSetPartitions(len(A))] )
def primitive(self, A, i=1):
r"""
Return the primitive associated to ``A`` in ``self``.
Fix some `i \in S`. Let `A` be an atomic set partition of `S`,
then the primitive `p(A)` given in [LM2011]_ is
.. MATH::
p(A) = \sum_{\gamma} (-1)^{\ell(\gamma)-1}
\mathbf{p}_{\gamma[A]}
where we sum over all ordered set partitions of `[\ell(A)]` such
that `i \in \gamma_1` and `\gamma[A]` is the action of `\gamma`
on `A` defined in
:meth:`SetPartition.ordered_set_partition_action()`.
If `A` is not atomic, then `p(A) = 0`.
.. SEEALSO:: :meth:`SetPartition.is_atomic`
INPUT:
- ``A`` -- a set partition
- ``i`` -- (default: 1) index in the base set for ``A`` specifying
which set of primitives this belongs to
OUTPUT:
- an element in the basis ``self``
EXAMPLES::
sage: p = SymmetricFunctionsNonCommutingVariables(QQ).powersum()
sage: elt = p.primitive(SetPartition([[1,3], [2]])); elt
-p{{1, 2}, {3}} + p{{1, 3}, {2}}
sage: elt.coproduct()
-p{} # p{{1, 2}, {3}} | |
<filename>ipt/ipt_hough_circles_detector.py
import os
import pickle
import logging
logger = logging.getLogger(__name__)
import cv2
import numpy as np
from skimage.transform import hough_circle, hough_circle_peaks
import ipso_phen.ipapi.base.ip_common as ipc
from ipso_phen.ipapi.base.ipt_abstract import IptBase
from ipso_phen.ipapi.ipt.ipt_edge_detector import IptEdgeDetector
from ipso_phen.ipapi.tools.regions import (
RectangleRegion,
CircleRegion,
AnnulusRegion,
Point,
)
from ipso_phen.ipapi.tools.folders import ipso_folders
class IptHoughCircles(IptBase):
def build_params(self):
self.add_checkbox(
name="enable_cache",
desc="Allow retrieving data from cache",
default_value=1,
hint="Data will be retrieved only if params are identical.",
)
self.add_combobox(
name="source_selector",
desc="Select source",
default_value="current_image",
values={"current_image": "Current image", "mask": "Mask"},
hint="Select which image will be used as source",
)
self.add_roi_settings(
default_name="unnamed_roi", default_type="keep", default_shape="rectangle"
)
self.add_separator(name="s1")
self.add_text_input(
name="crop_roi_name",
desc="Name of ROI to be used",
default_value="",
hint="Circles will only be detected inside ROI",
)
self.add_channel_selector(default_value="l")
self.add_checkbox(
name="normalize",
desc="Normalize channel",
default_value=0,
hint="Normalize channel before edge detection",
)
self.add_slider(
name="median_filter_size",
desc="Median filter size (odd values only)",
default_value=0,
minimum=0,
maximum=51,
)
self.add_spin_box(
name="min_radius",
desc="Minimal radius to consider",
default_value=400,
minimum=0,
maximum=2000,
hint="All circles smaller than this will be ignored",
)
self.add_spin_box(
name="max_radius",
desc="Maximal radius to consider",
default_value=1000,
minimum=0,
maximum=2000,
hint="All circles bigger than this will be ignored",
)
self.add_spin_box(
name="annulus_size",
desc="Annulus secondary radius delta",
default_value=0,
minimum=0,
maximum=2000,
hint="Annulus size, 0 means full disc",
)
self.add_spin_box(
name="step_radius",
desc="Radius granularity",
default_value=10,
minimum=0,
maximum=100,
hint="Steps for scanning radius",
)
self.add_spin_box(
name="max_peaks",
desc="Maximum number of detected circles",
default_value=2,
minimum=-1,
maximum=200,
hint="Keeps only n best circles",
)
self.add_spin_box(
name="min_distance",
desc="Minimum distance between two circles",
default_value=20,
minimum=1,
maximum=2000,
hint="Remove circles that are too close",
)
self.add_spin_box(
name="line_width",
desc="Draw line width",
default_value=4,
minimum=1,
maximum=20,
)
self.add_checkbox(
name="keep_only_one",
desc="Keep only closest, if not, ROI is larger circle",
default_value=0,
)
self.add_combobox(
name="target_position",
desc="Keep the closest circle closest to",
default_value="BOTTOM_CENTER",
values=dict(
TOP_LEFT="TOP_LEFT",
TOP_CENTER="TOP_CENTER",
TOP_RIGHT="TOP_RIGHT",
MIDDLE_LEFT="MIDDLE_LEFT",
MIDDLE_CENTER="MIDDLE_CENTER",
MIDDLE_RIGHT="MIDDLE_RIGHT",
BOTTOM_LEFT="BOTTOM_LEFT",
BOTTOM_CENTER="BOTTOM_CENTER",
BOTTOM_RIGHT="BOTTOM_RIGHT",
),
)
self.add_slider(
name="max_dist_to_root",
desc="Maximum distance to root position",
default_value=1000,
minimum=0,
maximum=4000,
)
self.add_checkbox(
name="draw_boundaries", desc="Draw max and min circles", default_value=0
)
self.add_checkbox(
name="draw_candidates", desc="Draw discarded candidates", default_value=0
)
self.add_spin_box(
name="expand_circle",
desc="Contract/expand circle",
default_value=0,
minimum=-1000,
maximum=1000,
)
self.add_checkbox(name="edge_only", desc="Edge detection only", default_value=0)
self.add_edge_detector()
self.add_text_overlay()
def process_wrapper(self, **kwargs):
"""
Hough circles detector:
Hough circles detector: Perform a circular Hough transform.
Can generate ROIs
Real time: False
Keyword Arguments (in parentheses, argument name):
* Allow retrieving data from cache (enable_cache): Data will be retrieved only if params are identical.
* ROI name (roi_name):
* Select action linked to ROI (roi_type): no clue
* Select ROI shape (roi_shape): no clue
* Target IPT (tool_target): no clue
* Name of ROI to be used (crop_roi_name): Circles will only be detected inside ROI
* Channel (channel):
* Normalize channel (normalize): Normalize channel before edge detection
* Median filter size (odd values only) (median_filter_size):
* Minimal radius to consider (min_radius): All circles smaller than this will be ignored
* Maximal radius to consider (max_radius): All circles bigger than this will be ignored
* Annulus secondary radius delta (annulus_size): Annulus size, 0 means full disc
* Radius granularity (step_radius): Steps for scanning radius
* Maximum number of detected circles (max_peaks): Keeps only n best circles
* Minimum distance between two circles (min_distance): Remove circles that are too close
* Draw line width (line_width):
* Keep only closest, if not, ROI is larger circle (keep_only_one):
* Keep the closest circle closest to (target_position):
* Maximum distance to root position (max_dist_to_root):
* Draw max and min circles (draw_boundaries):
* Draw discarded candidates (draw_candidates):
* Contract/expand circle (expand_circle):
* Edge detection only (edge_only):
* Select edge detection operator (operator):
* Canny's sigma for scikit, aperture for OpenCV (canny_sigma): Sigma.
* Canny's first Threshold (canny_first): First threshold for the hysteresis procedure.
* Canny's second Threshold (canny_second): Second threshold for the hysteresis procedure.
* Kernel size (kernel_size):
* Threshold (threshold): Threshold for kernel based operators
* Apply threshold (apply_threshold):
* Overlay text on top of images (text_overlay): Draw description text on top of images
--------------
"""
wrapper = self.init_wrapper(**kwargs)
if wrapper is None:
return False
res = False
try:
edge_only = self.get_value_of("edge_only") == 1
pkl_file = os.path.join(
ipso_folders.get_path("stored_data"),
self.get_short_hash(
exclude_list=("annulus_size", "roi_name", "tool_target", "roi_shape")
)
+ ".pkl",
)
if (
(self.get_value_of("enable_cache") == 1)
and edge_only is False
and os.path.isfile(pkl_file)
):
with open(pkl_file, "rb") as f:
self.result = pickle.load(f)
img = self.wrapper.current_image
line_width = self.get_value_of(
"line_width", scale_factor=wrapper.scale_factor
)
else:
# Get the edge
with IptEdgeDetector(wrapper=wrapper, **self.params_to_dict()) as (
res,
ed,
):
if not res:
return
edges = ed.result
if edge_only is True:
self.result = ed.result
self.demo_image = self.result
return True
# Read params
min_radius = self.get_value_of(
"min_radius", scale_factor=wrapper.scale_factor
)
max_radius = self.get_value_of(
"max_radius", scale_factor=wrapper.scale_factor
)
step_radius = self.get_value_of(
"step_radius", scale_factor=wrapper.scale_factor
)
max_peaks = self.get_value_of("max_peaks")
max_peaks = max_peaks if max_peaks > 0 else np.inf
min_distance = self.get_value_of(
"min_distance", scale_factor=wrapper.scale_factor
)
line_width = self.get_value_of(
"line_width", scale_factor=wrapper.scale_factor
)
draw_candidates = self.get_value_of("draw_candidates") == 1
roi = self.get_ipt_roi(
wrapper=wrapper,
roi_names=[self.get_value_of("crop_roi_name")],
selection_mode="all_named",
)
roi = roi[0] if roi else None
if roi is not None:
edges = wrapper.crop_to_roi(
img=edges,
roi=roi,
erase_outside_if_circle=True,
dbg_str="cropped_edges",
)
input_kind = self.get_value_of("source_selector")
if input_kind == "mask":
img = self.get_mask()
elif input_kind == "current_image":
img = wrapper.current_image
else:
img = None
logger.error(f"Unknown source: {input_kind}")
self.result = None
return
# Detect circles
hough_radii = np.arange(min_radius, max_radius, step_radius)
hough_res = hough_circle(edges, hough_radii)
# Draw the result
if len(img.shape) == 2:
img = np.dstack((img, img, img))
# Select the most prominent n circles
accu, cx, cy, radii = hough_circle_peaks(
hough_res,
hough_radii,
min_xdistance=min_distance,
min_ydistance=min_distance,
total_num_peaks=max_peaks,
)
if roi is not None:
roi = roi.as_rect()
cx += roi.left
cy += roi.top
if self.get_value_of("keep_only_one") == 1:
candidates = [[a, x, y, z] for a, x, y, z in zip(accu, cx, cy, radii)]
h, w = img.shape[:2]
roi = RectangleRegion(left=0, right=w, top=0, bottom=h)
roi_root = roi.point_at_position(
self.get_value_of("target_position"), True
)
min_dist = h * w
min_idx = -1
min_accu = -1
i = 0
colors = ipc.build_color_steps(step_count=len(candidates))
max_dist_to_root = self.get_value_of(
"max_dist_to_root", scale_factor=wrapper.scale_factor
)
for c_accu, center_x, center_y, radius in candidates:
if draw_candidates:
cv2.circle(
img,
(center_x, center_y),
radius,
colors[i],
max(1, line_width // 2),
)
cur_dist = roi_root.distance_to(Point(center_x, center_y))
if (
(cur_dist < min_dist)
and (cur_dist < max_dist_to_root)
and (
(cur_dist / min_dist > min_accu / c_accu)
or (min_accu == -1)
)
):
min_dist = cur_dist
min_idx = i
min_accu = c_accu
i += 1
if min_idx >= 0:
self.result = [
[
candidates[min_idx][1],
candidates[min_idx][2],
candidates[min_idx][3],
]
]
self.result[0][2] += self.get_value_of(
"expand_circle", scale_factor=wrapper.scale_factor
)
if self.get_value_of("draw_boundaries") == 1:
cv2.circle(
img,
(roi_root.x, roi_root.y),
min_radius,
ipc.C_RED,
line_width + 4,
)
cv2.circle(
img,
(roi_root.x, roi_root.y),
max_radius,
ipc.C_BLUE,
line_width + 4,
)
else:
self.result = None
else:
self.result = [[x, y, r] for x, y, r in zip(cx, cy, radii)]
if self.get_value_of("enable_cache") == 1:
with open(pkl_file, "wb") as f:
pickle.dump(self.result, f)
if self.result is not None:
colors = ipc.build_color_steps(step_count=len(self.result))
i = 0
annulus_size = self.get_value_of("annulus_size")
for center_x, center_y, radius in self.result:
cv2.circle(img, (center_x, center_y), radius, colors[i], line_width)
if annulus_size > 0 and radius - annulus_size > 0:
cv2.circle(
img,
(center_x, center_y),
radius - annulus_size,
colors[i],
line_width,
)
i += 1
wrapper.store_image(
image=img,
text="hough_circles",
text_overlay=self.get_value_of("text_overlay") == 1,
)
self.demo_image = img
res = True
except Exception as e:
logger.exception(f'Failed to process {self. name}: "{repr(e)}"')
res = False
else:
pass
finally:
return res
def generate_roi(self, **kwargs):
wrapper = self.init_wrapper(**kwargs)
if wrapper is None:
return None
if self.process_wrapper(**kwargs):
roi_shape = self.get_value_of("roi_shape")
roi_type = self.get_value_of("roi_type")
roi_name = self.get_value_of("roi_name")
tool_target = self.get_value_of("tool_target")
circles = sorted(self.result, key=lambda circle_: circle_[2])
circle = circles[0]
if roi_shape == "rectangle":
r = CircleRegion(cx=circle[0], cy=circle[1], radius=circle[2]).as_rect()
return RectangleRegion(
left=r.left,
width=r.width,
top=r.top,
height=r.height,
name=roi_name,
tag=roi_type,
target=tool_target,
)
elif roi_shape == "circle":
annulus_size = self.get_value_of("annulus_size")
if annulus_size == 0 or (circle[2] - annulus_size <= 0):
return CircleRegion(
cx=circle[0],
cy=circle[1],
radius=circle[2],
name=roi_name,
tag=roi_type,
target=tool_target,
)
else:
return AnnulusRegion(
cx=circle[0],
cy=circle[1],
radius=circle[2],
in_radius=circle[2] - annulus_size,
name=roi_name,
tag=roi_type,
target=tool_target,
)
else:
return None
else:
return None
def apply_roy(self, **kwargs):
wrapper = self.init_wrapper(**kwargs)
if wrapper is None:
return None
if self.process_wrapper(**kwargs):
circles = sorted(self.result, key=lambda circle_: circle_[2])
circle = circles[0]
roi_name = f"roi_keep_{len(wrapper.rois_list)}"
wrapper.add_circle_roi(circle[0], circle[1], circle[2], roi_name, "keep")
target = kwargs.get("target", "source")
if target == "source":
res = wrapper.apply_rois(wrapper.current_image)
elif target == "mask":
res = wrapper.apply_rois(wrapper.mask)
else:
res = None
| |
ZZZ(self):
"""hardcoded/mock instance of the class"""
return OutboundOrderLines()
instance=ZZZ()
"""hardcoded/returns an instance of the class"""
def Clone(self):
""" Clone(self: OutboundOrderLines) -> object """
pass
@staticmethod
def FromIEnumerable(list):
""" FromIEnumerable(list: IEnumerable[OutboundOrderLine]) -> OutboundOrderLines """
pass
def GetHashCode(self):
""" GetHashCode(self: OutboundOrderLines) -> int """
pass
def __getitem__(self,*args):
""" x.__getitem__(y) <==> x[y] """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self,*args):
""" __iter__(self: IEnumerable) -> object """
pass
def __reduce_ex__(self,*args):
pass
def __setitem__(self,*args):
""" x.__setitem__(i,y) <==> x[i]= """
pass
IsDisposable=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: IsDisposable(self: OutboundOrderLines) -> bool
"""
PreserveState=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: PreserveState(self: OutboundOrderLines) -> bool
"""
DisplayMember='ItemCode'
ValueMember='Id'
class HistoryOutboundOrderLines(OutboundOrderLines):
""" HistoryOutboundOrderLines() """
def ZZZ(self):
"""hardcoded/mock instance of the class"""
return HistoryOutboundOrderLines()
instance=ZZZ()
"""hardcoded/returns an instance of the class"""
def __getitem__(self,*args):
""" x.__getitem__(y) <==> x[y] """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self,*args):
""" __iter__(self: IEnumerable) -> object """
pass
def __reduce_ex__(self,*args):
pass
def __setitem__(self,*args):
""" x.__setitem__(i,y) <==> x[i]= """
pass
DisplayMember='ItemCode'
ValueMember='Id'
class HistoryOutboundOrders(FindableList):
""" HistoryOutboundOrders() """
def ZZZ(self):
"""hardcoded/mock instance of the class"""
return HistoryOutboundOrders()
instance=ZZZ()
"""hardcoded/returns an instance of the class"""
def Add(self,*__args):
"""
Add(self: HistoryOutboundOrders,order: HistoryOutboundOrder) -> FindableList[HistoryOutboundOrder]
Add(self: HistoryOutboundOrders,order: HistoryOutboundOrder,predicate: Predicate[HistoryOutboundOrder])
"""
pass
def Clone(self):
""" Clone(self: HistoryOutboundOrders) -> object """
pass
@staticmethod
def FromIEnumerable(list):
""" FromIEnumerable(list: IEnumerable[HistoryOutboundOrder]) -> HistoryOutboundOrders """
pass
def GetHashCode(self):
""" GetHashCode(self: HistoryOutboundOrders) -> int """
pass
def __add__(self,*args):
""" x.__add__(y) <==> x+yx.__add__(y) <==> x+y """
pass
def __getitem__(self,*args):
""" x.__getitem__(y) <==> x[y] """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __iter__(self,*args):
""" __iter__(self: IEnumerable) -> object """
pass
def __reduce_ex__(self,*args):
pass
def __setitem__(self,*args):
""" x.__setitem__(i,y) <==> x[i]= """
pass
TotalRowsMatched=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: TotalRowsMatched(self: HistoryOutboundOrders) -> Int64
Set: TotalRowsMatched(self: HistoryOutboundOrders)=value
"""
DisplayMember='Number'
ValueMember='DbKey'
class OutboundOrder(DbObject):
""" """
def ZZZ(self):
"""hardcoded/mock instance of the class"""
return OutboundOrder()
instance=ZZZ()
"""hardcoded/returns an instance of the class"""
def Clone(self):
""" Clone(self: OutboundOrder) -> object """
pass
def Equals(self,obj):
""" Equals(self: OutboundOrder,obj: object) -> bool """
pass
def GetHashCode(self):
""" GetHashCode(self: OutboundOrder) -> int """
pass
def GetHashCodeOfCustomer(self):
""" GetHashCodeOfCustomer(self: OutboundOrder) -> int """
pass
def __eq__(self,*args):
""" x.__eq__(y) <==> x==y """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __ne__(self,*args):
pass
def __reduce_ex__(self,*args):
pass
AllowPartialDelivery=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: AllowPartialDelivery(self: OutboundOrder) -> PartialDeliveryTypeEnum
Set: AllowPartialDelivery(self: OutboundOrder)=value
"""
AllowPartialDeliveryAsString=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: AllowPartialDeliveryAsString(self: OutboundOrder) -> str
"""
Backorder=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: Backorder(self: OutboundOrder) -> bool
Set: Backorder(self: OutboundOrder)=value
"""
CustomerAddressLine1=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: CustomerAddressLine1(self: OutboundOrder) -> str
Set: CustomerAddressLine1(self: OutboundOrder)=value
"""
CustomerAddressLine2=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: CustomerAddressLine2(self: OutboundOrder) -> str
Set: CustomerAddressLine2(self: OutboundOrder)=value
"""
CustomerAddressLine3=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: CustomerAddressLine3(self: OutboundOrder) -> str
Set: CustomerAddressLine3(self: OutboundOrder)=value
"""
CustomerCity=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Necessary for Userinterface BOXwise Mobile
Get: CustomerCity(self: OutboundOrder) -> str
Set: CustomerCity(self: OutboundOrder)=value
"""
CustomerContact=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: CustomerContact(self: OutboundOrder) -> str
Set: CustomerContact(self: OutboundOrder)=value
"""
CustomerContactEmail=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: CustomerContactEmail(self: OutboundOrder) -> str
Set: CustomerContactEmail(self: OutboundOrder)=value
"""
CustomerCountryCode=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Necessary for Userinterface BOXwise Mobile
Get: CustomerCountryCode(self: OutboundOrder) -> str
Set: CustomerCountryCode(self: OutboundOrder)=value
"""
CustomerCountryName=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: CustomerCountryName(self: OutboundOrder) -> str
Set: CustomerCountryName(self: OutboundOrder)=value
"""
CustomerEoriNumber=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: CustomerEoriNumber(self: OutboundOrder) -> str
Set: CustomerEoriNumber(self: OutboundOrder)=value
"""
CustomerInvoiceAddressLine1=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: CustomerInvoiceAddressLine1(self: OutboundOrder) -> str
Set: CustomerInvoiceAddressLine1(self: OutboundOrder)=value
"""
CustomerInvoiceAddressLine2=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: CustomerInvoiceAddressLine2(self: OutboundOrder) -> str
Set: CustomerInvoiceAddressLine2(self: OutboundOrder)=value
"""
CustomerInvoiceAddressLine3=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: CustomerInvoiceAddressLine3(self: OutboundOrder) -> str
Set: CustomerInvoiceAddressLine3(self: OutboundOrder)=value
"""
CustomerInvoiceCity=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: CustomerInvoiceCity(self: OutboundOrder) -> str
Set: CustomerInvoiceCity(self: OutboundOrder)=value
"""
CustomerInvoiceContact=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: CustomerInvoiceContact(self: OutboundOrder) -> str
Set: CustomerInvoiceContact(self: OutboundOrder)=value
"""
CustomerInvoiceContactEmail=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: CustomerInvoiceContactEmail(self: OutboundOrder) -> str
Set: CustomerInvoiceContactEmail(self: OutboundOrder)=value
"""
CustomerInvoiceCountryCode=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: CustomerInvoiceCountryCode(self: OutboundOrder) -> str
Set: CustomerInvoiceCountryCode(self: OutboundOrder)=value
"""
CustomerInvoiceCountryName=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: CustomerInvoiceCountryName(self: OutboundOrder) -> str
Set: CustomerInvoiceCountryName(self: OutboundOrder)=value
"""
CustomerInvoiceName=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: CustomerInvoiceName(self: OutboundOrder) -> str
Set: CustomerInvoiceName(self: OutboundOrder)=value
"""
CustomerInvoiceNumber=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: CustomerInvoiceNumber(self: OutboundOrder) -> str
Set: CustomerInvoiceNumber(self: OutboundOrder)=value
"""
CustomerInvoicePhoneNumber=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: CustomerInvoicePhoneNumber(self: OutboundOrder) -> str
Set: CustomerInvoicePhoneNumber(self: OutboundOrder)=value
"""
CustomerInvoiceState=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: CustomerInvoiceState(self: OutboundOrder) -> str
Set: CustomerInvoiceState(self: OutboundOrder)=value
"""
CustomerInvoiceZipCode=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: CustomerInvoiceZipCode(self: OutboundOrder) -> str
Set: CustomerInvoiceZipCode(self: OutboundOrder)=value
"""
CustomerName=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Necessary for Userinterface BOXwise Mobile
Get: CustomerName(self: OutboundOrder) -> str
Set: CustomerName(self: OutboundOrder)=value
"""
CustomerNumber=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: CustomerNumber(self: OutboundOrder) -> str
Set: CustomerNumber(self: OutboundOrder)=value
"""
CustomerPackageSlipLayout=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: CustomerPackageSlipLayout(self: OutboundOrder) -> str
Set: CustomerPackageSlipLayout(self: OutboundOrder)=value
"""
CustomerPhoneNumber=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: CustomerPhoneNumber(self: OutboundOrder) -> str
Set: CustomerPhoneNumber(self: OutboundOrder)=value
"""
CustomerReference=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: CustomerReference(self: OutboundOrder) -> str
Set: CustomerReference(self: OutboundOrder)=value
"""
CustomerState=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: CustomerState(self: OutboundOrder) -> str
Set: CustomerState(self: OutboundOrder)=value
"""
CustomerZipCode=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: CustomerZipCode(self: OutboundOrder) -> str
Set: CustomerZipCode(self: OutboundOrder)=value
"""
CustomFields=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: CustomFields(self: OutboundOrder) -> SerializableDictionary[str,object]
Set: CustomFields(self: OutboundOrder)=value
"""
DateOfDelivery=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: DateOfDelivery(self: OutboundOrder) -> DateTime
Set: DateOfDelivery(self: OutboundOrder)=value
"""
DateOrdered=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: DateOrdered(self: OutboundOrder) -> DateTime
Set: DateOrdered(self: OutboundOrder)=value
"""
DeliverableOrderlines=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: DeliverableOrderlines(self: OutboundOrder) -> int
Set: DeliverableOrderlines(self: OutboundOrder)=value
"""
DeliveryMethod=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Necessary for Userinterface BOXwise Mobile
Get: DeliveryMethod(self: OutboundOrder) -> str
Set: DeliveryMethod(self: OutboundOrder)=value
"""
Description=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: Description(self: OutboundOrder) -> str
Set: Description(self: OutboundOrder)=value
"""
Id=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Necessary for Userinterface BOXwise Mobile
Get: Id(self: OutboundOrder) -> int
Set: Id(self: OutboundOrder)=value
"""
LineBackColor=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: LineBackColor(self: OutboundOrder) -> str
Set: LineBackColor(self: OutboundOrder)=value
"""
LineForeColor=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: LineForeColor(self: OutboundOrder) -> str
Set: LineForeColor(self: OutboundOrder)=value
"""
Notes=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: Notes(self: OutboundOrder) -> str
Set: Notes(self: OutboundOrder)=value
"""
Number=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Necessary for Userinterface BOXwise Mobile
Get: Number(self: OutboundOrder) -> str
Set: Number(self: OutboundOrder)=value
"""
OrderAmountDeliverable=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: OrderAmountDeliverable(self: OutboundOrder) -> Decimal
Set: OrderAmountDeliverable(self: OutboundOrder)=value
"""
OrderAmountTotal=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: OrderAmountTotal(self: OutboundOrder) -> Decimal
Set: OrderAmountTotal(self: OutboundOrder)=value
"""
PendingItemCount=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: PendingItemCount(self: OutboundOrder) -> int
"""
PendingItems=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: PendingItems(self: OutboundOrder) -> Dictionary[str,Decimal]
Set: PendingItems(self: OutboundOrder)=value
"""
PendingItemUnitCount=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: PendingItemUnitCount(self: OutboundOrder) -> Decimal
"""
PendingOrderLines=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: PendingOrderLines(self: OutboundOrder) -> int
Set: PendingOrderLines(self: OutboundOrder)=value
"""
PercentageDeliverableAmount=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: PercentageDeliverableAmount(self: OutboundOrder) -> int
Set: PercentageDeliverableAmount(self: OutboundOrder)=value
"""
PercentageDeliverableLines=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: PercentageDeliverableLines(self: OutboundOrder) -> int
Set: PercentageDeliverableLines(self: OutboundOrder)=value
"""
PercentDeliverable=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Percentage of the QuantityToDeliver that is deliverable
Get: PercentDeliverable(self: OutboundOrder) -> int
Set: PercentDeliverable(self: OutboundOrder)=value
"""
ProjectCode=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: ProjectCode(self: OutboundOrder) -> str
Set: ProjectCode(self: OutboundOrder)=value
"""
ProjectName=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: ProjectName(self: OutboundOrder) -> str
Set: ProjectName(self: OutboundOrder)=value
"""
RoutingCode=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: RoutingCode(self: OutboundOrder) -> str
Set: RoutingCode(self: OutboundOrder)=value
"""
SelectionCode=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: SelectionCode(self: OutboundOrder) -> str
Set: SelectionCode(self: OutboundOrder)=value
"""
SelectionCodeDescription=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: SelectionCodeDescription(self: OutboundOrder) -> str
Set: SelectionCodeDescription(self: OutboundOrder)=value
"""
Tag=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: Tag(self: OutboundOrder) -> Tag
Set: Tag(self: OutboundOrder)=value
"""
TotalOrderlines=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""
Get: TotalOrderlines(self: OutboundOrder) -> int
Set: TotalOrderlines(self: OutboundOrder)=value
"""
Type=property(lambda | |
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
# Copyright 2019 The celery-dyrygent Authors. All rights reserved.
import logging
import random
import time
from datetime import (
timedelta,
)
from celery_dyrygent.celery import (
entities,
inspect,
)
from celery_dyrygent.workflows.exceptions import WorkflowException
from celery_dyrygent.workflows.node import WorkflowNode
# TO CONSIDER Perhaps following class shouldn't be a mixin but some kind of
# celery canvas to workflow transformer. For now lets leave it as mixin
# since workflows are quite closely bound to celery.
# Mixin only improves readability of Workflow class as the code related
# to understanding celery canvases resides only here
class CeleryWorkflowMixin(object):
"""
Mixin like class adding celery-workflow understanding
to Workflow class.
"""
def add_celery_canvas(self, canvas, dependencies=None):
"""
Consume any celery canvas.
Invokes proper methods based on canvas type
"""
type_map = {
entities.Signature: self.add_celery_signature,
entities.Chord: self.add_celery_chord,
entities.Chain: self.add_celery_chain,
entities.Group: self.add_celery_group,
}
handler = type_map[type(canvas)]
return handler(canvas, dependencies)
def add_celery_signature(self, signature, dependencies=None):
# signatures need to have task_id
signature.freeze()
node = self.add_signature(signature, dependencies)
return [node]
def add_celery_chain(self, chain, dependencies=None):
"""
Consume celery chain.
Chain is an ordered set of canvases e.g:
c1 -> c2 -> c3 -> c4
As a result:
c1 depends on given `dependencies`
c4 is the signature to depend on
"""
# rely on celery internals to iterate over chain
last_dependencies = dependencies
for task in inspect.get_chain_tasks(chain):
last_dependencies = self.add_celery_canvas(task, last_dependencies)
return last_dependencies
def add_celery_group(self, group, dependencies):
"""
Consume celery group.
Group is a set of tasks executed in parallel
(c1, c2, c3, c4) - execute in parallel
As a result:
all tasks depend on given `dependencies`
all tasks have to be dependent on
"""
depend_on = []
for task in inspect.get_group_tasks(group):
task_dependencies = self.add_celery_canvas(task, dependencies)
depend_on.extend(task_dependencies)
# use previous dependencies if group is empty
depend_on = depend_on or dependencies
return depend_on
def add_celery_chord(self, chord, dependencies):
"""
Consume celery chord.
Chord is a group of tasks executed in parallel followed by 'barrier'
task e.g.:
(c1, c2, c3, c4) -> c5
c[1-4] - execute in parallel
c5 - waits for c[1-4]
As a result
c[1-4] is called header and depends on `dependencies`
c5 is called body and depends on c[1-4]
c5 is the task to be dependent on
"""
body, header = inspect.get_chord_tasks(chord)
header_depend_on = []
for task in header:
task_dependencies = self.add_celery_canvas(task, dependencies)
header_depend_on.extend(task_dependencies)
# body needs to depend on dependencies given by header
# or previous dependencies if header is empty
header_depend_on = header_depend_on or dependencies
depend_on = self.add_celery_canvas(body, header_depend_on)
return depend_on
class WorkflowSignalMixin(object):
hooks = {
'after_active_tick': [],
'on_finish': [],
'on_state_change': [],
}
@classmethod
def connect(cls, hook_name):
"""Wrapped function must accept two arguments:
workflow - instance of workflow
payload - payload passed when signal is emitted
"""
if hook_name not in cls.hooks:
raise WorkflowException("'{}': invalid signal".format(hook_name))
def real_connect(function):
cls.hooks[hook_name].append(function)
return function
return real_connect
def emit(self, hook_name, payload=None):
assert hook_name in self.hooks
for hook in self.hooks[hook_name]:
# hook handlers are optional and they should not break
# an execution of a workflow
try:
hook(self, payload)
except Exception as e:
self.logger.warning(
"There was an exception during hook handler execution, "
"'%s' -> '%s' failed due to '%s'",
hook_name, hook, e
)
class WorkflowState(object):
# workflow in initial state
INITIAL = 'INITIAL'
# workflow in progress
RUNNING = 'RUNNING'
# workflow finished properly
SUCCESS = 'SUCCESS'
# workflow exec failed due to failed tasks
FAILURE = 'FAILURE'
# unexpected error occured
ERROR = 'ERROR'
# CANCEL/REVOKE is not supported here as its not handled
# by dyrygent. If operation is revoked we don't get any info
# so such state has to be handled elsewhere
# WATITING is not supported now as we don't inspect task ETA for now
# it might be implemented later
class WorkflowAlreadyRunningException(WorkflowException):
"""
Internal exception raised when multiple workflow processors
are detected
"""
pass
class Workflow(WorkflowSignalMixin, CeleryWorkflowMixin):
"""
Main class wrapping whole workflow, its signatures and inter-signature
dependencies
"""
workflow_processor_task = None
# by default processing task will be retried for 7
max_processing_time = int(timedelta(days=7).total_seconds())
straight_serializables = {
'running': {},
'finished': {},
'processing_limit_ts': None,
'version': 1,
'retry_policy': ['random', 3, 10],
'id': None,
'state': WorkflowState.INITIAL,
'tasks_options': {},
'workflow_options': {},
'custom_payload': {}
}
def __init__(self, options=None):
"""
Options are being transferred to the workflow task, as defined in
https://docs.celeryproject.org/en/stable/reference/celery.app.task.html#celery.app.task.Task.apply_async
"""
# holds state of workflow
self.state = WorkflowState.INITIAL
# dict holds all workflow nodes (wrapping signatures)
self.nodes = {}
# task is in running state if its id is in the dict
self.running = {}
# task finished successfuly or not
self.finished = {}
# holds workflow processor signature (after freeze())
self._signature = None
self.id = None
# final execution timestamp, execution reaches it then processing task
# will fail
self.processing_limit_ts = None
# version of workflow structure, might be useful to deserialize
# workflows scheduled by older version of code
self.version = 1
self.set_retry_policy('random', 10, 30)
# some stats/metrics holder
self.stats = {
'last_apply_async_tick': 0,
'ticks': 0,
'consecutive_celery_error_ticks': 0,
}
# internal var, whether tick scheduled some tasks or not
# used for smarter retries
self._active_tick = False
# internal var, determines whether there were detected
# celery erorrs within tick
self._celery_errors_within_tick = []
self.custom_payload = {}
# options for apply_async
self.tasks_options = {}
self.workflow_options = options or {}
# create instance level logger
self.__init_logger()
def add_signature(self, signature, dependencies=None):
"""
Add signature to Workflow, returns created WorkflowNode instance
"""
node = WorkflowNode(signature)
dependencies = dependencies if dependencies is not None else []
# if signature is already added then perhaps there is some bug in code
if node.id in self.nodes:
raise WorkflowException(
"Looks like signature '{}' is already added to the workflow"
.format(signature)
)
self.nodes[node.id] = node
for required_node in dependencies:
node.add_dependency(required_node)
return node
def get_tasks_state(self, task_ids):
"""
Fetch task states from celery.
Return dict {task_id: state}
State is celery.result.AsyncResult instance
"""
states = {
task_id: inspect.get_task_state(task_id)
for task_id in task_ids
}
return states
def update_pending(self):
"""
Move pending tasks to finished when task state is ready
"""
# get finished tasks states
states = self.get_tasks_state(self.running)
for task_id, result in states.items():
if not result.is_done():
self.logger.debug("Task %s is still running", task_id)
continue
# mitigation for celery bug
if not result.is_final():
self.logger.warning(
"Task %s detected as ready but got exception '%s', "
"assuming it will be retried and executed",
task_id, str(result.value)
)
self._celery_errors_within_tick.append(
"Problem with task {}".format(task_id)
)
# usualy celery retries the task on its own
# but there are some corner cases where it doesn't do it.
# We wait for 2 ticks and reschedule the task.
# This mechanism might interfere with evaluating task result
# for a few times (both use the same counter) but lets
# ignore that
self.running[task_id] += 1
if self.running[task_id] == 3:
self.reschedule_node_exec(self.nodes[task_id])
continue
# mitigation for celery bug
if result.needs_reschedule():
self.logger.warning(
"Task %s detected with exception '%s', "
"requires reschedule",
task_id, str(result.value)
)
self._celery_errors_within_tick.append(
"Reschedule task {}".format(task_id)
)
self.reschedule_node_exec(self.nodes[task_id])
continue
# mitigation for celery bug
if not result.is_successful() and self.running[task_id] <= 3:
self.running[task_id] += 1
# Celery occasionally returns successful task as not
# successful. Check failed state up to 3 times before
# treating the fail as permanent
continue
if self.running[task_id] and self.running[task_id] > 1:
self.logger.info(
"Task %s has final state after %s checks",
task_id, self.running[task_id]
)
del self.running[task_id]
self.finished[task_id] = result.is_successful()
if self.finished[task_id]:
self.logger.info("Task %s is done, success", task_id)
else:
self.logger.info(
"Task %s is done, failure, result '%s(%s)'",
task_id, type(result.value), result.value
)
def get_unlocked_dependencies(self):
"""
Get set of tasks which are ready with state success
"""
return set([
task_id
for task_id, task_success in self.finished.items()
if task_success
])
def get_signatures_to_run(self):
"""
Find signatures which should be executed according to dependencies
"""
ok_tasks = self.get_unlocked_dependencies()
to_run = []
for node_id, node in self.nodes.items():
if node_id in self.running or node_id in self.finished:
continue
if set(node.dependencies).issubset(ok_tasks):
to_run.append(node)
self.logger.debug(
"Workflow node %s dependencies fulfilled",
node.id
)
return to_run
def are_scheduled(self, nodes_to_run):
"""
Check if task to be scheduled | |
#!/usr/bin/env python
# coding=utf-8
from __future__ import print_function
#in case of using json.dumps with ensure_ascii=False
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
from pprint import pprint
from functools import wraps
import os
try:
from urllib import urlencode, quote_plus
except ImportError:
from urllib.parse import urlencode, quote_plus
try:
import urllib2 as wxb_urllib
from cookielib import CookieJar
except ImportError:
import urllib.request as wxb_urllib
from http.cookiejar import CookieJar
import re
import time
import xml.dom.minidom
import json
import math
import subprocess
import ssl
import thread
import urllib2
try:
from config import WX_TULING_API_KEY
except:
print('-----------------no TULING api key--------------')
WX_TULING_API_KEY = ''
WX_TULING_API_URL = 'http://www.tuling123.com/openapi/api?key=' + WX_TULING_API_KEY + '&info=%s'
DEBUG = False
IS_SERVER = False
ROBOT_ON = False
MAX_GROUP_NUM = 35 # 每组人数
INTERFACE_CALLING_INTERVAL = 20 # 接口调用时间间隔, 间隔太短容易出现"操作太频繁", 会被限制操作半小时左右
MAX_PROGRESS_LEN = 50
SERVER_QR_PATH = os.path.join(os.getcwd(), 'static/qrcode.jpg')
QRImagePath = os.path.join(os.getcwd(), 'qrcode.jpg')
tip = 0
uuid = ''
base_uri = ''
redirect_uri = ''
push_uri = ''
skey = ''
wxsid = ''
wxuin = ''
pass_ticket = ''
deviceId = 'e000000000000000'
BaseRequest = {}
ContactList = []
My = []
SyncKey = []
MemberList = []
MemberMap = {}
MemberNickMap = {}
ALERT_MEMBER = []
ALERT_LAST_MSG_FROM = {}
ALERT_LAST_MSG_REPLY = {}
ALERT_FLAG = False
try:
xrange
range = xrange
except:
# python 3
pass
def ex(default=0):
def wrapper(fn):
@wraps(fn)
def func(*args, **kwds):
try:
r = fn(*args, **kwds)
except Exception, e:
r = default
print('[%s][%s]' % (fn.__name__, str(e)))
#print traceback.format_exc()
return r
return func
return wrapper
def pace(fn):
@wraps(fn)
def func(*args, **kwds):
t0 = time.time()
r = fn(*args, **kwds)
t = time.time() - t0
print('---%s: %ss---' % (fn.__name__, t))
return r
def show():
print('ROBOT_ON: %s' % ROBOT_ON)
def robot_on():
global ROBOT_ON
ROBOT_ON = True
def robot_off():
global ROBOT_ON
ROBOT_ON = False
ALERT_TIMEOUT = 60 * 1
def check_alert():
now = time.time()
for k in ALERT_LAST_MSG_FROM:
if (now-ALERT_LAST_MSG_FROM[k])>ALERT_TIMEOUT and not ALERT_FLAG:
if k not in ALERT_LAST_MSG_REPLY:
return True
if k in ALERT_LAST_MSG_REPLY and ALERT_LAST_MSG_REPLY[k] < ALERT_LAST_MSG_FROM[k]:
return True
return False
def init_alert():
add_alert('Tingting')
def clean_alert():
global ALERT_MEMBER
global ALERT_LAST_MSG_FROM
global ALERT_LAST_MSG_REPLY
global ALERT_FLAG
ALERT_FLAG = False
ALERT_MEMBER = []
ALERT_LAST_MSG_FROM = {}
ALERT_LAST_MSG_REPLY = {}
show_alert()
def re_alert():
global ALERT_LAST_MSG_FROM, ALERT_LAST_MSG_REPLY, ALERT_FLAG
ALERT_FLAG = False
ALERT_LAST_MSG_FROM = {}
ALERT_LAST_MSG_REPLY = {}
show_alert()
def show_alert():
print('ALERT_FLAG: ', ALERT_FLAG)
print('ALERT_MEMBER: ', ALERT_MEMBER)
print('ALERT_LAST_MSG_FROM: ', ALERT_LAST_MSG_FROM)
print('ALERT_LAST_MSG_REPLY: ', ALERT_LAST_MSG_REPLY)
def start_alert():
global ALERT_FLAG
print('*' * 20 + '大王呼叫,全体集合!' + '*' * 20)
ALERT_FLAG = True
if sys.platform.find('darwin') >= 0:
subprocess.call(['open', 'alert.mp3'])
else:
os.startfile('alert.mp3')
def report_redbag(fr='发红包的'):
print('#' * 20 + '红包来了,快去抢哇!' + '#' * 20)
if sys.platform.find('darwin') >= 0:
subprocess.call(['open', 'redbag.mp3'])
else:
os.startfile('redbag.mp3')
send('[%s]发来了红包,快去抢耶!' % fr, My['NickName'])
def send_alert():
if My:
send('大王来啦!!!', My['NickName'])
def responseState(func, BaseResponse):
ErrMsg = BaseResponse['ErrMsg']
Ret = BaseResponse['Ret']
if DEBUG or Ret != 0:
print('func: %s, Ret: %d, ErrMsg: %s' % (func, Ret, ErrMsg))
if Ret != 0:
return False
return True
def add_alert(nickname=None):
global ALERT_MEMBER
if nickname:
ALERT_MEMBER.append(nickname)
def to8(u):
if type(u)==str:
return u
if type(u)==unicode:
return u.encode('utf8')
return ''
def toU(s):
if type(s)==str:
return s.decode('utf8')
if type(s)==unicode:
return s
return u''
def getRequest(url, data=None):
"""
#this is to ensure the data is in utf-8, not any /uxxxx style string produced by json.dumps. Generally, browser with a meta header will handle /uxxxx style unicode well. However it seems wechat handle /uxxxx with its repr not encoding ones.
if type(data) == unicode, it works well
else (str), exception will pass and finally will handle str data
"""
try:
data = data.encode('utf-8')
except:
pass
finally:
return wxb_urllib.Request(url=url, data=data)
def getUUID():
global uuid
url = 'https://login.weixin.qq.com/jslogin'
params = {
'appid': 'wx782c26e4c19acffb',
'fun': 'new',
'lang': 'zh_CN',
'_': int(time.time()),
}
request = getRequest(url=url, data=urlencode(params))
response = wxb_urllib.urlopen(request)
data = response.read().decode('utf-8', 'replace')
# print(data)
# window.QRLogin.code = 200; window.QRLogin.uuid = "oZwt_bFfRg==";
regx = r'window.QRLogin.code = (\d+); window.QRLogin.uuid = "(\S+?)"'
pm = re.search(regx, data)
code = pm.group(1)
uuid = pm.group(2)
if code == '200':
return True
return False
def showQRImage():
global tip
url = 'https://login.weixin.qq.com/qrcode/' + uuid
params = {
't': 'webwx',
'_': int(time.time()),
}
request = getRequest(url=url, data=urlencode(params))
response = wxb_urllib.urlopen(request)
tip = 1
global IS_SERVER
if sys.platform.find('linux') >= 0:
IS_SERVER = True
if IS_SERVER:
with open(SERVER_QR_PATH, 'wb') as f:
f.write(response.read())
print('请扫码二维码登录,地址 http://alancer.ml/qrcode.jpg')
else:
f = open(QRImagePath, 'wb')
f.write(response.read())
f.close()
if sys.platform.find('darwin') >= 0:
subprocess.call(['open', QRImagePath])
elif sys.platform.find('linux') >= 0:
subprocess.call(['xdg-open', QRImagePath])
else:
os.startfile(QRImagePath)
print('请使用微信扫描二维码以登录')
def waitForLogin():
global tip, base_uri, redirect_uri, push_uri
url = 'https://login.weixin.qq.com/cgi-bin/mmwebwx-bin/login?tip=%s&uuid=%s&_=%s' % (
tip, uuid, int(time.time()))
request = getRequest(url=url)
response = wxb_urllib.urlopen(request)
data = response.read().decode('utf-8', 'replace')
# print(data)
# window.code=500;
regx = r'window.code=(\d+);'
pm = re.search(regx, data)
code = pm.group(1)
if code == '201': # 已扫描
print('成功扫描,请在手机上点击确认以登录')
tip = 0
elif code == '200': # 已登录
print('正在登录...')
regx = r'window.redirect_uri="(\S+?)";'
pm = re.search(regx, data)
redirect_uri = pm.group(1) + '&fun=new'
base_uri = redirect_uri[:redirect_uri.rfind('/')]
# push_uri与base_uri对应关系(排名分先后)(就是这么奇葩..)
services = [
('wx2.qq.com', 'webpush2.weixin.qq.com'),
('qq.com', 'webpush.weixin.qq.com'),
('web1.wechat.com', 'webpush1.wechat.com'),
('web2.wechat.com', 'webpush2.wechat.com'),
('wechat.com', 'webpush.wechat.com'),
('web1.wechatapp.com', 'webpush1.wechatapp.com'),
]
push_uri = base_uri
for (searchUrl, pushUrl) in services:
if base_uri.find(searchUrl) >= 0:
push_uri = 'https://%s/cgi-bin/mmwebwx-bin' % pushUrl
break
# closeQRImage
if sys.platform.find('darwin') >= 0: # for OSX with Preview
os.system("osascript -e 'quit app \"Preview\"'")
elif code == '408': # 超时
pass
# elif code == '400' or code == '500':
return code
def login():
global skey, wxsid, wxuin, pass_ticket, BaseRequest
request = getRequest(url=redirect_uri)
response = wxb_urllib.urlopen(request)
data = response.read().decode('utf-8', 'replace')
# print(data)
doc = xml.dom.minidom.parseString(data)
root = doc.documentElement
for node in root.childNodes:
if node.nodeName == 'skey':
skey = node.childNodes[0].data
elif node.nodeName == 'wxsid':
wxsid = node.childNodes[0].data
elif node.nodeName == 'wxuin':
wxuin = node.childNodes[0].data
elif node.nodeName == 'pass_ticket':
pass_ticket = node.childNodes[0].data
# print('skey: %s, wxsid: %s, wxuin: %s, pass_ticket: %s' % (skey, wxsid,
# wxuin, pass_ticket))
if not all((skey, wxsid, wxuin, pass_ticket)):
return False
BaseRequest = {
'Uin': int(wxuin),
'Sid': wxsid,
'Skey': skey,
'DeviceID': deviceId,
}
return True
def webwxinit():
url = base_uri + \
'/webwxinit?pass_ticket=%s&skey=%s&r=%s' % (
pass_ticket, skey, int(time.time()))
params = {
'BaseRequest': BaseRequest
}
request = getRequest(url=url, data=json.dumps(params))
request.add_header('ContentType', 'application/json; charset=UTF-8')
response = wxb_urllib.urlopen(request)
data = response.read()
if DEBUG:
f = open(os.path.join(os.getcwd(), 'webwxinit.json'), 'wb')
f.write(data)
f.close()
data = data.decode('utf-8', 'replace')
#print(data)
global ContactList, My, SyncKey
dic = json.loads(data)
ContactList = dic['ContactList']
My = dic['User']
SyncKey = dic['SyncKey']
state = responseState('webwxinit', dic['BaseResponse'])
return state
def webwxgetcontact():
url = base_uri + '/webwxgetcontact?pass_ticket=%s&skey=%s&r=%s' % (pass_ticket, skey, int(time.time()))
request = getRequest(url=url)
request.add_header('ContentType', 'application/json; charset=UTF-8')
response = wxb_urllib.urlopen(request)
data = response.read()
if DEBUG:
f = open(os.path.join(os.getcwd(), 'webwxgetcontact.json'), 'wb')
f.write(data)
f.close()
# print(data)
data = data.decode('utf-8', 'replace')
dic = json.loads(data)
MemberList = dic['MemberList']
return MemberList
def special_user():
# 倒序遍历,不然删除的时候出问题..
SpecialUsers = ["newsapp", "fmessage", "filehelper", "weibo", "qqmail", "tmessage", "qmessage", "qqsync", "floatbottle", "lbsapp", "shakeapp", "medianote", "qqfriend", "readerapp", "blogapp", "facebookapp", "masssendapp", "meishiapp", "feedsapp", "voip", "blogappweixin", "weixin", "brandsessionholder", "weixinreminder", "wxid_novlwrv3lqwv11", "gh_22b87fa7cb3c", "officialaccounts", "notification_messages", "wxitil", "userexperience_alarm"]
for i in range(len(MemberList) - 1, -1, -1):
Member = MemberList[i]
if Member['VerifyFlag'] & 8 != 0: # 公众号/服务号
MemberList.remove(Member)
elif Member['UserName'] in SpecialUsers: # 特殊账号
MemberList.remove(Member)
elif Member['UserName'].find('@@') != -1: # 群聊
MemberList.remove(Member)
elif Member['UserName'] == My['UserName']: # 自己
MemberList.remove(Member)
return MemberList
def createChatroom(UserNames):
MemberList = [{'UserName': UserName} for UserName in UserNames]
url = base_uri + \
'/webwxcreatechatroom?pass_ticket=%s&r=%s' % (
pass_ticket, int(time.time()))
params = {
'BaseRequest': BaseRequest,
'MemberCount': len(MemberList),
'MemberList': MemberList,
'Topic': '',
}
request = getRequest(url=url, data=json.dumps(params))
request.add_header('ContentType', 'application/json; charset=UTF-8')
response = wxb_urllib.urlopen(request)
data = response.read().decode('utf-8', 'replace')
# print(data)
dic = json.loads(data)
ChatRoomName = dic['ChatRoomName']
MemberList = dic['MemberList']
DeletedList = []
BlockedList = []
for Member in MemberList:
if Member['MemberStatus'] == 4: # 被对方删除了
DeletedList.append(Member['UserName'])
elif Member['MemberStatus'] == 3: # 被加入黑名单
BlockedList.append(Member['UserName'])
state = responseState('createChatroom', dic['BaseResponse'])
return ChatRoomName, DeletedList, BlockedList
def deleteMember(ChatRoomName, UserNames):
url = base_uri + \
'/webwxupdatechatroom?fun=delmember&pass_ticket=%s' % (pass_ticket)
params = {
'BaseRequest': BaseRequest,
'ChatRoomName': ChatRoomName,
'DelMemberList': ','.join(UserNames),
}
request = getRequest(url=url, data=json.dumps(params))
request.add_header('ContentType', 'application/json; charset=UTF-8')
response = wxb_urllib.urlopen(request)
data = response.read().decode('utf-8', 'replace')
# print(data)
dic = json.loads(data)
state = responseState('deleteMember', dic['BaseResponse'])
return state
def addMember(ChatRoomName, UserNames):
url = base_uri + \
'/webwxupdatechatroom?fun=addmember&pass_ticket=%s' % (pass_ticket)
params = {
'BaseRequest': BaseRequest,
'ChatRoomName': ChatRoomName,
'AddMemberList': ','.join(UserNames),
}
request = getRequest(url=url, data=json.dumps(params))
request.add_header('ContentType', 'application/json; charset=UTF-8')
response = wxb_urllib.urlopen(request)
data = response.read().decode('utf-8', 'replace')
# print(data)
dic = json.loads(data)
MemberList = dic['MemberList']
DeletedList = []
BlockedList = []
for Member in MemberList:
if Member['MemberStatus'] == 4: # 被对方删除了
DeletedList.append(Member['UserName'])
elif Member['MemberStatus'] == 3: # 被加入黑名单
BlockedList.append(Member['UserName'])
state = responseState('addMember', dic['BaseResponse'])
return DeletedList, BlockedList
def syncKey():
SyncKeyItems = ['%s_%s' % (item['Key'], item['Val'])
for item in SyncKey['List']]
SyncKeyStr = '|'.join(SyncKeyItems)
return SyncKeyStr
def syncCheck():
url = push_uri + '/synccheck?'
params = | |
<gh_stars>0
# Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module implements the models for the Timesketch core system."""
from __future__ import unicode_literals
import json
from flask import current_app
from flask import url_for
from sqlalchemy import BigInteger
from sqlalchemy import Column
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
from sqlalchemy import Unicode
from sqlalchemy import UnicodeText
from sqlalchemy.orm import relationship
from timesketch.models import BaseModel
from timesketch.models.acl import AccessControlMixin
from timesketch.models.annotations import LabelMixin
from timesketch.models.annotations import CommentMixin
from timesketch.models.annotations import StatusMixin
from timesketch.lib.utils import random_color
class Sketch(AccessControlMixin, LabelMixin, StatusMixin, CommentMixin,
BaseModel):
"""Implements the Sketch model.
A Sketch is the collaborative entity in Timesketch. It contains one or more
timelines that can be grouped and queried on.
"""
name = Column(Unicode(255))
description = Column(UnicodeText())
user_id = Column(Integer, ForeignKey('user.id'))
timelines = relationship('Timeline', backref='sketch', lazy='select')
views = relationship('View', backref='sketch', lazy='select')
events = relationship('Event', backref='sketch', lazy='select')
stories = relationship('Story', backref='sketch', lazy='select')
aggregations = relationship('Aggregation', backref='sketch', lazy='select')
attributes = relationship('Attribute', backref='sketch', lazy='select')
graphs = relationship('Graph', backref='sketch', lazy='select')
graphcaches = relationship('GraphCache', backref='sketch', lazy='select')
aggregationgroups = relationship(
'AggregationGroup', backref='sketch', lazy='select')
analysis = relationship('Analysis', backref='sketch', lazy='select')
analysissessions = relationship(
'AnalysisSession', backref='sketch', lazy='select')
def __init__(self, name, description, user):
"""Initialize the Sketch object.
Args:
name: The name of the sketch
description: Description of the sketch
user: A user (instance of timesketch.models.user.User)
"""
super().__init__()
self.name = name
self.description = description
self.user = user
@property
def get_named_aggregations(self):
"""Get named aggregations that don't belong to a group.
Get named aggregations, i.e. only aggregations that have a name and
are not part of a group.
"""
return [
agg for agg in self.aggregations
if agg.name != '' and not agg.aggregationgroup
]
@property
def get_named_views(self):
"""Get named views.
Get named views, i.e. only views that has a name. Views without names
are used as user state views and should not be visible in the UI.
"""
views = [
view for view in self.views
if view.get_status.status != 'deleted' and view.name != ''
]
return views
@property
def external_url(self):
"""Get external URL for the sketch.
E.g: https://localhost/sketch/42/
Returns:
Full URL to the sketch as string.
"""
url_host = current_app.config.get(
'EXTERNAL_HOST_URL', 'https://localhost')
url_path = url_for('sketch_views.overview', sketch_id=self.id)
return url_host + url_path
def get_view_urls(self):
"""Get external URL for all views in the sketch.
Returns:
Dictionary with url as key and view name as value.
"""
views = {}
for view in self.get_named_views:
url_host = current_app.config.get(
'EXTERNAL_HOST_URL', 'https://localhost')
url_path = url_for(
'sketch_views.explore', sketch_id=self.id, view_id=view.id)
url = url_host + url_path
views[url] = view.name
return views
@property
def active_timelines(self):
"""List timelines that are ready for analysis.
Returns:
List of instances of timesketch.models.sketch.Timeline
"""
_timelines = []
for timeline in self.timelines:
timeline_status = timeline.get_status.status
index_status = timeline.searchindex.get_status.status
if (timeline_status or index_status) in (
'processing', 'fail', 'archived'):
continue
_timelines.append(timeline)
return _timelines
def get_active_analysis_sessions(self):
"""List active analysis sessions.
Returns:
List of instances of timesketch.models.sketch.AnalysisSession
"""
active_sessions = []
for session in self.analysissessions:
for analysis in session.analyses:
if analysis.get_status.status in ('PENDING', 'STARTED'):
active_sessions.append(session)
# Break early on first running analysis as this is enough
# to mark the session as active.
break
return active_sessions
@property
def get_search_templates(self):
"""Get search templates."""
return SearchTemplate.query.all()
def get_user_view(self, user):
"""Get view for user, i.e. view with the state for the user/sketch.
Args:
user: User (instance of timesketch.models.user.User)
Returns:
view: Instance of timesketch.models.sketch.View
"""
view = View.query.filter(View.user == user, View.name == '',
View.sketch_id == self.id).order_by(
View.created_at.desc()).first()
return view
class Timeline(LabelMixin, StatusMixin, CommentMixin, BaseModel):
"""Implements the Timeline model."""
name = Column(Unicode(255))
description = Column(UnicodeText())
color = Column(Unicode(6))
user_id = Column(Integer, ForeignKey('user.id'))
searchindex_id = Column(Integer, ForeignKey('searchindex.id'))
sketch_id = Column(Integer, ForeignKey('sketch.id'))
analysis = relationship('Analysis', backref='timeline', lazy='select')
datasources = relationship('DataSource', backref='sketch', lazy='select')
def __init__(self,
name,
user,
sketch,
searchindex,
color=None,
description=None):
"""Initialize the Timeline object.
Args:
name: The name of the timeline
user: A user (instance of timesketch.models.user.User)
sketch: A sketch (instance of timesketch.models.sketch.Sketch)
searchindex: A searchindex
(instance of timesketch.models.sketch.SearchIndex)
color: Color for the timeline in HEX as string (e.g. F1F1F1F1)
description: The description for the timeline
"""
super().__init__()
self.name = name
self.description = description
if not color:
color = random_color()
self.color = color
self.user = user
self.sketch = sketch
self.searchindex = searchindex
class SearchIndex(AccessControlMixin, LabelMixin, StatusMixin, CommentMixin,
BaseModel):
"""Implements the SearchIndex model."""
name = Column(Unicode(255))
description = Column(UnicodeText())
index_name = Column(Unicode(255))
user_id = Column(Integer, ForeignKey('user.id'))
timelines = relationship(
'Timeline', backref='searchindex', lazy='dynamic')
events = relationship('Event', backref='searchindex', lazy='dynamic')
def __init__(self, name, description, index_name, user):
"""Initialize the SearchIndex object.
Args:
name: The name of the timeline
description: The description for the timeline
index_name: The name of the searchindex
user: A user (instance of timesketch.models.user.User)
"""
super().__init__()
self.name = name
self.description = description
self.index_name = index_name
self.user = user
class View(AccessControlMixin, LabelMixin, StatusMixin, CommentMixin,
BaseModel):
"""Implements the View model."""
name = Column(Unicode(255))
description = Column(UnicodeText())
query_string = Column(UnicodeText())
query_filter = Column(UnicodeText())
query_dsl = Column(UnicodeText())
user_id = Column(Integer, ForeignKey('user.id'))
sketch_id = Column(Integer, ForeignKey('sketch.id'))
searchtemplate_id = Column(Integer, ForeignKey('searchtemplate.id'))
aggregations = relationship('Aggregation', backref='view', lazy='select')
aggregationgroups = relationship(
'AggregationGroup', backref='view', lazy='select')
def __init__(self,
name,
sketch,
user,
description=None,
searchtemplate=None,
query_string=None,
query_filter=None,
query_dsl=None):
"""Initialize the View object.
Args:
name: The name of the timeline
sketch: A sketch (instance of timesketch.models.sketch.Sketch)
user: A user (instance of timesketch.models.user.User)
description (str): Description of the view
searchtemplate: Instance of timesketch.models.sketch.SearchTemplate
query_string: The query string
query_filter: The filter to apply (JSON format as string)
query_dsl: A query DSL document (JSON format as string)
"""
super().__init__()
self.name = name
self.sketch = sketch
self.user = user
self.description = description
self.searchtemplate = searchtemplate
self.query_string = query_string
self.query_filter = query_filter
self.query_dsl = query_dsl
def validate_filter(self, query_filter=None):
"""Validate the Query Filter.
Make sure that we have all expected attributes in the query filter
json string. The filter dictionary evolves over time and this function
is used to update all filters.
Args:
query_filter: The query filter (JSON format or dictionary)
Returns:
query_filter: Query filter dictionary serialized to JSON
"""
DEFAULT_FROM = 0
DEFAULT_SIZE = 40 # Number of resulting documents to return
DEFAULT_LIMIT = DEFAULT_SIZE # Number of resulting documents to return
DEFAULT_VALUES = {
'from': DEFAULT_FROM,
'size': DEFAULT_SIZE,
'terminate_after': DEFAULT_LIMIT,
'indices': [],
'exclude': [],
'order': 'asc',
'chips': []
}
# If not provided, get the saved filter from the view
if not query_filter:
query_filter = self.query_filter
# Make sure we have the filter as a dictionary
if not isinstance(query_filter, dict):
filter_dict = json.loads(query_filter)
else:
filter_dict = query_filter
# Get all missing attributes and set them to their default value
missing_attributes = list(
set(DEFAULT_VALUES.keys()) - set(filter_dict.keys()))
for key in missing_attributes:
filter_dict[key] = DEFAULT_VALUES[key]
return json.dumps(filter_dict, ensure_ascii=False)
class SearchTemplate(AccessControlMixin, LabelMixin, StatusMixin, CommentMixin,
BaseModel):
"""Implements the Search Template model."""
name = Column(Unicode(255))
description = Column(UnicodeText())
query_string = Column(UnicodeText())
query_filter = Column(UnicodeText())
query_dsl = Column(UnicodeText())
user_id = Column(Integer, ForeignKey('user.id'))
views = relationship('View', backref='searchtemplate', lazy='select')
def __init__(self,
name,
user,
description=None,
query_string=None,
query_filter=None,
query_dsl=None):
"""Initialize the Search Template object.
Args:
name: The name of the timeline
user: A user (instance of timesketch.models.user.User)
description (str): Description of the search template
query_string: The query string
query_filter: The filter to apply (JSON format as string)
query_dsl: A query DSL document (JSON format as string)
"""
super().__init__()
self.name = name
self.user = user
self.description = description
self.query_string = query_string
if not query_filter:
filter_template = {
'exclude': [],
'indices': '_all',
'terminate_after': 40,
'order': 'asc',
'size': '40'
}
query_filter = json.dumps(filter_template, ensure_ascii=False)
self.query_filter = query_filter
self.query_dsl = query_dsl
class Event(LabelMixin, StatusMixin, CommentMixin, BaseModel):
"""Implements the Event model."""
sketch_id = Column(Integer, ForeignKey('sketch.id'))
searchindex_id = Column(Integer, ForeignKey('searchindex.id'))
document_id = Column(Unicode(255))
def __init__(self, sketch, searchindex, document_id):
"""Initialize the Event object.
Args:
sketch: A sketch (instance of timesketch.models.sketch.Sketch)
searchindex: A searchindex
(instance of timesketch.models.sketch.SearchIndex)
document_id = String with | |
0, 0, 0, 0],
[1764, 10.4855, 0, 9999, -9999, 1.0, 100, 1, 21.994769, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1765, 31.191337, 0, 9999, -9999, 1.0, 100, 1, 112.249863, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1766, 32.779309, 0, 9999, -9999, 1.0, 100, 1, 99.811208, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1767, 47.378355, 0, 9999, -9999, 1.0, 100, 1, 95.5909, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1768, 99.282432, 0, 9999, -9999, 1.0, 100, 1, 159.818572, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1769, 147.831228, 0, 9999, -9999, 1.0, 100, 1, 235.581664, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1770, 304.79978, 0, 9999, -9999, 1.0, 100, 1, 479.248156, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1771, 65.829812, 0, 9999, -9999, 1.0, 100, 1, 276.640075, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1772, 26.535914, 0, 9999, -9999, 1.0, 100, 1, 272.215345, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1773, 344.519489, 0, 9999, -9999, 1.0, 100, 1, 533.823159, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1774, 1.433941, 0, 9999, -9999, 1.0, 100, 1, 88.57714, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1775, 13.942291, 0, 9999, -9999, 1.0, 100, 1, 197.787397, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1776, 38.596164, 0, 9999, -9999, 1.0, 100, 1, 111.203656, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1777, 1.462073, 0, 9999, -9999, 1.0, 100, 1, 199.457983, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1778, 27.08531, 0, 9999, -9999, 1.0, 100, 1, 80.070627, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1779, 24.436918, 0, 9999, -9999, 1.0, 100, 1, 78.485044, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1780, 57.435871, 0, 9999, -9999, 1.0, 100, 1, 97.872974, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1781, 0.077992, 0, 9999, -9999, 1.0, 100, 1, 7.067063, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1782, 0.17755, 0, 9999, -9999, 1.0, 100, 1, 9.94901, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1783, 0.222187, 0, 9999, -9999, 1.0, 100, 1, 10.739092, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1784, 113.329767, 0, 9999, -9999, 1.0, 100, 1, 240.920274, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1785, 75.539881, 0, 9999, -9999, 1.0, 100, 1, 275.41262, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1786, 111.534647, 0, 9999, -9999, 1.0, 100, 1, 195.868213, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1787, 76.816864, 0, 9999, -9999, 1.0, 100, 1, 123.060646, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1788, 0.294637, 0, 9999, -9999, 1.0, 100, 1, 9.486282, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1789, 0.73622, 0, 9999, -9999, 1.0, 100, 1, 24.05804, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1790, 0.034988, 0, 9999, -9999, 1.0, 100, 1, 1.412167, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1791, 0.027252, 0, 9999, -9999, 1.0, 100, 1, 1.171034, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1792, 0.116895, 0, 9999, -9999, 1.0, 100, 1, 8.914306, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1793, 0.331412, 0, 9999, -9999, 1.0, 100, 1, 41.722817, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1794, 0.347348, 0, 9999, -9999, 1.0, 100, 1, 6.617641, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1795, 0.092707, 0, 9999, -9999, 1.0, 100, 1, 3.33586, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1796, 0.11041, 0, 9999, -9999, 1.0, 100, 1, 10.434523, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1797, 2.91855, 0, 9999, -9999, 1.0, 100, 1, 63.411765, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1798, 0.704229, 0, 9999, -9999, 1.0, 100, 1, 14.835758, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1799, 2.658677, 0, 9999, -9999, 1.0, 100, 1, 51.10225, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1800, 32.453095, 0, 9999, -9999, 1.0, 100, 1, 79.286766, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1801, 0.313154, 0, 9999, -9999, 1.0, 100, 1, 21.006749, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1802, 0.140553, 0, 9999, -9999, 1.0, 100, 1, 11.305192, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1803, 0.143182, 0, 9999, -9999, 1.0, 100, 1, 15.182571, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1804, 250.433828, 0, 9999, -9999, 1.0, 100, 1, 399.133201, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1805, 7.643743, 0, 9999, -9999, 1.0, 100, 1, 23.20491, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1806, 0.949212, 0, 9999, -9999, 1.0, 100, 1, 21.469357, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1807, 1.570516, 0, 9999, -9999, 1.0, 100, 1, 28.156483, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1808, 12.529593, 0, 9999, -9999, 1.0, 100, 1, 118.262712, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1809, 1.804181, 0, 9999, -9999, 1.0, 100, 1, 33.031228, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1810, 3.352529, 0, 9999, -9999, 1.0, 100, 1, 74.139408, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1811, 9.627622, 0, 9999, -9999, 1.0, 100, 1, 53.408299, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1812, 1.070103, 0, 9999, -9999, 1.0, 100, 1, 47.34526, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1813, 9.006468, 0, 9999, -9999, 1.0, 100, 1, 180.894957, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1814, 1.932899, 0, 9999, -9999, 1.0, 100, 1, 62.572642, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1815, 7.162069, 0, 9999, -9999, 1.0, 100, 1, 61.953143, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1816, 3.07405, 0, 9999, -9999, 1.0, 100, 1, 30.445169, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1817, 8.908725, 0, 9999, -9999, 1.0, 100, 1, 280.614897, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1818, 6.89181, 0, 9999, -9999, 1.0, 100, 1, 173.515675, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1819, 0.077826, 0, 9999, -9999, 1.0, 100, 1, 1.538348, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1820, 23.896406, 0, 9999, -9999, 1.0, 100, 1, 79.71358, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1821, 25.662621, 0, 9999, -9999, 1.0, 100, 1, 196.67938, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1822, 101.811728, 0, 9999, -9999, 1.0, 100, 1, 170.831584, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1823, 51.944775, 0, 9999, -9999, 1.0, 100, 1, 131.456153, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1824, 0.441474, 0, 9999, -9999, 1.0, 100, 1, 56.565054, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1825, 40.830872, 0, 9999, -9999, 1.0, 100, 1, 81.59195, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1826, 3.268268, 0, 9999, -9999, 1.0, 100, 1, 74.101252, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1827, 5.6733, 0, 9999, -9999, 1.0, 100, 1, 30.303552, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1828, 20.476816, 0, 9999, -9999, 1.0, 100, 1, 43.298921, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1829, 31.591544, 0, 9999, -9999, 1.0, 100, 1, 69.263255, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1830, 0.642651, 0, 9999, -9999, 1.0, 100, 1, 27.724768, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1831, 34.412999, 0, 9999, -9999, 1.0, 100, 1, 69.89001, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1832, 1.138187, 0, 9999, -9999, 1.0, 100, 1, 26.560625, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1833, 21.42128, 0, 9999, -9999, 1.0, 100, 1, 81.361962, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1834, 1.426142, 0, 9999, -9999, 1.0, 100, 1, 102.529569, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1835, 34.902506, 0, 9999, -9999, 1.0, 100, 1, 109.80979, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1836, 2.422712, 0, 9999, -9999, 1.0, 100, 1, 6.417969, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1837, 5.152147, 0, 9999, -9999, 1.0, 100, 1, 12.629331, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1838, 0.360196, 0, 9999, -9999, 1.0, 100, 1, 25.580913, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1839, 101.272918, 0, 9999, -9999, 1.0, 100, 1, 183.749133, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1840, 81.128784, 0, 9999, -9999, 1.0, 100, 1, 132.975197, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1841, 0.414931, 0, 9999, -9999, 1.0, 100, 1, 22.982632, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1842, 0.503001, 0, 9999, -9999, 1.0, 100, 1, 7.468633, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1843, 0.840712, 0, 9999, -9999, 1.0, 100, 1, 19.264686, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1844, 0.32928, 0, 9999, -9999, 1.0, 100, 1, 32.384294, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1845, 0.28889, 0, 9999, -9999, 1.0, 100, 1, 31.436002, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1846, 0.155499, 0, 9999, -9999, 1.0, 100, 1, 3.74984, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1847, 48.936735, 0, 9999, -9999, 1.0, 100, 1, 120.215574, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1848, 1.811188, 0, 9999, -9999, 1.0, 100, 1, 9.514696, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1849, 12.439626, 0, 9999, -9999, 1.0, 100, 1, 37.619097, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1850, 12.461036, 0, 9999, -9999, 1.0, 100, 1, 48.54058, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1851, 5.208163, 0, 9999, -9999, 1.0, 100, 1, 7.956444, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1852, 25.347233, 0, 9999, -9999, 1.0, 100, 1, 37.606916, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1853, 20.651565, 0, 9999, -9999, 1.0, 100, 1, 30.116711, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1854, 0.043822, 0, 9999, -9999, 1.0, 100, 1, 2.241167, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1855, 4.217729, 0, 9999, -9999, 1.0, 100, 1, 121.687485, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1856, 12.633077, 0, 9999, -9999, 1.0, 100, 1, 63.654358, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1857, 1.364182, 0, 9999, -9999, 1.0, 100, 1, 41.229597, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1858, 0.958978, 0, 9999, -9999, 1.0, 100, 1, 27.374415, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1859, 10.587897, 0, 9999, -9999, 1.0, 100, 1, 25.394374, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1860, 39.399432, 0, 9999, -9999, 1.0, 100, 1, 84.163604, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1861, 14.041237, 0, 9999, -9999, 1.0, 100, 1, 26.861144, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1862, 13.884853, 0, 9999, -9999, 1.0, 100, 1, 32.512826, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1863, 12.909042, 0, 9999, -9999, 1.0, 100, 1, 30.063729, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1864, 50.066394, 0, 9999, -9999, 1.0, 100, 1, 138.236316, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1865, 0.409612, 0, 9999, -9999, 1.0, 100, 1, 68.097772, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1866, 0.725366, 0, 9999, -9999, 1.0, 100, 1, 98.289141, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1867, 0.073895, 0, 9999, -9999, 1.0, 100, 1, 2.041288, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1868, 0.32648, 0, 9999, -9999, 1.0, 100, 1, 6.453374, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1869, 0.110449, 0, 9999, -9999, 1.0, 100, 1, 2.759448, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1870, 1.381036, 0, 9999, -9999, 1.0, 100, 1, 54.564665, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1871, 1.43154, 0, 9999, -9999, 1.0, 100, 1, 52.648444, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1872, 0.075555, 0, 9999, -9999, 1.0, 100, 1, 1.683854, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1873, 2.30156, 0, 9999, -9999, 1.0, 100, 1, 9.025283, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1874, 0.995556, 0, 9999, -9999, 1.0, 100, 1, 3.554415, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1875, 2.339423, 0, 9999, -9999, 1.0, 100, 1, 7.837576, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1876, 0.249749, 0, 9999, -9999, 1.0, 100, 1, 4.936672, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1877, 0.057457, 0, 9999, -9999, 1.0, 100, 1, 1.135717, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1878, 2.499638, 0, 9999, -9999, 1.0, 100, 1, 8.374329, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1879, 0.20187, 0, 9999, -9999, 1.0, 100, 1, 1.752881, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1880, 12.467959, 0, 9999, -9999, 1.0, 100, 1, 38.46747, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1881, 0.103805, 0, 9999, -9999, 1.0, 100, 1, 4.535799, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1882, 0.128327, 0, 9999, -9999, 1.0, 100, 1, 5.120641, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1883, 0.143763, 0, 9999, -9999, 1.0, 100, 1, 6.940957, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1884, 0.120329, 0, 9999, -9999, 1.0, 100, 1, 5.865468, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1885, 17.246135, 0, 9999, -9999, 1.0, 100, 1, 47.510175, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1886, 1.939145, 0, 9999, -9999, 1.0, 100, 1, 5.255398, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1887, 1.145333, 0, 9999, -9999, 1.0, 100, 1, 16.937671, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1888, 0.093829, 0, 9999, -9999, 1.0, 100, 1, 4.141211, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1889, 21.859223, 0, 9999, -9999, 1.0, 100, 1, 91.335184, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1890, 4.64049, 0, 9999, -9999, 1.0, 100, 1, 24.842697, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1891, 11.630679, 0, 9999, -9999, 1.0, 100, 1, 30.836318, 0.0, 0, 0, 0, 0, 0, 0, 0, | |
= {
"result": [
{
"acquisition_method": "",
"asset_tag": "P1000807",
"assigned": "2018-08-07 07:00:00",
"assigned_to": {
"link": "demisto.com",
"value": "admin"
},
"beneficiary": "",
"checked_in": "",
"checked_out": "",
"ci": {
"link": "demisto.com",
"value": "admin"
},
"comments": "",
"company": {
"link": "demisto.com",
"value": "admin"
},
"cost": "2499.99",
"cost_center": {
"link": "demisto.com",
"value": "admin"
},
"delivery_date": "2018-03-14 08:00:00",
"department": {
"link": "demisto.com",
"value": "admin"
},
"depreciated_amount": "934.59",
"depreciation": {
"link": "demisto.com",
"value": "admin"
},
"depreciation_date": "2018-05-27 07:00:00",
"display_name": "P1000807 - Apple MacBook Pro 17\"",
"disposal_reason": "",
"due": "",
"due_in": "",
"expenditure_type": "",
"gl_account": "",
"install_date": "2018-05-26 07:00:00",
"install_status": "1",
"invoice_number": "",
"justification": "",
"lease_id": "",
"location": {
"link": "demisto.com",
"value": "admin"
},
"managed_by": "",
"model": {
"link": "demisto.com",
"value": "admin"
},
"model_category": {
"link": "demisto.com",
"value": "admin"
},
"old_status": "",
"old_substatus": "",
"order_date": "2018-02-22 08:00:00",
"owned_by": "",
"parent": "",
"po_number": "PO100008",
"pre_allocated": "false",
"purchase_date": "2018-03-09",
"quantity": "1",
"request_line": "",
"resale_price": "0",
"reserved_for": "",
"residual": "1565.4",
"residual_date": "2020-04-09",
"retired": "",
"retirement_date": "",
"salvage_value": "0",
"serial_number": "IKS-131-F44462-HL",
"skip_sync": "false",
"stockroom": "",
"substatus": "",
"support_group": "",
"supported_by": "",
"sys_class_name": "alm_hardware",
"sys_created_by": "admin",
"sys_created_on": "2019-02-23 08:14:09",
"sys_domain": {
"link": "demisto.com",
"value": "global"
},
"sys_domain_path": "/",
"sys_id": "sys_id2",
"sys_mod_count": "20",
"sys_tags": "",
"sys_updated_by": "system",
"sys_updated_on": "2020-04-09 06:20:19",
"vendor": {
"link": "demisto.com",
"value": "admin"
},
"warranty_expiration": "2021-05-25",
"work_notes": ""
},
{
"acquisition_method": "",
"asset_tag": "P1000637",
"assigned": "2019-07-03 07:00:00",
"assigned_to": {
"link": "demisto.com",
"value": "admin"
},
"beneficiary": "",
"checked_in": "",
"checked_out": "",
"ci": {
"link": "demisto.com",
"value": "admin"
},
"comments": "",
"company": {
"link": "demisto.com",
"value": "admin"
},
"cost": "1599.99",
"cost_center": {
"link": "demisto.com",
"value": "admin"
},
"delivery_date": "2018-12-19 08:00:00",
"department": {
"link": "demisto.com",
"value": "admin"
},
"depreciated_amount": "389.71",
"depreciation": {
"link": "admin",
"value": "demisto.com"
},
"depreciation_date": "2019-01-19 08:00:00",
"display_name": "P1000637 - Apple MacBook Air 13\"",
"disposal_reason": "",
"due": "",
"due_in": "",
"expenditure_type": "",
"gl_account": "",
"install_date": "2019-01-18 08:00:00",
"install_status": "1",
"invoice_number": "",
"justification": "",
"lease_id": "",
"location": {
"link": "demisto.com",
"value": "admin"
},
"managed_by": "",
"model": {
"link": "demisto.com",
"value": "admin"
},
"model_category": {
"link": "demisto.com",
"value": "admin"
},
"old_status": "",
"old_substatus": "",
"order_date": "2018-11-24 08:00:00",
"owned_by": "",
"parent": "",
"po_number": "PO100011",
"pre_allocated": "false",
"purchase_date": "2018-12-09",
"quantity": "1",
"request_line": "",
"resale_price": "0",
"reserved_for": "",
"residual": "1210.28",
"residual_date": "2020-04-09",
"retired": "",
"retirement_date": "",
"salvage_value": "0",
"serial_number": "BXV-671-O15099-HI",
"skip_sync": "false",
"stockroom": "",
"substatus": "",
"support_group": "",
"supported_by": "",
"sys_class_name": "alm_hardware",
"sys_created_by": "admin",
"sys_created_on": "2019-02-23 08:13:36",
"sys_domain": {
"link": "demisto.com",
"value": "global"
},
"sys_domain_path": "/",
"sys_id": "sys_id3",
"sys_mod_count": "20",
"sys_tags": "",
"sys_updated_by": "system",
"sys_updated_on": "2020-04-09 06:20:19",
"vendor": {
"link": "demisto.com",
"value": "admin"
},
"warranty_expiration": "2022-01-17",
"work_notes": ""
},
{
"acquisition_method": "",
"asset_tag": "P1000412",
"assigned": "2017-08-17 07:00:00",
"assigned_to": {
"link": "demisto.com",
"value": "admin"
},
"beneficiary": "",
"checked_in": "",
"checked_out": "",
"ci": {
"link": "demisto.com",
"value": "admin"
},
"comments": "",
"company": {
"link": "demisto.com",
"value": "admin"
},
"cost": "2499.99",
"cost_center": {
"link": "demisto.com",
"value": "admin"
},
"delivery_date": "2017-01-20 08:00:00",
"department": {
"link": "demisto.com",
"value": "admin"
},
"depreciated_amount": "1564.03",
"depreciation": {
"link": "demisto.com",
"value": "admin"
},
"depreciation_date": "2017-02-20 08:00:00",
"display_name": "P1000412 - Apple MacBook Pro 17\"",
"disposal_reason": "",
"due": "",
"due_in": "",
"expenditure_type": "",
"gl_account": "",
"install_date": "2017-02-19 08:00:00",
"install_status": "1",
"invoice_number": "",
"justification": "",
"lease_id": "",
"location": {
"link": "demisto.com",
"value": "admin"
},
"managed_by": "",
"model": {
"link": "demisto.com",
"value": "admin"
},
"model_category": {
"link": "demisto.com",
"value": "admin"
},
"old_status": "",
"old_substatus": "",
"order_date": "2016-12-28 08:00:00",
"owned_by": "",
"parent": "",
"po_number": "PO100002",
"pre_allocated": "false",
"purchase_date": "2017-01-09",
"quantity": "1",
"request_line": "",
"resale_price": "0",
"reserved_for": "",
"residual": "935.96",
"residual_date": "2020-04-09",
"retired": "",
"retirement_date": "",
"salvage_value": "0",
"serial_number": "FQC-294-U60540-FN",
"skip_sync": "false",
"stockroom": "",
"substatus": "",
"support_group": "",
"supported_by": "",
"sys_class_name": "alm_hardware",
"sys_created_by": "admin",
"sys_created_on": "2019-02-23 08:13:40",
"sys_domain": {
"link": "demisto.com",
"value": "global"
},
"sys_domain_path": "/",
"sys_id": "sys_id4",
"sys_mod_count": "21",
"sys_tags": "",
"sys_updated_by": "system",
"sys_updated_on": "2020-04-09 06:20:20",
"vendor": {
"link": "demisto.com",
"value": "admin"
},
"warranty_expiration": "2020-02-19",
"work_notes": ""
}
]
}
RESPONSE_QUERY_TABLE_SYS_PARAMS = {
"result": [
{
"active": {
"display_value": "true",
"value": "true"
},
"activity_due": {
"display_value": "UNKNOWN",
"value": ""
},
"additional_assignee_list": {
"display_value": "",
"value": ""
},
"approval": {
"display_value": "Not Yet Requested",
"value": "not requested"
},
"approval_history": {
"display_value": "",
"value": ""
},
"approval_set": {
"display_value": "",
"value": ""
},
"assigned_to": {
"display_value": "",
"value": ""
},
"assignment_group": {
"display_value": "Procurement",
"value": "1234"
},
"business_duration": {
"display_value": "",
"value": ""
},
"business_service": {
"display_value": "",
"value": ""
},
"calendar_duration": {
"display_value": "",
"value": ""
},
"calendar_stc": {
"display_value": "",
"value": ""
},
"close_notes": {
"display_value": "",
"value": ""
},
"closed_at": {
"display_value": "",
"value": ""
},
"closed_by": {
"display_value": "",
"value": ""
},
"cmdb_ci": {
"display_value": "",
"value": ""
},
"comments": {
"display_value": "",
"value": ""
},
"comments_and_work_notes": {
"display_value": "",
"value": ""
},
"company": {
"display_value": "",
"value": ""
},
"contact_type": {
"display_value": None,
"value": ""
},
"contract": {
"display_value": "",
"value": ""
},
"correlation_display": {
"display_value": "",
"value": ""
},
"correlation_id": {
"display_value": "",
"value": ""
},
"delivery_plan": {
"display_value": "",
"value": ""
},
"delivery_task": {
"display_value": "",
"value": ""
},
"description": {
"display_value": "Order from vendor or move from in-stock inventory\n\t\t",
"value": "Order from vendor or move from in-stock inventory\n\t\t"
},
"due_date": {
"display_value": "2020-04-20 13:58:46",
"value": "2020-04-20 20:58:46"
},
"escalation": {
"display_value": "Normal",
"value": "0"
},
"expected_start": {
"display_value": "2020-04-20 13:58:46",
"value": "2020-04-20 20:58:46"
},
"follow_up": {
"display_value": "",
"value": ""
},
"group_list": {
"display_value": "",
"value": ""
},
"impact": {
"display_value": "3 - Low",
"value": "3"
},
"knowledge": {
"display_value": "false",
"value": "false"
},
"location": {
"display_value": "",
"value": ""
},
"made_sla": {
"display_value": "true",
"value": "true"
},
"number": {
"display_value": "TASK0000001",
"value": "TASK0000001"
},
"opened_at": {
"display_value": "2020-04-20 13:58:46",
"value": "2020-04-20 20:58:46"
},
"opened_by": {
"display_value": "System Administrator",
"value": "1234"
},
"order": {
"display_value": "",
"value": ""
},
"parent": {
"display_value": "RITM0000001",
"value": "aeed229047801200e0ef563dbb9a71c2"
},
"priority": {
"display_value": "4 - Low",
"value": "4"
},
"reassignment_count": {
"display_value": "0",
"value": "0"
},
"request": {
"display_value": "REQ0000001",
"value": "1234"
},
"request_item": {
"display_value": "RITM0000001",
"value": "1234"
},
"sc_catalog": {
"display_value": "",
"value": ""
},
"service_offering": {
"display_value": "",
"value": ""
},
"short_description": {
"display_value": "Order from vendor or move from in-stock inventory\n\t\t",
"value": "Order from vendor or move from in-stock inventory\n\t\t"
},
"skills": {
"display_value": "",
"value": ""
},
"sla_due": {
"display_value": "UNKNOWN",
"value": ""
},
"state": {
"display_value": "Open",
"value": "1"
},
"sys_class_name": {
"display_value": "Catalog Task",
"value": "sc_task"
},
"sys_created_by": {
"display_value": "admin",
"value": "admin"
},
"sys_created_on": {
"display_value": "2020-04-20 13:58:46",
"value": "2020-04-20 20:58:46"
},
"sys_domain": {
"display_value": "global",
"value": "global"
},
"sys_domain_path": {
"display_value": "/",
"value": "/"
},
"sys_id": {
"display_value": "1234",
"value": "1234"
},
"sys_mod_count": {
"display_value": "0",
"value": "0"
},
"sys_tags": {
"display_value": "",
"value": ""
},
"sys_updated_by": {
"display_value": "admin",
"value": "admin"
},
"sys_updated_on": {
"display_value": "2020-04-20 13:58:46",
"value": "2020-04-20 20:58:46"
},
"time_worked": {
"display_value": "",
"value": ""
},
"upon_approval": {
"display_value": "Proceed to Next Task",
"value": "proceed"
},
"upon_reject": {
"display_value": "Cancel all future Tasks",
"value": "cancel"
},
"urgency": {
"display_value": "3 - Low",
"value": "3"
},
"user_input": {
"display_value": "",
"value": ""
},
"watch_list": {
"display_value": "",
"value": ""
},
"work_end": {
"display_value": "",
"value": ""
},
"work_notes": {
"display_value": "",
"value": ""
},
"work_notes_list": {
"display_value": "",
"value": ""
},
"work_start": {
"display_value": "",
"value": ""
}
}
]
}
RESPONSE_LIST_TABLE_FIELDS = {
"result": [
{
"acquisition_method": "",
"asset_tag": "P1000479",
"assigned": "2017-10-31 07:00:00",
"assigned_to": {
"link": "demisto.com",
"value": "admin"
},
"beneficiary": "",
"checked_in": "",
"checked_out": "",
"ci": {
"link": "demisto.com",
"value": "admin"
},
"comments": "",
"company": {
"link": "demisto.com",
"value": "admin"
},
"cost": "1799.99",
"cost_center": {
"link": "demisto.com",
"value": "admin"
},
"delivery_date": "2017-04-20 07:00:00",
"department": {
"link": "demisto.com",
"value": "admin"
},
"depreciated_amount": "1025.61",
"depreciation": {
"link": "demisto.com",
"value": "admin"
},
"depreciation_date": "2017-06-03 07:00:00",
"display_name": "P1000479 - Apple MacBook Pro 15\"",
"disposal_reason": "",
"due": "",
"due_in": "",
"expenditure_type": "",
"gl_account": "",
"install_date": "2017-06-02 07:00:00",
"install_status": "1",
"invoice_number": "",
"justification": "",
"lease_id": "",
"location": {
"link": "demisto.com",
"value": "admin"
},
"managed_by": | |
- 59.07*m.x885 - 56.58*m.x900 - 56.58*m.x909
- 56.58*m.x919 - 56.58*m.x937 - 11.22*m.x947 - 11.22*m.x956 - 11.22*m.x974 - 28.61*m.x992
- 28.61*m.x1001 - 28.61*m.x1010 - 61.54*m.x1033 - 61.54*m.x1043 - 61.54*m.x1055 - 1.36*m.x1086
- 57.5*m.x1105 - 15.24*m.x1117 - 30.23*m.x1170 + 5.28*m.x1204 - 56.58*m.x1221 - 11.22*m.x1228
<= 0)
m.c392 = Constraint(expr= 24.35*m.x111 + 24.35*m.x118 + 24.35*m.x127 + 24.35*m.x139 + 4.14*m.x149 + 4.14*m.x158
+ 4.14*m.x167 - 1.06*m.x199 - 1.06*m.x213 - 1.06*m.x222 - 1.06*m.x240
- 0.300000000000004*m.x250 - 0.300000000000004*m.x264 - 0.300000000000004*m.x282
- 0.300000000000004*m.x300 - 21.97*m.x318 - 21.97*m.x334 - 7.95*m.x357 - 7.95*m.x366
- 7.95*m.x384 - 7.95*m.x402 - 52.67*m.x420 - 52.67*m.x434 - 52.67*m.x444 - 52.67*m.x456
- 42.07*m.x474 - 42.07*m.x490 - 42.07*m.x499 - 42.07*m.x517 - 42.07*m.x529 - 21.85*m.x539
- 21.85*m.x553 - 21.85*m.x562 - 21.85*m.x572 - 21.85*m.x590 - 20.49*m.x600 - 20.49*m.x616
- 20.49*m.x623 - 20.49*m.x632 - 20.49*m.x650 - 20.49*m.x662 - 46*m.x679 - 46*m.x686
- 46*m.x696 - 46*m.x708 + 3.54*m.x724 + 3.54*m.x731 + 3.54*m.x740 - 1.31*m.x766 - 1.31*m.x782
- 46.18*m.x808 - 46.18*m.x815 - 46.18*m.x824 - 46.18*m.x834 - 46.18*m.x846 - 9.18*m.x858
+ 13.16*m.x876 + 13.16*m.x885 - 45.79*m.x900 - 45.79*m.x909 - 45.79*m.x919 - 45.79*m.x937
- 50.67*m.x947 - 50.67*m.x956 - 50.67*m.x974 + 5.65*m.x992 + 5.65*m.x1001 + 5.65*m.x1010
- 14.59*m.x1033 - 14.59*m.x1043 - 14.59*m.x1055 - 1.06*m.x1086 - 21.97*m.x1105 - 7.95*m.x1117
- 46*m.x1170 - 9.18*m.x1204 - 45.79*m.x1221 - 50.67*m.x1228 <= 0)
m.c393 = Constraint(expr= - 49.87*m.x111 - 49.87*m.x118 - 49.87*m.x127 - 49.87*m.x139 + 4.24*m.x149 + 4.24*m.x158
+ 4.24*m.x167 + 8.58000000000001*m.x199 + 8.58000000000001*m.x213 + 8.58000000000001*m.x222
+ 8.58000000000001*m.x240 - 47.92*m.x250 - 47.92*m.x264 - 47.92*m.x282 - 47.92*m.x300
- 25.21*m.x318 - 25.21*m.x334 - 1.1*m.x357 - 1.1*m.x366 - 1.1*m.x384 - 1.1*m.x402
- 12.53*m.x420 - 12.53*m.x434 - 12.53*m.x444 - 12.53*m.x456 - 1.54*m.x474 - 1.54*m.x490
- 1.54*m.x499 - 1.54*m.x517 - 1.54*m.x529 - 46.89*m.x539 - 46.89*m.x553 - 46.89*m.x562
- 46.89*m.x572 - 46.89*m.x590 + 15.78*m.x600 + 15.78*m.x616 + 15.78*m.x623 + 15.78*m.x632
+ 15.78*m.x650 + 15.78*m.x662 + 1.37*m.x679 + 1.37*m.x686 + 1.37*m.x696 + 1.37*m.x708
- 32.18*m.x724 - 32.18*m.x731 - 32.18*m.x740 - 13.57*m.x766 - 13.57*m.x782 - 28.23*m.x808
- 28.23*m.x815 - 28.23*m.x824 - 28.23*m.x834 - 28.23*m.x846 - 16.8*m.x858 - 19.23*m.x876
- 19.23*m.x885 - 26.86*m.x900 - 26.86*m.x909 - 26.86*m.x919 - 26.86*m.x937 - 49.19*m.x947
- 49.19*m.x956 - 49.19*m.x974 + 17.15*m.x992 + 17.15*m.x1001 + 17.15*m.x1010 - 15.68*m.x1033
- 15.68*m.x1043 - 15.68*m.x1055 + 8.58000000000001*m.x1086 - 25.21*m.x1105 - 1.1*m.x1117
+ 1.37*m.x1170 - 16.8*m.x1204 - 26.86*m.x1221 - 49.19*m.x1228 <= 0)
m.c394 = Constraint(expr= - 48.97*m.x111 - 48.97*m.x118 - 48.97*m.x127 - 48.97*m.x139 - 19.46*m.x149 - 19.46*m.x158
- 19.46*m.x167 - 36.67*m.x199 - 36.67*m.x213 - 36.67*m.x222 - 36.67*m.x240 + 1.03*m.x250
+ 1.03*m.x264 + 1.03*m.x282 + 1.03*m.x300 - 55.91*m.x318 - 55.91*m.x334
+ 7.90000000000001*m.x357 + 7.90000000000001*m.x366 + 7.90000000000001*m.x384
+ 7.90000000000001*m.x402 + 9.99000000000001*m.x420 + 9.99000000000001*m.x434
+ 9.99000000000001*m.x444 + 9.99000000000001*m.x456 + 9.05*m.x474 + 9.05*m.x490 + 9.05*m.x499
+ 9.05*m.x517 + 9.05*m.x529 - 64.38*m.x539 - 64.38*m.x553 - 64.38*m.x562 - 64.38*m.x572
- 64.38*m.x590 - 16.81*m.x600 - 16.81*m.x616 - 16.81*m.x623 - 16.81*m.x632 - 16.81*m.x650
- 16.81*m.x662 + 10.2*m.x679 + 10.2*m.x686 + 10.2*m.x696 + 10.2*m.x708
- 4.09999999999999*m.x724 - 4.09999999999999*m.x731 - 4.09999999999999*m.x740 - 44.95*m.x766
- 44.95*m.x782 - 55.14*m.x808 - 55.14*m.x815 - 55.14*m.x824 - 55.14*m.x834 - 55.14*m.x846
- 1.45*m.x858 - 23.64*m.x876 - 23.64*m.x885 - 47.26*m.x900 - 47.26*m.x909 - 47.26*m.x919
- 47.26*m.x937 + 6.27000000000001*m.x947 + 6.27000000000001*m.x956 + 6.27000000000001*m.x974
- 9.36*m.x992 - 9.36*m.x1001 - 9.36*m.x1010 - 2.4*m.x1033 - 2.4*m.x1043 - 2.4*m.x1055
- 36.67*m.x1086 - 55.91*m.x1105 + 7.90000000000001*m.x1117 + 10.2*m.x1170 - 1.45*m.x1204
- 47.26*m.x1221 + 6.27000000000001*m.x1228 <= 0)
m.c395 = Constraint(expr= - 74.05*m.x111 - 74.05*m.x118 - 74.05*m.x127 - 74.05*m.x139 - 73.63*m.x149 - 73.63*m.x158
- 73.63*m.x167 - 68.21*m.x199 - 68.21*m.x213 - 68.21*m.x222 - 68.21*m.x240 - 40.49*m.x250
- 40.49*m.x264 - 40.49*m.x282 - 40.49*m.x300 - 36.75*m.x318 - 36.75*m.x334 - 24.52*m.x357
- 24.52*m.x366 - 24.52*m.x384 - 24.52*m.x402 - 75.57*m.x420 - 75.57*m.x434 - 75.57*m.x444
- 75.57*m.x456 - 28.14*m.x474 - 28.14*m.x490 - 28.14*m.x499 - 28.14*m.x517 - 28.14*m.x529
- 62.71*m.x539 - 62.71*m.x553 - 62.71*m.x562 - 62.71*m.x572 - 62.71*m.x590 - 23.71*m.x600
- 23.71*m.x616 - 23.71*m.x623 - 23.71*m.x632 - 23.71*m.x650 - 23.71*m.x662 - 53.98*m.x679
- 53.98*m.x686 - 53.98*m.x696 - 53.98*m.x708 - 19.75*m.x724 - 19.75*m.x731 - 19.75*m.x740
- 9.01*m.x766 - 9.01*m.x782 - 21.55*m.x808 - 21.55*m.x815 - 21.55*m.x824 - 21.55*m.x834
- 21.55*m.x846 - 11.32*m.x858 - 43.29*m.x876 - 43.29*m.x885 - 2.83*m.x900 - 2.83*m.x909
- 2.83*m.x919 - 2.83*m.x937 - 55.2*m.x947 - 55.2*m.x956 - 55.2*m.x974 - 68.52*m.x992
- 68.52*m.x1001 - 68.52*m.x1010 - 72.64*m.x1033 - 72.64*m.x1043 - 72.64*m.x1055
- 68.21*m.x1086 - 36.75*m.x1105 - 24.52*m.x1117 - 53.98*m.x1170 - 11.32*m.x1204 - 2.83*m.x1221
- 55.2*m.x1228 <= 0)
m.c396 = Constraint(expr= 9.39*m.x111 + 9.39*m.x118 + 9.39*m.x127 + 9.39*m.x139 - 29.63*m.x149 - 29.63*m.x158
- 29.63*m.x167 - 35.54*m.x199 - 35.54*m.x213 - 35.54*m.x222 - 35.54*m.x240 - 44.56*m.x250
- 44.56*m.x264 - 44.56*m.x282 - 44.56*m.x300 - 53.12*m.x318 - 53.12*m.x334 - 37.29*m.x357
- 37.29*m.x366 - 37.29*m.x384 - 37.29*m.x402 - 25.67*m.x420 - 25.67*m.x434 - 25.67*m.x444
- 25.67*m.x456 - 3.51*m.x474 - 3.51*m.x490 - 3.51*m.x499 - 3.51*m.x517 - 3.51*m.x529
- 10.26*m.x539 - 10.26*m.x553 - 10.26*m.x562 - 10.26*m.x572 - 10.26*m.x590 + 13.09*m.x600
+ 13.09*m.x616 + 13.09*m.x623 + 13.09*m.x632 + 13.09*m.x650 + 13.09*m.x662 + 3.55*m.x679
+ 3.55*m.x686 + 3.55*m.x696 + 3.55*m.x708 - 6.73*m.x724 - 6.73*m.x731 - 6.73*m.x740
- 19.8*m.x766 - 19.8*m.x782 - 49.79*m.x808 - 49.79*m.x815 - 49.79*m.x824 - 49.79*m.x834
- 49.79*m.x846 - 49.5*m.x858 - 45.78*m.x876 - 45.78*m.x885 - 50.97*m.x900 - 50.97*m.x909
- 50.97*m.x919 - 50.97*m.x937 + 0.0700000000000003*m.x947 + 0.0700000000000003*m.x956
+ 0.0700000000000003*m.x974 - 32.99*m.x992 - 32.99*m.x1001 - 32.99*m.x1010 - 0.75*m.x1033
- 0.75*m.x1043 - 0.75*m.x1055 - 35.54*m.x1086 - 53.12*m.x1105 - 37.29*m.x1117 + 3.55*m.x1170
- 49.5*m.x1204 - 50.97*m.x1221 + 0.0700000000000003*m.x1228 <= 0)
m.c397 = Constraint(expr= - 20.5*m.x111 - 20.5*m.x118 - 20.5*m.x127 - 20.5*m.x139 - 19.25*m.x149 - 19.25*m.x158
- 19.25*m.x167 - 7.14*m.x199 - 7.14*m.x213 - 7.14*m.x222 - 7.14*m.x240 - 61.93*m.x250
- 61.93*m.x264 - 61.93*m.x282 - 61.93*m.x300 - 14.2*m.x318 - 14.2*m.x334 - 5.9*m.x357
- 5.9*m.x366 - 5.9*m.x384 - 5.9*m.x402 - 63.55*m.x420 - 63.55*m.x434 - 63.55*m.x444
- 63.55*m.x456 - 42.42*m.x474 - 42.42*m.x490 - 42.42*m.x499 - 42.42*m.x517 - 42.42*m.x529
- 19.97*m.x539 - 19.97*m.x553 - 19.97*m.x562 - 19.97*m.x572 - 19.97*m.x590 + 2.83*m.x600
+ 2.83*m.x616 + 2.83*m.x623 + 2.83*m.x632 + 2.83*m.x650 + 2.83*m.x662 - 26.25*m.x679
- 26.25*m.x686 - 26.25*m.x696 - 26.25*m.x708 - 51.97*m.x724 - 51.97*m.x731 - 51.97*m.x740
- 50.56*m.x766 - 50.56*m.x782 - 61.29*m.x808 - 61.29*m.x815 - 61.29*m.x824 - 61.29*m.x834
- 61.29*m.x846 - 8.55*m.x858 - 30.23*m.x876 - 30.23*m.x885 - 15.26*m.x900 - 15.26*m.x909
- 15.26*m.x919 - 15.26*m.x937 + 7.7*m.x947 + 7.7*m.x956 + 7.7*m.x974 + 7.28*m.x992
+ 7.28*m.x1001 + 7.28*m.x1010 - 14.29*m.x1033 - 14.29*m.x1043 - 14.29*m.x1055 - 7.14*m.x1086
- 14.2*m.x1105 - 5.9*m.x1117 - 26.25*m.x1170 - 8.55*m.x1204 - 15.26*m.x1221 + 7.7*m.x1228
<= 0)
m.c398 = Constraint(expr= - 61.32*m.x111 - 61.32*m.x118 - 61.32*m.x127 - 61.32*m.x139 - 9.4*m.x149 - 9.4*m.x158
- 9.4*m.x167 - 46.62*m.x199 - 46.62*m.x213 - 46.62*m.x222 - 46.62*m.x240 - 18.5*m.x250
- 18.5*m.x264 - 18.5*m.x282 - 18.5*m.x300 - 28.79*m.x318 - 28.79*m.x334 - 62.58*m.x357
- 62.58*m.x366 - 62.58*m.x384 - 62.58*m.x402 - 10.7*m.x420 - 10.7*m.x434 - 10.7*m.x444
- 10.7*m.x456 - 61.88*m.x474 - 61.88*m.x490 - 61.88*m.x499 - 61.88*m.x517 - 61.88*m.x529
- 44.09*m.x539 - 44.09*m.x553 - 44.09*m.x562 - 44.09*m.x572 - 44.09*m.x590 - 65.34*m.x600
- 65.34*m.x616 - 65.34*m.x623 - 65.34*m.x632 - 65.34*m.x650 - 65.34*m.x662 - 16.33*m.x679
- 16.33*m.x686 - 16.33*m.x696 - 16.33*m.x708 + 1.92*m.x724 + 1.92*m.x731 + 1.92*m.x740
- 64.72*m.x766 - 64.72*m.x782 - 37.31*m.x808 - 37.31*m.x815 - 37.31*m.x824 - 37.31*m.x834
- 37.31*m.x846 - 54.12*m.x858 - 76.41*m.x876 - 76.41*m.x885 + 1.4*m.x900 + 1.4*m.x909
+ 1.4*m.x919 + 1.4*m.x937 - 25.88*m.x947 - 25.88*m.x956 - 25.88*m.x974 - 60.63*m.x992
- 60.63*m.x1001 - 60.63*m.x1010 - 56.17*m.x1033 - 56.17*m.x1043 - 56.17*m.x1055
- 46.62*m.x1086 - 28.79*m.x1105 - 62.58*m.x1117 - 16.33*m.x1170 - 54.12*m.x1204 + 1.4*m.x1221
- 25.88*m.x1228 <= 0)
m.c399 = Constraint(expr= - 16.9*m.x111 - 16.9*m.x118 - 16.9*m.x127 - 16.9*m.x139 - 51.75*m.x149 - 51.75*m.x158
- 51.75*m.x167 - 64.09*m.x199 - 64.09*m.x213 - 64.09*m.x222 - 64.09*m.x240 - 58.67*m.x250
- 58.67*m.x264 - 58.67*m.x282 - 58.67*m.x300 - 34.73*m.x318 - 34.73*m.x334 + 4.04*m.x357
+ 4.04*m.x366 + 4.04*m.x384 + 4.04*m.x402 - 57.41*m.x420 - 57.41*m.x434 - 57.41*m.x444
- 57.41*m.x456 - 40.13*m.x474 - 40.13*m.x490 - 40.13*m.x499 - 40.13*m.x517 - 40.13*m.x529
- 55.76*m.x539 - 55.76*m.x553 - 55.76*m.x562 - 55.76*m.x572 - 55.76*m.x590 - 9.32*m.x600
- 9.32*m.x616 - 9.32*m.x623 - 9.32*m.x632 - 9.32*m.x650 - 9.32*m.x662 - 15.72*m.x679
- 15.72*m.x686 - 15.72*m.x696 - 15.72*m.x708 - 39.9*m.x724 - 39.9*m.x731 - 39.9*m.x740
- 15.4*m.x766 - 15.4*m.x782 - 14.96*m.x808 - 14.96*m.x815 - 14.96*m.x824 - 14.96*m.x834
- 14.96*m.x846 - 61.26*m.x858 + 4.43*m.x876 + 4.43*m.x885 | |
documentation
while True:
message, attributes, children = self._sendrequest(
"GetPPUser",
{"seq": self._get_sequence(), "uid": start_uid, "maxRecords": MAX_RECORDS
})
if children is None or "user" not in children or not children["user"]:
break
if not isinstance(children['user'], list):
children['user'] = [children['user']]
for child in children['user']:
user = PPUser(self, child)
yield user
if len(children['user']) == MAX_RECORDS:
# response was as large as it could be, so maybe there are more records
start_uid = int(children['user'][-1]['uid'])+1
else:
break
def find_users(self, search_attrs, start_uid=0):
""" get user data records that match a given set of attributes
Obtain all user profiles that have all attributes (exact keys and values) in `search_attrs`.
Args:
search_attrs (dict): one or multiple attributes that the user records need to match
start_uid (int): (optional) the lowest user profile id to fetch (fetches next higher one if the given is does not exist)
Returns:
A generator that yields user records, one at a time.
"""
for user in self.get_users(start_uid):
matched = True
for attr in search_attrs:
if user.__getattr__(attr) != search_attrs[attr]:
matched = False
if matched:
yield user
def find_user(self, search_attrs, start_uid=0):
""" get the first user data record that matches a given set of attributes
Obtains the first user profiles that has all attributes (exact keys and values) in `search_attrs`.
Args:
search_attrs (dict): one or multiple attributes that the user record needs to match
start_uid (int): (optional) the lowest user profile id to fetch (fetches next higher one if the given is does not exist)
Returns:
A user record (dict) if a match was found, None otherwise.
"""
return next(self.find_users(search_attrs, start_uid), None)
def get_user(self, uid):
""" get user configuration data
Obtain the user profiles configuration data like sip-login ect. using the user id.
Args:
uid (int): user profile id
Returns:
Will return the users profile if the request ist successful.
If it fails None will be returned.
"""
message, attributes, children = self._sendrequest("GetPPUser", {"seq": self._get_sequence(), "uid": uid})
if children is not None and "user" in children and children["user"] is not None \
and children["user"]["uid"] == str(uid):
user = PPUser(self, children["user"])
return user
else:
return None
def get_last_pp_dev_action(self, ppn):
""" get last action of PP device
Obtain information about the last contact between the OMM and the given PP: What action was performed, and when.
The trType may be the string "None" if the OMM has not had contact with the PP since the last reboot.
Args:
ppn (int): id of the PP
Returns:
Will return a LastPPAction object if the AXI query was successful, None otherwise.
"""
message, attributes, children = self._sendrequest("GetLastPPDevAction", {"seq": self._get_sequence(), "ppn": ppn})
if children is not None and "pp" in children and children["pp"]:
action = LastPPAction(self, children["pp"])
return action
else:
return None
def set_user_relation_dynamic(self, uid):
""" Convert a fixed device-user relation into a dynamic one
After converting the relation from fixed to dynamic users are able to
logout the profile from a device using the DECT feature code.
Args:
uid (int): user profile id
Returns:
Will return the user profile's attributes if successful
False will be returned if the request failed.
"""
messagedata = {
"seq": self._get_sequence(),
"uid": uid,
"relType": "Dynamic"
}
message, attributes, children = self._sendrequest("SetPPUserDevRelation", messagedata)
if attributes is not None:
return attributes
else:
return False
def set_user_relation_fixed(self, uid):
""" Convert a user-device relation into fixed type
.. note::
Prior to this operation the user profile must be bound to a device.
When a user profile is already logged in (bound) to a device using dynamic
relationship this method can be used to fix the binding. After that no
login and logout method can be performed using the DECT mechanisms.
Args:
uid (int): user profile id
Returns:
If successful it will return a dict containing all information about the user profile.
Will return False if the request didn't succeed properly.
"""
messagedata = {
"seq": self._get_sequence(),
"uid": uid,
"relType": "Fixed"
}
message, attributes, children = self._sendrequest("SetPPUserDevRelation", messagedata)
if attributes is not None:
return attributes
else:
return False
def detach_user_device(self, uid, ppn):
""" detaches an user profile from an existing device
This only works if the OMM login has been performed in OMM sync mode
(ommsync=True for this module's login() function), otherwise it will
fail and return False.
.. note::
You have to obtain the device id also named ppn and the users id named uid.
Can be used to logout a user profile from a device entry.
The user can be logged in to another device after that.
The device can be used to login another user.
Args:
uid (int): user profile id
ppn (int): registered device id
Returns:
True if the operation was successful. False if it failed.
"""
if (type(uid) is not int or type(ppn) is not int) or (ppn <= 0 or uid <= 0):
return False
messagedata = {
"pp": {
"uid": 0,
"relType": "Unbound",
"ppn": ppn
},
"user": {
"uid": uid,
"relType": "Unbound",
"ppn": 0
}
}
message, attributes, children = self._sendrequest("SetPP", {"seq": self._get_sequence()}, messagedata)
if children is not None and "pp" in children and children["pp"]["uid"] == str(uid):
return True
else:
return False
def attach_user_device(self, uid, ppn):
""" Connects an existing user profile to an existing subscribed device
This only works if the OMM login has been performed in OMM sync mode
(ommsync=True for this module's login() function), otherwise it will
fail and return False.
Args:
uid (int): user profile id
ppn (int): registered device id
Returns:
True if the operation was successful. False if it failed.
"""
if (type(uid) is not int or type(ppn) is not int) or (ppn <= 0 or uid <= 0):
return False
messagedata = {
"pp": {
"uid": uid,
"relType": "Dynamic",
"ppn": ppn
},
"user": {
"uid": uid,
"relType": "Dynamic",
"ppn": ppn
}
}
message, attributes, children = self._sendrequest("SetPP", {"seq": self._get_sequence()}, messagedata)
if children is not None and "pp" in children and children["pp"]["uid"] == str(uid):
return True
else:
return False
def ping(self):
""" Pings OMM and awaits response
"""
self._ensure_login()
self._sendrequest("Ping", {})
def create_user(self, name, number, desc1=None, desc2=None, login=None, pin="", sip_user=None, sip_password=None):
""" Creates new user
This function will create a new user profile without a device relation ship in dynamic mode.
It can be used to loing from a device using the feature access code with login and PIN specified.
The Feature access code can be configured using OMM. Could be someting like (*1)(4711)(3333).
Within the example *1 stands for general feature access prefix 4711 is the code for user login.
And 3333 is the extension for which login is requested. User will be prompted for a PIN.
.. note:: If no sip user name and sip password is specified number will be used
:param name: Name for the user profile (Shown as Name in OMP)
:type name: str
:param number: number for the user profile (Shown as Number/SIP user name in OMM)
:type number: str
:param desc1: Description 1 for the new user profile. Can by any string.
:type desc1: str
:param desc2: Description 2 for the new user profile. Can by any string.
:type desc2: str
:param login: Login for the use to be used for profile login from DECT or additional ID.
:type login: str
:param pin: PIN for profile login via DECT. Any non numeric value doesn't make sense.
:type pin: str
:param sip_user: Username for OMM to register the profile against the configured sip registrar
:type sip_user: str
:param sip_password: Password for sip register against registrar configured
:type sip_password: str
:rtype: dict
:return: Will return a dict containing data of the new user object if successful. Will return None if it failed.
"""
children = {
"user": {
"name": name,
"num": number
}
}
if desc1:
children["user"]["hierarchy1"] = desc1
if desc2:
children["user"]["hierarchy2"] = desc2
if login:
children["user"]["addId"] = login
if pin:
children["user"]["pin"] = encrypt_pin(pin, self._modulus, self._exponent)
if sip_user:
children["user"]["sipAuthId"] = sip_user
if sip_password:
children["user"]["sipPw"] = | |
dest):
'''
Resets the index and working tree to HEAD.
Discards any changes to tracked files in working
tree since that commit.
'''
cmd = "%s reset --hard HEAD" % (git_path,)
return module.run_command(cmd, check_rc=True, cwd=dest)
def get_remote_head(git_path, module, dest, version, remote, bare):
cloning = False
cwd = None
tag = False
if remote == module.params['repo']:
cloning = True
else:
cwd = dest
if version == 'HEAD':
if cloning:
# cloning the repo, just get the remote's HEAD version
cmd = '%s ls-remote %s -h HEAD' % (git_path, remote)
else:
head_branch = get_head_branch(git_path, module, dest, remote, bare)
cmd = '%s ls-remote %s -h refs/heads/%s' % (git_path, remote, head_branch)
elif is_remote_branch(git_path, module, dest, remote, version):
cmd = '%s ls-remote %s -h refs/heads/%s' % (git_path, remote, version)
elif is_remote_tag(git_path, module, dest, remote, version):
tag = True
cmd = '%s ls-remote %s -t refs/tags/%s*' % (git_path, remote, version)
else:
# appears to be a sha1. return as-is since it appears
# cannot check for a specific sha1 on remote
return version
(rc, out, err) = module.run_command(cmd, check_rc=True, cwd=cwd)
if len(out) < 1:
module.fail_json(msg="Could not determine remote revision for %s" % version)
if tag:
# Find the dereferenced tag if this is an annotated tag.
for tag in out.split('\n'):
if tag.endswith(version + '^{}'):
out = tag
break
elif tag.endswith(version):
out = tag
rev = out.split()[0]
return rev
def is_remote_tag(git_path, module, dest, remote, version):
cmd = '%s ls-remote %s -t refs/tags/%s' % (git_path, remote, version)
(rc, out, err) = module.run_command(cmd, check_rc=True, cwd=dest)
if version in out:
return True
else:
return False
def get_branches(git_path, module, dest):
branches = []
cmd = '%s branch -a' % (git_path,)
(rc, out, err) = module.run_command(cmd, cwd=dest)
if rc != 0:
module.fail_json(msg="Could not determine branch data - received %s" % out)
for line in out.split('\n'):
branches.append(line.strip())
return branches
def get_tags(git_path, module, dest):
tags = []
cmd = '%s tag' % (git_path,)
(rc, out, err) = module.run_command(cmd, cwd=dest)
if rc != 0:
module.fail_json(msg="Could not determine tag data - received %s" % out)
for line in out.split('\n'):
tags.append(line.strip())
return tags
def is_remote_branch(git_path, module, dest, remote, version):
cmd = '%s ls-remote %s -h refs/heads/%s' % (git_path, remote, version)
(rc, out, err) = module.run_command(cmd, check_rc=True, cwd=dest)
if version in out:
return True
else:
return False
def is_local_branch(git_path, module, dest, branch):
branches = get_branches(git_path, module, dest)
lbranch = '%s' % branch
if lbranch in branches:
return True
elif '* %s' % branch in branches:
return True
else:
return False
def is_not_a_branch(git_path, module, dest):
branches = get_branches(git_path, module, dest)
for b in branches:
if b.startswith('* ') and 'no branch' in b:
return True
return False
def get_head_branch(git_path, module, dest, remote, bare=False):
'''
Determine what branch HEAD is associated with. This is partly
taken from lib/ansible/utils/__init__.py. It finds the correct
path to .git/HEAD and reads from that file the branch that HEAD is
associated with. In the case of a detached HEAD, this will look
up the branch in .git/refs/remotes/<remote>/HEAD.
'''
if bare:
repo_path = dest
else:
repo_path = os.path.join(dest, '.git')
# Check if the .git is a file. If it is a file, it means that we are in a submodule structure.
if os.path.isfile(repo_path):
try:
gitdir = yaml.safe_load(open(repo_path)).get('gitdir')
# There is a posibility the .git file to have an absolute path.
if os.path.isabs(gitdir):
repo_path = gitdir
else:
repo_path = os.path.join(repo_path.split('.git')[0], gitdir)
except (IOError, AttributeError):
return ''
# Read .git/HEAD for the name of the branch.
# If we're in a detached HEAD state, look up the branch associated with
# the remote HEAD in .git/refs/remotes/<remote>/HEAD
f = open(os.path.join(repo_path, "HEAD"))
if is_not_a_branch(git_path, module, dest):
f.close()
f = open(os.path.join(repo_path, 'refs', 'remotes', remote, 'HEAD'))
branch = f.readline().split('/')[-1].rstrip("\n")
f.close()
return branch
def fetch(git_path, module, repo, dest, version, remote, bare, refspec):
''' updates repo from remote sources '''
commands = [("set a new url %s for %s" % (repo, remote), [git_path, 'remote', 'set-url', remote, repo])]
fetch_str = 'download remote objects and refs'
if bare:
refspecs = ['+refs/heads/*:refs/heads/*', '+refs/tags/*:refs/tags/*']
if refspec:
refspecs.append(refspec)
commands.append((fetch_str, [git_path, 'fetch', remote] + refspecs))
else:
# unlike in bare mode, there's no way to combine the
# additional refspec with the default git fetch behavior,
# so use two commands
commands.append((fetch_str, [git_path, 'fetch', remote]))
refspecs = ['+refs/tags/*:refs/tags/*']
if refspec:
refspecs.append(refspec)
commands.append((fetch_str, [git_path, 'fetch', remote] + refspecs))
for (label,command) in commands:
(rc,out,err) = module.run_command(command, cwd=dest)
if rc != 0:
module.fail_json(msg="Failed to %s: %s %s" % (label, out, err))
def submodules_fetch(git_path, module, remote, track_submodules, dest):
changed = False
if not os.path.exists(os.path.join(dest, '.gitmodules')):
# no submodules
return changed
gitmodules_file = open(os.path.join(dest, '.gitmodules'), 'r')
for line in gitmodules_file:
# Check for new submodules
if not changed and line.strip().startswith('path'):
path = line.split('=', 1)[1].strip()
# Check that dest/path/.git exists
if not os.path.exists(os.path.join(dest, path, '.git')):
changed = True
# add the submodule repo's hostkey
if line.strip().startswith('url'):
repo = line.split('=', 1)[1].strip()
if module.params['ssh_opts'] is not None:
if not "-o StrictHostKeyChecking=no" in module.params['ssh_opts']:
add_git_host_key(module, repo, accept_hostkey=module.params['accept_hostkey'])
else:
add_git_host_key(module, repo, accept_hostkey=module.params['accept_hostkey'])
# Check for updates to existing modules
if not changed:
# Fetch updates
begin = get_submodule_versions(git_path, module, dest)
cmd = [git_path, 'submodule', 'foreach', git_path, 'fetch']
(rc, out, err) = module.run_command(cmd, check_rc=True, cwd=dest)
if rc != 0:
module.fail_json(msg="Failed to fetch submodules: %s" % out + err)
if track_submodules:
# Compare against submodule HEAD
### FIXME: determine this from .gitmodules
version = 'master'
after = get_submodule_versions(git_path, module, dest, '%s/%s'
% (remote, version))
if begin != after:
changed = True
else:
# Compare against the superproject's expectation
cmd = [git_path, 'submodule', 'status']
(rc, out, err) = module.run_command(cmd, check_rc=True, cwd=dest)
if rc != 0:
module.fail_json(msg='Failed to retrieve submodule status: %s' % out + err)
for line in out.splitlines():
if line[0] != ' ':
changed = True
break
return changed
def submodule_update(git_path, module, dest, track_submodules):
''' init and update any submodules '''
# get the valid submodule params
params = get_submodule_update_params(module, git_path, dest)
# skip submodule commands if .gitmodules is not present
if not os.path.exists(os.path.join(dest, '.gitmodules')):
return (0, '', '')
cmd = [ git_path, 'submodule', 'sync' ]
(rc, out, err) = module.run_command(cmd, check_rc=True, cwd=dest)
if 'remote' in params and track_submodules:
cmd = [ git_path, 'submodule', 'update', '--init', '--recursive' ,'--remote' ]
else:
cmd = [ git_path, 'submodule', 'update', '--init', '--recursive' ]
(rc, out, err) = module.run_command(cmd, cwd=dest)
if rc != 0:
module.fail_json(msg="Failed to init/update submodules: %s" % out + err)
return (rc, out, err)
def switch_version(git_path, module, dest, remote, version):
cmd = ''
if version != 'HEAD':
if is_remote_branch(git_path, module, dest, remote, version):
if not is_local_branch(git_path, module, dest, version):
cmd = "%s checkout --track -b %s %s/%s" % (git_path, version, remote, version)
else:
(rc, out, err) = module.run_command("%s checkout --force %s" % (git_path, version), cwd=dest)
if rc != 0:
module.fail_json(msg="Failed to checkout branch %s" % version)
cmd = "%s reset --hard %s/%s" % (git_path, remote, version)
else:
cmd = "%s checkout --force %s" % (git_path, version)
else:
branch = get_head_branch(git_path, module, dest, remote)
(rc, out, err) = module.run_command("%s checkout --force %s" % (git_path, branch), cwd=dest)
if rc != 0:
module.fail_json(msg="Failed to checkout branch %s" % branch)
cmd = "%s reset --hard %s" % (git_path, remote)
(rc, out1, err1) = module.run_command(cmd, cwd=dest)
if rc != 0:
if version != 'HEAD':
module.fail_json(msg="Failed to checkout %s" % (version))
else:
module.fail_json(msg="Failed to checkout branch %s" % (branch))
return (rc, out1, err1)
# ===========================================
def main():
module = AnsibleModule(
argument_spec = dict(
dest=dict(),
repo=dict(required=True, aliases=['name']),
version=dict(default='HEAD'),
remote=dict(default='origin'),
refspec=dict(default=None),
reference=dict(default=None),
force=dict(default='no', type='bool'),
depth=dict(default=None, type='int'),
clone=dict(default='yes', type='bool'),
update=dict(default='yes', type='bool'),
accept_hostkey=dict(default='no', type='bool'),
key_file=dict(default=None, required=False),
ssh_opts=dict(default=None, required=False),
executable=dict(default=None),
bare=dict(default='no', type='bool'),
recursive=dict(default='yes', type='bool'),
track_submodules=dict(default='no', type='bool'),
),
supports_check_mode=True
)
dest = module.params['dest']
repo = module.params['repo']
version = module.params['version']
remote = module.params['remote']
refspec = module.params['refspec']
force = module.params['force']
depth = module.params['depth']
update = module.params['update']
allow_clone = module.params['clone']
bare = module.params['bare']
reference = module.params['reference']
git_path = module.params['executable'] or module.get_bin_path('git', True)
key_file = module.params['key_file']
ssh_opts = module.params['ssh_opts']
# We screenscrape a huge amount of git commands so use C locale anytime we
# call run_command()
module.run_command_environ_update = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C', LC_CTYPE='C')
gitconfig = | |
<gh_stars>1-10
#!/usr/bin/env python
# encoding: utf-8
# The MIT License (MIT)
# Copyright (c) 2016-2020 CNRS
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# AUTHORS
# <NAME> - http://herve.niderb.fr
import os
import yaml
from pathlib import Path
import warnings
import pandas as pd
from glob import glob
from pyannote_xxx.core import Segment, Timeline, Annotation
class PyannoteDatabaseException(Exception):
pass
class FileFinder(object):
"""Database file finder
Parameters
----------
config_yml : str, optional
Path to database configuration file in YAML format.
See "Configuration file" sections for examples.
Defaults to the content of PYANNOTE_DATABASE_CONFIG environment
variable if defined and to "~/.pyannote/database.yml" otherwise.
Configuration file
------------------
Here are a few examples of what is expected in the configuration file.
# all files are in the same directory
/path/to/files/{uri}.wav
# support for {database} placeholder
/path/to/{database}/files/{uri}.wav
# support for multiple databases
database1: /path/to/files/{uri}.wav
database2: /path/to/other/files/{uri}.wav
# files are spread over multiple directory
database3:
- /path/to/files/1/{uri}.wav
- /path/to/files/2/{uri}.wav
# supports * globbing
database4: /path/to/files/*/{uri}.wav
See also
--------
glob
"""
def __init__(self, config_yml=None):
super(FileFinder, self).__init__()
if config_yml is None:
config_yml = os.environ.get("PYANNOTE_DATABASE_CONFIG",
"~/.pyannote/database.yml")
config_yml = Path(config_yml).expanduser()
try:
with open(config_yml, 'r') as fp:
config = yaml.load(fp, Loader=yaml.SafeLoader)
except FileNotFoundError:
config = dict()
self.config = config.get('Databases', dict())
def _find(self, config, uri=None, database=None, **kwargs):
found = []
# list of path templates
if isinstance(config, list):
for path_template in config:
path = path_template.format(uri=uri, database=database,
**kwargs)
found_ = glob(path)
found.extend(found_)
# database-indexed dictionary
elif isinstance(config, dict):
# if database identifier is not provided
# or does not exist in configuration file
# look into all databases...
if database is None or database not in config:
databases = list(config)
# if database identifier is provided AND exists
# only look into this very database
else:
databases = [database]
# iteratively look into selected databases
for database in databases:
found_ = self._find(config[database], uri=uri,
database=database, **kwargs)
found.extend(found_)
else:
path_template = config
path = path_template.format(uri=uri, database=database, **kwargs)
found_ = glob(path)
found.extend(found_)
return found
@classmethod
def protocol_file_iter(cls, protocol):
"""Iterate over all files in `protocol`
Parameters
----------
protocol : Protocol
"""
msg = (
'FileFinder.protocol_file_iter is deprecated. '
'Use Protocol.files instead.')
raise NotImplementedError(msg)
@classmethod
def current_file_iter(cls, current_file: 'ProtocolFile'):
msg = (
'FileFinder.current_file_iter is deprecated. '
'Use ProtocolFile.files instead.')
raise NotImplementedError(msg)
def __call__(self, current_file):
"""Find files
Parameters
----------
current_file : ProtocolFile
Dictionary as generated by pyannote.database plugins.
Returns
-------
path : str (or list of str)
When `current_file` refers to only one file, returns it.
When `current_file` refers to a list of file (i.e. 'uri',
'channel', or 'database' is a list), returns a list of files.
"""
found_files = self._find(self.config, **abs(current_file))
n_found_files = len(found_files)
if n_found_files == 1:
return found_files[0]
elif n_found_files == 0:
uri = current_file['uri']
msg = 'Could not find file "{uri}".'
raise ValueError(msg.format(uri=uri))
else:
uri = current_file['uri']
msg = 'Found {n} matches for file "{uri}"'
raise ValueError(msg.format(uri=uri, n=n_found_files))
def get_unique_identifier(item):
"""Return unique item identifier
The complete format is {database}/{uri}_{channel}:
* prefixed by "{database}/" only when `item` has a 'database' key.
* suffixed by "_{channel}" only when `item` has a 'channel' key.
Parameters
----------
item : dict
Item as yielded by pyannote.database protocols
Returns
-------
identifier : str
Unique item identifier
"""
IDENTIFIER = ""
# {database}/{uri}_{channel}
database = item.get('database', None)
if database is not None:
IDENTIFIER += "{database}/"
IDENTIFIER += "{uri}"
channel = item.get('channel', None)
if channel is not None:
IDENTIFIER += "_{channel:d}"
return IDENTIFIER.format(**item)
def get_annotated(current_file):
"""Get part of the file that is annotated.
Parameters
----------
current_file : `dict`
File generated by a `pyannote.database` protocol.
Returns
-------
annotated : `pyannote.core.Timeline`
Part of the file that is annotated. Defaults to
`current_file["annotated"]`. When it does not exist, try to use the
full audio extent. When that fails, use "annotation" extent.
"""
# if protocol provides 'annotated' key, use it
if 'annotated' in current_file:
annotated = current_file['annotated']
return annotated
# if it does not, but does provide 'audio' key
# try and use wav duration
if 'duration' in current_file:
try:
duration = current_file['duration']
except ImportError as e:
pass
else:
annotated = Timeline([Segment(0, duration)])
msg = f'"annotated" was approximated by [0, audio duration].'
warnings.warn(msg)
return annotated
extent = current_file['annotation'].get_timeline().extent()
annotated = Timeline([extent])
msg = (f'"annotated" was approximated by "annotation" extent. '
f'Please provide "annotated" directly, or at the very '
f'least, use a "duration" preprocessor.')
warnings.warn(msg)
return annotated
def get_label_identifier(label, current_file):
"""Return unique label identifier
Parameters
----------
label : str
Database-internal label
current_file
Yielded by pyannote.database protocols
Returns
-------
unique_label : str
Global label
"""
# TODO. when the "true" name of a person is used,
# do not preprend database name.
database = current_file['database']
return database + '|' + label
def load_rttm(file_rttm):
"""Load RTTM file
Parameter
---------
file_rttm : `str`
Path to RTTM file.
Returns
-------
annotations : `dict`
Speaker diarization as a {uri: pyannote.core.Annotation} dictionary.
"""
names = ['NA1', 'uri', 'NA2', 'start', 'duration',
'NA3', 'NA4', 'speaker', 'NA5', 'NA6']
dtype = {'uri': str, 'start': float, 'duration': float, 'speaker': str}
data = pd.read_csv(file_rttm, names=names, dtype=dtype,
delim_whitespace=True,
keep_default_na=False)
annotations = dict()
for uri, turns in data.groupby('uri'):
annotation = Annotation(uri=uri)
for i, turn in turns.iterrows():
segment = Segment(turn.start, turn.start + turn.duration)
annotation[segment, i] = turn.speaker
annotations[uri] = annotation
return annotations
class RTTMLoader(object):
"""RTTM loader for use as pyannote.database preprocessor
Parameters
----------
train : `Path`, optional
Path to RTTM file for training set
development : `Path`, optional
Path to RTTM file for development set
test : `Path`, optional
Path to RTTM file for test set
"""
def __init__(self, train=None, development=None, test=None):
super().__init__()
# preload everything in memory
self.hypotheses_ = {}
if train is not None:
self.hypotheses_['train'] = load_rttm(train)
if development is not None:
self.hypotheses_['development'] = load_rttm(development)
if test is not None:
self.hypotheses_['test'] = load_rttm(test)
def __call__(self, current_file):
"""Return RTTM content for current file
Parameter
---------
current_file : `dict`
Current file as provided by a `pyannote.database.Protocol`
Returns
-------
annotation : `pyannote.core.Annotation`
Annotation
"""
uri = current_file['uri']
found = []
for subset, hypotheses in self.hypotheses_.items():
if uri in hypotheses:
found.append(hypotheses[uri])
if len(found) == 1:
return found[0]
elif len(found) == 0:
msg = (
f'Could not find any hypothesis for "{uri}".'
)
raise ValueError(msg)
else:
msg = (
f'Found {len(found)} hypotheses for "{uri}".'
)
raise ValueError(msg)
def load_mdtm(file_mdtm):
"""Load MDTM file
Parameter
---------
file_mdtm : `str`
Path to MDTM file.
Returns
-------
annotations : `dict`
Speaker diarization as a {uri: pyannote.core.Annotation} dictionary.
"""
names = ['uri', 'NA1', 'start', 'duration', 'NA2', 'NA3', 'NA4', 'speaker']
dtype = {'uri': str, 'start': float, 'duration': float, 'speaker': str}
data = pd.read_csv(file_mdtm, names=names, dtype=dtype,
delim_whitespace=True,
keep_default_na=False)
annotations = dict()
for uri, turns in data.groupby('uri'):
annotation = Annotation(uri=uri)
for i, turn in turns.iterrows():
segment = Segment(turn.start, turn.start + turn.duration)
annotation[segment, i] = turn.speaker
annotations[uri] = annotation
return annotations
def load_uem(file_uem):
"""Load UEM file
Parameter
---------
file_uem : `str`
Path to UEM file.
Returns
-------
timelines : `dict`
Evaluation map as a {uri: pyannote.core.Timeline} dictionary.
"""
names = ['uri', 'NA1', 'start', 'end']
dtype = {'uri': str, 'start': float, 'end': float}
data = pd.read_csv(file_uem, names=names, dtype=dtype,
delim_whitespace=True)
timelines = dict()
for uri, parts in data.groupby('uri'):
segments = [Segment(part.start, part.end)
for i, part in parts.iterrows()]
timelines[uri] = |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.